]> git.lizzy.rs Git - rust.git/commitdiff
:arrow_up: rust-analyzer
authorLaurențiu Nicola <lnicola@dend.ro>
Tue, 30 Aug 2022 11:51:24 +0000 (14:51 +0300)
committerLaurențiu Nicola <lnicola@dend.ro>
Tue, 30 Aug 2022 11:51:24 +0000 (14:51 +0300)
74 files changed:
1  2 
src/tools/rust-analyzer/Cargo.lock
src/tools/rust-analyzer/crates/base-db/src/input.rs
src/tools/rust-analyzer/crates/base-db/src/lib.rs
src/tools/rust-analyzer/crates/flycheck/src/lib.rs
src/tools/rust-analyzer/crates/hir-def/src/data.rs
src/tools/rust-analyzer/crates/hir-def/src/db.rs
src/tools/rust-analyzer/crates/hir-def/src/nameres/diagnostics.rs
src/tools/rust-analyzer/crates/hir-def/src/test_db.rs
src/tools/rust-analyzer/crates/hir-ty/src/lower.rs
src/tools/rust-analyzer/crates/hir-ty/src/test_db.rs
src/tools/rust-analyzer/crates/hir-ty/src/tests/regression.rs
src/tools/rust-analyzer/crates/hir-ty/src/utils.rs
src/tools/rust-analyzer/crates/hir/src/lib.rs
src/tools/rust-analyzer/crates/ide-assists/src/handlers/extract_type_alias.rs
src/tools/rust-analyzer/crates/ide-assists/src/handlers/inline_call.rs
src/tools/rust-analyzer/crates/ide-completion/src/context.rs
src/tools/rust-analyzer/crates/ide-completion/src/context/analysis.rs
src/tools/rust-analyzer/crates/ide-completion/src/render.rs
src/tools/rust-analyzer/crates/ide-completion/src/render/function.rs
src/tools/rust-analyzer/crates/ide-completion/src/render/literal.rs
src/tools/rust-analyzer/crates/ide-db/src/lib.rs
src/tools/rust-analyzer/crates/ide-db/src/line_index.rs
src/tools/rust-analyzer/crates/ide-db/src/search.rs
src/tools/rust-analyzer/crates/ide-db/src/source_change.rs
src/tools/rust-analyzer/crates/ide-diagnostics/src/handlers/inactive_code.rs
src/tools/rust-analyzer/crates/ide-ssr/Cargo.toml
src/tools/rust-analyzer/crates/ide-ssr/src/lib.rs
src/tools/rust-analyzer/crates/ide/src/doc_links.rs
src/tools/rust-analyzer/crates/ide/src/lib.rs
src/tools/rust-analyzer/crates/ide/src/moniker.rs
src/tools/rust-analyzer/crates/ide/src/prime_caches.rs
src/tools/rust-analyzer/crates/ide/src/references.rs
src/tools/rust-analyzer/crates/ide/src/syntax_highlighting.rs
src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/html.rs
src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/inject.rs
src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tags.rs
src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/test_data/highlight_doctest.html
src/tools/rust-analyzer/crates/ide/src/syntax_highlighting/tests.rs
src/tools/rust-analyzer/crates/ide/src/view_crate_graph.rs
src/tools/rust-analyzer/crates/parser/src/grammar/patterns.rs
src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rast
src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0024_slice_pat.rs
src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rast
src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0026_tuple_pat_fields.rs
src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rast
src/tools/rust-analyzer/crates/parser/test_data/parser/inline/ok/0111_tuple_pat.rs
src/tools/rust-analyzer/crates/proc-macro-srv/Cargo.toml
src/tools/rust-analyzer/crates/proc-macro-srv/src/lib.rs
src/tools/rust-analyzer/crates/project-model/src/tests.rs
src/tools/rust-analyzer/crates/project-model/src/workspace.rs
src/tools/rust-analyzer/crates/rust-analyzer/Cargo.toml
src/tools/rust-analyzer/crates/rust-analyzer/src/bin/main.rs
src/tools/rust-analyzer/crates/rust-analyzer/src/cli.rs
src/tools/rust-analyzer/crates/rust-analyzer/src/cli/flags.rs
src/tools/rust-analyzer/crates/rust-analyzer/src/cli/scip.rs
src/tools/rust-analyzer/crates/rust-analyzer/src/config.rs
src/tools/rust-analyzer/crates/rust-analyzer/src/diagnostics.rs
src/tools/rust-analyzer/crates/rust-analyzer/src/global_state.rs
src/tools/rust-analyzer/crates/rust-analyzer/src/handlers.rs
src/tools/rust-analyzer/crates/rust-analyzer/src/main_loop.rs
src/tools/rust-analyzer/crates/rust-analyzer/src/semantic_tokens.rs
src/tools/rust-analyzer/crates/rust-analyzer/src/to_proto.rs
src/tools/rust-analyzer/crates/stdx/src/hash.rs
src/tools/rust-analyzer/crates/stdx/src/lib.rs
src/tools/rust-analyzer/crates/vfs/Cargo.toml
src/tools/rust-analyzer/crates/vfs/src/file_set.rs
src/tools/rust-analyzer/crates/vfs/src/lib.rs
src/tools/rust-analyzer/docs/user/generated_config.adoc
src/tools/rust-analyzer/docs/user/manual.adoc
src/tools/rust-analyzer/editors/code/package.json
src/tools/rust-analyzer/editors/code/src/client.ts
src/tools/rust-analyzer/editors/code/src/config.ts
src/tools/rust-analyzer/editors/code/src/main.ts
src/tools/rust-analyzer/lib/lsp-server/src/socket.rs

index 8a61ea1c9241422451a8a3ea1a0b1145159c50c4,0000000000000000000000000000000000000000..9f10d92c4e3ab0dc90c60ff09287ade92fbdff6d
mode 100644,000000..100644
--- /dev/null
@@@ -1,2093 -1,0 +1,2120 @@@
- [[package]]
- name = "crossbeam"
- version = "0.8.2"
- source = "registry+https://github.com/rust-lang/crates.io-index"
- checksum = "2801af0d36612ae591caa9568261fddce32ce6e08a7275ea334a06a4ad021a2c"
- dependencies = [
-  "cfg-if",
-  "crossbeam-channel",
-  "crossbeam-deque",
-  "crossbeam-epoch",
-  "crossbeam-queue",
-  "crossbeam-utils",
- ]
 +# This file is automatically @generated by Cargo.
 +# It is not intended for manual editing.
 +version = 3
 +
 +[[package]]
 +name = "addr2line"
 +version = "0.17.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
 +dependencies = [
 + "gimli",
 +]
 +
 +[[package]]
 +name = "adler"
 +version = "1.0.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
 +
 +[[package]]
 +name = "always-assert"
 +version = "0.1.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "fbf688625d06217d5b1bb0ea9d9c44a1635fd0ee3534466388d18203174f4d11"
 +dependencies = [
 + "log",
 +]
 +
 +[[package]]
 +name = "ansi_term"
 +version = "0.12.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
 +dependencies = [
 + "winapi",
 +]
 +
 +[[package]]
 +name = "anyhow"
 +version = "1.0.62"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "1485d4d2cc45e7b201ee3767015c96faa5904387c9d87c6efdd0fb511f12d305"
 +
 +[[package]]
 +name = "anymap"
 +version = "1.0.0-beta.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72"
 +
 +[[package]]
 +name = "arbitrary"
 +version = "1.1.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "5a7924531f38b1970ff630f03eb20a2fde69db5c590c93b0f3482e95dcc5fd60"
 +
 +[[package]]
 +name = "arrayvec"
 +version = "0.7.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
 +
 +[[package]]
 +name = "atty"
 +version = "0.2.14"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
 +dependencies = [
 + "hermit-abi",
 + "libc",
 + "winapi",
 +]
 +
 +[[package]]
 +name = "autocfg"
 +version = "1.1.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
 +
 +[[package]]
 +name = "backtrace"
 +version = "0.3.66"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
 +dependencies = [
 + "addr2line",
 + "cc",
 + "cfg-if",
 + "libc",
 + "miniz_oxide",
 + "object",
 + "rustc-demangle",
 +]
 +
 +[[package]]
 +name = "base-db"
 +version = "0.0.0"
 +dependencies = [
 + "cfg",
 + "profile",
 + "rustc-hash",
 + "salsa",
 + "stdx",
 + "syntax",
 + "test-utils",
 + "tt",
 + "vfs",
 +]
 +
 +[[package]]
 +name = "bitflags"
 +version = "1.3.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
 +
 +[[package]]
 +name = "camino"
 +version = "1.1.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "88ad0e1e3e88dd237a156ab9f571021b8a158caa0ae44b1968a241efb5144c1e"
 +dependencies = [
 + "serde",
 +]
 +
 +[[package]]
 +name = "cargo-platform"
 +version = "0.1.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27"
 +dependencies = [
 + "serde",
 +]
 +
 +[[package]]
 +name = "cargo_metadata"
 +version = "0.15.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "3abb7553d5b9b8421c6de7cb02606ff15e0c6eea7d8eadd75ef013fd636bec36"
 +dependencies = [
 + "camino",
 + "cargo-platform",
 + "semver",
 + "serde",
 + "serde_json",
 +]
 +
 +[[package]]
 +name = "cc"
 +version = "1.0.73"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
 +
 +[[package]]
 +name = "cfg"
 +version = "0.0.0"
 +dependencies = [
 + "arbitrary",
 + "derive_arbitrary",
 + "expect-test",
 + "mbe",
 + "oorandom",
 + "rustc-hash",
 + "syntax",
 + "tt",
 +]
 +
 +[[package]]
 +name = "cfg-if"
 +version = "1.0.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 +
 +[[package]]
 +name = "chalk-derive"
 +version = "0.84.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "cf29c109d57f8d57b0e7675391be37a9285d86dd93278bd5f14a0ad3c447a6c2"
 +dependencies = [
 + "proc-macro2",
 + "quote",
 + "syn",
 + "synstructure",
 +]
 +
 +[[package]]
 +name = "chalk-ir"
 +version = "0.84.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d391763027b5e50a5e15caf6d2857ec585fd68160367bbeac9e1804209620918"
 +dependencies = [
 + "bitflags",
 + "chalk-derive",
 + "lazy_static",
 +]
 +
 +[[package]]
 +name = "chalk-recursive"
 +version = "0.84.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "afafd92dcdc7fe0ea940ee94bdd8cc5bd18f4a4a84c593d6d7025fe16c150478"
 +dependencies = [
 + "chalk-derive",
 + "chalk-ir",
 + "chalk-solve",
 + "rustc-hash",
 + "tracing",
 +]
 +
 +[[package]]
 +name = "chalk-solve"
 +version = "0.84.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "3af1d111f11c91c48ace02e93e470c5bae6d2631bd112e4545317da53660d7fc"
 +dependencies = [
 + "chalk-derive",
 + "chalk-ir",
 + "ena",
 + "indexmap",
 + "itertools",
 + "petgraph",
 + "rustc-hash",
 + "tracing",
 +]
 +
 +[[package]]
 +name = "countme"
 +version = "3.0.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
 +dependencies = [
 + "dashmap",
 + "once_cell",
 + "rustc-hash",
 +]
 +
 +[[package]]
 +name = "cov-mark"
 +version = "2.0.0-pre.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "0d48d8f76bd9331f19fe2aaf3821a9f9fb32c3963e1e3d6ce82a8c09cef7444a"
 +
 +[[package]]
 +name = "crc32fast"
 +version = "1.3.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
 +dependencies = [
 + "cfg-if",
 +]
 +
- [[package]]
- name = "crossbeam-queue"
- version = "0.3.6"
- source = "registry+https://github.com/rust-lang/crates.io-index"
- checksum = "1cd42583b04998a5363558e5f9291ee5a5ff6b49944332103f251e7479a82aa7"
- dependencies = [
-  "cfg-if",
-  "crossbeam-utils",
- ]
 +[[package]]
 +name = "crossbeam-channel"
 +version = "0.5.6"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
 +dependencies = [
 + "cfg-if",
 + "crossbeam-utils",
 +]
 +
 +[[package]]
 +name = "crossbeam-deque"
 +version = "0.8.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
 +dependencies = [
 + "cfg-if",
 + "crossbeam-epoch",
 + "crossbeam-utils",
 +]
 +
 +[[package]]
 +name = "crossbeam-epoch"
 +version = "0.9.10"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1"
 +dependencies = [
 + "autocfg",
 + "cfg-if",
 + "crossbeam-utils",
 + "memoffset",
 + "once_cell",
 + "scopeguard",
 +]
 +
- version = "0.93.0"
 +[[package]]
 +name = "crossbeam-utils"
 +version = "0.8.11"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc"
 +dependencies = [
 + "cfg-if",
 + "once_cell",
 +]
 +
 +[[package]]
 +name = "dashmap"
 +version = "5.3.4"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "3495912c9c1ccf2e18976439f4443f3fee0fd61f424ff99fde6a66b15ecb448f"
 +dependencies = [
 + "cfg-if",
 + "hashbrown",
 + "lock_api",
 + "parking_lot_core 0.9.3",
 +]
 +
 +[[package]]
 +name = "derive_arbitrary"
 +version = "1.1.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "c9a577516173adb681466d517d39bd468293bc2c2a16439375ef0f35bba45f3d"
 +dependencies = [
 + "proc-macro2",
 + "quote",
 + "syn",
 +]
 +
 +[[package]]
 +name = "dissimilar"
 +version = "1.0.4"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "8c97b9233581d84b8e1e689cdd3a47b6f69770084fc246e86a7f78b0d9c1d4a5"
 +
 +[[package]]
 +name = "dot"
 +version = "0.1.4"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "a74b6c4d4a1cff5f454164363c16b72fa12463ca6b31f4b5f2035a65fa3d5906"
 +
 +[[package]]
 +name = "drop_bomb"
 +version = "0.1.5"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1"
 +
 +[[package]]
 +name = "either"
 +version = "1.8.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
 +
 +[[package]]
 +name = "ena"
 +version = "0.14.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3"
 +dependencies = [
 + "log",
 +]
 +
 +[[package]]
 +name = "expect-test"
 +version = "1.4.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "1d4661aca38d826eb7c72fe128e4238220616de4c0cc00db7bfc38e2e1364dd3"
 +dependencies = [
 + "dissimilar",
 + "once_cell",
 +]
 +
 +[[package]]
 +name = "filetime"
 +version = "0.2.17"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c"
 +dependencies = [
 + "cfg-if",
 + "libc",
 + "redox_syscall",
 + "windows-sys 0.36.1",
 +]
 +
 +[[package]]
 +name = "fixedbitset"
 +version = "0.2.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
 +
 +[[package]]
 +name = "flate2"
 +version = "1.0.24"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
 +dependencies = [
 + "crc32fast",
 + "miniz_oxide",
 +]
 +
 +[[package]]
 +name = "flycheck"
 +version = "0.0.0"
 +dependencies = [
 + "cargo_metadata",
 + "crossbeam-channel",
 + "jod-thread",
 + "paths",
 + "serde",
 + "serde_json",
 + "stdx",
 + "toolchain",
 + "tracing",
 +]
 +
 +[[package]]
 +name = "form_urlencoded"
 +version = "1.0.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191"
 +dependencies = [
 + "matches",
 + "percent-encoding",
 +]
 +
 +[[package]]
 +name = "fs_extra"
 +version = "1.2.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394"
 +
 +[[package]]
 +name = "fsevent-sys"
 +version = "4.1.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
 +dependencies = [
 + "libc",
 +]
 +
 +[[package]]
 +name = "fst"
 +version = "0.4.7"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
 +
 +[[package]]
 +name = "gimli"
 +version = "0.26.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
 +
 +[[package]]
 +name = "hashbrown"
 +version = "0.12.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
 +
 +[[package]]
 +name = "heck"
 +version = "0.3.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
 +dependencies = [
 + "unicode-segmentation",
 +]
 +
 +[[package]]
 +name = "hermit-abi"
 +version = "0.1.19"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
 +dependencies = [
 + "libc",
 +]
 +
 +[[package]]
 +name = "hir"
 +version = "0.0.0"
 +dependencies = [
 + "arrayvec",
 + "base-db",
 + "cfg",
 + "either",
 + "hir-def",
 + "hir-expand",
 + "hir-ty",
 + "itertools",
 + "once_cell",
 + "profile",
 + "rustc-hash",
 + "smallvec",
 + "stdx",
 + "syntax",
 + "tt",
 +]
 +
 +[[package]]
 +name = "hir-def"
 +version = "0.0.0"
 +dependencies = [
 + "anymap",
 + "arrayvec",
 + "base-db",
 + "bitflags",
 + "cfg",
 + "cov-mark",
 + "dashmap",
 + "drop_bomb",
 + "either",
 + "expect-test",
 + "fst",
 + "hashbrown",
 + "hir-expand",
 + "indexmap",
 + "itertools",
 + "la-arena",
 + "limit",
 + "mbe",
 + "once_cell",
 + "profile",
 + "rustc-hash",
 + "smallvec",
 + "stdx",
 + "syntax",
 + "test-utils",
 + "tracing",
 + "tt",
 +]
 +
 +[[package]]
 +name = "hir-expand"
 +version = "0.0.0"
 +dependencies = [
 + "base-db",
 + "cfg",
 + "cov-mark",
 + "either",
 + "expect-test",
 + "hashbrown",
 + "itertools",
 + "la-arena",
 + "limit",
 + "mbe",
 + "profile",
 + "rustc-hash",
 + "smallvec",
 + "stdx",
 + "syntax",
 + "tracing",
 + "tt",
 +]
 +
 +[[package]]
 +name = "hir-ty"
 +version = "0.0.0"
 +dependencies = [
 + "arrayvec",
 + "base-db",
 + "chalk-ir",
 + "chalk-recursive",
 + "chalk-solve",
 + "cov-mark",
 + "ena",
 + "expect-test",
 + "hir-def",
 + "hir-expand",
 + "itertools",
 + "la-arena",
 + "limit",
 + "once_cell",
 + "profile",
 + "rustc-hash",
 + "scoped-tls",
 + "smallvec",
 + "stdx",
 + "syntax",
 + "test-utils",
 + "tracing",
 + "tracing-subscriber",
 + "tracing-tree",
 + "typed-arena",
 +]
 +
 +[[package]]
 +name = "home"
 +version = "0.5.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "2456aef2e6b6a9784192ae780c0f15bc57df0e918585282325e8c8ac27737654"
 +dependencies = [
 + "winapi",
 +]
 +
 +[[package]]
 +name = "ide"
 +version = "0.0.0"
 +dependencies = [
 + "cfg",
 + "cov-mark",
 + "crossbeam-channel",
 + "dot",
 + "either",
 + "expect-test",
 + "hir",
 + "ide-assists",
 + "ide-completion",
 + "ide-db",
 + "ide-diagnostics",
 + "ide-ssr",
 + "itertools",
 + "oorandom",
 + "profile",
 + "pulldown-cmark",
 + "pulldown-cmark-to-cmark",
 + "stdx",
 + "syntax",
 + "test-utils",
 + "text-edit",
 + "toolchain",
 + "tracing",
 + "url",
 +]
 +
 +[[package]]
 +name = "ide-assists"
 +version = "0.0.0"
 +dependencies = [
 + "cov-mark",
 + "either",
 + "expect-test",
 + "hir",
 + "ide-db",
 + "itertools",
 + "profile",
 + "sourcegen",
 + "stdx",
 + "syntax",
 + "test-utils",
 + "text-edit",
 +]
 +
 +[[package]]
 +name = "ide-completion"
 +version = "0.0.0"
 +dependencies = [
 + "base-db",
 + "cov-mark",
 + "expect-test",
 + "hir",
 + "ide-db",
 + "itertools",
 + "once_cell",
 + "profile",
 + "smallvec",
 + "stdx",
 + "syntax",
 + "test-utils",
 + "text-edit",
 +]
 +
 +[[package]]
 +name = "ide-db"
 +version = "0.0.0"
 +dependencies = [
 + "arrayvec",
 + "base-db",
 + "cov-mark",
 + "either",
 + "expect-test",
 + "fst",
 + "hir",
 + "indexmap",
 + "itertools",
 + "limit",
 + "once_cell",
 + "parser",
 + "profile",
 + "rayon",
 + "rustc-hash",
 + "sourcegen",
 + "stdx",
 + "syntax",
 + "test-utils",
 + "text-edit",
 + "tracing",
 + "xshell",
 +]
 +
 +[[package]]
 +name = "ide-diagnostics"
 +version = "0.0.0"
 +dependencies = [
 + "cfg",
 + "cov-mark",
 + "either",
 + "expect-test",
 + "hir",
 + "ide-db",
 + "itertools",
 + "profile",
 + "serde_json",
 + "sourcegen",
 + "stdx",
 + "syntax",
 + "test-utils",
 + "text-edit",
 +]
 +
 +[[package]]
 +name = "ide-ssr"
 +version = "0.0.0"
 +dependencies = [
 + "cov-mark",
 + "expect-test",
 + "hir",
 + "ide-db",
 + "itertools",
 + "parser",
++ "stdx",
 + "syntax",
 + "test-utils",
 + "text-edit",
 +]
 +
 +[[package]]
 +name = "idna"
 +version = "0.2.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8"
 +dependencies = [
 + "matches",
 + "unicode-bidi",
 + "unicode-normalization",
 +]
 +
 +[[package]]
 +name = "indexmap"
 +version = "1.9.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
 +dependencies = [
 + "autocfg",
 + "hashbrown",
 +]
 +
 +[[package]]
 +name = "inotify"
 +version = "0.9.6"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
 +dependencies = [
 + "bitflags",
 + "inotify-sys",
 + "libc",
 +]
 +
 +[[package]]
 +name = "inotify-sys"
 +version = "0.1.5"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
 +dependencies = [
 + "libc",
 +]
 +
 +[[package]]
 +name = "instant"
 +version = "0.1.12"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
 +dependencies = [
 + "cfg-if",
 +]
 +
 +[[package]]
 +name = "itertools"
 +version = "0.10.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3"
 +dependencies = [
 + "either",
 +]
 +
 +[[package]]
 +name = "itoa"
 +version = "1.0.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754"
 +
 +[[package]]
 +name = "jod-thread"
 +version = "0.1.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
 +
 +[[package]]
 +name = "kqueue"
 +version = "1.0.6"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "4d6112e8f37b59803ac47a42d14f1f3a59bbf72fc6857ffc5be455e28a691f8e"
 +dependencies = [
 + "kqueue-sys",
 + "libc",
 +]
 +
 +[[package]]
 +name = "kqueue-sys"
 +version = "1.0.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587"
 +dependencies = [
 + "bitflags",
 + "libc",
 +]
 +
 +[[package]]
 +name = "la-arena"
 +version = "0.3.0"
 +
 +[[package]]
 +name = "lazy_static"
 +version = "1.4.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
 +
 +[[package]]
 +name = "libc"
 +version = "0.2.132"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5"
 +
 +[[package]]
 +name = "libloading"
 +version = "0.7.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
 +dependencies = [
 + "cfg-if",
 + "winapi",
 +]
 +
 +[[package]]
 +name = "libmimalloc-sys"
 +version = "0.1.25"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "11ca136052550448f55df7898c6dbe651c6b574fe38a0d9ea687a9f8088a2e2c"
 +dependencies = [
 + "cc",
 +]
 +
 +[[package]]
 +name = "limit"
 +version = "0.0.0"
 +
 +[[package]]
 +name = "lock_api"
 +version = "0.4.7"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53"
 +dependencies = [
 + "autocfg",
 + "scopeguard",
 +]
 +
 +[[package]]
 +name = "log"
 +version = "0.4.17"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
 +dependencies = [
 + "cfg-if",
 +]
 +
 +[[package]]
 +name = "lsp-server"
 +version = "0.6.0"
 +dependencies = [
 + "crossbeam-channel",
 + "log",
 + "lsp-types",
 + "serde",
 + "serde_json",
 +]
 +
 +[[package]]
 +name = "lsp-types"
- checksum = "70c74e2173b2b31f8655d33724b4b45ac13f439386f66290f539c22b144c2212"
++version = "0.93.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
-  "crossbeam",
++checksum = "a3bcfee315dde785ba887edb540b08765fd7df75a7d948844be6bf5712246734"
 +dependencies = [
 + "bitflags",
 + "serde",
 + "serde_json",
 + "serde_repr",
 + "url",
 +]
 +
 +[[package]]
 +name = "matchers"
 +version = "0.1.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
 +dependencies = [
 + "regex-automata",
 +]
 +
 +[[package]]
 +name = "matches"
 +version = "0.1.9"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
 +
 +[[package]]
 +name = "mbe"
 +version = "0.0.0"
 +dependencies = [
 + "cov-mark",
 + "parser",
 + "rustc-hash",
 + "smallvec",
 + "stdx",
 + "syntax",
 + "test-utils",
 + "tracing",
 + "tt",
 +]
 +
 +[[package]]
 +name = "memchr"
 +version = "2.5.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
 +
 +[[package]]
 +name = "memmap2"
 +version = "0.5.7"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498"
 +dependencies = [
 + "libc",
 +]
 +
 +[[package]]
 +name = "memoffset"
 +version = "0.6.5"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
 +dependencies = [
 + "autocfg",
 +]
 +
 +[[package]]
 +name = "mimalloc"
 +version = "0.1.29"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "2f64ad83c969af2e732e907564deb0d0ed393cec4af80776f77dd77a1a427698"
 +dependencies = [
 + "libmimalloc-sys",
 +]
 +
 +[[package]]
 +name = "miniz_oxide"
 +version = "0.5.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc"
 +dependencies = [
 + "adler",
 +]
 +
 +[[package]]
 +name = "mio"
 +version = "0.8.4"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf"
 +dependencies = [
 + "libc",
 + "log",
 + "wasi",
 + "windows-sys 0.36.1",
 +]
 +
 +[[package]]
 +name = "miow"
 +version = "0.4.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "a7377f7792b3afb6a3cba68daa54ca23c032137010460d667fda53a8d66be00e"
 +dependencies = [
 + "windows-sys 0.28.0",
 +]
 +
 +[[package]]
 +name = "notify"
 +version = "5.0.0-pre.16"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "530f6314d6904508082f4ea424a0275cf62d341e118b313663f266429cb19693"
 +dependencies = [
 + "bitflags",
 + "crossbeam-channel",
 + "filetime",
 + "fsevent-sys",
 + "inotify",
 + "kqueue",
 + "libc",
 + "mio",
 + "walkdir",
 + "winapi",
 +]
 +
 +[[package]]
 +name = "num_cpus"
 +version = "1.13.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
 +dependencies = [
 + "hermit-abi",
 + "libc",
 +]
 +
 +[[package]]
 +name = "object"
 +version = "0.29.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
 +dependencies = [
 + "memchr",
 +]
 +
 +[[package]]
 +name = "once_cell"
 +version = "1.13.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "074864da206b4973b84eb91683020dbefd6a8c3f0f38e054d93954e891935e4e"
 +
 +[[package]]
 +name = "oorandom"
 +version = "11.1.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
 +
 +[[package]]
 +name = "parking_lot"
 +version = "0.11.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
 +dependencies = [
 + "instant",
 + "lock_api",
 + "parking_lot_core 0.8.5",
 +]
 +
 +[[package]]
 +name = "parking_lot"
 +version = "0.12.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
 +dependencies = [
 + "lock_api",
 + "parking_lot_core 0.9.3",
 +]
 +
 +[[package]]
 +name = "parking_lot_core"
 +version = "0.8.5"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
 +dependencies = [
 + "cfg-if",
 + "instant",
 + "libc",
 + "redox_syscall",
 + "smallvec",
 + "winapi",
 +]
 +
 +[[package]]
 +name = "parking_lot_core"
 +version = "0.9.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
 +dependencies = [
 + "cfg-if",
 + "libc",
 + "redox_syscall",
 + "smallvec",
 + "windows-sys 0.36.1",
 +]
 +
 +[[package]]
 +name = "parser"
 +version = "0.0.0"
 +dependencies = [
 + "drop_bomb",
 + "expect-test",
 + "limit",
 + "rustc-ap-rustc_lexer",
 + "sourcegen",
 +]
 +
 +[[package]]
 +name = "paste"
 +version = "1.0.8"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "9423e2b32f7a043629287a536f21951e8c6a82482d0acb1eeebfc90bc2225b22"
 +
 +[[package]]
 +name = "paths"
 +version = "0.0.0"
 +
 +[[package]]
 +name = "percent-encoding"
 +version = "2.1.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
 +
 +[[package]]
 +name = "perf-event"
 +version = "0.4.7"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "5396562cd2eaa828445d6d34258ae21ee1eb9d40fe626ca7f51c8dccb4af9d66"
 +dependencies = [
 + "libc",
 + "perf-event-open-sys",
 +]
 +
 +[[package]]
 +name = "perf-event-open-sys"
 +version = "1.0.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "ce9bedf5da2c234fdf2391ede2b90fabf585355f33100689bc364a3ea558561a"
 +dependencies = [
 + "libc",
 +]
 +
 +[[package]]
 +name = "petgraph"
 +version = "0.5.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
 +dependencies = [
 + "fixedbitset",
 + "indexmap",
 +]
 +
 +[[package]]
 +name = "pin-project-lite"
 +version = "0.2.9"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
 +
 +[[package]]
 +name = "proc-macro-api"
 +version = "0.0.0"
 +dependencies = [
 + "memmap2",
 + "object",
 + "paths",
 + "profile",
 + "serde",
 + "serde_json",
 + "snap",
 + "stdx",
 + "tracing",
 + "tt",
 +]
 +
 +[[package]]
 +name = "proc-macro-srv"
 +version = "0.0.0"
 +dependencies = [
 + "expect-test",
 + "libloading",
 + "mbe",
 + "memmap2",
 + "object",
 + "paths",
 + "proc-macro-api",
 + "proc-macro-test",
 + "tt",
 +]
 +
 +[[package]]
 +name = "proc-macro-srv-cli"
 +version = "0.0.0"
 +dependencies = [
 + "proc-macro-srv",
 +]
 +
 +[[package]]
 +name = "proc-macro-test"
 +version = "0.0.0"
 +dependencies = [
 + "cargo_metadata",
 + "proc-macro-test-impl",
 + "toolchain",
 +]
 +
 +[[package]]
 +name = "proc-macro-test-impl"
 +version = "0.0.0"
 +
 +[[package]]
 +name = "proc-macro2"
 +version = "1.0.43"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
 +dependencies = [
 + "unicode-ident",
 +]
 +
 +[[package]]
 +name = "profile"
 +version = "0.0.0"
 +dependencies = [
 + "cfg-if",
 + "countme",
 + "la-arena",
 + "libc",
 + "once_cell",
 + "perf-event",
 + "tikv-jemalloc-ctl",
 + "winapi",
 +]
 +
 +[[package]]
 +name = "project-model"
 +version = "0.0.0"
 +dependencies = [
 + "anyhow",
 + "base-db",
 + "cargo_metadata",
 + "cfg",
 + "expect-test",
 + "la-arena",
 + "paths",
 + "profile",
 + "rustc-hash",
 + "semver",
 + "serde",
 + "serde_json",
 + "stdx",
 + "toolchain",
 + "tracing",
 +]
 +
++[[package]]
++name = "protobuf"
++version = "3.1.0"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "4ee4a7d8b91800c8f167a6268d1a1026607368e1adc84e98fe044aeb905302f7"
++dependencies = [
++ "once_cell",
++ "protobuf-support",
++ "thiserror",
++]
++
++[[package]]
++name = "protobuf-support"
++version = "3.1.0"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "8ca157fe12fc7ee2e315f2f735e27df41b3d97cdd70ea112824dac1ffb08ee1c"
++dependencies = [
++ "thiserror",
++]
++
 +[[package]]
 +name = "pulldown-cmark"
 +version = "0.9.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63"
 +dependencies = [
 + "bitflags",
 + "memchr",
 + "unicase",
 +]
 +
 +[[package]]
 +name = "pulldown-cmark-to-cmark"
 +version = "10.0.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "c1353ac408192fa925228d3e60ff746167d03f4f7e54835d78ef79e08225d913"
 +dependencies = [
 + "pulldown-cmark",
 +]
 +
 +[[package]]
 +name = "quote"
 +version = "1.0.21"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
 +dependencies = [
 + "proc-macro2",
 +]
 +
 +[[package]]
 +name = "rayon"
 +version = "1.5.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
 +dependencies = [
 + "autocfg",
 + "crossbeam-deque",
 + "either",
 + "rayon-core",
 +]
 +
 +[[package]]
 +name = "rayon-core"
 +version = "1.9.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
 +dependencies = [
 + "crossbeam-channel",
 + "crossbeam-deque",
 + "crossbeam-utils",
 + "num_cpus",
 +]
 +
 +[[package]]
 +name = "redox_syscall"
 +version = "0.2.16"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
 +dependencies = [
 + "bitflags",
 +]
 +
 +[[package]]
 +name = "regex"
 +version = "1.6.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
 +dependencies = [
 + "regex-syntax",
 +]
 +
 +[[package]]
 +name = "regex-automata"
 +version = "0.1.10"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
 +dependencies = [
 + "regex-syntax",
 +]
 +
 +[[package]]
 +name = "regex-syntax"
 +version = "0.6.27"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
 +
 +[[package]]
 +name = "rowan"
 +version = "0.15.8"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "e88acf7b001007e9e8c989fe7449f6601d909e5dd2c56399fc158977ad6c56e8"
 +dependencies = [
 + "countme",
 + "hashbrown",
 + "memoffset",
 + "rustc-hash",
 + "text-size",
 +]
 +
 +[[package]]
 +name = "rust-analyzer"
 +version = "0.0.0"
 +dependencies = [
 + "always-assert",
 + "anyhow",
 + "cfg",
 + "crossbeam-channel",
 + "dissimilar",
 + "expect-test",
 + "flycheck",
 + "hir",
 + "hir-def",
 + "hir-ty",
 + "ide",
 + "ide-db",
 + "ide-ssr",
 + "itertools",
 + "jod-thread",
 + "lsp-server",
 + "lsp-types",
 + "mbe",
 + "mimalloc",
 + "num_cpus",
 + "oorandom",
 + "parking_lot 0.12.1",
 + "proc-macro-api",
 + "proc-macro-srv",
 + "profile",
 + "project-model",
 + "rayon",
 + "rustc-hash",
++ "scip",
 + "serde",
 + "serde_json",
 + "sourcegen",
 + "stdx",
 + "syntax",
 + "test-utils",
 + "threadpool",
 + "tikv-jemallocator",
 + "toolchain",
 + "tracing",
 + "tracing-log",
 + "tracing-subscriber",
 + "tracing-tree",
 + "tt",
 + "vfs",
 + "vfs-notify",
 + "winapi",
 + "xflags",
 + "xshell",
 +]
 +
 +[[package]]
 +name = "rustc-ap-rustc_lexer"
 +version = "725.0.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "f950742ef8a203aa7661aad3ab880438ddeb7f95d4b837c30d65db1a2c5df68e"
 +dependencies = [
 + "unicode-xid",
 +]
 +
 +[[package]]
 +name = "rustc-demangle"
 +version = "0.1.21"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
 +
 +[[package]]
 +name = "rustc-hash"
 +version = "1.1.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
 +
 +[[package]]
 +name = "ryu"
 +version = "1.0.11"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
 +
 +[[package]]
 +name = "salsa"
 +version = "0.17.0-pre.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "9b223dccb46c32753144d0b51290da7230bb4aedcd8379d6b4c9a474c18bf17a"
 +dependencies = [
 + "crossbeam-utils",
 + "indexmap",
 + "lock_api",
 + "log",
 + "oorandom",
 + "parking_lot 0.11.2",
 + "rustc-hash",
 + "salsa-macros",
 + "smallvec",
 +]
 +
 +[[package]]
 +name = "salsa-macros"
 +version = "0.17.0-pre.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "ac6c2e352df550bf019da7b16164ed2f7fa107c39653d1311d1bba42d1582ff7"
 +dependencies = [
 + "heck",
 + "proc-macro2",
 + "quote",
 + "syn",
 +]
 +
 +[[package]]
 +name = "same-file"
 +version = "1.0.6"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
 +dependencies = [
 + "winapi-util",
 +]
 +
++[[package]]
++name = "scip"
++version = "0.1.1"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "b2bfbb10286f69fad7c78db71004b7839bf957788359fe0c479f029f9849136b"
++dependencies = [
++ "protobuf",
++]
++
 +[[package]]
 +name = "scoped-tls"
 +version = "1.0.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"
 +
 +[[package]]
 +name = "scopeguard"
 +version = "1.1.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
 +
 +[[package]]
 +name = "semver"
 +version = "1.0.13"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "93f6841e709003d68bb2deee8c343572bf446003ec20a583e76f7b15cebf3711"
 +dependencies = [
 + "serde",
 +]
 +
 +[[package]]
 +name = "serde"
 +version = "1.0.143"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "53e8e5d5b70924f74ff5c6d64d9a5acd91422117c60f48c4e07855238a254553"
 +dependencies = [
 + "serde_derive",
 +]
 +
 +[[package]]
 +name = "serde_derive"
 +version = "1.0.143"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d3d8e8de557aee63c26b85b947f5e59b690d0454c753f3adeb5cd7835ab88391"
 +dependencies = [
 + "proc-macro2",
 + "quote",
 + "syn",
 +]
 +
 +[[package]]
 +name = "serde_json"
 +version = "1.0.83"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "38dd04e3c8279e75b31ef29dbdceebfe5ad89f4d0937213c53f7d49d01b3d5a7"
 +dependencies = [
 + "indexmap",
 + "itoa",
 + "ryu",
 + "serde",
 +]
 +
 +[[package]]
 +name = "serde_repr"
 +version = "0.1.9"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca"
 +dependencies = [
 + "proc-macro2",
 + "quote",
 + "syn",
 +]
 +
 +[[package]]
 +name = "sharded-slab"
 +version = "0.1.4"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
 +dependencies = [
 + "lazy_static",
 +]
 +
 +[[package]]
 +name = "smallvec"
 +version = "1.9.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
 +
 +[[package]]
 +name = "smol_str"
 +version = "0.1.23"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "7475118a28b7e3a2e157ce0131ba8c5526ea96e90ee601d9f6bb2e286a35ab44"
 +dependencies = [
 + "serde",
 +]
 +
 +[[package]]
 +name = "snap"
 +version = "1.0.5"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451"
 +
 +[[package]]
 +name = "sourcegen"
 +version = "0.0.0"
 +dependencies = [
 + "xshell",
 +]
 +
 +[[package]]
 +name = "stdx"
 +version = "0.0.0"
 +dependencies = [
 + "always-assert",
 + "backtrace",
 + "libc",
 + "miow",
 + "winapi",
 +]
 +
 +[[package]]
 +name = "syn"
 +version = "1.0.99"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
 +dependencies = [
 + "proc-macro2",
 + "quote",
 + "unicode-ident",
 +]
 +
 +[[package]]
 +name = "synstructure"
 +version = "0.12.6"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
 +dependencies = [
 + "proc-macro2",
 + "quote",
 + "syn",
 + "unicode-xid",
 +]
 +
 +[[package]]
 +name = "syntax"
 +version = "0.0.0"
 +dependencies = [
 + "cov-mark",
 + "expect-test",
 + "indexmap",
 + "itertools",
 + "once_cell",
 + "parser",
 + "proc-macro2",
 + "profile",
 + "quote",
 + "rayon",
 + "rowan",
 + "rustc-ap-rustc_lexer",
 + "rustc-hash",
 + "smol_str",
 + "sourcegen",
 + "stdx",
 + "test-utils",
 + "text-edit",
 + "ungrammar",
 +]
 +
 +[[package]]
 +name = "test-utils"
 +version = "0.0.0"
 +dependencies = [
 + "dissimilar",
 + "profile",
 + "rustc-hash",
 + "stdx",
 + "text-size",
 +]
 +
 +[[package]]
 +name = "text-edit"
 +version = "0.0.0"
 +dependencies = [
 + "itertools",
 + "text-size",
 +]
 +
 +[[package]]
 +name = "text-size"
 +version = "1.1.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
 +
++[[package]]
++name = "thiserror"
++version = "1.0.31"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a"
++dependencies = [
++ "thiserror-impl",
++]
++
++[[package]]
++name = "thiserror-impl"
++version = "1.0.31"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a"
++dependencies = [
++ "proc-macro2",
++ "quote",
++ "syn",
++]
++
 +[[package]]
 +name = "thread_local"
 +version = "1.1.4"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
 +dependencies = [
 + "once_cell",
 +]
 +
 +[[package]]
 +name = "threadpool"
 +version = "1.8.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
 +dependencies = [
 + "num_cpus",
 +]
 +
 +[[package]]
 +name = "tikv-jemalloc-ctl"
 +version = "0.5.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "e37706572f4b151dff7a0146e040804e9c26fe3a3118591112f05cf12a4216c1"
 +dependencies = [
 + "libc",
 + "paste",
 + "tikv-jemalloc-sys",
 +]
 +
 +[[package]]
 +name = "tikv-jemalloc-sys"
 +version = "0.5.1+5.3.0-patched"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "931e876f91fed0827f863a2d153897790da0b24d882c721a79cb3beb0b903261"
 +dependencies = [
 + "cc",
 + "fs_extra",
 + "libc",
 +]
 +
 +[[package]]
 +name = "tikv-jemallocator"
 +version = "0.5.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "20612db8a13a6c06d57ec83953694185a367e16945f66565e8028d2c0bd76979"
 +dependencies = [
 + "libc",
 + "tikv-jemalloc-sys",
 +]
 +
 +[[package]]
 +name = "tinyvec"
 +version = "1.6.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
 +dependencies = [
 + "tinyvec_macros",
 +]
 +
 +[[package]]
 +name = "tinyvec_macros"
 +version = "0.1.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
 +
 +[[package]]
 +name = "toolchain"
 +version = "0.0.0"
 +dependencies = [
 + "home",
 +]
 +
 +[[package]]
 +name = "tracing"
 +version = "0.1.36"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "2fce9567bd60a67d08a16488756721ba392f24f29006402881e43b19aac64307"
 +dependencies = [
 + "cfg-if",
 + "pin-project-lite",
 + "tracing-attributes",
 + "tracing-core",
 +]
 +
 +[[package]]
 +name = "tracing-attributes"
 +version = "0.1.22"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2"
 +dependencies = [
 + "proc-macro2",
 + "quote",
 + "syn",
 +]
 +
 +[[package]]
 +name = "tracing-core"
 +version = "0.1.29"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "5aeea4303076558a00714b823f9ad67d58a3bbda1df83d8827d21193156e22f7"
 +dependencies = [
 + "once_cell",
 + "valuable",
 +]
 +
 +[[package]]
 +name = "tracing-log"
 +version = "0.1.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
 +dependencies = [
 + "lazy_static",
 + "log",
 + "tracing-core",
 +]
 +
 +[[package]]
 +name = "tracing-subscriber"
 +version = "0.3.15"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "60db860322da191b40952ad9affe65ea23e7dd6a5c442c2c42865810c6ab8e6b"
 +dependencies = [
 + "matchers",
 + "once_cell",
 + "regex",
 + "sharded-slab",
 + "thread_local",
 + "tracing",
 + "tracing-core",
 + "tracing-log",
 +]
 +
 +[[package]]
 +name = "tracing-tree"
 +version = "0.2.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "d07e90b329c621ade432823988574e820212648aa40e7a2497777d58de0fb453"
 +dependencies = [
 + "ansi_term",
 + "atty",
 + "tracing-core",
 + "tracing-log",
 + "tracing-subscriber",
 +]
 +
 +[[package]]
 +name = "tt"
 +version = "0.0.0"
 +dependencies = [
 + "smol_str",
 + "stdx",
 +]
 +
 +[[package]]
 +name = "typed-arena"
 +version = "2.0.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae"
 +
 +[[package]]
 +name = "ungrammar"
 +version = "1.16.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f"
 +
 +[[package]]
 +name = "unicase"
 +version = "2.6.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
 +dependencies = [
 + "version_check",
 +]
 +
 +[[package]]
 +name = "unicode-bidi"
 +version = "0.3.8"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
 +
 +[[package]]
 +name = "unicode-ident"
 +version = "1.0.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c"
 +
 +[[package]]
 +name = "unicode-normalization"
 +version = "0.1.21"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6"
 +dependencies = [
 + "tinyvec",
 +]
 +
 +[[package]]
 +name = "unicode-segmentation"
 +version = "1.9.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99"
 +
 +[[package]]
 +name = "unicode-xid"
 +version = "0.2.3"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04"
 +
 +[[package]]
 +name = "url"
 +version = "2.2.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c"
 +dependencies = [
 + "form_urlencoded",
 + "idna",
 + "matches",
 + "percent-encoding",
 + "serde",
 +]
 +
 +[[package]]
 +name = "valuable"
 +version = "0.1.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
 +
 +[[package]]
 +name = "version_check"
 +version = "0.9.4"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
 +
 +[[package]]
 +name = "vfs"
 +version = "0.0.0"
 +dependencies = [
 + "fst",
 + "indexmap",
 + "paths",
 + "rustc-hash",
++ "stdx",
 +]
 +
 +[[package]]
 +name = "vfs-notify"
 +version = "0.0.0"
 +dependencies = [
 + "crossbeam-channel",
 + "jod-thread",
 + "notify",
 + "paths",
 + "tracing",
 + "vfs",
 + "walkdir",
 +]
 +
 +[[package]]
 +name = "walkdir"
 +version = "2.3.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
 +dependencies = [
 + "same-file",
 + "winapi",
 + "winapi-util",
 +]
 +
 +[[package]]
 +name = "wasi"
 +version = "0.11.0+wasi-snapshot-preview1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 +
 +[[package]]
 +name = "winapi"
 +version = "0.3.9"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
 +dependencies = [
 + "winapi-i686-pc-windows-gnu",
 + "winapi-x86_64-pc-windows-gnu",
 +]
 +
 +[[package]]
 +name = "winapi-i686-pc-windows-gnu"
 +version = "0.4.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
 +
 +[[package]]
 +name = "winapi-util"
 +version = "0.1.5"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
 +dependencies = [
 + "winapi",
 +]
 +
 +[[package]]
 +name = "winapi-x86_64-pc-windows-gnu"
 +version = "0.4.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 +
 +[[package]]
 +name = "windows-sys"
 +version = "0.28.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "82ca39602d5cbfa692c4b67e3bcbb2751477355141c1ed434c94da4186836ff6"
 +dependencies = [
 + "windows_aarch64_msvc 0.28.0",
 + "windows_i686_gnu 0.28.0",
 + "windows_i686_msvc 0.28.0",
 + "windows_x86_64_gnu 0.28.0",
 + "windows_x86_64_msvc 0.28.0",
 +]
 +
 +[[package]]
 +name = "windows-sys"
 +version = "0.36.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
 +dependencies = [
 + "windows_aarch64_msvc 0.36.1",
 + "windows_i686_gnu 0.36.1",
 + "windows_i686_msvc 0.36.1",
 + "windows_x86_64_gnu 0.36.1",
 + "windows_x86_64_msvc 0.36.1",
 +]
 +
 +[[package]]
 +name = "windows_aarch64_msvc"
 +version = "0.28.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "52695a41e536859d5308cc613b4a022261a274390b25bd29dfff4bf08505f3c2"
 +
 +[[package]]
 +name = "windows_aarch64_msvc"
 +version = "0.36.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
 +
 +[[package]]
 +name = "windows_i686_gnu"
 +version = "0.28.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "f54725ac23affef038fecb177de6c9bf065787c2f432f79e3c373da92f3e1d8a"
 +
 +[[package]]
 +name = "windows_i686_gnu"
 +version = "0.36.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
 +
 +[[package]]
 +name = "windows_i686_msvc"
 +version = "0.28.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "51d5158a43cc43623c0729d1ad6647e62fa384a3d135fd15108d37c683461f64"
 +
 +[[package]]
 +name = "windows_i686_msvc"
 +version = "0.36.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
 +
 +[[package]]
 +name = "windows_x86_64_gnu"
 +version = "0.28.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "bc31f409f565611535130cfe7ee8e6655d3fa99c1c61013981e491921b5ce954"
 +
 +[[package]]
 +name = "windows_x86_64_gnu"
 +version = "0.36.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
 +
 +[[package]]
 +name = "windows_x86_64_msvc"
 +version = "0.28.0"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "3f2b8c7cbd3bfdddd9ab98769f9746a7fad1bca236554cd032b78d768bc0e89f"
 +
 +[[package]]
 +name = "windows_x86_64_msvc"
 +version = "0.36.1"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
 +
 +[[package]]
 +name = "write-json"
 +version = "0.1.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "06069a848f95fceae3e5e03c0ddc8cb78452b56654ee0c8e68f938cf790fb9e3"
 +
 +[[package]]
 +name = "xflags"
 +version = "0.2.4"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "3f14fe1ed41a5a2b5ef3f565586c4a8a559ee55d3953faab360a771135bdee00"
 +dependencies = [
 + "xflags-macros",
 +]
 +
 +[[package]]
 +name = "xflags-macros"
 +version = "0.2.4"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "45d11d5fc2a97287eded8b170ca80533b3c42646dd7fa386a5eb045817921022"
 +
 +[[package]]
 +name = "xshell"
 +version = "0.2.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "6d47097dc5c85234b1e41851b3422dd6d19b3befdd35b4ae5ce386724aeca981"
 +dependencies = [
 + "xshell-macros",
 +]
 +
 +[[package]]
 +name = "xshell-macros"
 +version = "0.2.2"
 +source = "registry+https://github.com/rust-lang/crates.io-index"
 +checksum = "88301b56c26dd9bf5c43d858538f82d6f3f7764767defbc5d34e59459901c41a"
 +
 +[[package]]
 +name = "xtask"
 +version = "0.1.0"
 +dependencies = [
 + "anyhow",
 + "flate2",
 + "write-json",
 + "xflags",
 + "xshell",
 +]
index 9580ce8007c76a2224f5f2ba90a2acfbc6680d11,0000000000000000000000000000000000000000..b388e47dee6e4ecd4411158467ca7e39a4da34f5
mode 100644,000000..100644
--- /dev/null
@@@ -1,792 -1,0 +1,808 @@@
- use rustc_hash::{FxHashMap, FxHashSet};
 +//! This module specifies the input to rust-analyzer. In some sense, this is
 +//! **the** most important module, because all other fancy stuff is strictly
 +//! derived from this input.
 +//!
 +//! Note that neither this module, nor any other part of the analyzer's core do
 +//! actual IO. See `vfs` and `project_model` in the `rust-analyzer` crate for how
 +//! actual IO is done and lowered to input.
 +
 +use std::{fmt, ops, panic::RefUnwindSafe, str::FromStr, sync::Arc};
 +
 +use cfg::CfgOptions;
- use vfs::{file_set::FileSet, FileId, VfsPath};
++use rustc_hash::FxHashMap;
++use stdx::hash::{NoHashHashMap, NoHashHashSet};
 +use syntax::SmolStr;
 +use tt::Subtree;
-     pub(crate) file_set: FileSet,
++use vfs::{file_set::FileSet, AnchoredPath, FileId, VfsPath};
 +
 +/// Files are grouped into source roots. A source root is a directory on the
 +/// file systems which is watched for changes. Typically it corresponds to a
 +/// Rust crate. Source roots *might* be nested: in this case, a file belongs to
 +/// the nearest enclosing source root. Paths to files are always relative to a
 +/// source root, and the analyzer does not know the root path of the source root at
 +/// all. So, a file from one source root can't refer to a file in another source
 +/// root by path.
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
 +pub struct SourceRootId(pub u32);
 +
 +#[derive(Clone, Debug, PartialEq, Eq)]
 +pub struct SourceRoot {
 +    /// Sysroot or crates.io library.
 +    ///
 +    /// Libraries are considered mostly immutable, this assumption is used to
 +    /// optimize salsa's query structure
 +    pub is_library: bool,
-     arena: FxHashMap<CrateId, CrateData>,
++    file_set: FileSet,
 +}
 +
 +impl SourceRoot {
 +    pub fn new_local(file_set: FileSet) -> SourceRoot {
 +        SourceRoot { is_library: false, file_set }
 +    }
++
 +    pub fn new_library(file_set: FileSet) -> SourceRoot {
 +        SourceRoot { is_library: true, file_set }
 +    }
++
 +    pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
 +        self.file_set.path_for_file(file)
 +    }
++
 +    pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
 +        self.file_set.file_for_path(path)
 +    }
++
++    pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
++        self.file_set.resolve_path(path)
++    }
++
 +    pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
 +        self.file_set.iter()
 +    }
 +}
 +
 +/// `CrateGraph` is a bit of information which turns a set of text files into a
 +/// number of Rust crates.
 +///
 +/// Each crate is defined by the `FileId` of its root module, the set of enabled
 +/// `cfg` flags and the set of dependencies.
 +///
 +/// Note that, due to cfg's, there might be several crates for a single `FileId`!
 +///
 +/// For the purposes of analysis, a crate does not have a name. Instead, names
 +/// are specified on dependency edges. That is, a crate might be known under
 +/// different names in different dependent crates.
 +///
 +/// Note that `CrateGraph` is build-system agnostic: it's a concept of the Rust
 +/// language proper, not a concept of the build system. In practice, we get
 +/// `CrateGraph` by lowering `cargo metadata` output.
 +///
 +/// `CrateGraph` is `!Serialize` by design, see
 +/// <https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/architecture.md#serialization>
 +#[derive(Debug, Clone, Default /* Serialize, Deserialize */)]
 +pub struct CrateGraph {
- #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
++    arena: NoHashHashMap<CrateId, CrateData>,
 +}
 +
-         if let Some(path) = self.find_path(&mut FxHashSet::default(), dep.crate_id, from) {
++#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
 +pub struct CrateId(pub u32);
 +
++impl stdx::hash::NoHashHashable for CrateId {}
++impl std::hash::Hash for CrateId {
++    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
++        self.0.hash(state);
++    }
++}
++
 +#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 +pub struct CrateName(SmolStr);
 +
 +impl CrateName {
 +    /// Creates a crate name, checking for dashes in the string provided.
 +    /// Dashes are not allowed in the crate names,
 +    /// hence the input string is returned as `Err` for those cases.
 +    pub fn new(name: &str) -> Result<CrateName, &str> {
 +        if name.contains('-') {
 +            Err(name)
 +        } else {
 +            Ok(Self(SmolStr::new(name)))
 +        }
 +    }
 +
 +    /// Creates a crate name, unconditionally replacing the dashes with underscores.
 +    pub fn normalize_dashes(name: &str) -> CrateName {
 +        Self(SmolStr::new(name.replace('-', "_")))
 +    }
 +
 +    pub fn as_smol_str(&self) -> &SmolStr {
 +        &self.0
 +    }
 +}
 +
 +impl fmt::Display for CrateName {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        self.0.fmt(f)
 +    }
 +}
 +
 +impl ops::Deref for CrateName {
 +    type Target = str;
 +    fn deref(&self) -> &str {
 +        &*self.0
 +    }
 +}
 +
 +/// Origin of the crates. It is used in emitting monikers.
 +#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 +pub enum CrateOrigin {
 +    /// Crates that are from crates.io official registry,
 +    CratesIo { repo: Option<String> },
 +    /// Crates that are provided by the language, like std, core, proc-macro, ...
 +    Lang(LangCrateOrigin),
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub enum LangCrateOrigin {
 +    Alloc,
 +    Core,
 +    ProcMacro,
 +    Std,
 +    Test,
 +    Other,
 +}
 +
 +impl From<&str> for LangCrateOrigin {
 +    fn from(s: &str) -> Self {
 +        match s {
 +            "alloc" => LangCrateOrigin::Alloc,
 +            "core" => LangCrateOrigin::Core,
 +            "proc-macro" => LangCrateOrigin::ProcMacro,
 +            "std" => LangCrateOrigin::Std,
 +            "test" => LangCrateOrigin::Test,
 +            _ => LangCrateOrigin::Other,
 +        }
 +    }
 +}
 +
 +impl fmt::Display for LangCrateOrigin {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        let text = match self {
 +            LangCrateOrigin::Alloc => "alloc",
 +            LangCrateOrigin::Core => "core",
 +            LangCrateOrigin::ProcMacro => "proc_macro",
 +            LangCrateOrigin::Std => "std",
 +            LangCrateOrigin::Test => "test",
 +            LangCrateOrigin::Other => "other",
 +        };
 +        f.write_str(text)
 +    }
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 +pub struct CrateDisplayName {
 +    // The name we use to display various paths (with `_`).
 +    crate_name: CrateName,
 +    // The name as specified in Cargo.toml (with `-`).
 +    canonical_name: String,
 +}
 +
 +impl CrateDisplayName {
 +    pub fn canonical_name(&self) -> &str {
 +        &self.canonical_name
 +    }
 +    pub fn crate_name(&self) -> &CrateName {
 +        &self.crate_name
 +    }
 +}
 +
 +impl From<CrateName> for CrateDisplayName {
 +    fn from(crate_name: CrateName) -> CrateDisplayName {
 +        let canonical_name = crate_name.to_string();
 +        CrateDisplayName { crate_name, canonical_name }
 +    }
 +}
 +
 +impl fmt::Display for CrateDisplayName {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        self.crate_name.fmt(f)
 +    }
 +}
 +
 +impl ops::Deref for CrateDisplayName {
 +    type Target = str;
 +    fn deref(&self) -> &str {
 +        &*self.crate_name
 +    }
 +}
 +
 +impl CrateDisplayName {
 +    pub fn from_canonical_name(canonical_name: String) -> CrateDisplayName {
 +        let crate_name = CrateName::normalize_dashes(&canonical_name);
 +        CrateDisplayName { crate_name, canonical_name }
 +    }
 +}
 +
 +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
 +pub struct ProcMacroId(pub u32);
 +
 +#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
 +pub enum ProcMacroKind {
 +    CustomDerive,
 +    FuncLike,
 +    Attr,
 +}
 +
 +pub trait ProcMacroExpander: fmt::Debug + Send + Sync + RefUnwindSafe {
 +    fn expand(
 +        &self,
 +        subtree: &Subtree,
 +        attrs: Option<&Subtree>,
 +        env: &Env,
 +    ) -> Result<Subtree, ProcMacroExpansionError>;
 +}
 +
 +pub enum ProcMacroExpansionError {
 +    Panic(String),
 +    /// Things like "proc macro server was killed by OOM".
 +    System(String),
 +}
 +
 +pub type ProcMacroLoadResult = Result<Vec<ProcMacro>, String>;
 +
 +#[derive(Debug, Clone)]
 +pub struct ProcMacro {
 +    pub name: SmolStr,
 +    pub kind: ProcMacroKind,
 +    pub expander: Arc<dyn ProcMacroExpander>,
 +}
 +
 +#[derive(Debug, Clone)]
 +pub struct CrateData {
 +    pub root_file_id: FileId,
 +    pub edition: Edition,
 +    pub version: Option<String>,
 +    /// A name used in the package's project declaration: for Cargo projects,
 +    /// its `[package].name` can be different for other project types or even
 +    /// absent (a dummy crate for the code snippet, for example).
 +    ///
 +    /// For purposes of analysis, crates are anonymous (only names in
 +    /// `Dependency` matters), this name should only be used for UI.
 +    pub display_name: Option<CrateDisplayName>,
 +    pub cfg_options: CfgOptions,
 +    pub potential_cfg_options: CfgOptions,
 +    pub env: Env,
 +    pub dependencies: Vec<Dependency>,
 +    pub proc_macro: ProcMacroLoadResult,
 +    pub origin: CrateOrigin,
 +    pub is_proc_macro: bool,
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
 +pub enum Edition {
 +    Edition2015,
 +    Edition2018,
 +    Edition2021,
 +}
 +
 +impl Edition {
 +    pub const CURRENT: Edition = Edition::Edition2018;
 +}
 +
 +#[derive(Default, Debug, Clone, PartialEq, Eq)]
 +pub struct Env {
 +    entries: FxHashMap<String, String>,
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub struct Dependency {
 +    pub crate_id: CrateId,
 +    pub name: CrateName,
 +    prelude: bool,
 +}
 +
 +impl Dependency {
 +    pub fn new(name: CrateName, crate_id: CrateId) -> Self {
 +        Self { name, crate_id, prelude: true }
 +    }
 +
 +    pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self {
 +        Self { name, crate_id, prelude }
 +    }
 +
 +    /// Whether this dependency is to be added to the depending crate's extern prelude.
 +    pub fn is_prelude(&self) -> bool {
 +        self.prelude
 +    }
 +}
 +
 +impl CrateGraph {
 +    pub fn add_crate_root(
 +        &mut self,
 +        root_file_id: FileId,
 +        edition: Edition,
 +        display_name: Option<CrateDisplayName>,
 +        version: Option<String>,
 +        cfg_options: CfgOptions,
 +        potential_cfg_options: CfgOptions,
 +        env: Env,
 +        proc_macro: ProcMacroLoadResult,
 +        is_proc_macro: bool,
 +        origin: CrateOrigin,
 +    ) -> CrateId {
 +        let data = CrateData {
 +            root_file_id,
 +            edition,
 +            version,
 +            display_name,
 +            cfg_options,
 +            potential_cfg_options,
 +            env,
 +            proc_macro,
 +            dependencies: Vec::new(),
 +            origin,
 +            is_proc_macro,
 +        };
 +        let crate_id = CrateId(self.arena.len() as u32);
 +        let prev = self.arena.insert(crate_id, data);
 +        assert!(prev.is_none());
 +        crate_id
 +    }
 +
 +    pub fn add_dep(
 +        &mut self,
 +        from: CrateId,
 +        dep: Dependency,
 +    ) -> Result<(), CyclicDependenciesError> {
 +        let _p = profile::span("add_dep");
 +
 +        // Check if adding a dep from `from` to `to` creates a cycle. To figure
 +        // that out, look for a  path in the *opposite* direction, from `to` to
 +        // `from`.
-         let mut deps = FxHashSet::default();
++        if let Some(path) = self.find_path(&mut NoHashHashSet::default(), dep.crate_id, from) {
 +            let path = path.into_iter().map(|it| (it, self[it].display_name.clone())).collect();
 +            let err = CyclicDependenciesError { path };
 +            assert!(err.from().0 == from && err.to().0 == dep.crate_id);
 +            return Err(err);
 +        }
 +
 +        self.arena.get_mut(&from).unwrap().add_dep(dep);
 +        Ok(())
 +    }
 +
 +    pub fn is_empty(&self) -> bool {
 +        self.arena.is_empty()
 +    }
 +
 +    pub fn iter(&self) -> impl Iterator<Item = CrateId> + '_ {
 +        self.arena.keys().copied()
 +    }
 +
 +    /// Returns an iterator over all transitive dependencies of the given crate,
 +    /// including the crate itself.
 +    pub fn transitive_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
 +        let mut worklist = vec![of];
-         let mut rev_deps = FxHashSet::default();
++        let mut deps = NoHashHashSet::default();
 +
 +        while let Some(krate) = worklist.pop() {
 +            if !deps.insert(krate) {
 +                continue;
 +            }
 +
 +            worklist.extend(self[krate].dependencies.iter().map(|dep| dep.crate_id));
 +        }
 +
 +        deps.into_iter()
 +    }
 +
 +    /// Returns all transitive reverse dependencies of the given crate,
 +    /// including the crate itself.
 +    pub fn transitive_rev_deps(&self, of: CrateId) -> impl Iterator<Item = CrateId> {
 +        let mut worklist = vec![of];
-         let mut inverted_graph = FxHashMap::<_, Vec<_>>::default();
++        let mut rev_deps = NoHashHashSet::default();
 +        rev_deps.insert(of);
 +
-         let mut visited = FxHashSet::default();
++        let mut inverted_graph = NoHashHashMap::<_, Vec<_>>::default();
 +        self.arena.iter().for_each(|(&krate, data)| {
 +            data.dependencies
 +                .iter()
 +                .for_each(|dep| inverted_graph.entry(dep.crate_id).or_default().push(krate))
 +        });
 +
 +        while let Some(krate) = worklist.pop() {
 +            if let Some(krate_rev_deps) = inverted_graph.get(&krate) {
 +                krate_rev_deps
 +                    .iter()
 +                    .copied()
 +                    .filter(|&rev_dep| rev_deps.insert(rev_dep))
 +                    .for_each(|rev_dep| worklist.push(rev_dep));
 +            }
 +        }
 +
 +        rev_deps.into_iter()
 +    }
 +
 +    /// Returns all crates in the graph, sorted in topological order (ie. dependencies of a crate
 +    /// come before the crate itself).
 +    pub fn crates_in_topological_order(&self) -> Vec<CrateId> {
 +        let mut res = Vec::new();
-             visited: &mut FxHashSet<CrateId>,
++        let mut visited = NoHashHashSet::default();
 +
 +        for krate in self.arena.keys().copied() {
 +            go(self, &mut visited, &mut res, krate);
 +        }
 +
 +        return res;
 +
 +        fn go(
 +            graph: &CrateGraph,
-         visited: &mut FxHashSet<CrateId>,
++            visited: &mut NoHashHashSet<CrateId>,
 +            res: &mut Vec<CrateId>,
 +            source: CrateId,
 +        ) {
 +            if !visited.insert(source) {
 +                return;
 +            }
 +            for dep in graph[source].dependencies.iter() {
 +                go(graph, visited, res, dep.crate_id)
 +            }
 +            res.push(source)
 +        }
 +    }
 +
 +    // FIXME: this only finds one crate with the given root; we could have multiple
 +    pub fn crate_id_for_crate_root(&self, file_id: FileId) -> Option<CrateId> {
 +        let (&crate_id, _) =
 +            self.arena.iter().find(|(_crate_id, data)| data.root_file_id == file_id)?;
 +        Some(crate_id)
 +    }
 +
 +    /// Extends this crate graph by adding a complete disjoint second crate
 +    /// graph.
 +    ///
 +    /// The ids of the crates in the `other` graph are shifted by the return
 +    /// amount.
 +    pub fn extend(&mut self, other: CrateGraph) -> u32 {
 +        let start = self.arena.len() as u32;
 +        self.arena.extend(other.arena.into_iter().map(|(id, mut data)| {
 +            let new_id = id.shift(start);
 +            for dep in &mut data.dependencies {
 +                dep.crate_id = dep.crate_id.shift(start);
 +            }
 +            (new_id, data)
 +        }));
 +        start
 +    }
 +
 +    fn find_path(
 +        &self,
++        visited: &mut NoHashHashSet<CrateId>,
 +        from: CrateId,
 +        to: CrateId,
 +    ) -> Option<Vec<CrateId>> {
 +        if !visited.insert(from) {
 +            return None;
 +        }
 +
 +        if from == to {
 +            return Some(vec![to]);
 +        }
 +
 +        for dep in &self[from].dependencies {
 +            let crate_id = dep.crate_id;
 +            if let Some(mut path) = self.find_path(visited, crate_id, to) {
 +                path.push(from);
 +                return Some(path);
 +            }
 +        }
 +
 +        None
 +    }
 +
 +    // Work around for https://github.com/rust-lang/rust-analyzer/issues/6038.
 +    // As hacky as it gets.
 +    pub fn patch_cfg_if(&mut self) -> bool {
 +        let cfg_if = self.hacky_find_crate("cfg_if");
 +        let std = self.hacky_find_crate("std");
 +        match (cfg_if, std) {
 +            (Some(cfg_if), Some(std)) => {
 +                self.arena.get_mut(&cfg_if).unwrap().dependencies.clear();
 +                self.arena
 +                    .get_mut(&std)
 +                    .unwrap()
 +                    .dependencies
 +                    .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if));
 +                true
 +            }
 +            _ => false,
 +        }
 +    }
 +
 +    fn hacky_find_crate(&self, display_name: &str) -> Option<CrateId> {
 +        self.iter().find(|it| self[*it].display_name.as_deref() == Some(display_name))
 +    }
 +}
 +
 +impl ops::Index<CrateId> for CrateGraph {
 +    type Output = CrateData;
 +    fn index(&self, crate_id: CrateId) -> &CrateData {
 +        &self.arena[&crate_id]
 +    }
 +}
 +
 +impl CrateId {
 +    fn shift(self, amount: u32) -> CrateId {
 +        CrateId(self.0 + amount)
 +    }
 +}
 +
 +impl CrateData {
 +    fn add_dep(&mut self, dep: Dependency) {
 +        self.dependencies.push(dep)
 +    }
 +}
 +
 +impl FromStr for Edition {
 +    type Err = ParseEditionError;
 +
 +    fn from_str(s: &str) -> Result<Self, Self::Err> {
 +        let res = match s {
 +            "2015" => Edition::Edition2015,
 +            "2018" => Edition::Edition2018,
 +            "2021" => Edition::Edition2021,
 +            _ => return Err(ParseEditionError { invalid_input: s.to_string() }),
 +        };
 +        Ok(res)
 +    }
 +}
 +
 +impl fmt::Display for Edition {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        f.write_str(match self {
 +            Edition::Edition2015 => "2015",
 +            Edition::Edition2018 => "2018",
 +            Edition::Edition2021 => "2021",
 +        })
 +    }
 +}
 +
 +impl FromIterator<(String, String)> for Env {
 +    fn from_iter<T: IntoIterator<Item = (String, String)>>(iter: T) -> Self {
 +        Env { entries: FromIterator::from_iter(iter) }
 +    }
 +}
 +
 +impl Env {
 +    pub fn set(&mut self, env: &str, value: String) {
 +        self.entries.insert(env.to_owned(), value);
 +    }
 +
 +    pub fn get(&self, env: &str) -> Option<String> {
 +        self.entries.get(env).cloned()
 +    }
 +
 +    pub fn iter(&self) -> impl Iterator<Item = (&str, &str)> {
 +        self.entries.iter().map(|(k, v)| (k.as_str(), v.as_str()))
 +    }
 +}
 +
 +#[derive(Debug)]
 +pub struct ParseEditionError {
 +    invalid_input: String,
 +}
 +
 +impl fmt::Display for ParseEditionError {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        write!(f, "invalid edition: {:?}", self.invalid_input)
 +    }
 +}
 +
 +impl std::error::Error for ParseEditionError {}
 +
 +#[derive(Debug)]
 +pub struct CyclicDependenciesError {
 +    path: Vec<(CrateId, Option<CrateDisplayName>)>,
 +}
 +
 +impl CyclicDependenciesError {
 +    fn from(&self) -> &(CrateId, Option<CrateDisplayName>) {
 +        self.path.first().unwrap()
 +    }
 +    fn to(&self) -> &(CrateId, Option<CrateDisplayName>) {
 +        self.path.last().unwrap()
 +    }
 +}
 +
 +impl fmt::Display for CyclicDependenciesError {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        let render = |(id, name): &(CrateId, Option<CrateDisplayName>)| match name {
 +            Some(it) => format!("{}({:?})", it, id),
 +            None => format!("{:?}", id),
 +        };
 +        let path = self.path.iter().rev().map(render).collect::<Vec<String>>().join(" -> ");
 +        write!(
 +            f,
 +            "cyclic deps: {} -> {}, alternative path: {}",
 +            render(self.from()),
 +            render(self.to()),
 +            path
 +        )
 +    }
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use crate::CrateOrigin;
 +
 +    use super::{CfgOptions, CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId};
 +
 +    #[test]
 +    fn detect_cyclic_dependency_indirect() {
 +        let mut graph = CrateGraph::default();
 +        let crate1 = graph.add_crate_root(
 +            FileId(1u32),
 +            Edition2018,
 +            None,
 +            None,
 +            CfgOptions::default(),
 +            CfgOptions::default(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        let crate2 = graph.add_crate_root(
 +            FileId(2u32),
 +            Edition2018,
 +            None,
 +            None,
 +            CfgOptions::default(),
 +            CfgOptions::default(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        let crate3 = graph.add_crate_root(
 +            FileId(3u32),
 +            Edition2018,
 +            None,
 +            None,
 +            CfgOptions::default(),
 +            CfgOptions::default(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        assert!(graph
 +            .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
 +            .is_ok());
 +        assert!(graph
 +            .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
 +            .is_ok());
 +        assert!(graph
 +            .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1))
 +            .is_err());
 +    }
 +
 +    #[test]
 +    fn detect_cyclic_dependency_direct() {
 +        let mut graph = CrateGraph::default();
 +        let crate1 = graph.add_crate_root(
 +            FileId(1u32),
 +            Edition2018,
 +            None,
 +            None,
 +            CfgOptions::default(),
 +            CfgOptions::default(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        let crate2 = graph.add_crate_root(
 +            FileId(2u32),
 +            Edition2018,
 +            None,
 +            None,
 +            CfgOptions::default(),
 +            CfgOptions::default(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        assert!(graph
 +            .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
 +            .is_ok());
 +        assert!(graph
 +            .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
 +            .is_err());
 +    }
 +
 +    #[test]
 +    fn it_works() {
 +        let mut graph = CrateGraph::default();
 +        let crate1 = graph.add_crate_root(
 +            FileId(1u32),
 +            Edition2018,
 +            None,
 +            None,
 +            CfgOptions::default(),
 +            CfgOptions::default(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        let crate2 = graph.add_crate_root(
 +            FileId(2u32),
 +            Edition2018,
 +            None,
 +            None,
 +            CfgOptions::default(),
 +            CfgOptions::default(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        let crate3 = graph.add_crate_root(
 +            FileId(3u32),
 +            Edition2018,
 +            None,
 +            None,
 +            CfgOptions::default(),
 +            CfgOptions::default(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        assert!(graph
 +            .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2))
 +            .is_ok());
 +        assert!(graph
 +            .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3))
 +            .is_ok());
 +    }
 +
 +    #[test]
 +    fn dashes_are_normalized() {
 +        let mut graph = CrateGraph::default();
 +        let crate1 = graph.add_crate_root(
 +            FileId(1u32),
 +            Edition2018,
 +            None,
 +            None,
 +            CfgOptions::default(),
 +            CfgOptions::default(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        let crate2 = graph.add_crate_root(
 +            FileId(2u32),
 +            Edition2018,
 +            None,
 +            None,
 +            CfgOptions::default(),
 +            CfgOptions::default(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        assert!(graph
 +            .add_dep(
 +                crate1,
 +                Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2)
 +            )
 +            .is_ok());
 +        assert_eq!(
 +            graph[crate1].dependencies,
 +            vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2)]
 +        );
 +    }
 +}
index 2d0a95b09d9a1a5b7d0bcb5116ad0572f4a1930f,0000000000000000000000000000000000000000..da11e4ae7bb96f36b2532b79423407e93786cfcf
mode 100644,000000..100644
--- /dev/null
@@@ -1,131 -1,0 +1,131 @@@
- use rustc_hash::FxHashSet;
 +//! base_db defines basic database traits. The concrete DB is defined by ide.
 +
 +#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 +
 +mod input;
 +mod change;
 +pub mod fixture;
 +
 +use std::{panic, sync::Arc};
 +
-     fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>>;
++use stdx::hash::NoHashHashSet;
 +use syntax::{ast, Parse, SourceFile, TextRange, TextSize};
 +
 +pub use crate::{
 +    change::Change,
 +    input::{
 +        CrateData, CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency,
 +        Edition, Env, LangCrateOrigin, ProcMacro, ProcMacroExpander, ProcMacroExpansionError,
 +        ProcMacroId, ProcMacroKind, ProcMacroLoadResult, SourceRoot, SourceRootId,
 +    },
 +};
 +pub use salsa::{self, Cancelled};
 +pub use vfs::{file_set::FileSet, AnchoredPath, AnchoredPathBuf, FileId, VfsPath};
 +
 +#[macro_export]
 +macro_rules! impl_intern_key {
 +    ($name:ident) => {
 +        impl $crate::salsa::InternKey for $name {
 +            fn from_intern_id(v: $crate::salsa::InternId) -> Self {
 +                $name(v)
 +            }
 +            fn as_intern_id(&self) -> $crate::salsa::InternId {
 +                self.0
 +            }
 +        }
 +    };
 +}
 +
 +pub trait Upcast<T: ?Sized> {
 +    fn upcast(&self) -> &T;
 +}
 +
 +#[derive(Clone, Copy, Debug)]
 +pub struct FilePosition {
 +    pub file_id: FileId,
 +    pub offset: TextSize,
 +}
 +
 +#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)]
 +pub struct FileRange {
 +    pub file_id: FileId,
 +    pub range: TextRange,
 +}
 +
 +pub const DEFAULT_LRU_CAP: usize = 128;
 +
 +pub trait FileLoader {
 +    /// Text of the file.
 +    fn file_text(&self, file_id: FileId) -> Arc<String>;
 +    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId>;
-     fn source_root_crates(&self, id: SourceRootId) -> Arc<FxHashSet<CrateId>>;
++    fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>>;
 +}
 +
 +/// Database which stores all significant input facts: source code and project
 +/// model. Everything else in rust-analyzer is derived from these queries.
 +#[salsa::query_group(SourceDatabaseStorage)]
 +pub trait SourceDatabase: FileLoader + std::fmt::Debug {
 +    // Parses the file into the syntax tree.
 +    #[salsa::invoke(parse_query)]
 +    fn parse(&self, file_id: FileId) -> Parse<ast::SourceFile>;
 +
 +    /// The crate graph.
 +    #[salsa::input]
 +    fn crate_graph(&self) -> Arc<CrateGraph>;
 +}
 +
 +fn parse_query(db: &dyn SourceDatabase, file_id: FileId) -> Parse<ast::SourceFile> {
 +    let _p = profile::span("parse_query").detail(|| format!("{:?}", file_id));
 +    let text = db.file_text(file_id);
 +    SourceFile::parse(&*text)
 +}
 +
 +/// We don't want to give HIR knowledge of source roots, hence we extract these
 +/// methods into a separate DB.
 +#[salsa::query_group(SourceDatabaseExtStorage)]
 +pub trait SourceDatabaseExt: SourceDatabase {
 +    #[salsa::input]
 +    fn file_text(&self, file_id: FileId) -> Arc<String>;
 +    /// Path to a file, relative to the root of its source root.
 +    /// Source root of the file.
 +    #[salsa::input]
 +    fn file_source_root(&self, file_id: FileId) -> SourceRootId;
 +    /// Contents of the source root.
 +    #[salsa::input]
 +    fn source_root(&self, id: SourceRootId) -> Arc<SourceRoot>;
 +
- fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<FxHashSet<CrateId>> {
++    fn source_root_crates(&self, id: SourceRootId) -> Arc<NoHashHashSet<CrateId>>;
 +}
 +
-         source_root.file_set.resolve_path(path)
++fn source_root_crates(db: &dyn SourceDatabaseExt, id: SourceRootId) -> Arc<NoHashHashSet<CrateId>> {
 +    let graph = db.crate_graph();
 +    let res = graph
 +        .iter()
 +        .filter(|&krate| {
 +            let root_file = graph[krate].root_file_id;
 +            db.file_source_root(root_file) == id
 +        })
 +        .collect();
 +    Arc::new(res)
 +}
 +
 +/// Silly workaround for cyclic deps between the traits
 +pub struct FileLoaderDelegate<T>(pub T);
 +
 +impl<T: SourceDatabaseExt> FileLoader for FileLoaderDelegate<&'_ T> {
 +    fn file_text(&self, file_id: FileId) -> Arc<String> {
 +        SourceDatabaseExt::file_text(self.0, file_id)
 +    }
 +    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
 +        // FIXME: this *somehow* should be platform agnostic...
 +        let source_root = self.0.file_source_root(path.anchor);
 +        let source_root = self.0.source_root(source_root);
-     fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
++        source_root.resolve_path(path)
 +    }
 +
++    fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
 +        let _p = profile::span("relevant_crates");
 +        let source_root = self.0.file_source_root(file_id);
 +        self.0.source_root_crates(source_root)
 +    }
 +}
index c22945c81fcb93b1971721c6d5f6446cf950142c,0000000000000000000000000000000000000000..d9f4ef5b7ff578deceae41a0a1866b15d7dd9753
mode 100644,000000..100644
--- /dev/null
@@@ -1,419 -1,0 +1,421 @@@
-                             tracing::error!(
-                                 command = ?self.check_command(),
-                                 %error, "failed to restart flycheck"
-                             );
 +//! Flycheck provides the functionality needed to run `cargo check` or
 +//! another compatible command (f.x. clippy) in a background thread and provide
 +//! LSP diagnostics based on the output of the command.
 +
 +#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 +
 +use std::{
 +    fmt, io,
 +    process::{ChildStderr, ChildStdout, Command, Stdio},
 +    time::Duration,
 +};
 +
 +use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
 +use paths::AbsPathBuf;
 +use serde::Deserialize;
 +use stdx::{process::streaming_output, JodChild};
 +
 +pub use cargo_metadata::diagnostic::{
 +    Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
 +    DiagnosticSpanMacroExpansion,
 +};
 +
 +#[derive(Clone, Debug, PartialEq, Eq)]
 +pub enum FlycheckConfig {
 +    CargoCommand {
 +        command: String,
 +        target_triple: Option<String>,
 +        all_targets: bool,
 +        no_default_features: bool,
 +        all_features: bool,
 +        features: Vec<String>,
 +        extra_args: Vec<String>,
 +    },
 +    CustomCommand {
 +        command: String,
 +        args: Vec<String>,
 +    },
 +}
 +
 +impl fmt::Display for FlycheckConfig {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        match self {
 +            FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {}", command),
 +            FlycheckConfig::CustomCommand { command, args } => {
 +                write!(f, "{} {}", command, args.join(" "))
 +            }
 +        }
 +    }
 +}
 +
 +/// Flycheck wraps the shared state and communication machinery used for
 +/// running `cargo check` (or other compatible command) and providing
 +/// diagnostics based on the output.
 +/// The spawned thread is shut down when this struct is dropped.
 +#[derive(Debug)]
 +pub struct FlycheckHandle {
 +    // XXX: drop order is significant
 +    sender: Sender<Restart>,
 +    _thread: jod_thread::JoinHandle,
 +    id: usize,
 +}
 +
 +impl FlycheckHandle {
 +    pub fn spawn(
 +        id: usize,
 +        sender: Box<dyn Fn(Message) + Send>,
 +        config: FlycheckConfig,
 +        workspace_root: AbsPathBuf,
 +    ) -> FlycheckHandle {
 +        let actor = FlycheckActor::new(id, sender, config, workspace_root);
 +        let (sender, receiver) = unbounded::<Restart>();
 +        let thread = jod_thread::Builder::new()
 +            .name("Flycheck".to_owned())
 +            .spawn(move || actor.run(receiver))
 +            .expect("failed to spawn thread");
 +        FlycheckHandle { id, sender, _thread: thread }
 +    }
 +
 +    /// Schedule a re-start of the cargo check worker.
 +    pub fn restart(&self) {
 +        self.sender.send(Restart::Yes).unwrap();
 +    }
 +
 +    /// Stop this cargo check worker.
 +    pub fn cancel(&self) {
 +        self.sender.send(Restart::No).unwrap();
 +    }
 +
 +    pub fn id(&self) -> usize {
 +        self.id
 +    }
 +}
 +
 +pub enum Message {
 +    /// Request adding a diagnostic with fixes included to a file
 +    AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic },
 +
 +    /// Request check progress notification to client
 +    Progress {
 +        /// Flycheck instance ID
 +        id: usize,
 +        progress: Progress,
 +    },
 +}
 +
 +impl fmt::Debug for Message {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        match self {
 +            Message::AddDiagnostic { id, workspace_root, diagnostic } => f
 +                .debug_struct("AddDiagnostic")
 +                .field("id", id)
 +                .field("workspace_root", workspace_root)
 +                .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
 +                .finish(),
 +            Message::Progress { id, progress } => {
 +                f.debug_struct("Progress").field("id", id).field("progress", progress).finish()
 +            }
 +        }
 +    }
 +}
 +
 +#[derive(Debug)]
 +pub enum Progress {
 +    DidStart,
 +    DidCheckCrate(String),
 +    DidFinish(io::Result<()>),
 +    DidCancel,
++    DidFailToRestart(String),
 +}
 +
 +enum Restart {
 +    Yes,
 +    No,
 +}
 +
 +struct FlycheckActor {
 +    id: usize,
 +    sender: Box<dyn Fn(Message) + Send>,
 +    config: FlycheckConfig,
 +    workspace_root: AbsPathBuf,
 +    /// CargoHandle exists to wrap around the communication needed to be able to
 +    /// run `cargo check` without blocking. Currently the Rust standard library
 +    /// doesn't provide a way to read sub-process output without blocking, so we
 +    /// have to wrap sub-processes output handling in a thread and pass messages
 +    /// back over a channel.
 +    cargo_handle: Option<CargoHandle>,
 +}
 +
 +enum Event {
 +    Restart(Restart),
 +    CheckEvent(Option<CargoMessage>),
 +}
 +
 +impl FlycheckActor {
 +    fn new(
 +        id: usize,
 +        sender: Box<dyn Fn(Message) + Send>,
 +        config: FlycheckConfig,
 +        workspace_root: AbsPathBuf,
 +    ) -> FlycheckActor {
 +        tracing::info!(%id, ?workspace_root, "Spawning flycheck");
 +        FlycheckActor { id, sender, config, workspace_root, cargo_handle: None }
 +    }
 +    fn progress(&self, progress: Progress) {
 +        self.send(Message::Progress { id: self.id, progress });
 +    }
 +    fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> {
 +        let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver);
 +        select! {
 +            recv(inbox) -> msg => msg.ok().map(Event::Restart),
 +            recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
 +        }
 +    }
 +    fn run(mut self, inbox: Receiver<Restart>) {
 +        while let Some(event) = self.next_event(&inbox) {
 +            match event {
 +                Event::Restart(Restart::No) => {
 +                    self.cancel_check_process();
 +                }
 +                Event::Restart(Restart::Yes) => {
 +                    // Cancel the previously spawned process
 +                    self.cancel_check_process();
 +                    while let Ok(_) = inbox.recv_timeout(Duration::from_millis(50)) {}
 +
 +                    let command = self.check_command();
 +                    tracing::debug!(?command, "will restart flycheck");
 +                    match CargoHandle::spawn(command) {
 +                        Ok(cargo_handle) => {
 +                            tracing::debug!(
 +                                command = ?self.check_command(),
 +                                "did  restart flycheck"
 +                            );
 +                            self.cargo_handle = Some(cargo_handle);
 +                            self.progress(Progress::DidStart);
 +                        }
 +                        Err(error) => {
++                            self.progress(Progress::DidFailToRestart(format!(
++                                "Failed to run the following command: {:?} error={}",
++                                self.check_command(),
++                                error
++                            )));
 +                        }
 +                    }
 +                }
 +                Event::CheckEvent(None) => {
 +                    tracing::debug!(flycheck_id = self.id, "flycheck finished");
 +
 +                    // Watcher finished
 +                    let cargo_handle = self.cargo_handle.take().unwrap();
 +                    let res = cargo_handle.join();
 +                    if res.is_err() {
 +                        tracing::error!(
 +                            "Flycheck failed to run the following command: {:?}",
 +                            self.check_command()
 +                        );
 +                    }
 +                    self.progress(Progress::DidFinish(res));
 +                }
 +                Event::CheckEvent(Some(message)) => match message {
 +                    CargoMessage::CompilerArtifact(msg) => {
 +                        self.progress(Progress::DidCheckCrate(msg.target.name));
 +                    }
 +
 +                    CargoMessage::Diagnostic(msg) => {
 +                        self.send(Message::AddDiagnostic {
 +                            id: self.id,
 +                            workspace_root: self.workspace_root.clone(),
 +                            diagnostic: msg,
 +                        });
 +                    }
 +                },
 +            }
 +        }
 +        // If we rerun the thread, we need to discard the previous check results first
 +        self.cancel_check_process();
 +    }
 +
 +    fn cancel_check_process(&mut self) {
 +        if let Some(cargo_handle) = self.cargo_handle.take() {
 +            tracing::debug!(
 +                command = ?self.check_command(),
 +                "did  cancel flycheck"
 +            );
 +            cargo_handle.cancel();
 +            self.progress(Progress::DidCancel);
 +        }
 +    }
 +
 +    fn check_command(&self) -> Command {
 +        let mut cmd = match &self.config {
 +            FlycheckConfig::CargoCommand {
 +                command,
 +                target_triple,
 +                no_default_features,
 +                all_targets,
 +                all_features,
 +                extra_args,
 +                features,
 +            } => {
 +                let mut cmd = Command::new(toolchain::cargo());
 +                cmd.arg(command);
 +                cmd.current_dir(&self.workspace_root);
 +                cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
 +                    .arg(self.workspace_root.join("Cargo.toml").as_os_str());
 +
 +                if let Some(target) = target_triple {
 +                    cmd.args(&["--target", target.as_str()]);
 +                }
 +                if *all_targets {
 +                    cmd.arg("--all-targets");
 +                }
 +                if *all_features {
 +                    cmd.arg("--all-features");
 +                } else {
 +                    if *no_default_features {
 +                        cmd.arg("--no-default-features");
 +                    }
 +                    if !features.is_empty() {
 +                        cmd.arg("--features");
 +                        cmd.arg(features.join(" "));
 +                    }
 +                }
 +                cmd.args(extra_args);
 +                cmd
 +            }
 +            FlycheckConfig::CustomCommand { command, args } => {
 +                let mut cmd = Command::new(command);
 +                cmd.args(args);
 +                cmd
 +            }
 +        };
 +        cmd.current_dir(&self.workspace_root);
 +        cmd
 +    }
 +
 +    fn send(&self, check_task: Message) {
 +        (self.sender)(check_task);
 +    }
 +}
 +
 +/// A handle to a cargo process used for fly-checking.
 +struct CargoHandle {
 +    /// The handle to the actual cargo process. As we cannot cancel directly from with
 +    /// a read syscall dropping and therefor terminating the process is our best option.
 +    child: JodChild,
 +    thread: jod_thread::JoinHandle<io::Result<(bool, String)>>,
 +    receiver: Receiver<CargoMessage>,
 +}
 +
 +impl CargoHandle {
 +    fn spawn(mut command: Command) -> std::io::Result<CargoHandle> {
 +        command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
 +        let mut child = JodChild::spawn(command)?;
 +
 +        let stdout = child.stdout.take().unwrap();
 +        let stderr = child.stderr.take().unwrap();
 +
 +        let (sender, receiver) = unbounded();
 +        let actor = CargoActor::new(sender, stdout, stderr);
 +        let thread = jod_thread::Builder::new()
 +            .name("CargoHandle".to_owned())
 +            .spawn(move || actor.run())
 +            .expect("failed to spawn thread");
 +        Ok(CargoHandle { child, thread, receiver })
 +    }
 +
 +    fn cancel(mut self) {
 +        let _ = self.child.kill();
 +        let _ = self.child.wait();
 +    }
 +
 +    fn join(mut self) -> io::Result<()> {
 +        let _ = self.child.kill();
 +        let exit_status = self.child.wait()?;
 +        let (read_at_least_one_message, error) = self.thread.join()?;
 +        if read_at_least_one_message || exit_status.success() {
 +            Ok(())
 +        } else {
 +            Err(io::Error::new(io::ErrorKind::Other, format!(
 +                "Cargo watcher failed, the command produced no valid metadata (exit code: {:?}):\n{}",
 +                exit_status, error
 +            )))
 +        }
 +    }
 +}
 +
 +struct CargoActor {
 +    sender: Sender<CargoMessage>,
 +    stdout: ChildStdout,
 +    stderr: ChildStderr,
 +}
 +
 +impl CargoActor {
 +    fn new(sender: Sender<CargoMessage>, stdout: ChildStdout, stderr: ChildStderr) -> CargoActor {
 +        CargoActor { sender, stdout, stderr }
 +    }
 +
 +    fn run(self) -> io::Result<(bool, String)> {
 +        // We manually read a line at a time, instead of using serde's
 +        // stream deserializers, because the deserializer cannot recover
 +        // from an error, resulting in it getting stuck, because we try to
 +        // be resilient against failures.
 +        //
 +        // Because cargo only outputs one JSON object per line, we can
 +        // simply skip a line if it doesn't parse, which just ignores any
 +        // erroneous output.
 +
 +        let mut error = String::new();
 +        let mut read_at_least_one_message = false;
 +        let output = streaming_output(
 +            self.stdout,
 +            self.stderr,
 +            &mut |line| {
 +                read_at_least_one_message = true;
 +
 +                // Try to deserialize a message from Cargo or Rustc.
 +                let mut deserializer = serde_json::Deserializer::from_str(line);
 +                deserializer.disable_recursion_limit();
 +                if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
 +                    match message {
 +                        // Skip certain kinds of messages to only spend time on what's useful
 +                        JsonMessage::Cargo(message) => match message {
 +                            cargo_metadata::Message::CompilerArtifact(artifact)
 +                                if !artifact.fresh =>
 +                            {
 +                                self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
 +                            }
 +                            cargo_metadata::Message::CompilerMessage(msg) => {
 +                                self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
 +                            }
 +                            _ => (),
 +                        },
 +                        JsonMessage::Rustc(message) => {
 +                            self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
 +                        }
 +                    }
 +                }
 +            },
 +            &mut |line| {
 +                error.push_str(line);
 +                error.push('\n');
 +            },
 +        );
 +        match output {
 +            Ok(_) => Ok((read_at_least_one_message, error)),
 +            Err(e) => Err(io::Error::new(e.kind(), format!("{:?}: {}", e, error))),
 +        }
 +    }
 +}
 +
 +enum CargoMessage {
 +    CompilerArtifact(cargo_metadata::Artifact),
 +    Diagnostic(Diagnostic),
 +}
 +
 +#[derive(Deserialize)]
 +#[serde(untagged)]
 +enum JsonMessage {
 +    Cargo(cargo_metadata::Message),
 +    Rustc(Diagnostic),
 +}
index 35c8708955a77757b56635b9c28845e1c3916514,0000000000000000000000000000000000000000..631ae3cf11fa7e0fc9ddc2d57843ac383d1d6e65
mode 100644,000000..100644
--- /dev/null
@@@ -1,579 -1,0 +1,615 @@@
- use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, MacroCallId, MacroDefKind};
 +//! Contains basic data about various HIR declarations.
 +
 +use std::sync::Arc;
 +
-     nameres::{attr_resolution::ResolvedAttr, proc_macro::ProcMacroKind, DefMap},
++use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroDefKind};
 +use smallvec::SmallVec;
 +use syntax::ast;
 +
 +use crate::{
 +    attr::Attrs,
 +    body::{Expander, Mark},
 +    db::DefDatabase,
 +    intern::Interned,
 +    item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId},
-         let (items, attribute_calls) = collector.finish();
-         Arc::new(TraitData {
-             name,
-             attribute_calls,
-             items,
-             is_auto,
-             is_unsafe,
-             visibility,
-             skip_array_during_method_dispatch,
-         })
++    nameres::{
++        attr_resolution::ResolvedAttr, diagnostics::DefDiagnostic, proc_macro::ProcMacroKind,
++        DefMap,
++    },
 +    type_ref::{TraitRef, TypeBound, TypeRef},
 +    visibility::RawVisibility,
 +    AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
 +    Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId,
 +    StaticId, TraitId, TypeAliasId, TypeAliasLoc,
 +};
 +
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub struct FunctionData {
 +    pub name: Name,
 +    pub params: Vec<(Option<Name>, Interned<TypeRef>)>,
 +    pub ret_type: Interned<TypeRef>,
 +    pub async_ret_type: Option<Interned<TypeRef>>,
 +    pub attrs: Attrs,
 +    pub visibility: RawVisibility,
 +    pub abi: Option<Interned<str>>,
 +    pub legacy_const_generics_indices: Box<[u32]>,
 +    flags: FnFlags,
 +}
 +
 +impl FunctionData {
 +    pub(crate) fn fn_data_query(db: &dyn DefDatabase, func: FunctionId) -> Arc<FunctionData> {
 +        let loc = func.lookup(db);
 +        let krate = loc.container.module(db).krate;
 +        let crate_graph = db.crate_graph();
 +        let cfg_options = &crate_graph[krate].cfg_options;
 +        let item_tree = loc.id.item_tree(db);
 +        let func = &item_tree[loc.id.value];
 +        let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
 +            db.trait_data(trait_id).visibility.clone()
 +        } else {
 +            item_tree[func.visibility].clone()
 +        };
 +
 +        let enabled_params = func
 +            .params
 +            .clone()
 +            .filter(|&param| item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options));
 +
 +        // If last cfg-enabled param is a `...` param, it's a varargs function.
 +        let is_varargs = enabled_params
 +            .clone()
 +            .next_back()
 +            .map_or(false, |param| matches!(item_tree[param], Param::Varargs));
 +
 +        let mut flags = func.flags;
 +        if is_varargs {
 +            flags |= FnFlags::IS_VARARGS;
 +        }
 +        if flags.contains(FnFlags::HAS_SELF_PARAM) {
 +            // If there's a self param in the syntax, but it is cfg'd out, remove the flag.
 +            let is_cfgd_out = match func.params.clone().next() {
 +                Some(param) => {
 +                    !item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options)
 +                }
 +                None => {
 +                    stdx::never!("fn HAS_SELF_PARAM but no parameters allocated");
 +                    true
 +                }
 +            };
 +            if is_cfgd_out {
 +                cov_mark::hit!(cfgd_out_self_param);
 +                flags.remove(FnFlags::HAS_SELF_PARAM);
 +            }
 +        }
 +
 +        let legacy_const_generics_indices = item_tree
 +            .attrs(db, krate, ModItem::from(loc.id.value).into())
 +            .by_key("rustc_legacy_const_generics")
 +            .tt_values()
 +            .next()
 +            .map(parse_rustc_legacy_const_generics)
 +            .unwrap_or_default();
 +
 +        Arc::new(FunctionData {
 +            name: func.name.clone(),
 +            params: enabled_params
 +                .clone()
 +                .filter_map(|id| match &item_tree[id] {
 +                    Param::Normal(name, ty) => Some((name.clone(), ty.clone())),
 +                    Param::Varargs => None,
 +                })
 +                .collect(),
 +            ret_type: func.ret_type.clone(),
 +            async_ret_type: func.async_ret_type.clone(),
 +            attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()),
 +            visibility,
 +            abi: func.abi.clone(),
 +            legacy_const_generics_indices,
 +            flags,
 +        })
 +    }
 +
 +    pub fn has_body(&self) -> bool {
 +        self.flags.contains(FnFlags::HAS_BODY)
 +    }
 +
 +    /// True if the first param is `self`. This is relevant to decide whether this
 +    /// can be called as a method.
 +    pub fn has_self_param(&self) -> bool {
 +        self.flags.contains(FnFlags::HAS_SELF_PARAM)
 +    }
 +
 +    pub fn has_default_kw(&self) -> bool {
 +        self.flags.contains(FnFlags::HAS_DEFAULT_KW)
 +    }
 +
 +    pub fn has_const_kw(&self) -> bool {
 +        self.flags.contains(FnFlags::HAS_CONST_KW)
 +    }
 +
 +    pub fn has_async_kw(&self) -> bool {
 +        self.flags.contains(FnFlags::HAS_ASYNC_KW)
 +    }
 +
 +    pub fn has_unsafe_kw(&self) -> bool {
 +        self.flags.contains(FnFlags::HAS_UNSAFE_KW)
 +    }
 +
 +    pub fn is_varargs(&self) -> bool {
 +        self.flags.contains(FnFlags::IS_VARARGS)
 +    }
 +}
 +
 +fn parse_rustc_legacy_const_generics(tt: &tt::Subtree) -> Box<[u32]> {
 +    let mut indices = Vec::new();
 +    for args in tt.token_trees.chunks(2) {
 +        match &args[0] {
 +            tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.text.parse() {
 +                Ok(index) => indices.push(index),
 +                Err(_) => break,
 +            },
 +            _ => break,
 +        }
 +
 +        if let Some(comma) = args.get(1) {
 +            match comma {
 +                tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
 +                _ => break,
 +            }
 +        }
 +    }
 +
 +    indices.into_boxed_slice()
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub struct TypeAliasData {
 +    pub name: Name,
 +    pub type_ref: Option<Interned<TypeRef>>,
 +    pub visibility: RawVisibility,
 +    pub is_extern: bool,
 +    /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
 +    pub bounds: Vec<Interned<TypeBound>>,
 +}
 +
 +impl TypeAliasData {
 +    pub(crate) fn type_alias_data_query(
 +        db: &dyn DefDatabase,
 +        typ: TypeAliasId,
 +    ) -> Arc<TypeAliasData> {
 +        let loc = typ.lookup(db);
 +        let item_tree = loc.id.item_tree(db);
 +        let typ = &item_tree[loc.id.value];
 +        let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
 +            db.trait_data(trait_id).visibility.clone()
 +        } else {
 +            item_tree[typ.visibility].clone()
 +        };
 +
 +        Arc::new(TypeAliasData {
 +            name: typ.name.clone(),
 +            type_ref: typ.type_ref.clone(),
 +            visibility,
 +            is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
 +            bounds: typ.bounds.to_vec(),
 +        })
 +    }
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub struct TraitData {
 +    pub name: Name,
 +    pub items: Vec<(Name, AssocItemId)>,
 +    pub is_auto: bool,
 +    pub is_unsafe: bool,
 +    pub visibility: RawVisibility,
 +    /// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore
 +    /// method calls to this trait's methods when the receiver is an array and the crate edition is
 +    /// 2015 or 2018.
 +    pub skip_array_during_method_dispatch: bool,
 +    // box it as the vec is usually empty anyways
 +    pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
 +}
 +
 +impl TraitData {
 +    pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitData> {
++        db.trait_data_with_diagnostics(tr).0
++    }
++
++    pub(crate) fn trait_data_with_diagnostics_query(
++        db: &dyn DefDatabase,
++        tr: TraitId,
++    ) -> (Arc<TraitData>, Arc<Vec<DefDiagnostic>>) {
 +        let tr_loc @ ItemLoc { container: module_id, id: tree_id } = tr.lookup(db);
 +        let item_tree = tree_id.item_tree(db);
 +        let tr_def = &item_tree[tree_id.value];
 +        let _cx = stdx::panic_context::enter(format!(
 +            "trait_data_query({:?} -> {:?} -> {:?})",
 +            tr, tr_loc, tr_def
 +        ));
 +        let name = tr_def.name.clone();
 +        let is_auto = tr_def.is_auto;
 +        let is_unsafe = tr_def.is_unsafe;
 +        let visibility = item_tree[tr_def.visibility].clone();
 +        let skip_array_during_method_dispatch = item_tree
 +            .attrs(db, module_id.krate(), ModItem::from(tree_id.value).into())
 +            .by_key("rustc_skip_array_during_method_dispatch")
 +            .exists();
 +
 +        let mut collector =
 +            AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr));
 +        collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items);
-         let _p = profile::span("impl_data_query");
++        let (items, attribute_calls, diagnostics) = collector.finish();
++
++        (
++            Arc::new(TraitData {
++                name,
++                attribute_calls,
++                items,
++                is_auto,
++                is_unsafe,
++                visibility,
++                skip_array_during_method_dispatch,
++            }),
++            Arc::new(diagnostics),
++        )
 +    }
 +
 +    pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
 +        self.items.iter().filter_map(|(_name, item)| match item {
 +            AssocItemId::TypeAliasId(t) => Some(*t),
 +            _ => None,
 +        })
 +    }
 +
 +    pub fn associated_type_by_name(&self, name: &Name) -> Option<TypeAliasId> {
 +        self.items.iter().find_map(|(item_name, item)| match item {
 +            AssocItemId::TypeAliasId(t) if item_name == name => Some(*t),
 +            _ => None,
 +        })
 +    }
 +
 +    pub fn method_by_name(&self, name: &Name) -> Option<FunctionId> {
 +        self.items.iter().find_map(|(item_name, item)| match item {
 +            AssocItemId::FunctionId(t) if item_name == name => Some(*t),
 +            _ => None,
 +        })
 +    }
 +
 +    pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
 +        self.attribute_calls.iter().flat_map(|it| it.iter()).copied()
 +    }
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub struct ImplData {
 +    pub target_trait: Option<Interned<TraitRef>>,
 +    pub self_ty: Interned<TypeRef>,
 +    pub items: Vec<AssocItemId>,
 +    pub is_negative: bool,
 +    // box it as the vec is usually empty anyways
 +    pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
 +}
 +
 +impl ImplData {
 +    pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> {
-         let (items, attribute_calls) = collector.finish();
++        db.impl_data_with_diagnostics(id).0
++    }
++
++    pub(crate) fn impl_data_with_diagnostics_query(
++        db: &dyn DefDatabase,
++        id: ImplId,
++    ) -> (Arc<ImplData>, Arc<Vec<DefDiagnostic>>) {
++        let _p = profile::span("impl_data_with_diagnostics_query");
 +        let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
 +
 +        let item_tree = tree_id.item_tree(db);
 +        let impl_def = &item_tree[tree_id.value];
 +        let target_trait = impl_def.target_trait.clone();
 +        let self_ty = impl_def.self_ty.clone();
 +        let is_negative = impl_def.is_negative;
 +
 +        let mut collector =
 +            AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id));
 +        collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items);
 +
-         Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls })
++        let (items, attribute_calls, diagnostics) = collector.finish();
 +        let items = items.into_iter().map(|(_, item)| item).collect();
 +
-     ) -> (Vec<(Name, AssocItemId)>, Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>) {
++        (
++            Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls }),
++            Arc::new(diagnostics),
++        )
 +    }
 +
 +    pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
 +        self.attribute_calls.iter().flat_map(|it| it.iter()).copied()
 +    }
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub struct Macro2Data {
 +    pub name: Name,
 +    pub visibility: RawVisibility,
 +}
 +
 +impl Macro2Data {
 +    pub(crate) fn macro2_data_query(db: &dyn DefDatabase, makro: Macro2Id) -> Arc<Macro2Data> {
 +        let loc = makro.lookup(db);
 +        let item_tree = loc.id.item_tree(db);
 +        let makro = &item_tree[loc.id.value];
 +
 +        Arc::new(Macro2Data {
 +            name: makro.name.clone(),
 +            visibility: item_tree[makro.visibility].clone(),
 +        })
 +    }
 +}
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub struct MacroRulesData {
 +    pub name: Name,
 +    pub macro_export: bool,
 +}
 +
 +impl MacroRulesData {
 +    pub(crate) fn macro_rules_data_query(
 +        db: &dyn DefDatabase,
 +        makro: MacroRulesId,
 +    ) -> Arc<MacroRulesData> {
 +        let loc = makro.lookup(db);
 +        let item_tree = loc.id.item_tree(db);
 +        let makro = &item_tree[loc.id.value];
 +
 +        let macro_export = item_tree
 +            .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
 +            .by_key("macro_export")
 +            .exists();
 +
 +        Arc::new(MacroRulesData { name: makro.name.clone(), macro_export })
 +    }
 +}
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub struct ProcMacroData {
 +    pub name: Name,
 +    /// Derive helpers, if this is a derive
 +    pub helpers: Option<Box<[Name]>>,
 +}
 +
 +impl ProcMacroData {
 +    pub(crate) fn proc_macro_data_query(
 +        db: &dyn DefDatabase,
 +        makro: ProcMacroId,
 +    ) -> Arc<ProcMacroData> {
 +        let loc = makro.lookup(db);
 +        let item_tree = loc.id.item_tree(db);
 +        let makro = &item_tree[loc.id.value];
 +
 +        let (name, helpers) = if let Some(def) = item_tree
 +            .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
 +            .parse_proc_macro_decl(&makro.name)
 +        {
 +            (
 +                def.name,
 +                match def.kind {
 +                    ProcMacroKind::CustomDerive { helpers } => Some(helpers),
 +                    ProcMacroKind::FnLike | ProcMacroKind::Attr => None,
 +                },
 +            )
 +        } else {
 +            // eeeh...
 +            stdx::never!("proc macro declaration is not a proc macro");
 +            (makro.name.clone(), None)
 +        };
 +        Arc::new(ProcMacroData { name, helpers })
 +    }
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub struct ConstData {
 +    /// `None` for `const _: () = ();`
 +    pub name: Option<Name>,
 +    pub type_ref: Interned<TypeRef>,
 +    pub visibility: RawVisibility,
 +}
 +
 +impl ConstData {
 +    pub(crate) fn const_data_query(db: &dyn DefDatabase, konst: ConstId) -> Arc<ConstData> {
 +        let loc = konst.lookup(db);
 +        let item_tree = loc.id.item_tree(db);
 +        let konst = &item_tree[loc.id.value];
 +        let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
 +            db.trait_data(trait_id).visibility.clone()
 +        } else {
 +            item_tree[konst.visibility].clone()
 +        };
 +
 +        Arc::new(ConstData {
 +            name: konst.name.clone(),
 +            type_ref: konst.type_ref.clone(),
 +            visibility,
 +        })
 +    }
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub struct StaticData {
 +    pub name: Name,
 +    pub type_ref: Interned<TypeRef>,
 +    pub visibility: RawVisibility,
 +    pub mutable: bool,
 +    pub is_extern: bool,
 +}
 +
 +impl StaticData {
 +    pub(crate) fn static_data_query(db: &dyn DefDatabase, konst: StaticId) -> Arc<StaticData> {
 +        let loc = konst.lookup(db);
 +        let item_tree = loc.id.item_tree(db);
 +        let statik = &item_tree[loc.id.value];
 +
 +        Arc::new(StaticData {
 +            name: statik.name.clone(),
 +            type_ref: statik.type_ref.clone(),
 +            visibility: item_tree[statik.visibility].clone(),
 +            mutable: statik.mutable,
 +            is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
 +        })
 +    }
 +}
 +
 +struct AssocItemCollector<'a> {
 +    db: &'a dyn DefDatabase,
 +    module_id: ModuleId,
 +    def_map: Arc<DefMap>,
++    inactive_diagnostics: Vec<DefDiagnostic>,
 +    container: ItemContainerId,
 +    expander: Expander,
 +
 +    items: Vec<(Name, AssocItemId)>,
 +    attr_calls: Vec<(AstId<ast::Item>, MacroCallId)>,
 +}
 +
 +impl<'a> AssocItemCollector<'a> {
 +    fn new(
 +        db: &'a dyn DefDatabase,
 +        module_id: ModuleId,
 +        file_id: HirFileId,
 +        container: ItemContainerId,
 +    ) -> Self {
 +        Self {
 +            db,
 +            module_id,
 +            def_map: module_id.def_map(db),
 +            container,
 +            expander: Expander::new(db, file_id, module_id),
 +            items: Vec::new(),
 +            attr_calls: Vec::new(),
++            inactive_diagnostics: Vec::new(),
 +        }
 +    }
 +
 +    fn finish(
 +        self,
++    ) -> (
++        Vec<(Name, AssocItemId)>,
++        Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
++        Vec<DefDiagnostic>,
++    ) {
 +        (
 +            self.items,
 +            if self.attr_calls.is_empty() { None } else { Some(Box::new(self.attr_calls)) },
++            self.inactive_diagnostics,
 +        )
 +    }
 +
 +    // FIXME: proc-macro diagnostics
 +    fn collect(&mut self, item_tree: &ItemTree, tree_id: TreeId, assoc_items: &[AssocItem]) {
 +        let container = self.container;
 +        self.items.reserve(assoc_items.len());
 +
 +        'items: for &item in assoc_items {
 +            let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into());
 +            if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
++                self.inactive_diagnostics.push(DefDiagnostic::unconfigured_code(
++                    self.module_id.local_id,
++                    InFile::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()),
++                    attrs.cfg().unwrap(),
++                    self.expander.cfg_options().clone(),
++                ));
 +                continue;
 +            }
 +
 +            'attrs: for attr in &*attrs {
 +                let ast_id =
 +                    AstId::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast());
 +                let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id };
 +
 +                if let Ok(ResolvedAttr::Macro(call_id)) = self.def_map.resolve_attr_macro(
 +                    self.db,
 +                    self.module_id.local_id,
 +                    ast_id_with_path,
 +                    attr,
 +                ) {
 +                    self.attr_calls.push((ast_id, call_id));
 +                    // If proc attribute macro expansion is disabled, skip expanding it here
 +                    if !self.db.enable_proc_attr_macros() {
 +                        continue 'attrs;
 +                    }
 +                    let loc = self.db.lookup_intern_macro_call(call_id);
 +                    if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind {
 +                        // If there's no expander for the proc macro (e.g. the
 +                        // proc macro is ignored, or building the proc macro
 +                        // crate failed), skip expansion like we would if it was
 +                        // disabled. This is analogous to the handling in
 +                        // `DefCollector::collect_macros`.
 +                        if exp.is_dummy() {
 +                            continue 'attrs;
 +                        }
 +                    }
 +                    match self.expander.enter_expand_id::<ast::MacroItems>(self.db, call_id) {
 +                        ExpandResult { value: Some((mark, _)), .. } => {
 +                            self.collect_macro_items(mark);
 +                            continue 'items;
 +                        }
 +                        ExpandResult { .. } => {}
 +                    }
 +                }
 +            }
 +
 +            match item {
 +                AssocItem::Function(id) => {
 +                    let item = &item_tree[id];
 +
 +                    let def =
 +                        FunctionLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
 +                    self.items.push((item.name.clone(), def.into()));
 +                }
 +                AssocItem::Const(id) => {
 +                    let item = &item_tree[id];
 +
 +                    let name = match item.name.clone() {
 +                        Some(name) => name,
 +                        None => continue,
 +                    };
 +                    let def =
 +                        ConstLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
 +                    self.items.push((name, def.into()));
 +                }
 +                AssocItem::TypeAlias(id) => {
 +                    let item = &item_tree[id];
 +
 +                    let def = TypeAliasLoc { container, id: ItemTreeId::new(tree_id, id) }
 +                        .intern(self.db);
 +                    self.items.push((item.name.clone(), def.into()));
 +                }
 +                AssocItem::MacroCall(call) => {
 +                    if let Some(root) = self.db.parse_or_expand(self.expander.current_file_id()) {
 +                        let call = &item_tree[call];
 +
 +                        let ast_id_map = self.db.ast_id_map(self.expander.current_file_id());
 +                        let call = ast_id_map.get(call.ast_id).to_node(&root);
 +                        let _cx = stdx::panic_context::enter(format!(
 +                            "collect_items MacroCall: {}",
 +                            call
 +                        ));
 +                        let res = self.expander.enter_expand::<ast::MacroItems>(self.db, call);
 +
 +                        if let Ok(ExpandResult { value: Some((mark, _)), .. }) = res {
 +                            self.collect_macro_items(mark);
 +                        }
 +                    }
 +                }
 +            }
 +        }
 +    }
 +
 +    fn collect_macro_items(&mut self, mark: Mark) {
 +        let tree_id = item_tree::TreeId::new(self.expander.current_file_id(), None);
 +        let item_tree = tree_id.item_tree(self.db);
 +        let iter: SmallVec<[_; 2]> =
 +            item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item).collect();
 +
 +        self.collect(&item_tree, tree_id, &iter);
 +
 +        self.expander.exit(self.db, mark);
 +    }
 +}
index df6dcb024b5eafb9b71b12d7517903b075da81de,0000000000000000000000000000000000000000..40b2f734b7117192afa1fb51b52de20be9a250c8
mode 100644,000000..100644
--- /dev/null
@@@ -1,243 -1,0 +1,250 @@@
-     nameres::DefMap,
 +//! Defines database & queries for name resolution.
 +use std::sync::Arc;
 +
 +use base_db::{salsa, CrateId, SourceDatabase, Upcast};
 +use either::Either;
 +use hir_expand::{db::AstDatabase, HirFileId};
 +use la_arena::ArenaMap;
 +use syntax::{ast, AstPtr, SmolStr};
 +
 +use crate::{
 +    adt::{EnumData, StructData},
 +    attr::{Attrs, AttrsWithOwner},
 +    body::{scope::ExprScopes, Body, BodySourceMap},
 +    data::{
 +        ConstData, FunctionData, ImplData, Macro2Data, MacroRulesData, ProcMacroData, StaticData,
 +        TraitData, TypeAliasData,
 +    },
 +    generics::GenericParams,
 +    import_map::ImportMap,
 +    intern::Interned,
 +    item_tree::{AttrOwner, ItemTree},
 +    lang_item::{LangItemTarget, LangItems},
++    nameres::{diagnostics::DefDiagnostic, DefMap},
 +    visibility::{self, Visibility},
 +    AttrDefId, BlockId, BlockLoc, ConstId, ConstLoc, DefWithBodyId, EnumId, EnumLoc, ExternBlockId,
 +    ExternBlockLoc, FunctionId, FunctionLoc, GenericDefId, ImplId, ImplLoc, LocalEnumVariantId,
 +    LocalFieldId, Macro2Id, Macro2Loc, MacroRulesId, MacroRulesLoc, ProcMacroId, ProcMacroLoc,
 +    StaticId, StaticLoc, StructId, StructLoc, TraitId, TraitLoc, TypeAliasId, TypeAliasLoc,
 +    UnionId, UnionLoc, VariantId,
 +};
 +
 +#[salsa::query_group(InternDatabaseStorage)]
 +pub trait InternDatabase: SourceDatabase {
 +    #[salsa::interned]
 +    fn intern_function(&self, loc: FunctionLoc) -> FunctionId;
 +    #[salsa::interned]
 +    fn intern_struct(&self, loc: StructLoc) -> StructId;
 +    #[salsa::interned]
 +    fn intern_union(&self, loc: UnionLoc) -> UnionId;
 +    #[salsa::interned]
 +    fn intern_enum(&self, loc: EnumLoc) -> EnumId;
 +    #[salsa::interned]
 +    fn intern_const(&self, loc: ConstLoc) -> ConstId;
 +    #[salsa::interned]
 +    fn intern_static(&self, loc: StaticLoc) -> StaticId;
 +    #[salsa::interned]
 +    fn intern_trait(&self, loc: TraitLoc) -> TraitId;
 +    #[salsa::interned]
 +    fn intern_type_alias(&self, loc: TypeAliasLoc) -> TypeAliasId;
 +    #[salsa::interned]
 +    fn intern_impl(&self, loc: ImplLoc) -> ImplId;
 +    #[salsa::interned]
 +    fn intern_extern_block(&self, loc: ExternBlockLoc) -> ExternBlockId;
 +    #[salsa::interned]
 +    fn intern_block(&self, loc: BlockLoc) -> BlockId;
 +    #[salsa::interned]
 +    fn intern_macro2(&self, loc: Macro2Loc) -> Macro2Id;
 +    #[salsa::interned]
 +    fn intern_proc_macro(&self, loc: ProcMacroLoc) -> ProcMacroId;
 +    #[salsa::interned]
 +    fn intern_macro_rules(&self, loc: MacroRulesLoc) -> MacroRulesId;
 +}
 +
 +#[salsa::query_group(DefDatabaseStorage)]
 +pub trait DefDatabase: InternDatabase + AstDatabase + Upcast<dyn AstDatabase> {
 +    #[salsa::input]
 +    fn enable_proc_attr_macros(&self) -> bool;
 +
 +    #[salsa::invoke(ItemTree::file_item_tree_query)]
 +    fn file_item_tree(&self, file_id: HirFileId) -> Arc<ItemTree>;
 +
 +    #[salsa::invoke(crate_def_map_wait)]
 +    #[salsa::transparent]
 +    fn crate_def_map(&self, krate: CrateId) -> Arc<DefMap>;
 +
 +    #[salsa::invoke(DefMap::crate_def_map_query)]
 +    fn crate_def_map_query(&self, krate: CrateId) -> Arc<DefMap>;
 +
 +    /// Computes the block-level `DefMap`, returning `None` when `block` doesn't contain any inner
 +    /// items directly.
 +    ///
 +    /// For example:
 +    ///
 +    /// ```
 +    /// fn f() { // (0)
 +    ///     { // (1)
 +    ///         fn inner() {}
 +    ///     }
 +    /// }
 +    /// ```
 +    ///
 +    /// The `block_def_map` for block 0 would return `None`, while `block_def_map` of block 1 would
 +    /// return a `DefMap` containing `inner`.
 +    #[salsa::invoke(DefMap::block_def_map_query)]
 +    fn block_def_map(&self, block: BlockId) -> Option<Arc<DefMap>>;
 +
 +    #[salsa::invoke(StructData::struct_data_query)]
 +    fn struct_data(&self, id: StructId) -> Arc<StructData>;
 +
 +    #[salsa::invoke(StructData::union_data_query)]
 +    fn union_data(&self, id: UnionId) -> Arc<StructData>;
 +
 +    #[salsa::invoke(EnumData::enum_data_query)]
 +    fn enum_data(&self, e: EnumId) -> Arc<EnumData>;
 +
 +    #[salsa::invoke(ImplData::impl_data_query)]
 +    fn impl_data(&self, e: ImplId) -> Arc<ImplData>;
 +
++    #[salsa::invoke(ImplData::impl_data_with_diagnostics_query)]
++    fn impl_data_with_diagnostics(&self, e: ImplId) -> (Arc<ImplData>, Arc<Vec<DefDiagnostic>>);
++
 +    #[salsa::invoke(TraitData::trait_data_query)]
 +    fn trait_data(&self, e: TraitId) -> Arc<TraitData>;
 +
++    #[salsa::invoke(TraitData::trait_data_with_diagnostics_query)]
++    fn trait_data_with_diagnostics(&self, tr: TraitId)
++        -> (Arc<TraitData>, Arc<Vec<DefDiagnostic>>);
++
 +    #[salsa::invoke(TypeAliasData::type_alias_data_query)]
 +    fn type_alias_data(&self, e: TypeAliasId) -> Arc<TypeAliasData>;
 +
 +    #[salsa::invoke(FunctionData::fn_data_query)]
 +    fn function_data(&self, func: FunctionId) -> Arc<FunctionData>;
 +
 +    #[salsa::invoke(ConstData::const_data_query)]
 +    fn const_data(&self, konst: ConstId) -> Arc<ConstData>;
 +
 +    #[salsa::invoke(StaticData::static_data_query)]
 +    fn static_data(&self, konst: StaticId) -> Arc<StaticData>;
 +
 +    #[salsa::invoke(Macro2Data::macro2_data_query)]
 +    fn macro2_data(&self, makro: Macro2Id) -> Arc<Macro2Data>;
 +
 +    #[salsa::invoke(MacroRulesData::macro_rules_data_query)]
 +    fn macro_rules_data(&self, makro: MacroRulesId) -> Arc<MacroRulesData>;
 +
 +    #[salsa::invoke(ProcMacroData::proc_macro_data_query)]
 +    fn proc_macro_data(&self, makro: ProcMacroId) -> Arc<ProcMacroData>;
 +
 +    #[salsa::invoke(Body::body_with_source_map_query)]
 +    fn body_with_source_map(&self, def: DefWithBodyId) -> (Arc<Body>, Arc<BodySourceMap>);
 +
 +    #[salsa::invoke(Body::body_query)]
 +    fn body(&self, def: DefWithBodyId) -> Arc<Body>;
 +
 +    #[salsa::invoke(ExprScopes::expr_scopes_query)]
 +    fn expr_scopes(&self, def: DefWithBodyId) -> Arc<ExprScopes>;
 +
 +    #[salsa::invoke(GenericParams::generic_params_query)]
 +    fn generic_params(&self, def: GenericDefId) -> Interned<GenericParams>;
 +
 +    #[salsa::invoke(Attrs::variants_attrs_query)]
 +    fn variants_attrs(&self, def: EnumId) -> Arc<ArenaMap<LocalEnumVariantId, Attrs>>;
 +
 +    #[salsa::invoke(Attrs::fields_attrs_query)]
 +    fn fields_attrs(&self, def: VariantId) -> Arc<ArenaMap<LocalFieldId, Attrs>>;
 +
 +    #[salsa::invoke(crate::attr::variants_attrs_source_map)]
 +    fn variants_attrs_source_map(
 +        &self,
 +        def: EnumId,
 +    ) -> Arc<ArenaMap<LocalEnumVariantId, AstPtr<ast::Variant>>>;
 +
 +    #[salsa::invoke(crate::attr::fields_attrs_source_map)]
 +    fn fields_attrs_source_map(
 +        &self,
 +        def: VariantId,
 +    ) -> Arc<ArenaMap<LocalFieldId, Either<AstPtr<ast::TupleField>, AstPtr<ast::RecordField>>>>;
 +
 +    #[salsa::invoke(AttrsWithOwner::attrs_query)]
 +    fn attrs(&self, def: AttrDefId) -> AttrsWithOwner;
 +
 +    #[salsa::invoke(LangItems::crate_lang_items_query)]
 +    fn crate_lang_items(&self, krate: CrateId) -> Arc<LangItems>;
 +
 +    #[salsa::invoke(LangItems::lang_item_query)]
 +    fn lang_item(&self, start_crate: CrateId, item: SmolStr) -> Option<LangItemTarget>;
 +
 +    #[salsa::invoke(ImportMap::import_map_query)]
 +    fn import_map(&self, krate: CrateId) -> Arc<ImportMap>;
 +
 +    #[salsa::invoke(visibility::field_visibilities_query)]
 +    fn field_visibilities(&self, var: VariantId) -> Arc<ArenaMap<LocalFieldId, Visibility>>;
 +
 +    // FIXME: unify function_visibility and const_visibility?
 +    #[salsa::invoke(visibility::function_visibility_query)]
 +    fn function_visibility(&self, def: FunctionId) -> Visibility;
 +
 +    #[salsa::invoke(visibility::const_visibility_query)]
 +    fn const_visibility(&self, def: ConstId) -> Visibility;
 +
 +    #[salsa::transparent]
 +    fn crate_limits(&self, crate_id: CrateId) -> CrateLimits;
 +
 +    fn crate_supports_no_std(&self, crate_id: CrateId) -> bool;
 +}
 +
 +fn crate_def_map_wait(db: &dyn DefDatabase, krate: CrateId) -> Arc<DefMap> {
 +    let _p = profile::span("crate_def_map:wait");
 +    db.crate_def_map_query(krate)
 +}
 +
 +pub struct CrateLimits {
 +    /// The maximum depth for potentially infinitely-recursive compile-time operations like macro expansion or auto-dereference.
 +    pub recursion_limit: u32,
 +}
 +
 +fn crate_limits(db: &dyn DefDatabase, crate_id: CrateId) -> CrateLimits {
 +    let def_map = db.crate_def_map(crate_id);
 +
 +    CrateLimits {
 +        // 128 is the default in rustc.
 +        recursion_limit: def_map.recursion_limit().unwrap_or(128),
 +    }
 +}
 +
 +fn crate_supports_no_std(db: &dyn DefDatabase, crate_id: CrateId) -> bool {
 +    let file = db.crate_graph()[crate_id].root_file_id;
 +    let item_tree = db.file_item_tree(file.into());
 +    let attrs = item_tree.raw_attrs(AttrOwner::TopLevel);
 +    for attr in &**attrs {
 +        match attr.path().as_ident().and_then(|id| id.as_text()) {
 +            Some(ident) if ident == "no_std" => return true,
 +            Some(ident) if ident == "cfg_attr" => {}
 +            _ => continue,
 +        }
 +
 +        // This is a `cfg_attr`; check if it could possibly expand to `no_std`.
 +        // Syntax is: `#[cfg_attr(condition(cfg, style), attr0, attr1, <...>)]`
 +        let tt = match attr.token_tree_value() {
 +            Some(tt) => &tt.token_trees,
 +            None => continue,
 +        };
 +
 +        let segments = tt.split(|tt| match tt {
 +            tt::TokenTree::Leaf(tt::Leaf::Punct(p)) if p.char == ',' => true,
 +            _ => false,
 +        });
 +        for output in segments.skip(1) {
 +            match output {
 +                [tt::TokenTree::Leaf(tt::Leaf::Ident(ident))] if ident.text == "no_std" => {
 +                    return true
 +                }
 +                _ => {}
 +            }
 +        }
 +    }
 +
 +    false
 +}
index 0d01f6d0aba3449517dca190319af8dedeb57b4e,0000000000000000000000000000000000000000..ed7e920fd2b83a00ac28b69fca443dc6723a83d1
mode 100644,000000..100644
--- /dev/null
@@@ -1,137 -1,0 +1,137 @@@
-     pub(super) fn unconfigured_code(
 +//! Diagnostics emitted during DefMap construction.
 +
 +use base_db::CrateId;
 +use cfg::{CfgExpr, CfgOptions};
 +use hir_expand::MacroCallKind;
 +use la_arena::Idx;
 +use syntax::ast;
 +
 +use crate::{
 +    attr::AttrId,
 +    item_tree::{self, ItemTreeId},
 +    nameres::LocalModuleId,
 +    path::ModPath,
 +    AstId,
 +};
 +
 +#[derive(Debug, PartialEq, Eq)]
 +pub enum DefDiagnosticKind {
 +    UnresolvedModule { ast: AstId<ast::Module>, candidates: Box<[String]> },
 +
 +    UnresolvedExternCrate { ast: AstId<ast::ExternCrate> },
 +
 +    UnresolvedImport { id: ItemTreeId<item_tree::Import>, index: Idx<ast::UseTree> },
 +
 +    UnconfiguredCode { ast: AstId<ast::Item>, cfg: CfgExpr, opts: CfgOptions },
 +
 +    UnresolvedProcMacro { ast: MacroCallKind, krate: CrateId },
 +
 +    UnresolvedMacroCall { ast: MacroCallKind, path: ModPath },
 +
 +    MacroError { ast: MacroCallKind, message: String },
 +
 +    UnimplementedBuiltinMacro { ast: AstId<ast::Macro> },
 +
 +    InvalidDeriveTarget { ast: AstId<ast::Item>, id: u32 },
 +
 +    MalformedDerive { ast: AstId<ast::Adt>, id: u32 },
 +}
 +
 +#[derive(Debug, PartialEq, Eq)]
 +pub struct DefDiagnostic {
 +    pub in_module: LocalModuleId,
 +    pub kind: DefDiagnosticKind,
 +}
 +
 +impl DefDiagnostic {
 +    pub(super) fn unresolved_module(
 +        container: LocalModuleId,
 +        declaration: AstId<ast::Module>,
 +        candidates: Box<[String]>,
 +    ) -> Self {
 +        Self {
 +            in_module: container,
 +            kind: DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates },
 +        }
 +    }
 +
 +    pub(super) fn unresolved_extern_crate(
 +        container: LocalModuleId,
 +        declaration: AstId<ast::ExternCrate>,
 +    ) -> Self {
 +        Self {
 +            in_module: container,
 +            kind: DefDiagnosticKind::UnresolvedExternCrate { ast: declaration },
 +        }
 +    }
 +
 +    pub(super) fn unresolved_import(
 +        container: LocalModuleId,
 +        id: ItemTreeId<item_tree::Import>,
 +        index: Idx<ast::UseTree>,
 +    ) -> Self {
 +        Self { in_module: container, kind: DefDiagnosticKind::UnresolvedImport { id, index } }
 +    }
 +
++    pub fn unconfigured_code(
 +        container: LocalModuleId,
 +        ast: AstId<ast::Item>,
 +        cfg: CfgExpr,
 +        opts: CfgOptions,
 +    ) -> Self {
 +        Self { in_module: container, kind: DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } }
 +    }
 +
 +    pub(super) fn unresolved_proc_macro(
 +        container: LocalModuleId,
 +        ast: MacroCallKind,
 +        krate: CrateId,
 +    ) -> Self {
 +        Self { in_module: container, kind: DefDiagnosticKind::UnresolvedProcMacro { ast, krate } }
 +    }
 +
 +    pub(super) fn macro_error(
 +        container: LocalModuleId,
 +        ast: MacroCallKind,
 +        message: String,
 +    ) -> Self {
 +        Self { in_module: container, kind: DefDiagnosticKind::MacroError { ast, message } }
 +    }
 +
 +    pub(super) fn unresolved_macro_call(
 +        container: LocalModuleId,
 +        ast: MacroCallKind,
 +        path: ModPath,
 +    ) -> Self {
 +        Self { in_module: container, kind: DefDiagnosticKind::UnresolvedMacroCall { ast, path } }
 +    }
 +
 +    pub(super) fn unimplemented_builtin_macro(
 +        container: LocalModuleId,
 +        ast: AstId<ast::Macro>,
 +    ) -> Self {
 +        Self { in_module: container, kind: DefDiagnosticKind::UnimplementedBuiltinMacro { ast } }
 +    }
 +
 +    pub(super) fn invalid_derive_target(
 +        container: LocalModuleId,
 +        ast: AstId<ast::Item>,
 +        id: AttrId,
 +    ) -> Self {
 +        Self {
 +            in_module: container,
 +            kind: DefDiagnosticKind::InvalidDeriveTarget { ast, id: id.ast_index },
 +        }
 +    }
 +
 +    pub(super) fn malformed_derive(
 +        container: LocalModuleId,
 +        ast: AstId<ast::Adt>,
 +        id: AttrId,
 +    ) -> Self {
 +        Self {
 +            in_module: container,
 +            kind: DefDiagnosticKind::MalformedDerive { ast, id: id.ast_index },
 +        }
 +    }
 +}
index 9cdc18d6b66fdfb07ba53e85dcb00a65cc4aede7,0000000000000000000000000000000000000000..b7908bddaa1cf785d392cf1e7f4449eee0f64e82
mode 100644,000000..100644
--- /dev/null
@@@ -1,245 -1,0 +1,245 @@@
- use rustc_hash::FxHashSet;
 +//! Database used for testing `hir_def`.
 +
 +use std::{
 +    fmt, panic,
 +    sync::{Arc, Mutex},
 +};
 +
 +use base_db::{
 +    salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, FilePosition,
 +    SourceDatabase, Upcast,
 +};
 +use hir_expand::{db::AstDatabase, InFile};
-     fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
++use stdx::hash::NoHashHashSet;
 +use syntax::{algo, ast, AstNode};
 +
 +use crate::{
 +    db::DefDatabase,
 +    nameres::{DefMap, ModuleSource},
 +    src::HasSource,
 +    LocalModuleId, Lookup, ModuleDefId, ModuleId,
 +};
 +
 +#[salsa::database(
 +    base_db::SourceDatabaseExtStorage,
 +    base_db::SourceDatabaseStorage,
 +    hir_expand::db::AstDatabaseStorage,
 +    crate::db::InternDatabaseStorage,
 +    crate::db::DefDatabaseStorage
 +)]
 +pub(crate) struct TestDB {
 +    storage: salsa::Storage<TestDB>,
 +    events: Mutex<Option<Vec<salsa::Event>>>,
 +}
 +
 +impl Default for TestDB {
 +    fn default() -> Self {
 +        let mut this = Self { storage: Default::default(), events: Default::default() };
 +        this.set_enable_proc_attr_macros(true);
 +        this
 +    }
 +}
 +
 +impl Upcast<dyn AstDatabase> for TestDB {
 +    fn upcast(&self) -> &(dyn AstDatabase + 'static) {
 +        &*self
 +    }
 +}
 +
 +impl Upcast<dyn DefDatabase> for TestDB {
 +    fn upcast(&self) -> &(dyn DefDatabase + 'static) {
 +        &*self
 +    }
 +}
 +
 +impl salsa::Database for TestDB {
 +    fn salsa_event(&self, event: salsa::Event) {
 +        let mut events = self.events.lock().unwrap();
 +        if let Some(events) = &mut *events {
 +            events.push(event);
 +        }
 +    }
 +}
 +
 +impl fmt::Debug for TestDB {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        f.debug_struct("TestDB").finish()
 +    }
 +}
 +
 +impl panic::RefUnwindSafe for TestDB {}
 +
 +impl FileLoader for TestDB {
 +    fn file_text(&self, file_id: FileId) -> Arc<String> {
 +        FileLoaderDelegate(self).file_text(file_id)
 +    }
 +    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
 +        FileLoaderDelegate(self).resolve_path(path)
 +    }
++    fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
 +        FileLoaderDelegate(self).relevant_crates(file_id)
 +    }
 +}
 +
 +impl TestDB {
 +    pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
 +        for &krate in self.relevant_crates(file_id).iter() {
 +            let crate_def_map = self.crate_def_map(krate);
 +            for (local_id, data) in crate_def_map.modules() {
 +                if data.origin.file_id() == Some(file_id) {
 +                    return crate_def_map.module_id(local_id);
 +                }
 +            }
 +        }
 +        panic!("Can't find module for file")
 +    }
 +
 +    pub(crate) fn module_at_position(&self, position: FilePosition) -> ModuleId {
 +        let file_module = self.module_for_file(position.file_id);
 +        let mut def_map = file_module.def_map(self);
 +        let module = self.mod_at_position(&def_map, position);
 +
 +        def_map = match self.block_at_position(&def_map, position) {
 +            Some(it) => it,
 +            None => return def_map.module_id(module),
 +        };
 +        loop {
 +            let new_map = self.block_at_position(&def_map, position);
 +            match new_map {
 +                Some(new_block) if !Arc::ptr_eq(&new_block, &def_map) => {
 +                    def_map = new_block;
 +                }
 +                _ => {
 +                    // FIXME: handle `mod` inside block expression
 +                    return def_map.module_id(def_map.root());
 +                }
 +            }
 +        }
 +    }
 +
 +    /// Finds the smallest/innermost module in `def_map` containing `position`.
 +    fn mod_at_position(&self, def_map: &DefMap, position: FilePosition) -> LocalModuleId {
 +        let mut size = None;
 +        let mut res = def_map.root();
 +        for (module, data) in def_map.modules() {
 +            let src = data.definition_source(self);
 +            if src.file_id != position.file_id.into() {
 +                continue;
 +            }
 +
 +            let range = match src.value {
 +                ModuleSource::SourceFile(it) => it.syntax().text_range(),
 +                ModuleSource::Module(it) => it.syntax().text_range(),
 +                ModuleSource::BlockExpr(it) => it.syntax().text_range(),
 +            };
 +
 +            if !range.contains(position.offset) {
 +                continue;
 +            }
 +
 +            let new_size = match size {
 +                None => range.len(),
 +                Some(size) => {
 +                    if range.len() < size {
 +                        range.len()
 +                    } else {
 +                        size
 +                    }
 +                }
 +            };
 +
 +            if size != Some(new_size) {
 +                cov_mark::hit!(submodule_in_testdb);
 +                size = Some(new_size);
 +                res = module;
 +            }
 +        }
 +
 +        res
 +    }
 +
 +    fn block_at_position(&self, def_map: &DefMap, position: FilePosition) -> Option<Arc<DefMap>> {
 +        // Find the smallest (innermost) function in `def_map` containing the cursor.
 +        let mut size = None;
 +        let mut fn_def = None;
 +        for (_, module) in def_map.modules() {
 +            let file_id = module.definition_source(self).file_id;
 +            if file_id != position.file_id.into() {
 +                continue;
 +            }
 +            for decl in module.scope.declarations() {
 +                if let ModuleDefId::FunctionId(it) = decl {
 +                    let range = it.lookup(self).source(self).value.syntax().text_range();
 +
 +                    if !range.contains(position.offset) {
 +                        continue;
 +                    }
 +
 +                    let new_size = match size {
 +                        None => range.len(),
 +                        Some(size) => {
 +                            if range.len() < size {
 +                                range.len()
 +                            } else {
 +                                size
 +                            }
 +                        }
 +                    };
 +                    if size != Some(new_size) {
 +                        size = Some(new_size);
 +                        fn_def = Some(it);
 +                    }
 +                }
 +            }
 +        }
 +
 +        // Find the innermost block expression that has a `DefMap`.
 +        let def_with_body = fn_def?.into();
 +        let (_, source_map) = self.body_with_source_map(def_with_body);
 +        let scopes = self.expr_scopes(def_with_body);
 +        let root = self.parse(position.file_id);
 +
 +        let scope_iter = algo::ancestors_at_offset(&root.syntax_node(), position.offset)
 +            .filter_map(|node| {
 +                let block = ast::BlockExpr::cast(node)?;
 +                let expr = ast::Expr::from(block);
 +                let expr_id = source_map.node_expr(InFile::new(position.file_id.into(), &expr))?;
 +                let scope = scopes.scope_for(expr_id).unwrap();
 +                Some(scope)
 +            });
 +
 +        for scope in scope_iter {
 +            let containing_blocks =
 +                scopes.scope_chain(Some(scope)).filter_map(|scope| scopes.block(scope));
 +
 +            for block in containing_blocks {
 +                if let Some(def_map) = self.block_def_map(block) {
 +                    return Some(def_map);
 +                }
 +            }
 +        }
 +
 +        None
 +    }
 +
 +    pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
 +        *self.events.lock().unwrap() = Some(Vec::new());
 +        f();
 +        self.events.lock().unwrap().take().unwrap()
 +    }
 +
 +    pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
 +        let events = self.log(f);
 +        events
 +            .into_iter()
 +            .filter_map(|e| match e.kind {
 +                // This is pretty horrible, but `Debug` is the only way to inspect
 +                // QueryDescriptor at the moment.
 +                salsa::EventKind::WillExecute { database_key } => {
 +                    Some(format!("{:?}", database_key.debug(self)))
 +                }
 +                _ => None,
 +            })
 +            .collect()
 +    }
 +}
index ae115c8c0da85ad7afa7a84dd4fbee7fbd01cc47,0000000000000000000000000000000000000000..3f6d0844e9c1f27afc10ece6dde6b3414c169937
mode 100644,000000..100644
--- /dev/null
@@@ -1,1769 -1,0 +1,1774 @@@
-     cell::{Cell, RefCell},
 +//! Methods for lowering the HIR to types. There are two main cases here:
 +//!
 +//!  - Lowering a type reference like `&usize` or `Option<foo::bar::Baz>` to a
 +//!    type: The entry point for this is `Ty::from_hir`.
 +//!  - Building the type for an item: This happens through the `type_for_def` query.
 +//!
 +//! This usually involves resolving names, collecting generic arguments etc.
 +use std::{
-                 let (expander, recursion_start) = {
-                     let mut expander = self.expander.borrow_mut();
-                     if expander.is_some() {
-                         (Some(expander), false)
-                     } else {
-                         *expander = Some(Expander::new(
-                             self.db.upcast(),
-                             macro_call.file_id,
-                             self.resolver.module(),
-                         ));
-                         (Some(expander), true)
++    cell::{Cell, RefCell, RefMut},
 +    iter,
 +    sync::Arc,
 +};
 +
 +use base_db::CrateId;
 +use chalk_ir::{
 +    cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
 +};
 +
 +use hir_def::{
 +    adt::StructKind,
 +    body::{Expander, LowerCtx},
 +    builtin_type::BuiltinType,
 +    generics::{
 +        TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
 +    },
 +    intern::Interned,
 +    lang_item::lang_attr,
 +    path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
 +    resolver::{HasResolver, Resolver, TypeNs},
 +    type_ref::{
 +        ConstScalarOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
 +    },
 +    AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
 +    HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, StaticId, StructId, TraitId,
 +    TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
 +};
 +use hir_expand::{name::Name, ExpandResult};
 +use itertools::Either;
 +use la_arena::ArenaMap;
 +use rustc_hash::FxHashSet;
 +use smallvec::SmallVec;
 +use stdx::{impl_from, never};
 +use syntax::{ast, SmolStr};
 +
 +use crate::{
 +    all_super_traits,
 +    consteval::{intern_const_scalar, path_to_const, unknown_const, unknown_const_as_generic},
 +    db::HirDatabase,
 +    make_binders,
 +    mapping::ToChalk,
 +    static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
 +    utils::Generics,
 +    utils::{all_super_trait_refs, associated_type_by_name_including_super_traits, generics},
 +    AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DynTy, FnPointer,
 +    FnSig, FnSubst, GenericArgData, ImplTraitId, Interner, ParamKind, PolyFnSig, ProjectionTy,
 +    QuantifiedWhereClause, QuantifiedWhereClauses, ReturnTypeImplTrait, ReturnTypeImplTraits,
 +    Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
 +};
 +
 +#[derive(Debug)]
 +pub struct TyLoweringContext<'a> {
 +    pub db: &'a dyn HirDatabase,
 +    pub resolver: &'a Resolver,
 +    in_binders: DebruijnIndex,
 +    /// Note: Conceptually, it's thinkable that we could be in a location where
 +    /// some type params should be represented as placeholders, and others
 +    /// should be converted to variables. I think in practice, this isn't
 +    /// possible currently, so this should be fine for now.
 +    pub type_param_mode: ParamLoweringMode,
 +    pub impl_trait_mode: ImplTraitLoweringMode,
 +    impl_trait_counter: Cell<u16>,
 +    /// When turning `impl Trait` into opaque types, we have to collect the
 +    /// bounds at the same time to get the IDs correct (without becoming too
 +    /// complicated). I don't like using interior mutability (as for the
 +    /// counter), but I've tried and failed to make the lifetimes work for
 +    /// passing around a `&mut TyLoweringContext`. The core problem is that
 +    /// we're grouping the mutable data (the counter and this field) together
 +    /// with the immutable context (the references to the DB and resolver).
 +    /// Splitting this up would be a possible fix.
 +    opaque_type_data: RefCell<Vec<ReturnTypeImplTrait>>,
 +    expander: RefCell<Option<Expander>>,
 +    /// Tracks types with explicit `?Sized` bounds.
 +    pub(crate) unsized_types: RefCell<FxHashSet<Ty>>,
 +}
 +
 +impl<'a> TyLoweringContext<'a> {
 +    pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self {
 +        let impl_trait_counter = Cell::new(0);
 +        let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
 +        let type_param_mode = ParamLoweringMode::Placeholder;
 +        let in_binders = DebruijnIndex::INNERMOST;
 +        let opaque_type_data = RefCell::new(Vec::new());
 +        Self {
 +            db,
 +            resolver,
 +            in_binders,
 +            impl_trait_mode,
 +            impl_trait_counter,
 +            type_param_mode,
 +            opaque_type_data,
 +            expander: RefCell::new(None),
 +            unsized_types: RefCell::default(),
 +        }
 +    }
 +
 +    pub fn with_debruijn<T>(
 +        &self,
 +        debruijn: DebruijnIndex,
 +        f: impl FnOnce(&TyLoweringContext<'_>) -> T,
 +    ) -> T {
 +        let opaque_ty_data_vec = self.opaque_type_data.take();
 +        let expander = self.expander.take();
 +        let unsized_types = self.unsized_types.take();
 +        let new_ctx = Self {
 +            in_binders: debruijn,
 +            impl_trait_counter: Cell::new(self.impl_trait_counter.get()),
 +            opaque_type_data: RefCell::new(opaque_ty_data_vec),
 +            expander: RefCell::new(expander),
 +            unsized_types: RefCell::new(unsized_types),
 +            ..*self
 +        };
 +        let result = f(&new_ctx);
 +        self.impl_trait_counter.set(new_ctx.impl_trait_counter.get());
 +        self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner());
 +        self.expander.replace(new_ctx.expander.into_inner());
 +        self.unsized_types.replace(new_ctx.unsized_types.into_inner());
 +        result
 +    }
 +
 +    pub fn with_shifted_in<T>(
 +        &self,
 +        debruijn: DebruijnIndex,
 +        f: impl FnOnce(&TyLoweringContext<'_>) -> T,
 +    ) -> T {
 +        self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f)
 +    }
 +
 +    pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
 +        Self { impl_trait_mode, ..self }
 +    }
 +
 +    pub fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self {
 +        Self { type_param_mode, ..self }
 +    }
 +}
 +
 +#[derive(Copy, Clone, Debug, PartialEq, Eq)]
 +pub enum ImplTraitLoweringMode {
 +    /// `impl Trait` gets lowered into an opaque type that doesn't unify with
 +    /// anything except itself. This is used in places where values flow 'out',
 +    /// i.e. for arguments of the function we're currently checking, and return
 +    /// types of functions we're calling.
 +    Opaque,
 +    /// `impl Trait` gets lowered into a type variable. Used for argument
 +    /// position impl Trait when inside the respective function, since it allows
 +    /// us to support that without Chalk.
 +    Param,
 +    /// `impl Trait` gets lowered into a variable that can unify with some
 +    /// type. This is used in places where values flow 'in', i.e. for arguments
 +    /// of functions we're calling, and the return type of the function we're
 +    /// currently checking.
 +    Variable,
 +    /// `impl Trait` is disallowed and will be an error.
 +    Disallowed,
 +}
 +
 +#[derive(Copy, Clone, Debug, PartialEq, Eq)]
 +pub enum ParamLoweringMode {
 +    Placeholder,
 +    Variable,
 +}
 +
 +impl<'a> TyLoweringContext<'a> {
 +    pub fn lower_ty(&self, type_ref: &TypeRef) -> Ty {
 +        self.lower_ty_ext(type_ref).0
 +    }
 +
 +    fn generics(&self) -> Generics {
 +        generics(
 +            self.db.upcast(),
 +            self.resolver
 +                .generic_def()
 +                .expect("there should be generics if there's a generic param"),
 +        )
 +    }
 +
 +    pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) {
 +        let mut res = None;
 +        let ty = match type_ref {
 +            TypeRef::Never => TyKind::Never.intern(Interner),
 +            TypeRef::Tuple(inner) => {
 +                let inner_tys = inner.iter().map(|tr| self.lower_ty(tr));
 +                TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
 +                    .intern(Interner)
 +            }
 +            TypeRef::Path(path) => {
 +                let (ty, res_) = self.lower_path(path);
 +                res = res_;
 +                ty
 +            }
 +            TypeRef::RawPtr(inner, mutability) => {
 +                let inner_ty = self.lower_ty(inner);
 +                TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(Interner)
 +            }
 +            TypeRef::Array(inner, len) => {
 +                let inner_ty = self.lower_ty(inner);
 +                let const_len = const_or_path_to_chalk(
 +                    self.db,
 +                    self.resolver,
 +                    TyBuilder::usize(),
 +                    len,
 +                    self.type_param_mode,
 +                    || self.generics(),
 +                    self.in_binders,
 +                );
 +
 +                TyKind::Array(inner_ty, const_len).intern(Interner)
 +            }
 +            TypeRef::Slice(inner) => {
 +                let inner_ty = self.lower_ty(inner);
 +                TyKind::Slice(inner_ty).intern(Interner)
 +            }
 +            TypeRef::Reference(inner, _, mutability) => {
 +                let inner_ty = self.lower_ty(inner);
 +                let lifetime = static_lifetime();
 +                TyKind::Ref(lower_to_chalk_mutability(*mutability), lifetime, inner_ty)
 +                    .intern(Interner)
 +            }
 +            TypeRef::Placeholder => TyKind::Error.intern(Interner),
 +            TypeRef::Fn(params, is_varargs) => {
 +                let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
 +                    Substitution::from_iter(Interner, params.iter().map(|(_, tr)| ctx.lower_ty(tr)))
 +                });
 +                TyKind::Function(FnPointer {
 +                    num_binders: 0, // FIXME lower `for<'a> fn()` correctly
 +                    sig: FnSig { abi: (), safety: Safety::Safe, variadic: *is_varargs },
 +                    substitution: FnSubst(substs),
 +                })
 +                .intern(Interner)
 +            }
 +            TypeRef::DynTrait(bounds) => self.lower_dyn_trait(bounds),
 +            TypeRef::ImplTrait(bounds) => {
 +                match self.impl_trait_mode {
 +                    ImplTraitLoweringMode::Opaque => {
 +                        let idx = self.impl_trait_counter.get();
 +                        self.impl_trait_counter.set(idx + 1);
 +                        let func = match self.resolver.generic_def() {
 +                            Some(GenericDefId::FunctionId(f)) => f,
 +                            _ => panic!("opaque impl trait lowering in non-function"),
 +                        };
 +
 +                        assert!(idx as usize == self.opaque_type_data.borrow().len());
 +                        // this dance is to make sure the data is in the right
 +                        // place even if we encounter more opaque types while
 +                        // lowering the bounds
 +                        self.opaque_type_data.borrow_mut().push(ReturnTypeImplTrait {
 +                            bounds: crate::make_single_type_binders(Vec::new()),
 +                        });
 +                        // We don't want to lower the bounds inside the binders
 +                        // we're currently in, because they don't end up inside
 +                        // those binders. E.g. when we have `impl Trait<impl
 +                        // OtherTrait<T>>`, the `impl OtherTrait<T>` can't refer
 +                        // to the self parameter from `impl Trait`, and the
 +                        // bounds aren't actually stored nested within each
 +                        // other, but separately. So if the `T` refers to a type
 +                        // parameter of the outer function, it's just one binder
 +                        // away instead of two.
 +                        let actual_opaque_type_data = self
 +                            .with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
 +                                ctx.lower_impl_trait(bounds, func)
 +                            });
 +                        self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
 +
 +                        let impl_trait_id = ImplTraitId::ReturnTypeImplTrait(func, idx);
 +                        let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
 +                        let generics = generics(self.db.upcast(), func.into());
 +                        let parameters = generics.bound_vars_subst(self.db, self.in_binders);
 +                        TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner)
 +                    }
 +                    ImplTraitLoweringMode::Param => {
 +                        let idx = self.impl_trait_counter.get();
 +                        // FIXME we're probably doing something wrong here
 +                        self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
 +                        if let Some(def) = self.resolver.generic_def() {
 +                            let generics = generics(self.db.upcast(), def);
 +                            let param = generics
 +                                .iter()
 +                                .filter(|(_, data)| {
 +                                    matches!(
 +                                        data,
 +                                        TypeOrConstParamData::TypeParamData(data)
 +                                        if data.provenance == TypeParamProvenance::ArgumentImplTrait
 +                                    )
 +                                })
 +                                .nth(idx as usize)
 +                                .map_or(TyKind::Error, |(id, _)| {
 +                                    TyKind::Placeholder(to_placeholder_idx(self.db, id))
 +                                });
 +                            param.intern(Interner)
 +                        } else {
 +                            TyKind::Error.intern(Interner)
 +                        }
 +                    }
 +                    ImplTraitLoweringMode::Variable => {
 +                        let idx = self.impl_trait_counter.get();
 +                        // FIXME we're probably doing something wrong here
 +                        self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
 +                        let (
 +                            parent_params,
 +                            self_params,
 +                            list_params,
 +                            const_params,
 +                            _impl_trait_params,
 +                        ) = if let Some(def) = self.resolver.generic_def() {
 +                            let generics = generics(self.db.upcast(), def);
 +                            generics.provenance_split()
 +                        } else {
 +                            (0, 0, 0, 0, 0)
 +                        };
 +                        TyKind::BoundVar(BoundVar::new(
 +                            self.in_binders,
 +                            idx as usize + parent_params + self_params + list_params + const_params,
 +                        ))
 +                        .intern(Interner)
 +                    }
 +                    ImplTraitLoweringMode::Disallowed => {
 +                        // FIXME: report error
 +                        TyKind::Error.intern(Interner)
 +                    }
 +                }
 +            }
 +            TypeRef::Macro(macro_call) => {
-                 let ty = if let Some(mut expander) = expander {
-                     let expander_mut = expander.as_mut().unwrap();
++                let (mut expander, recursion_start) = {
++                    match RefMut::filter_map(self.expander.borrow_mut(), Option::as_mut) {
++                        Ok(expander) => (expander, false),
++                        Err(expander) => (
++                            RefMut::map(expander, |it| {
++                                it.insert(Expander::new(
++                                    self.db.upcast(),
++                                    macro_call.file_id,
++                                    self.resolver.module(),
++                                ))
++                            }),
++                            true,
++                        ),
 +                    }
 +                };
-                     match expander_mut.enter_expand::<ast::Type>(self.db.upcast(), macro_call) {
++                let ty = {
 +                    let macro_call = macro_call.to_node(self.db.upcast());
-                             let ctx =
-                                 LowerCtx::new(self.db.upcast(), expander_mut.current_file_id());
++                    match expander.enter_expand::<ast::Type>(self.db.upcast(), macro_call) {
 +                        Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
-                 } else {
-                     None
++                            let ctx = LowerCtx::new(self.db.upcast(), expander.current_file_id());
 +                            let type_ref = TypeRef::from_ast(&ctx, expanded);
 +
 +                            drop(expander);
 +                            let ty = self.lower_ty(&type_ref);
 +
 +                            self.expander
 +                                .borrow_mut()
 +                                .as_mut()
 +                                .unwrap()
 +                                .exit(self.db.upcast(), mark);
 +                            Some(ty)
 +                        }
 +                        _ => None,
 +                    }
-                         let idx = generics.param_idx(param_id.into()).expect("matching generics");
 +                };
 +                if recursion_start {
 +                    *self.expander.borrow_mut() = None;
 +                }
 +                ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
 +            }
 +            TypeRef::Error => TyKind::Error.intern(Interner),
 +        };
 +        (ty, res)
 +    }
 +
 +    /// This is only for `generic_predicates_for_param`, where we can't just
 +    /// lower the self types of the predicates since that could lead to cycles.
 +    /// So we just check here if the `type_ref` resolves to a generic param, and which.
 +    fn lower_ty_only_param(&self, type_ref: &TypeRef) -> Option<TypeOrConstParamId> {
 +        let path = match type_ref {
 +            TypeRef::Path(path) => path,
 +            _ => return None,
 +        };
 +        if path.type_anchor().is_some() {
 +            return None;
 +        }
 +        if path.segments().len() > 1 {
 +            return None;
 +        }
 +        let resolution =
 +            match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
 +                Some((it, None)) => it,
 +                _ => return None,
 +            };
 +        match resolution {
 +            TypeNs::GenericParam(param_id) => Some(param_id.into()),
 +            _ => None,
 +        }
 +    }
 +
 +    pub(crate) fn lower_ty_relative_path(
 +        &self,
 +        ty: Ty,
 +        // We need the original resolution to lower `Self::AssocTy` correctly
 +        res: Option<TypeNs>,
 +        remaining_segments: PathSegments<'_>,
 +    ) -> (Ty, Option<TypeNs>) {
 +        match remaining_segments.len() {
 +            0 => (ty, res),
 +            1 => {
 +                // resolve unselected assoc types
 +                let segment = remaining_segments.first().unwrap();
 +                (self.select_associated_type(res, segment), None)
 +            }
 +            _ => {
 +                // FIXME report error (ambiguous associated type)
 +                (TyKind::Error.intern(Interner), None)
 +            }
 +        }
 +    }
 +
 +    pub(crate) fn lower_partly_resolved_path(
 +        &self,
 +        resolution: TypeNs,
 +        resolved_segment: PathSegment<'_>,
 +        remaining_segments: PathSegments<'_>,
 +        infer_args: bool,
 +    ) -> (Ty, Option<TypeNs>) {
 +        let ty = match resolution {
 +            TypeNs::TraitId(trait_) => {
 +                let ty = match remaining_segments.len() {
 +                    1 => {
 +                        let trait_ref =
 +                            self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
 +                        let segment = remaining_segments.first().unwrap();
 +                        let found = self
 +                            .db
 +                            .trait_data(trait_ref.hir_trait_id())
 +                            .associated_type_by_name(segment.name);
 +                        match found {
 +                            Some(associated_ty) => {
 +                                // FIXME handle type parameters on the segment
 +                                TyKind::Alias(AliasTy::Projection(ProjectionTy {
 +                                    associated_ty_id: to_assoc_type_id(associated_ty),
 +                                    substitution: trait_ref.substitution,
 +                                }))
 +                                .intern(Interner)
 +                            }
 +                            None => {
 +                                // FIXME: report error (associated type not found)
 +                                TyKind::Error.intern(Interner)
 +                            }
 +                        }
 +                    }
 +                    0 => {
 +                        // Trait object type without dyn; this should be handled in upstream. See
 +                        // `lower_path()`.
 +                        stdx::never!("unexpected fully resolved trait path");
 +                        TyKind::Error.intern(Interner)
 +                    }
 +                    _ => {
 +                        // FIXME report error (ambiguous associated type)
 +                        TyKind::Error.intern(Interner)
 +                    }
 +                };
 +                return (ty, None);
 +            }
 +            TypeNs::GenericParam(param_id) => {
 +                let generics = generics(
 +                    self.db.upcast(),
 +                    self.resolver.generic_def().expect("generics in scope"),
 +                );
 +                match self.type_param_mode {
 +                    ParamLoweringMode::Placeholder => {
 +                        TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
 +                    }
 +                    ParamLoweringMode::Variable => {
++                        let idx = match generics.param_idx(param_id.into()) {
++                            None => {
++                                never!("no matching generics");
++                                return (TyKind::Error.intern(Interner), None);
++                            }
++                            Some(idx) => idx,
++                        };
++
 +                        TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
 +                    }
 +                }
 +                .intern(Interner)
 +            }
 +            TypeNs::SelfType(impl_id) => {
 +                let generics = generics(self.db.upcast(), impl_id.into());
 +                let substs = match self.type_param_mode {
 +                    ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
 +                    ParamLoweringMode::Variable => {
 +                        generics.bound_vars_subst(self.db, self.in_binders)
 +                    }
 +                };
 +                self.db.impl_self_ty(impl_id).substitute(Interner, &substs)
 +            }
 +            TypeNs::AdtSelfType(adt) => {
 +                let generics = generics(self.db.upcast(), adt.into());
 +                let substs = match self.type_param_mode {
 +                    ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
 +                    ParamLoweringMode::Variable => {
 +                        generics.bound_vars_subst(self.db, self.in_binders)
 +                    }
 +                };
 +                self.db.ty(adt.into()).substitute(Interner, &substs)
 +            }
 +
 +            TypeNs::AdtId(it) => self.lower_path_inner(resolved_segment, it.into(), infer_args),
 +            TypeNs::BuiltinType(it) => {
 +                self.lower_path_inner(resolved_segment, it.into(), infer_args)
 +            }
 +            TypeNs::TypeAliasId(it) => {
 +                self.lower_path_inner(resolved_segment, it.into(), infer_args)
 +            }
 +            // FIXME: report error
 +            TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None),
 +        };
 +        self.lower_ty_relative_path(ty, Some(resolution), remaining_segments)
 +    }
 +
 +    pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) {
 +        // Resolve the path (in type namespace)
 +        if let Some(type_ref) = path.type_anchor() {
 +            let (ty, res) = self.lower_ty_ext(type_ref);
 +            return self.lower_ty_relative_path(ty, res, path.segments());
 +        }
 +
 +        let (resolution, remaining_index) =
 +            match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
 +                Some(it) => it,
 +                None => return (TyKind::Error.intern(Interner), None),
 +            };
 +
 +        if matches!(resolution, TypeNs::TraitId(_)) && remaining_index.is_none() {
 +            // trait object type without dyn
 +            let bound = TypeBound::Path(path.clone(), TraitBoundModifier::None);
 +            let ty = self.lower_dyn_trait(&[Interned::new(bound)]);
 +            return (ty, None);
 +        }
 +
 +        let (resolved_segment, remaining_segments) = match remaining_index {
 +            None => (
 +                path.segments().last().expect("resolved path has at least one element"),
 +                PathSegments::EMPTY,
 +            ),
 +            Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
 +        };
 +        self.lower_partly_resolved_path(resolution, resolved_segment, remaining_segments, false)
 +    }
 +
 +    fn select_associated_type(&self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
 +        let (def, res) = match (self.resolver.generic_def(), res) {
 +            (Some(def), Some(res)) => (def, res),
 +            _ => return TyKind::Error.intern(Interner),
 +        };
 +        let ty = named_associated_type_shorthand_candidates(
 +            self.db,
 +            def,
 +            res,
 +            Some(segment.name.clone()),
 +            move |name, t, associated_ty| {
 +                if name == segment.name {
 +                    let substs = match self.type_param_mode {
 +                        ParamLoweringMode::Placeholder => {
 +                            // if we're lowering to placeholders, we have to put
 +                            // them in now
 +                            let generics = generics(
 +                                self.db.upcast(),
 +                                self.resolver
 +                                    .generic_def()
 +                                    .expect("there should be generics if there's a generic param"),
 +                            );
 +                            let s = generics.placeholder_subst(self.db);
 +                            s.apply(t.substitution.clone(), Interner)
 +                        }
 +                        ParamLoweringMode::Variable => t.substitution.clone(),
 +                    };
 +                    // We need to shift in the bound vars, since
 +                    // associated_type_shorthand_candidates does not do that
 +                    let substs = substs.shifted_in_from(Interner, self.in_binders);
 +                    // FIXME handle type parameters on the segment
 +                    Some(
 +                        TyKind::Alias(AliasTy::Projection(ProjectionTy {
 +                            associated_ty_id: to_assoc_type_id(associated_ty),
 +                            substitution: substs,
 +                        }))
 +                        .intern(Interner),
 +                    )
 +                } else {
 +                    None
 +                }
 +            },
 +        );
 +
 +        ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
 +    }
 +
 +    fn lower_path_inner(
 +        &self,
 +        segment: PathSegment<'_>,
 +        typeable: TyDefId,
 +        infer_args: bool,
 +    ) -> Ty {
 +        let generic_def = match typeable {
 +            TyDefId::BuiltinType(_) => None,
 +            TyDefId::AdtId(it) => Some(it.into()),
 +            TyDefId::TypeAliasId(it) => Some(it.into()),
 +        };
 +        let substs = self.substs_from_path_segment(segment, generic_def, infer_args, None);
 +        self.db.ty(typeable).substitute(Interner, &substs)
 +    }
 +
 +    /// Collect generic arguments from a path into a `Substs`. See also
 +    /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
 +    pub(super) fn substs_from_path(
 +        &self,
 +        path: &Path,
 +        // Note that we don't call `db.value_type(resolved)` here,
 +        // `ValueTyDefId` is just a convenient way to pass generics and
 +        // special-case enum variants
 +        resolved: ValueTyDefId,
 +        infer_args: bool,
 +    ) -> Substitution {
 +        let last = path.segments().last().expect("path should have at least one segment");
 +        let (segment, generic_def) = match resolved {
 +            ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
 +            ValueTyDefId::StructId(it) => (last, Some(it.into())),
 +            ValueTyDefId::UnionId(it) => (last, Some(it.into())),
 +            ValueTyDefId::ConstId(it) => (last, Some(it.into())),
 +            ValueTyDefId::StaticId(_) => (last, None),
 +            ValueTyDefId::EnumVariantId(var) => {
 +                // the generic args for an enum variant may be either specified
 +                // on the segment referring to the enum, or on the segment
 +                // referring to the variant. So `Option::<T>::None` and
 +                // `Option::None::<T>` are both allowed (though the former is
 +                // preferred). See also `def_ids_for_path_segments` in rustc.
 +                let len = path.segments().len();
 +                let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx));
 +                let segment = match penultimate {
 +                    Some(segment) if segment.args_and_bindings.is_some() => segment,
 +                    _ => last,
 +                };
 +                (segment, Some(var.parent.into()))
 +            }
 +        };
 +        self.substs_from_path_segment(segment, generic_def, infer_args, None)
 +    }
 +
 +    fn substs_from_path_segment(
 +        &self,
 +        segment: PathSegment<'_>,
 +        def_generic: Option<GenericDefId>,
 +        infer_args: bool,
 +        explicit_self_ty: Option<Ty>,
 +    ) -> Substitution {
 +        let mut substs = Vec::new();
 +        let def_generics = if let Some(def) = def_generic {
 +            generics(self.db.upcast(), def)
 +        } else {
 +            return Substitution::empty(Interner);
 +        };
 +        let (parent_params, self_params, type_params, const_params, impl_trait_params) =
 +            def_generics.provenance_split();
 +        let total_len =
 +            parent_params + self_params + type_params + const_params + impl_trait_params;
 +
 +        let ty_error = GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner);
 +
 +        let mut def_generic_iter = def_generics.iter_id();
 +
 +        for _ in 0..parent_params {
 +            if let Some(eid) = def_generic_iter.next() {
 +                match eid {
 +                    Either::Left(_) => substs.push(ty_error.clone()),
 +                    Either::Right(x) => {
 +                        substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
 +                    }
 +                }
 +            }
 +        }
 +
 +        let fill_self_params = || {
 +            for x in explicit_self_ty
 +                .into_iter()
 +                .map(|x| GenericArgData::Ty(x).intern(Interner))
 +                .chain(iter::repeat(ty_error.clone()))
 +                .take(self_params)
 +            {
 +                if let Some(id) = def_generic_iter.next() {
 +                    assert!(id.is_left());
 +                    substs.push(x);
 +                }
 +            }
 +        };
 +        let mut had_explicit_args = false;
 +
 +        if let Some(generic_args) = &segment.args_and_bindings {
 +            if !generic_args.has_self_type {
 +                fill_self_params();
 +            }
 +            let expected_num = if generic_args.has_self_type {
 +                self_params + type_params + const_params
 +            } else {
 +                type_params + const_params
 +            };
 +            let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 };
 +            // if args are provided, it should be all of them, but we can't rely on that
 +            for arg in generic_args
 +                .args
 +                .iter()
 +                .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
 +                .skip(skip)
 +                .take(expected_num)
 +            {
 +                if let Some(id) = def_generic_iter.next() {
 +                    if let Some(x) = generic_arg_to_chalk(
 +                        self.db,
 +                        id,
 +                        arg,
 +                        &mut (),
 +                        |_, type_ref| self.lower_ty(type_ref),
 +                        |_, c, ty| {
 +                            const_or_path_to_chalk(
 +                                self.db,
 +                                &self.resolver,
 +                                ty,
 +                                c,
 +                                self.type_param_mode,
 +                                || self.generics(),
 +                                self.in_binders,
 +                            )
 +                        },
 +                    ) {
 +                        had_explicit_args = true;
 +                        substs.push(x);
 +                    } else {
 +                        // we just filtered them out
 +                        never!("Unexpected lifetime argument");
 +                    }
 +                }
 +            }
 +        } else {
 +            fill_self_params();
 +        }
 +
 +        // handle defaults. In expression or pattern path segments without
 +        // explicitly specified type arguments, missing type arguments are inferred
 +        // (i.e. defaults aren't used).
 +        if !infer_args || had_explicit_args {
 +            if let Some(def_generic) = def_generic {
 +                let defaults = self.db.generic_defaults(def_generic);
 +                assert_eq!(total_len, defaults.len());
 +
 +                for default_ty in defaults.iter().skip(substs.len()) {
 +                    // each default can depend on the previous parameters
 +                    let substs_so_far = Substitution::from_iter(Interner, substs.clone());
 +                    if let Some(_id) = def_generic_iter.next() {
 +                        substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
 +                    }
 +                }
 +            }
 +        }
 +
 +        // add placeholders for args that were not provided
 +        // FIXME: emit diagnostics in contexts where this is not allowed
 +        for eid in def_generic_iter {
 +            match eid {
 +                Either::Left(_) => substs.push(ty_error.clone()),
 +                Either::Right(x) => {
 +                    substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
 +                }
 +            }
 +        }
 +        // If this assert fails, it means you pushed into subst but didn't call .next() of def_generic_iter
 +        assert_eq!(substs.len(), total_len);
 +
 +        Substitution::from_iter(Interner, substs)
 +    }
 +
 +    fn lower_trait_ref_from_path(
 +        &self,
 +        path: &Path,
 +        explicit_self_ty: Option<Ty>,
 +    ) -> Option<TraitRef> {
 +        let resolved =
 +            match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
 +                TypeNs::TraitId(tr) => tr,
 +                _ => return None,
 +            };
 +        let segment = path.segments().last().expect("path should have at least one segment");
 +        Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
 +    }
 +
 +    pub(crate) fn lower_trait_ref_from_resolved_path(
 +        &self,
 +        resolved: TraitId,
 +        segment: PathSegment<'_>,
 +        explicit_self_ty: Option<Ty>,
 +    ) -> TraitRef {
 +        let substs = self.trait_ref_substs_from_path(segment, resolved, explicit_self_ty);
 +        TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
 +    }
 +
 +    fn lower_trait_ref(
 +        &self,
 +        trait_ref: &HirTraitRef,
 +        explicit_self_ty: Option<Ty>,
 +    ) -> Option<TraitRef> {
 +        self.lower_trait_ref_from_path(&trait_ref.path, explicit_self_ty)
 +    }
 +
 +    fn trait_ref_substs_from_path(
 +        &self,
 +        segment: PathSegment<'_>,
 +        resolved: TraitId,
 +        explicit_self_ty: Option<Ty>,
 +    ) -> Substitution {
 +        self.substs_from_path_segment(segment, Some(resolved.into()), false, explicit_self_ty)
 +    }
 +
 +    pub(crate) fn lower_where_predicate(
 +        &'a self,
 +        where_predicate: &'a WherePredicate,
 +        ignore_bindings: bool,
 +    ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
 +        match where_predicate {
 +            WherePredicate::ForLifetime { target, bound, .. }
 +            | WherePredicate::TypeBound { target, bound } => {
 +                let self_ty = match target {
 +                    WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(type_ref),
 +                    WherePredicateTypeTarget::TypeOrConstParam(param_id) => {
 +                        let generic_def = self.resolver.generic_def().expect("generics in scope");
 +                        let generics = generics(self.db.upcast(), generic_def);
 +                        let param_id = hir_def::TypeOrConstParamId {
 +                            parent: generic_def,
 +                            local_id: *param_id,
 +                        };
 +                        let placeholder = to_placeholder_idx(self.db, param_id);
 +                        match self.type_param_mode {
 +                            ParamLoweringMode::Placeholder => TyKind::Placeholder(placeholder),
 +                            ParamLoweringMode::Variable => {
 +                                let idx = generics.param_idx(param_id).expect("matching generics");
 +                                TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx))
 +                            }
 +                        }
 +                        .intern(Interner)
 +                    }
 +                };
 +                self.lower_type_bound(bound, self_ty, ignore_bindings)
 +                    .collect::<Vec<_>>()
 +                    .into_iter()
 +            }
 +            WherePredicate::Lifetime { .. } => vec![].into_iter(),
 +        }
 +    }
 +
 +    pub(crate) fn lower_type_bound(
 +        &'a self,
 +        bound: &'a TypeBound,
 +        self_ty: Ty,
 +        ignore_bindings: bool,
 +    ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
 +        let mut bindings = None;
 +        let trait_ref = match bound {
 +            TypeBound::Path(path, TraitBoundModifier::None) => {
 +                bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
 +                bindings
 +                    .clone()
 +                    .filter(|tr| {
 +                        // ignore `T: Drop` or `T: Destruct` bounds.
 +                        // - `T: ~const Drop` has a special meaning in Rust 1.61 that we don't implement.
 +                        //   (So ideally, we'd only ignore `~const Drop` here)
 +                        // - `Destruct` impls are built-in in 1.62 (current nightlies as of 08-04-2022), so until
 +                        //   the builtin impls are supported by Chalk, we ignore them here.
 +                        if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
 +                            if lang == "drop" || lang == "destruct" {
 +                                return false;
 +                            }
 +                        }
 +                        true
 +                    })
 +                    .map(WhereClause::Implemented)
 +                    .map(crate::wrap_empty_binders)
 +            }
 +            TypeBound::Path(path, TraitBoundModifier::Maybe) => {
 +                let sized_trait = self
 +                    .db
 +                    .lang_item(self.resolver.krate(), SmolStr::new_inline("sized"))
 +                    .and_then(|lang_item| lang_item.as_trait());
 +                // Don't lower associated type bindings as the only possible relaxed trait bound
 +                // `?Sized` has no of them.
 +                // If we got another trait here ignore the bound completely.
 +                let trait_id = self
 +                    .lower_trait_ref_from_path(path, Some(self_ty.clone()))
 +                    .map(|trait_ref| trait_ref.hir_trait_id());
 +                if trait_id == sized_trait {
 +                    self.unsized_types.borrow_mut().insert(self_ty);
 +                }
 +                None
 +            }
 +            TypeBound::ForLifetime(_, path) => {
 +                // FIXME Don't silently drop the hrtb lifetimes here
 +                bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
 +                bindings.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
 +            }
 +            TypeBound::Lifetime(_) => None,
 +            TypeBound::Error => None,
 +        };
 +        trait_ref.into_iter().chain(
 +            bindings
 +                .into_iter()
 +                .filter(move |_| !ignore_bindings)
 +                .flat_map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr)),
 +        )
 +    }
 +
 +    fn assoc_type_bindings_from_type_bound(
 +        &'a self,
 +        bound: &'a TypeBound,
 +        trait_ref: TraitRef,
 +    ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
 +        let last_segment = match bound {
 +            TypeBound::Path(path, TraitBoundModifier::None) | TypeBound::ForLifetime(_, path) => {
 +                path.segments().last()
 +            }
 +            TypeBound::Path(_, TraitBoundModifier::Maybe)
 +            | TypeBound::Error
 +            | TypeBound::Lifetime(_) => None,
 +        };
 +        last_segment
 +            .into_iter()
 +            .filter_map(|segment| segment.args_and_bindings)
 +            .flat_map(|args_and_bindings| &args_and_bindings.bindings)
 +            .flat_map(move |binding| {
 +                let found = associated_type_by_name_including_super_traits(
 +                    self.db,
 +                    trait_ref.clone(),
 +                    &binding.name,
 +                );
 +                let (super_trait_ref, associated_ty) = match found {
 +                    None => return SmallVec::new(),
 +                    Some(t) => t,
 +                };
 +                let projection_ty = ProjectionTy {
 +                    associated_ty_id: to_assoc_type_id(associated_ty),
 +                    substitution: super_trait_ref.substitution,
 +                };
 +                let mut preds: SmallVec<[_; 1]> = SmallVec::with_capacity(
 +                    binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
 +                );
 +                if let Some(type_ref) = &binding.type_ref {
 +                    let ty = self.lower_ty(type_ref);
 +                    let alias_eq =
 +                        AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
 +                    preds.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
 +                }
 +                for bound in &binding.bounds {
 +                    preds.extend(self.lower_type_bound(
 +                        bound,
 +                        TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
 +                        false,
 +                    ));
 +                }
 +                preds
 +            })
 +    }
 +
 +    fn lower_dyn_trait(&self, bounds: &[Interned<TypeBound>]) -> Ty {
 +        let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
 +        let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
 +            QuantifiedWhereClauses::from_iter(
 +                Interner,
 +                bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)),
 +            )
 +        });
 +        let bounds = crate::make_single_type_binders(bounds);
 +        TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner)
 +    }
 +
 +    fn lower_impl_trait(
 +        &self,
 +        bounds: &[Interned<TypeBound>],
 +        func: FunctionId,
 +    ) -> ReturnTypeImplTrait {
 +        cov_mark::hit!(lower_rpit);
 +        let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
 +        let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
 +            let mut predicates: Vec<_> = bounds
 +                .iter()
 +                .flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false))
 +                .collect();
 +
 +            if !ctx.unsized_types.borrow().contains(&self_ty) {
 +                let krate = func.lookup(ctx.db.upcast()).module(ctx.db.upcast()).krate();
 +                let sized_trait = ctx
 +                    .db
 +                    .lang_item(krate, SmolStr::new_inline("sized"))
 +                    .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
 +                let sized_clause = sized_trait.map(|trait_id| {
 +                    let clause = WhereClause::Implemented(TraitRef {
 +                        trait_id,
 +                        substitution: Substitution::from1(Interner, self_ty.clone()),
 +                    });
 +                    crate::wrap_empty_binders(clause)
 +                });
 +                predicates.extend(sized_clause.into_iter());
 +                predicates.shrink_to_fit();
 +            }
 +            predicates
 +        });
 +        ReturnTypeImplTrait { bounds: crate::make_single_type_binders(predicates) }
 +    }
 +}
 +
 +fn count_impl_traits(type_ref: &TypeRef) -> usize {
 +    let mut count = 0;
 +    type_ref.walk(&mut |type_ref| {
 +        if matches!(type_ref, TypeRef::ImplTrait(_)) {
 +            count += 1;
 +        }
 +    });
 +    count
 +}
 +
 +/// Build the signature of a callable item (function, struct or enum variant).
 +pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig {
 +    match def {
 +        CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f),
 +        CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s),
 +        CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e),
 +    }
 +}
 +
 +pub fn associated_type_shorthand_candidates<R>(
 +    db: &dyn HirDatabase,
 +    def: GenericDefId,
 +    res: TypeNs,
 +    cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
 +) -> Option<R> {
 +    named_associated_type_shorthand_candidates(db, def, res, None, cb)
 +}
 +
 +fn named_associated_type_shorthand_candidates<R>(
 +    db: &dyn HirDatabase,
 +    // If the type parameter is defined in an impl and we're in a method, there
 +    // might be additional where clauses to consider
 +    def: GenericDefId,
 +    res: TypeNs,
 +    assoc_name: Option<Name>,
 +    mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
 +) -> Option<R> {
 +    let mut search = |t| {
 +        for t in all_super_trait_refs(db, t) {
 +            let data = db.trait_data(t.hir_trait_id());
 +
 +            for (name, assoc_id) in &data.items {
 +                if let AssocItemId::TypeAliasId(alias) = assoc_id {
 +                    if let Some(result) = cb(name, &t, *alias) {
 +                        return Some(result);
 +                    }
 +                }
 +            }
 +        }
 +        None
 +    };
 +
 +    match res {
 +        TypeNs::SelfType(impl_id) => search(
 +            // we're _in_ the impl -- the binders get added back later. Correct,
 +            // but it would be nice to make this more explicit
 +            db.impl_trait(impl_id)?.into_value_and_skipped_binders().0,
 +        ),
 +        TypeNs::GenericParam(param_id) => {
 +            let predicates = db.generic_predicates_for_param(def, param_id.into(), assoc_name);
 +            let res = predicates.iter().find_map(|pred| match pred.skip_binders().skip_binders() {
 +                // FIXME: how to correctly handle higher-ranked bounds here?
 +                WhereClause::Implemented(tr) => search(
 +                    tr.clone()
 +                        .shifted_out_to(Interner, DebruijnIndex::ONE)
 +                        .expect("FIXME unexpected higher-ranked trait bound"),
 +                ),
 +                _ => None,
 +            });
 +            if let Some(_) = res {
 +                return res;
 +            }
 +            // Handle `Self::Type` referring to own associated type in trait definitions
 +            if let GenericDefId::TraitId(trait_id) = param_id.parent() {
 +                let generics = generics(db.upcast(), trait_id.into());
 +                if generics.params.type_or_consts[param_id.local_id()].is_trait_self() {
 +                    let trait_ref = TyBuilder::trait_ref(db, trait_id)
 +                        .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0)
 +                        .build();
 +                    return search(trait_ref);
 +                }
 +            }
 +            None
 +        }
 +        _ => None,
 +    }
 +}
 +
 +/// Build the type of all specific fields of a struct or enum variant.
 +pub(crate) fn field_types_query(
 +    db: &dyn HirDatabase,
 +    variant_id: VariantId,
 +) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>> {
 +    let var_data = variant_id.variant_data(db.upcast());
 +    let (resolver, def): (_, GenericDefId) = match variant_id {
 +        VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()),
 +        VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()),
 +        VariantId::EnumVariantId(it) => (it.parent.resolver(db.upcast()), it.parent.into()),
 +    };
 +    let generics = generics(db.upcast(), def);
 +    let mut res = ArenaMap::default();
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
 +    for (field_id, field_data) in var_data.fields().iter() {
 +        res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref)));
 +    }
 +    Arc::new(res)
 +}
 +
 +/// This query exists only to be used when resolving short-hand associated types
 +/// like `T::Item`.
 +///
 +/// See the analogous query in rustc and its comment:
 +/// <https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46>
 +/// This is a query mostly to handle cycles somewhat gracefully; e.g. the
 +/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
 +/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
 +pub(crate) fn generic_predicates_for_param_query(
 +    db: &dyn HirDatabase,
 +    def: GenericDefId,
 +    param_id: TypeOrConstParamId,
 +    assoc_name: Option<Name>,
 +) -> Arc<[Binders<QuantifiedWhereClause>]> {
 +    let resolver = def.resolver(db.upcast());
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
 +    let generics = generics(db.upcast(), def);
 +    let mut predicates: Vec<_> = resolver
 +        .where_predicates_in_scope()
 +        // we have to filter out all other predicates *first*, before attempting to lower them
 +        .filter(|pred| match pred {
 +            WherePredicate::ForLifetime { target, bound, .. }
 +            | WherePredicate::TypeBound { target, bound, .. } => {
 +                match target {
 +                    WherePredicateTypeTarget::TypeRef(type_ref) => {
 +                        if ctx.lower_ty_only_param(type_ref) != Some(param_id) {
 +                            return false;
 +                        }
 +                    }
 +                    &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
 +                        let target_id = TypeOrConstParamId { parent: def, local_id };
 +                        if target_id != param_id {
 +                            return false;
 +                        }
 +                    }
 +                };
 +
 +                match &**bound {
 +                    TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
 +                        // Only lower the bound if the trait could possibly define the associated
 +                        // type we're looking for.
 +
 +                        let assoc_name = match &assoc_name {
 +                            Some(it) => it,
 +                            None => return true,
 +                        };
 +                        let tr = match resolver
 +                            .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
 +                        {
 +                            Some(TypeNs::TraitId(tr)) => tr,
 +                            _ => return false,
 +                        };
 +
 +                        all_super_traits(db.upcast(), tr).iter().any(|tr| {
 +                            db.trait_data(*tr).items.iter().any(|(name, item)| {
 +                                matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
 +                            })
 +                        })
 +                    }
 +                    TypeBound::Lifetime(_) | TypeBound::Error => false,
 +                }
 +            }
 +            WherePredicate::Lifetime { .. } => false,
 +        })
 +        .flat_map(|pred| {
 +            ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p))
 +        })
 +        .collect();
 +
 +    let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
 +    let explicitly_unsized_tys = ctx.unsized_types.into_inner();
 +    let implicitly_sized_predicates =
 +        implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &subst, &resolver)
 +            .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
 +    predicates.extend(implicitly_sized_predicates);
 +    predicates.into()
 +}
 +
 +pub(crate) fn generic_predicates_for_param_recover(
 +    _db: &dyn HirDatabase,
 +    _cycle: &[String],
 +    _def: &GenericDefId,
 +    _param_id: &TypeOrConstParamId,
 +    _assoc_name: &Option<Name>,
 +) -> Arc<[Binders<QuantifiedWhereClause>]> {
 +    Arc::new([])
 +}
 +
 +pub(crate) fn trait_environment_query(
 +    db: &dyn HirDatabase,
 +    def: GenericDefId,
 +) -> Arc<TraitEnvironment> {
 +    let resolver = def.resolver(db.upcast());
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Placeholder);
 +    let mut traits_in_scope = Vec::new();
 +    let mut clauses = Vec::new();
 +    for pred in resolver.where_predicates_in_scope() {
 +        for pred in ctx.lower_where_predicate(pred, false) {
 +            if let WhereClause::Implemented(tr) = &pred.skip_binders() {
 +                traits_in_scope.push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id()));
 +            }
 +            let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
 +            clauses.push(program_clause.into_from_env_clause(Interner));
 +        }
 +    }
 +
 +    let container: Option<ItemContainerId> = match def {
 +        // FIXME: is there a function for this?
 +        GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
 +        GenericDefId::AdtId(_) => None,
 +        GenericDefId::TraitId(_) => None,
 +        GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
 +        GenericDefId::ImplId(_) => None,
 +        GenericDefId::EnumVariantId(_) => None,
 +        GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container),
 +    };
 +    if let Some(ItemContainerId::TraitId(trait_id)) = container {
 +        // add `Self: Trait<T1, T2, ...>` to the environment in trait
 +        // function default implementations (and speculative code
 +        // inside consts or type aliases)
 +        cov_mark::hit!(trait_self_implements_self);
 +        let substs = TyBuilder::placeholder_subst(db, trait_id);
 +        let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs };
 +        let pred = WhereClause::Implemented(trait_ref);
 +        let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
 +        clauses.push(program_clause.into_from_env_clause(Interner));
 +    }
 +
 +    let subst = generics(db.upcast(), def).placeholder_subst(db);
 +    let explicitly_unsized_tys = ctx.unsized_types.into_inner();
 +    let implicitly_sized_clauses =
 +        implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver).map(|pred| {
 +            let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
 +            program_clause.into_from_env_clause(Interner)
 +        });
 +    clauses.extend(implicitly_sized_clauses);
 +
 +    let krate = def.module(db.upcast()).krate();
 +
 +    let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
 +
 +    Arc::new(TraitEnvironment { krate, traits_from_clauses: traits_in_scope, env })
 +}
 +
 +/// Resolve the where clause(s) of an item with generics.
 +pub(crate) fn generic_predicates_query(
 +    db: &dyn HirDatabase,
 +    def: GenericDefId,
 +) -> Arc<[Binders<QuantifiedWhereClause>]> {
 +    let resolver = def.resolver(db.upcast());
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
 +    let generics = generics(db.upcast(), def);
 +
 +    let mut predicates = resolver
 +        .where_predicates_in_scope()
 +        .flat_map(|pred| {
 +            ctx.lower_where_predicate(pred, false).map(|p| make_binders(db, &generics, p))
 +        })
 +        .collect::<Vec<_>>();
 +
 +    let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
 +    let explicitly_unsized_tys = ctx.unsized_types.into_inner();
 +    let implicitly_sized_predicates =
 +        implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
 +            .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
 +    predicates.extend(implicitly_sized_predicates);
 +    predicates.into()
 +}
 +
 +/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound.
 +/// Exception is Self of a trait def.
 +fn implicitly_sized_clauses<'a>(
 +    db: &dyn HirDatabase,
 +    def: GenericDefId,
 +    explicitly_unsized_tys: &'a FxHashSet<Ty>,
 +    substitution: &'a Substitution,
 +    resolver: &Resolver,
 +) -> impl Iterator<Item = WhereClause> + 'a {
 +    let is_trait_def = matches!(def, GenericDefId::TraitId(..));
 +    let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
 +    let sized_trait = db
 +        .lang_item(resolver.krate(), SmolStr::new_inline("sized"))
 +        .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
 +
 +    sized_trait.into_iter().flat_map(move |sized_trait| {
 +        let implicitly_sized_tys = generic_args
 +            .iter()
 +            .filter_map(|generic_arg| generic_arg.ty(Interner))
 +            .filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty));
 +        implicitly_sized_tys.map(move |self_ty| {
 +            WhereClause::Implemented(TraitRef {
 +                trait_id: sized_trait,
 +                substitution: Substitution::from1(Interner, self_ty.clone()),
 +            })
 +        })
 +    })
 +}
 +
 +/// Resolve the default type params from generics
 +pub(crate) fn generic_defaults_query(
 +    db: &dyn HirDatabase,
 +    def: GenericDefId,
 +) -> Arc<[Binders<chalk_ir::GenericArg<Interner>>]> {
 +    let resolver = def.resolver(db.upcast());
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
 +    let generic_params = generics(db.upcast(), def);
 +
 +    let defaults = generic_params
 +        .iter()
 +        .enumerate()
 +        .map(|(idx, (id, p))| {
 +            let p = match p {
 +                TypeOrConstParamData::TypeParamData(p) => p,
 +                TypeOrConstParamData::ConstParamData(_) => {
 +                    // FIXME: implement const generic defaults
 +                    let val = unknown_const_as_generic(
 +                        db.const_param_ty(ConstParamId::from_unchecked(id)),
 +                    );
 +                    return crate::make_binders_with_count(db, idx, &generic_params, val);
 +                }
 +            };
 +            let mut ty =
 +                p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
 +
 +            // Each default can only refer to previous parameters.
 +            // type variable default referring to parameter coming
 +            // after it. This is forbidden (FIXME: report
 +            // diagnostic)
 +            ty = fallback_bound_vars(ty, idx);
 +            let val = GenericArgData::Ty(ty).intern(Interner);
 +            crate::make_binders_with_count(db, idx, &generic_params, val)
 +        })
 +        .collect();
 +
 +    defaults
 +}
 +
 +pub(crate) fn generic_defaults_recover(
 +    db: &dyn HirDatabase,
 +    _cycle: &[String],
 +    def: &GenericDefId,
 +) -> Arc<[Binders<crate::GenericArg>]> {
 +    let generic_params = generics(db.upcast(), *def);
 +    // FIXME: this code is not covered in tests.
 +    // we still need one default per parameter
 +    let defaults = generic_params
 +        .iter_id()
 +        .enumerate()
 +        .map(|(count, id)| {
 +            let val = match id {
 +                itertools::Either::Left(_) => {
 +                    GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
 +                }
 +                itertools::Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
 +            };
 +            crate::make_binders_with_count(db, count, &generic_params, val)
 +        })
 +        .collect();
 +
 +    defaults
 +}
 +
 +fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
 +    let data = db.function_data(def);
 +    let resolver = def.resolver(db.upcast());
 +    let ctx_params = TyLoweringContext::new(db, &resolver)
 +        .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
 +        .with_type_param_mode(ParamLoweringMode::Variable);
 +    let params = data.params.iter().map(|(_, tr)| ctx_params.lower_ty(tr)).collect::<Vec<_>>();
 +    let ctx_ret = TyLoweringContext::new(db, &resolver)
 +        .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
 +        .with_type_param_mode(ParamLoweringMode::Variable);
 +    let ret = ctx_ret.lower_ty(&data.ret_type);
 +    let generics = generics(db.upcast(), def.into());
 +    let sig = CallableSig::from_params_and_return(params, ret, data.is_varargs());
 +    make_binders(db, &generics, sig)
 +}
 +
 +/// Build the declared type of a function. This should not need to look at the
 +/// function body.
 +fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
 +    let generics = generics(db.upcast(), def.into());
 +    let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
 +    make_binders(
 +        db,
 +        &generics,
 +        TyKind::FnDef(CallableDefId::FunctionId(def).to_chalk(db), substs).intern(Interner),
 +    )
 +}
 +
 +/// Build the declared type of a const.
 +fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
 +    let data = db.const_data(def);
 +    let generics = generics(db.upcast(), def.into());
 +    let resolver = def.resolver(db.upcast());
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
 +
 +    make_binders(db, &generics, ctx.lower_ty(&data.type_ref))
 +}
 +
 +/// Build the declared type of a static.
 +fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
 +    let data = db.static_data(def);
 +    let resolver = def.resolver(db.upcast());
 +    let ctx = TyLoweringContext::new(db, &resolver);
 +
 +    Binders::empty(Interner, ctx.lower_ty(&data.type_ref))
 +}
 +
 +fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
 +    let struct_data = db.struct_data(def);
 +    let fields = struct_data.variant_data.fields();
 +    let resolver = def.resolver(db.upcast());
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
 +    let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
 +    let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
 +    Binders::new(binders, CallableSig::from_params_and_return(params, ret, false))
 +}
 +
 +/// Build the type of a tuple struct constructor.
 +fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders<Ty> {
 +    let struct_data = db.struct_data(def);
 +    if let StructKind::Unit = struct_data.variant_data.kind() {
 +        return type_for_adt(db, def.into());
 +    }
 +    let generics = generics(db.upcast(), def.into());
 +    let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
 +    make_binders(
 +        db,
 +        &generics,
 +        TyKind::FnDef(CallableDefId::StructId(def).to_chalk(db), substs).intern(Interner),
 +    )
 +}
 +
 +fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
 +    let enum_data = db.enum_data(def.parent);
 +    let var_data = &enum_data.variants[def.local_id];
 +    let fields = var_data.variant_data.fields();
 +    let resolver = def.parent.resolver(db.upcast());
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
 +    let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
 +    let (ret, binders) = type_for_adt(db, def.parent.into()).into_value_and_skipped_binders();
 +    Binders::new(binders, CallableSig::from_params_and_return(params, ret, false))
 +}
 +
 +/// Build the type of a tuple enum variant constructor.
 +fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> Binders<Ty> {
 +    let enum_data = db.enum_data(def.parent);
 +    let var_data = &enum_data.variants[def.local_id].variant_data;
 +    if let StructKind::Unit = var_data.kind() {
 +        return type_for_adt(db, def.parent.into());
 +    }
 +    let generics = generics(db.upcast(), def.parent.into());
 +    let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
 +    make_binders(
 +        db,
 +        &generics,
 +        TyKind::FnDef(CallableDefId::EnumVariantId(def).to_chalk(db), substs).intern(Interner),
 +    )
 +}
 +
 +fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
 +    let generics = generics(db.upcast(), adt.into());
 +    let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
 +    let ty = TyKind::Adt(crate::AdtId(adt), subst).intern(Interner);
 +    make_binders(db, &generics, ty)
 +}
 +
 +fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
 +    let generics = generics(db.upcast(), t.into());
 +    let resolver = t.resolver(db.upcast());
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
 +    if db.type_alias_data(t).is_extern {
 +        Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
 +    } else {
 +        let type_ref = &db.type_alias_data(t).type_ref;
 +        let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
 +        make_binders(db, &generics, inner)
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub enum CallableDefId {
 +    FunctionId(FunctionId),
 +    StructId(StructId),
 +    EnumVariantId(EnumVariantId),
 +}
 +impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
 +
 +impl CallableDefId {
 +    pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
 +        let db = db.upcast();
 +        match self {
 +            CallableDefId::FunctionId(f) => f.lookup(db).module(db),
 +            CallableDefId::StructId(s) => s.lookup(db).container,
 +            CallableDefId::EnumVariantId(e) => e.parent.lookup(db).container,
 +        }
 +        .krate()
 +    }
 +}
 +
 +impl From<CallableDefId> for GenericDefId {
 +    fn from(def: CallableDefId) -> GenericDefId {
 +        match def {
 +            CallableDefId::FunctionId(f) => f.into(),
 +            CallableDefId::StructId(s) => s.into(),
 +            CallableDefId::EnumVariantId(e) => e.into(),
 +        }
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub enum TyDefId {
 +    BuiltinType(BuiltinType),
 +    AdtId(AdtId),
 +    TypeAliasId(TypeAliasId),
 +}
 +impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub enum ValueTyDefId {
 +    FunctionId(FunctionId),
 +    StructId(StructId),
 +    UnionId(UnionId),
 +    EnumVariantId(EnumVariantId),
 +    ConstId(ConstId),
 +    StaticId(StaticId),
 +}
 +impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
 +
 +/// Build the declared type of an item. This depends on the namespace; e.g. for
 +/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
 +/// the constructor function `(usize) -> Foo` which lives in the values
 +/// namespace.
 +pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
 +    match def {
 +        TyDefId::BuiltinType(it) => Binders::empty(Interner, TyBuilder::builtin(it)),
 +        TyDefId::AdtId(it) => type_for_adt(db, it),
 +        TyDefId::TypeAliasId(it) => type_for_type_alias(db, it),
 +    }
 +}
 +
 +pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
 +    let generics = match *def {
 +        TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
 +        TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
 +        TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()),
 +    };
 +    make_binders(db, &generics, TyKind::Error.intern(Interner))
 +}
 +
 +pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders<Ty> {
 +    match def {
 +        ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
 +        ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
 +        ValueTyDefId::UnionId(it) => type_for_adt(db, it.into()),
 +        ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
 +        ValueTyDefId::ConstId(it) => type_for_const(db, it),
 +        ValueTyDefId::StaticId(it) => type_for_static(db, it),
 +    }
 +}
 +
 +pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders<Ty> {
 +    let impl_loc = impl_id.lookup(db.upcast());
 +    let impl_data = db.impl_data(impl_id);
 +    let resolver = impl_id.resolver(db.upcast());
 +    let _cx = stdx::panic_context::enter(format!(
 +        "impl_self_ty_query({:?} -> {:?} -> {:?})",
 +        impl_id, impl_loc, impl_data
 +    ));
 +    let generics = generics(db.upcast(), impl_id.into());
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
 +    make_binders(db, &generics, ctx.lower_ty(&impl_data.self_ty))
 +}
 +
 +// returns None if def is a type arg
 +pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty {
 +    let parent_data = db.generic_params(def.parent());
 +    let data = &parent_data.type_or_consts[def.local_id()];
 +    let resolver = def.parent().resolver(db.upcast());
 +    let ctx = TyLoweringContext::new(db, &resolver);
 +    match data {
 +        TypeOrConstParamData::TypeParamData(_) => {
 +            never!();
 +            Ty::new(Interner, TyKind::Error)
 +        }
 +        TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(&d.ty),
 +    }
 +}
 +
 +pub(crate) fn impl_self_ty_recover(
 +    db: &dyn HirDatabase,
 +    _cycle: &[String],
 +    impl_id: &ImplId,
 +) -> Binders<Ty> {
 +    let generics = generics(db.upcast(), (*impl_id).into());
 +    make_binders(db, &generics, TyKind::Error.intern(Interner))
 +}
 +
 +pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
 +    let impl_loc = impl_id.lookup(db.upcast());
 +    let impl_data = db.impl_data(impl_id);
 +    let resolver = impl_id.resolver(db.upcast());
 +    let _cx = stdx::panic_context::enter(format!(
 +        "impl_trait_query({:?} -> {:?} -> {:?})",
 +        impl_id, impl_loc, impl_data
 +    ));
 +    let ctx =
 +        TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
 +    let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
 +    let target_trait = impl_data.target_trait.as_ref()?;
 +    Some(Binders::new(binders, ctx.lower_trait_ref(target_trait, Some(self_ty))?))
 +}
 +
 +pub(crate) fn return_type_impl_traits(
 +    db: &dyn HirDatabase,
 +    def: hir_def::FunctionId,
 +) -> Option<Arc<Binders<ReturnTypeImplTraits>>> {
 +    // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
 +    let data = db.function_data(def);
 +    let resolver = def.resolver(db.upcast());
 +    let ctx_ret = TyLoweringContext::new(db, &resolver)
 +        .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
 +        .with_type_param_mode(ParamLoweringMode::Variable);
 +    let _ret = (&ctx_ret).lower_ty(&data.ret_type);
 +    let generics = generics(db.upcast(), def.into());
 +    let return_type_impl_traits =
 +        ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() };
 +    if return_type_impl_traits.impl_traits.is_empty() {
 +        None
 +    } else {
 +        Some(Arc::new(make_binders(db, &generics, return_type_impl_traits)))
 +    }
 +}
 +
 +pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mutability {
 +    match m {
 +        hir_def::type_ref::Mutability::Shared => Mutability::Not,
 +        hir_def::type_ref::Mutability::Mut => Mutability::Mut,
 +    }
 +}
 +
 +/// Checks if the provided generic arg matches its expected kind, then lower them via
 +/// provided closures. Use unknown if there was kind mismatch.
 +///
 +/// Returns `Some` of the lowered generic arg. `None` if the provided arg is a lifetime.
 +pub(crate) fn generic_arg_to_chalk<'a, T>(
 +    db: &dyn HirDatabase,
 +    kind_id: Either<TypeParamId, ConstParamId>,
 +    arg: &'a GenericArg,
 +    this: &mut T,
 +    for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
 +    for_const: impl FnOnce(&mut T, &ConstScalarOrPath, Ty) -> Const + 'a,
 +) -> Option<crate::GenericArg> {
 +    let kind = match kind_id {
 +        Either::Left(_) => ParamKind::Type,
 +        Either::Right(id) => {
 +            let ty = db.const_param_ty(id);
 +            ParamKind::Const(ty)
 +        }
 +    };
 +    Some(match (arg, kind) {
 +        (GenericArg::Type(type_ref), ParamKind::Type) => {
 +            let ty = for_type(this, type_ref);
 +            GenericArgData::Ty(ty).intern(Interner)
 +        }
 +        (GenericArg::Const(c), ParamKind::Const(c_ty)) => {
 +            GenericArgData::Const(for_const(this, c, c_ty)).intern(Interner)
 +        }
 +        (GenericArg::Const(_), ParamKind::Type) => {
 +            GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
 +        }
 +        (GenericArg::Type(t), ParamKind::Const(c_ty)) => {
 +            // We want to recover simple idents, which parser detects them
 +            // as types. Maybe here is not the best place to do it, but
 +            // it works.
 +            if let TypeRef::Path(p) = t {
 +                let p = p.mod_path();
 +                if p.kind == PathKind::Plain {
 +                    if let [n] = p.segments() {
 +                        let c = ConstScalarOrPath::Path(n.clone());
 +                        return Some(
 +                            GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
 +                        );
 +                    }
 +                }
 +            }
 +            unknown_const_as_generic(c_ty)
 +        }
 +        (GenericArg::Lifetime(_), _) => return None,
 +    })
 +}
 +
 +pub(crate) fn const_or_path_to_chalk(
 +    db: &dyn HirDatabase,
 +    resolver: &Resolver,
 +    expected_ty: Ty,
 +    value: &ConstScalarOrPath,
 +    mode: ParamLoweringMode,
 +    args: impl FnOnce() -> Generics,
 +    debruijn: DebruijnIndex,
 +) -> Const {
 +    match value {
 +        ConstScalarOrPath::Scalar(s) => intern_const_scalar(s.clone(), expected_ty),
 +        ConstScalarOrPath::Path(n) => {
 +            let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
 +            path_to_const(db, resolver, &path, mode, args, debruijn)
 +                .unwrap_or_else(|| unknown_const(expected_ty))
 +        }
 +    }
 +}
 +
 +/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
 +/// num_vars_to_keep) by `TyKind::Unknown`.
 +fn fallback_bound_vars<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
 +    s: T,
 +    num_vars_to_keep: usize,
 +) -> T {
 +    crate::fold_free_vars(
 +        s,
 +        |bound, binders| {
 +            if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
 +                TyKind::Error.intern(Interner)
 +            } else {
 +                bound.shifted_in_from(binders).to_ty(Interner)
 +            }
 +        },
 +        |ty, bound, binders| {
 +            if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
 +                unknown_const(ty.clone())
 +            } else {
 +                bound.shifted_in_from(binders).to_const(Interner, ty)
 +            }
 +        },
 +    )
 +}
index dc7252f7072d86a052eb2c2d365938bcf867bb28,0000000000000000000000000000000000000000..118e5311e9a6414ebf6ab084762257883bb2d4dd
mode 100644,000000..100644
--- /dev/null
@@@ -1,150 -1,0 +1,150 @@@
- use rustc_hash::{FxHashMap, FxHashSet};
 +//! Database used for testing `hir`.
 +
 +use std::{
 +    fmt, panic,
 +    sync::{Arc, Mutex},
 +};
 +
 +use base_db::{
 +    salsa, AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
 +};
 +use hir_def::{db::DefDatabase, ModuleId};
 +use hir_expand::db::AstDatabase;
-     fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
++use stdx::hash::{NoHashHashMap, NoHashHashSet};
 +use syntax::TextRange;
 +use test_utils::extract_annotations;
 +
 +#[salsa::database(
 +    base_db::SourceDatabaseExtStorage,
 +    base_db::SourceDatabaseStorage,
 +    hir_expand::db::AstDatabaseStorage,
 +    hir_def::db::InternDatabaseStorage,
 +    hir_def::db::DefDatabaseStorage,
 +    crate::db::HirDatabaseStorage
 +)]
 +pub(crate) struct TestDB {
 +    storage: salsa::Storage<TestDB>,
 +    events: Mutex<Option<Vec<salsa::Event>>>,
 +}
 +
 +impl Default for TestDB {
 +    fn default() -> Self {
 +        let mut this = Self { storage: Default::default(), events: Default::default() };
 +        this.set_enable_proc_attr_macros(true);
 +        this
 +    }
 +}
 +
 +impl fmt::Debug for TestDB {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        f.debug_struct("TestDB").finish()
 +    }
 +}
 +
 +impl Upcast<dyn AstDatabase> for TestDB {
 +    fn upcast(&self) -> &(dyn AstDatabase + 'static) {
 +        &*self
 +    }
 +}
 +
 +impl Upcast<dyn DefDatabase> for TestDB {
 +    fn upcast(&self) -> &(dyn DefDatabase + 'static) {
 +        &*self
 +    }
 +}
 +
 +impl salsa::Database for TestDB {
 +    fn salsa_event(&self, event: salsa::Event) {
 +        let mut events = self.events.lock().unwrap();
 +        if let Some(events) = &mut *events {
 +            events.push(event);
 +        }
 +    }
 +}
 +
 +impl salsa::ParallelDatabase for TestDB {
 +    fn snapshot(&self) -> salsa::Snapshot<TestDB> {
 +        salsa::Snapshot::new(TestDB {
 +            storage: self.storage.snapshot(),
 +            events: Default::default(),
 +        })
 +    }
 +}
 +
 +impl panic::RefUnwindSafe for TestDB {}
 +
 +impl FileLoader for TestDB {
 +    fn file_text(&self, file_id: FileId) -> Arc<String> {
 +        FileLoaderDelegate(self).file_text(file_id)
 +    }
 +    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
 +        FileLoaderDelegate(self).resolve_path(path)
 +    }
-     pub(crate) fn extract_annotations(&self) -> FxHashMap<FileId, Vec<(TextRange, String)>> {
++    fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
 +        FileLoaderDelegate(self).relevant_crates(file_id)
 +    }
 +}
 +
 +impl TestDB {
 +    pub(crate) fn module_for_file_opt(&self, file_id: FileId) -> Option<ModuleId> {
 +        for &krate in self.relevant_crates(file_id).iter() {
 +            let crate_def_map = self.crate_def_map(krate);
 +            for (local_id, data) in crate_def_map.modules() {
 +                if data.origin.file_id() == Some(file_id) {
 +                    return Some(crate_def_map.module_id(local_id));
 +                }
 +            }
 +        }
 +        None
 +    }
 +
 +    pub(crate) fn module_for_file(&self, file_id: FileId) -> ModuleId {
 +        self.module_for_file_opt(file_id).unwrap()
 +    }
 +
++    pub(crate) fn extract_annotations(&self) -> NoHashHashMap<FileId, Vec<(TextRange, String)>> {
 +        let mut files = Vec::new();
 +        let crate_graph = self.crate_graph();
 +        for krate in crate_graph.iter() {
 +            let crate_def_map = self.crate_def_map(krate);
 +            for (module_id, _) in crate_def_map.modules() {
 +                let file_id = crate_def_map[module_id].origin.file_id();
 +                files.extend(file_id)
 +            }
 +        }
 +        files
 +            .into_iter()
 +            .filter_map(|file_id| {
 +                let text = self.file_text(file_id);
 +                let annotations = extract_annotations(&text);
 +                if annotations.is_empty() {
 +                    return None;
 +                }
 +                Some((file_id, annotations))
 +            })
 +            .collect()
 +    }
 +}
 +
 +impl TestDB {
 +    pub(crate) fn log(&self, f: impl FnOnce()) -> Vec<salsa::Event> {
 +        *self.events.lock().unwrap() = Some(Vec::new());
 +        f();
 +        self.events.lock().unwrap().take().unwrap()
 +    }
 +
 +    pub(crate) fn log_executed(&self, f: impl FnOnce()) -> Vec<String> {
 +        let events = self.log(f);
 +        events
 +            .into_iter()
 +            .filter_map(|e| match e.kind {
 +                // This is pretty horrible, but `Debug` is the only way to inspect
 +                // QueryDescriptor at the moment.
 +                salsa::EventKind::WillExecute { database_key } => {
 +                    Some(format!("{:?}", database_key.debug(self)))
 +                }
 +                _ => None,
 +            })
 +            .collect()
 +    }
 +}
index 1b5ed0603bfd137696aeb4fd7c79cf86bf8cb46f,0000000000000000000000000000000000000000..c7895db1afbf5e523a3e6c9a03f09df74cd0aa73
mode 100644,000000..100644
--- /dev/null
@@@ -1,1667 -1,0 +1,1695 @@@
 +use expect_test::expect;
 +
 +use super::{check_infer, check_no_mismatches, check_types};
 +
 +#[test]
 +fn bug_484() {
 +    check_infer(
 +        r#"
 +        fn test() {
 +            let x = if true {};
 +        }
 +        "#,
 +        expect![[r#"
 +            10..37 '{     ... {}; }': ()
 +            20..21 'x': ()
 +            24..34 'if true {}': ()
 +            27..31 'true': bool
 +            32..34 '{}': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn no_panic_on_field_of_enum() {
 +    check_infer(
 +        r#"
 +        enum X {}
 +
 +        fn test(x: X) {
 +            x.some_field;
 +        }
 +        "#,
 +        expect![[r#"
 +            19..20 'x': X
 +            25..46 '{     ...eld; }': ()
 +            31..32 'x': X
 +            31..43 'x.some_field': {unknown}
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn bug_585() {
 +    check_infer(
 +        r#"
 +        fn test() {
 +            X {};
 +            match x {
 +                A::B {} => (),
 +                A::Y() => (),
 +            }
 +        }
 +        "#,
 +        expect![[r#"
 +            10..88 '{     ...   } }': ()
 +            16..20 'X {}': {unknown}
 +            26..86 'match ...     }': ()
 +            32..33 'x': {unknown}
 +            44..51 'A::B {}': {unknown}
 +            55..57 '()': ()
 +            67..73 'A::Y()': {unknown}
 +            77..79 '()': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn bug_651() {
 +    check_infer(
 +        r#"
 +        fn quux() {
 +            let y = 92;
 +            1 + y;
 +        }
 +        "#,
 +        expect![[r#"
 +            10..40 '{     ...+ y; }': ()
 +            20..21 'y': i32
 +            24..26 '92': i32
 +            32..33 '1': i32
 +            32..37 '1 + y': i32
 +            36..37 'y': i32
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn recursive_vars() {
 +    check_infer(
 +        r#"
 +        fn test() {
 +            let y = unknown;
 +            [y, &y];
 +        }
 +        "#,
 +        expect![[r#"
 +            10..47 '{     ...&y]; }': ()
 +            20..21 'y': {unknown}
 +            24..31 'unknown': {unknown}
 +            37..44 '[y, &y]': [{unknown}; 2]
 +            38..39 'y': {unknown}
 +            41..43 '&y': &{unknown}
 +            42..43 'y': {unknown}
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn recursive_vars_2() {
 +    check_infer(
 +        r#"
 +        fn test() {
 +            let x = unknown;
 +            let y = unknown;
 +            [(x, y), (&y, &x)];
 +        }
 +        "#,
 +        expect![[r#"
 +            10..79 '{     ...x)]; }': ()
 +            20..21 'x': &{unknown}
 +            24..31 'unknown': &{unknown}
 +            41..42 'y': {unknown}
 +            45..52 'unknown': {unknown}
 +            58..76 '[(x, y..., &x)]': [(&{unknown}, {unknown}); 2]
 +            59..65 '(x, y)': (&{unknown}, {unknown})
 +            60..61 'x': &{unknown}
 +            63..64 'y': {unknown}
 +            67..75 '(&y, &x)': (&{unknown}, {unknown})
 +            68..70 '&y': &{unknown}
 +            69..70 'y': {unknown}
 +            72..74 '&x': &&{unknown}
 +            73..74 'x': &{unknown}
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn array_elements_expected_type() {
 +    check_no_mismatches(
 +        r#"
 +        fn test() {
 +            let x: [[u32; 2]; 2] = [[1, 2], [3, 4]];
 +        }
 +        "#,
 +    );
 +}
 +
 +#[test]
 +fn infer_std_crash_1() {
 +    // caused stack overflow, taken from std
 +    check_infer(
 +        r#"
 +        enum Maybe<T> {
 +            Real(T),
 +            Fake,
 +        }
 +
 +        fn write() {
 +            match something_unknown {
 +                Maybe::Real(ref mut something) => (),
 +            }
 +        }
 +        "#,
 +        expect![[r#"
 +            53..138 '{     ...   } }': ()
 +            59..136 'match ...     }': ()
 +            65..82 'someth...nknown': Maybe<{unknown}>
 +            93..123 'Maybe:...thing)': Maybe<{unknown}>
 +            105..122 'ref mu...ething': &mut {unknown}
 +            127..129 '()': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn infer_std_crash_2() {
 +    // caused "equating two type variables, ...", taken from std
 +    check_infer(
 +        r#"
 +        fn test_line_buffer() {
 +            &[0, b'\n', 1, b'\n'];
 +        }
 +        "#,
 +        expect![[r#"
 +            22..52 '{     ...n']; }': ()
 +            28..49 '&[0, b...b'\n']': &[u8; 4]
 +            29..49 '[0, b'...b'\n']': [u8; 4]
 +            30..31 '0': u8
 +            33..38 'b'\n'': u8
 +            40..41 '1': u8
 +            43..48 'b'\n'': u8
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn infer_std_crash_3() {
 +    // taken from rustc
 +    check_infer(
 +        r#"
 +        pub fn compute() {
 +            match nope!() {
 +                SizeSkeleton::Pointer { non_zero: true, tail } => {}
 +            }
 +        }
 +        "#,
 +        expect![[r#"
 +            17..107 '{     ...   } }': ()
 +            23..105 'match ...     }': ()
 +            29..36 'nope!()': {unknown}
 +            47..93 'SizeSk...tail }': {unknown}
 +            81..85 'true': bool
 +            81..85 'true': bool
 +            87..91 'tail': {unknown}
 +            97..99 '{}': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn infer_std_crash_4() {
 +    // taken from rustc
 +    check_infer(
 +        r#"
 +        pub fn primitive_type() {
 +            match *self {
 +                BorrowedRef { type_: Primitive(p), ..} => {},
 +            }
 +        }
 +        "#,
 +        expect![[r#"
 +            24..105 '{     ...   } }': ()
 +            30..103 'match ...     }': ()
 +            36..41 '*self': {unknown}
 +            37..41 'self': {unknown}
 +            52..90 'Borrow...), ..}': {unknown}
 +            73..85 'Primitive(p)': {unknown}
 +            83..84 'p': {unknown}
 +            94..96 '{}': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn infer_std_crash_5() {
 +    // taken from rustc
 +    check_infer(
 +        r#"
 +        fn extra_compiler_flags() {
 +            for content in doesnt_matter {
 +                let name = if doesnt_matter {
 +                    first
 +                } else {
 +                    &content
 +                };
 +
 +                let content = if ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.contains(&name) {
 +                    name
 +                } else {
 +                    content
 +                };
 +            }
 +        }
 +        "#,
 +        expect![[r#"
 +            26..322 '{     ...   } }': ()
 +            32..320 'for co...     }': ()
 +            36..43 'content': {unknown}
 +            47..60 'doesnt_matter': {unknown}
 +            61..320 '{     ...     }': ()
 +            75..79 'name': &{unknown}
 +            82..166 'if doe...     }': &{unknown}
 +            85..98 'doesnt_matter': bool
 +            99..128 '{     ...     }': &{unknown}
 +            113..118 'first': &{unknown}
 +            134..166 '{     ...     }': &{unknown}
 +            148..156 '&content': &{unknown}
 +            149..156 'content': {unknown}
 +            181..188 'content': &{unknown}
 +            191..313 'if ICE...     }': &{unknown}
 +            194..231 'ICE_RE..._VALUE': {unknown}
 +            194..247 'ICE_RE...&name)': bool
 +            241..246 '&name': &&{unknown}
 +            242..246 'name': &{unknown}
 +            248..276 '{     ...     }': &{unknown}
 +            262..266 'name': &{unknown}
 +            282..313 '{     ...     }': {unknown}
 +            296..303 'content': {unknown}
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn infer_nested_generics_crash() {
 +    // another crash found typechecking rustc
 +    check_infer(
 +        r#"
 +        struct Canonical<V> {
 +            value: V,
 +        }
 +        struct QueryResponse<V> {
 +            value: V,
 +        }
 +        fn test<R>(query_response: Canonical<QueryResponse<R>>) {
 +            &query_response.value;
 +        }
 +        "#,
 +        expect![[r#"
 +            91..105 'query_response': Canonical<QueryResponse<R>>
 +            136..166 '{     ...lue; }': ()
 +            142..163 '&query....value': &QueryResponse<R>
 +            143..157 'query_response': Canonical<QueryResponse<R>>
 +            143..163 'query_....value': QueryResponse<R>
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn infer_paren_macro_call() {
 +    check_infer(
 +        r#"
 +        macro_rules! bar { () => {0u32} }
 +        fn test() {
 +            let a = (bar!());
 +        }
 +        "#,
 +        expect![[r#"
 +            !0..4 '0u32': u32
 +            44..69 '{     ...()); }': ()
 +            54..55 'a': u32
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn infer_array_macro_call() {
 +    check_infer(
 +        r#"
 +        macro_rules! bar { () => {0u32} }
 +        fn test() {
 +            let a = [bar!()];
 +        }
 +        "#,
 +        expect![[r#"
 +            !0..4 '0u32': u32
 +            44..69 '{     ...()]; }': ()
 +            54..55 'a': [u32; 1]
 +            58..66 '[bar!()]': [u32; 1]
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn bug_1030() {
 +    check_infer(
 +        r#"
 +        struct HashSet<T, H>;
 +        struct FxHasher;
 +        type FxHashSet<T> = HashSet<T, FxHasher>;
 +
 +        impl<T, H> HashSet<T, H> {
 +            fn default() -> HashSet<T, H> {}
 +        }
 +
 +        pub fn main_loop() {
 +            FxHashSet::default();
 +        }
 +        "#,
 +        expect![[r#"
 +            143..145 '{}': HashSet<T, H>
 +            168..197 '{     ...t(); }': ()
 +            174..192 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher>
 +            174..194 'FxHash...ault()': HashSet<{unknown}, FxHasher>
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_2669() {
 +    check_infer(
 +        r#"
 +        trait A {}
 +        trait Write {}
 +        struct Response<T> {}
 +
 +        trait D {
 +            fn foo();
 +        }
 +
 +        impl<T:A> D for Response<T> {
 +            fn foo() {
 +                end();
 +                fn end<W: Write>() {
 +                    let _x: T =  loop {};
 +                }
 +            }
 +        }
 +        "#,
 +        expect![[r#"
 +            119..214 '{     ...     }': ()
 +            129..132 'end': fn end<{unknown}>()
 +            129..134 'end()': ()
 +            163..208 '{     ...     }': ()
 +            181..183 '_x': !
 +            190..197 'loop {}': !
 +            195..197 '{}': ()
 +        "#]],
 +    )
 +}
 +
 +#[test]
 +fn issue_2705() {
 +    check_infer(
 +        r#"
 +        trait Trait {}
 +        fn test() {
 +            <Trait<u32>>::foo()
 +        }
 +        "#,
 +        expect![[r#"
 +            25..52 '{     ...oo() }': ()
 +            31..48 '<Trait...>::foo': {unknown}
 +            31..50 '<Trait...:foo()': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_2683_chars_impl() {
 +    check_types(
 +        r#"
 +//- minicore: iterator
 +pub struct Chars<'a> {}
 +impl<'a> Iterator for Chars<'a> {
 +    type Item = char;
 +    fn next(&mut self) -> Option<char> { loop {} }
 +}
 +
 +fn test() {
 +    let chars: Chars<'_>;
 +    (chars.next(), chars.nth(1));
 +} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (Option<char>, Option<char>)
 +"#,
 +    );
 +}
 +
 +#[test]
 +fn issue_3999_slice() {
 +    check_infer(
 +        r#"
 +        fn foo(params: &[usize]) {
 +            match params {
 +                [ps @ .., _] => {}
 +            }
 +        }
 +        "#,
 +        expect![[r#"
 +            7..13 'params': &[usize]
 +            25..80 '{     ...   } }': ()
 +            31..78 'match ...     }': ()
 +            37..43 'params': &[usize]
 +            54..66 '[ps @ .., _]': [usize]
 +            55..62 'ps @ ..': &[usize]
 +            60..62 '..': [usize]
 +            64..65 '_': usize
 +            70..72 '{}': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_3999_struct() {
 +    // rust-analyzer should not panic on seeing this malformed
 +    // record pattern.
 +    check_infer(
 +        r#"
 +        struct Bar {
 +            a: bool,
 +        }
 +        fn foo(b: Bar) {
 +            match b {
 +                Bar { a: .. } => {},
 +            }
 +        }
 +        "#,
 +        expect![[r#"
 +            35..36 'b': Bar
 +            43..95 '{     ...   } }': ()
 +            49..93 'match ...     }': ()
 +            55..56 'b': Bar
 +            67..80 'Bar { a: .. }': Bar
 +            76..78 '..': bool
 +            84..86 '{}': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_4235_name_conflicts() {
 +    check_infer(
 +        r#"
 +        struct FOO {}
 +        static FOO:FOO = FOO {};
 +
 +        impl FOO {
 +            fn foo(&self) {}
 +        }
 +
 +        fn main() {
 +            let a = &FOO;
 +            a.foo();
 +        }
 +        "#,
 +        expect![[r#"
 +            31..37 'FOO {}': FOO
 +            63..67 'self': &FOO
 +            69..71 '{}': ()
 +            85..119 '{     ...o(); }': ()
 +            95..96 'a': &FOO
 +            99..103 '&FOO': &FOO
 +            100..103 'FOO': FOO
 +            109..110 'a': &FOO
 +            109..116 'a.foo()': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_4465_dollar_crate_at_type() {
 +    check_infer(
 +        r#"
 +        pub struct Foo {}
 +        pub fn anything<T>() -> T {
 +            loop {}
 +        }
 +        macro_rules! foo {
 +            () => {{
 +                let r: $crate::Foo = anything();
 +                r
 +            }};
 +        }
 +        fn main() {
 +            let _a = foo!();
 +        }
 +        "#,
 +        expect![[r#"
 +            44..59 '{     loop {} }': T
 +            50..57 'loop {}': !
 +            55..57 '{}': ()
 +            !0..31 '{letr:...g();r}': Foo
 +            !4..5 'r': Foo
 +            !18..26 'anything': fn anything<Foo>() -> Foo
 +            !18..28 'anything()': Foo
 +            !29..30 'r': Foo
 +            163..187 '{     ...!(); }': ()
 +            173..175 '_a': Foo
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_6811() {
 +    check_infer(
 +        r#"
 +        macro_rules! profile_function {
 +            () => {
 +                let _a = 1;
 +                let _b = 1;
 +            };
 +        }
 +        fn main() {
 +            profile_function!();
 +        }
 +        "#,
 +        expect![[r#"
 +            !0..16 'let_a=...t_b=1;': ()
 +            !3..5 '_a': i32
 +            !6..7 '1': i32
 +            !11..13 '_b': i32
 +            !14..15 '1': i32
 +            103..131 '{     ...!(); }': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_4053_diesel_where_clauses() {
 +    check_infer(
 +        r#"
 +        trait BoxedDsl<DB> {
 +            type Output;
 +            fn internal_into_boxed(self) -> Self::Output;
 +        }
 +
 +        struct SelectStatement<From, Select, Distinct, Where, Order, LimitOffset, GroupBy, Locking> {
 +            order: Order,
 +        }
 +
 +        trait QueryFragment<DB: Backend> {}
 +
 +        trait Into<T> { fn into(self) -> T; }
 +
 +        impl<F, S, D, W, O, LOf, DB> BoxedDsl<DB>
 +            for SelectStatement<F, S, D, W, O, LOf, G>
 +        where
 +            O: Into<dyn QueryFragment<DB>>,
 +        {
 +            type Output = XXX;
 +
 +            fn internal_into_boxed(self) -> Self::Output {
 +                self.order.into();
 +            }
 +        }
 +        "#,
 +        expect![[r#"
 +            65..69 'self': Self
 +            267..271 'self': Self
 +            466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
 +            488..522 '{     ...     }': ()
 +            498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
 +            498..508 'self.order': O
 +            498..515 'self.o...into()': dyn QueryFragment<DB>
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_4953() {
 +    check_infer(
 +        r#"
 +        pub struct Foo(pub i64);
 +        impl Foo {
 +            fn test() -> Self { Self(0i64) }
 +        }
 +        "#,
 +        expect![[r#"
 +            58..72 '{ Self(0i64) }': Foo
 +            60..64 'Self': Foo(i64) -> Foo
 +            60..70 'Self(0i64)': Foo
 +            65..69 '0i64': i64
 +        "#]],
 +    );
 +    check_infer(
 +        r#"
 +        pub struct Foo<T>(pub T);
 +        impl Foo<i64> {
 +            fn test() -> Self { Self(0i64) }
 +        }
 +        "#,
 +        expect![[r#"
 +            64..78 '{ Self(0i64) }': Foo<i64>
 +            66..70 'Self': Foo<i64>(i64) -> Foo<i64>
 +            66..76 'Self(0i64)': Foo<i64>
 +            71..75 '0i64': i64
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_4931() {
 +    check_infer(
 +        r#"
 +        trait Div<T> {
 +            type Output;
 +        }
 +
 +        trait CheckedDiv: Div<()> {}
 +
 +        trait PrimInt: CheckedDiv<Output = ()> {
 +            fn pow(self);
 +        }
 +
 +        fn check<T: PrimInt>(i: T) {
 +            i.pow();
 +        }
 +        "#,
 +        expect![[r#"
 +            117..121 'self': Self
 +            148..149 'i': T
 +            154..170 '{     ...w(); }': ()
 +            160..161 'i': T
 +            160..167 'i.pow()': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_4885() {
 +    check_infer(
 +        r#"
 +        //- minicore: coerce_unsized, future
 +        use core::future::Future;
 +        trait Foo<R> {
 +            type Bar;
 +        }
 +        fn foo<R, K>(key: &K) -> impl Future<Output = K::Bar>
 +        where
 +            K: Foo<R>,
 +        {
 +            bar(key)
 +        }
 +        fn bar<R, K>(key: &K) -> impl Future<Output = K::Bar>
 +        where
 +            K: Foo<R>,
 +        {
 +        }
 +        "#,
 +        expect![[r#"
 +            70..73 'key': &K
 +            132..148 '{     ...key) }': impl Future<Output = <K as Foo<R>>::Bar>
 +            138..141 'bar': fn bar<R, K>(&K) -> impl Future<Output = <K as Foo<R>>::Bar>
 +            138..146 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar>
 +            142..145 'key': &K
 +            162..165 'key': &K
 +            224..227 '{ }': ()
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_4800() {
 +    check_infer(
 +        r#"
 +        trait Debug {}
 +
 +        struct Foo<T>;
 +
 +        type E1<T> = (T, T, T);
 +        type E2<T> = E1<E1<E1<(T, T, T)>>>;
 +
 +        impl Debug for Foo<E2<()>> {}
 +
 +        struct Request;
 +
 +        pub trait Future {
 +            type Output;
 +        }
 +
 +        pub struct PeerSet<D>;
 +
 +        impl<D> Service<Request> for PeerSet<D>
 +        where
 +            D: Discover,
 +            D::Key: Debug,
 +        {
 +            type Error = ();
 +            type Future = dyn Future<Output = Self::Error>;
 +
 +            fn call(&mut self) -> Self::Future {
 +                loop {}
 +            }
 +        }
 +
 +        pub trait Discover {
 +            type Key;
 +        }
 +
 +        pub trait Service<Request> {
 +            type Error;
 +            type Future: Future<Output = Self::Error>;
 +            fn call(&mut self) -> Self::Future;
 +        }
 +        "#,
 +        expect![[r#"
 +            379..383 'self': &mut PeerSet<D>
 +            401..424 '{     ...     }': dyn Future<Output = ()>
 +            411..418 'loop {}': !
 +            416..418 '{}': ()
 +            575..579 'self': &mut Self
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_4966() {
 +    check_infer(
 +        r#"
 +        //- minicore: deref
 +        pub trait IntoIterator {
 +            type Item;
 +        }
 +
 +        struct Repeat<A> { element: A }
 +
 +        struct Map<F> { f: F }
 +
 +        struct Vec<T> {}
 +
 +        impl<T> core::ops::Deref for Vec<T> {
 +            type Target = [T];
 +        }
 +
 +        fn from_iter<A, T: IntoIterator<Item = A>>(iter: T) -> Vec<A> {}
 +
 +        fn main() {
 +            let inner = Map { f: |_: &f64| 0.0 };
 +
 +            let repeat = Repeat { element: inner };
 +
 +            let vec = from_iter(repeat);
 +
 +            vec.foo_bar();
 +        }
 +        "#,
 +        expect![[r#"
 +            225..229 'iter': T
 +            244..246 '{}': Vec<A>
 +            258..402 '{     ...r(); }': ()
 +            268..273 'inner': Map<|&f64| -> f64>
 +            276..300 'Map { ... 0.0 }': Map<|&f64| -> f64>
 +            285..298 '|_: &f64| 0.0': |&f64| -> f64
 +            286..287 '_': &f64
 +            295..298 '0.0': f64
 +            311..317 'repeat': Repeat<Map<|&f64| -> f64>>
 +            320..345 'Repeat...nner }': Repeat<Map<|&f64| -> f64>>
 +            338..343 'inner': Map<|&f64| -> f64>
 +            356..359 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
 +            362..371 'from_iter': fn from_iter<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>, Repeat<Map<|&f64| -> f64>>>(Repeat<Map<|&f64| -> f64>>) -> Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
 +            362..379 'from_i...epeat)': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
 +            372..378 'repeat': Repeat<Map<|&f64| -> f64>>
 +            386..389 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
 +            386..399 'vec.foo_bar()': {unknown}
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_6628() {
 +    check_infer(
 +        r#"
 +//- minicore: fn
 +struct S<T>();
 +impl<T> S<T> {
 +    fn f(&self, _t: T) {}
 +    fn g<F: FnOnce(&T)>(&self, _f: F) {}
 +}
 +fn main() {
 +    let s = S();
 +    s.g(|_x| {});
 +    s.f(10);
 +}
 +"#,
 +        expect![[r#"
 +            40..44 'self': &S<T>
 +            46..48 '_t': T
 +            53..55 '{}': ()
 +            81..85 'self': &S<T>
 +            87..89 '_f': F
 +            94..96 '{}': ()
 +            109..160 '{     ...10); }': ()
 +            119..120 's': S<i32>
 +            123..124 'S': S<i32>() -> S<i32>
 +            123..126 'S()': S<i32>
 +            132..133 's': S<i32>
 +            132..144 's.g(|_x| {})': ()
 +            136..143 '|_x| {}': |&i32| -> ()
 +            137..139 '_x': &i32
 +            141..143 '{}': ()
 +            150..151 's': S<i32>
 +            150..157 's.f(10)': ()
 +            154..156 '10': i32
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn issue_6852() {
 +    check_infer(
 +        r#"
 +//- minicore: deref
 +use core::ops::Deref;
 +
 +struct BufWriter {}
 +
 +struct Mutex<T> {}
 +struct MutexGuard<'a, T> {}
 +impl<T> Mutex<T> {
 +    fn lock(&self) -> MutexGuard<'_, T> {}
 +}
 +impl<'a, T: 'a> Deref for MutexGuard<'a, T> {
 +    type Target = T;
 +}
 +fn flush(&self) {
 +    let w: &Mutex<BufWriter>;
 +    *(w.lock());
 +}
 +"#,
 +        expect![[r#"
 +            123..127 'self': &Mutex<T>
 +            150..152 '{}': MutexGuard<T>
 +            234..238 'self': &{unknown}
 +            240..290 '{     ...()); }': ()
 +            250..251 'w': &Mutex<BufWriter>
 +            276..287 '*(w.lock())': BufWriter
 +            278..279 'w': &Mutex<BufWriter>
 +            278..286 'w.lock()': MutexGuard<BufWriter>
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn param_overrides_fn() {
 +    check_types(
 +        r#"
 +        fn example(example: i32) {
 +            fn f() {}
 +            example;
 +          //^^^^^^^ i32
 +        }
 +        "#,
 +    )
 +}
 +
 +#[test]
 +fn lifetime_from_chalk_during_deref() {
 +    check_types(
 +        r#"
 +//- minicore: deref
 +struct Box<T: ?Sized> {}
 +impl<T: ?Sized> core::ops::Deref for Box<T> {
 +    type Target = T;
 +
 +    fn deref(&self) -> &Self::Target {
 +        loop {}
 +    }
 +}
 +
 +trait Iterator {
 +    type Item;
 +}
 +
 +pub struct Iter<'a, T: 'a> {
 +    inner: Box<dyn IterTrait<'a, T, Item = &'a T> + 'a>,
 +}
 +
 +trait IterTrait<'a, T: 'a>: Iterator<Item = &'a T> {
 +    fn clone_box(&self);
 +}
 +
 +fn clone_iter<T>(s: Iter<T>) {
 +    s.inner.clone_box();
 +  //^^^^^^^^^^^^^^^^^^^ ()
 +}
 +"#,
 +    )
 +}
 +
 +#[test]
 +fn issue_8686() {
 +    check_infer(
 +        r#"
 +pub trait Try: FromResidual {
 +    type Output;
 +    type Residual;
 +}
 +pub trait FromResidual<R = <Self as Try>::Residual> {
 +     fn from_residual(residual: R) -> Self;
 +}
 +
 +struct ControlFlow<B, C>;
 +impl<B, C> Try for ControlFlow<B, C> {
 +    type Output = C;
 +    type Residual = ControlFlow<B, !>;
 +}
 +impl<B, C> FromResidual for ControlFlow<B, C> {
 +    fn from_residual(r: ControlFlow<B, !>) -> Self { ControlFlow }
 +}
 +
 +fn test() {
 +    ControlFlow::from_residual(ControlFlow::<u32, !>);
 +}
 +        "#,
 +        expect![[r#"
 +            144..152 'residual': R
 +            365..366 'r': ControlFlow<B, !>
 +            395..410 '{ ControlFlow }': ControlFlow<B, C>
 +            397..408 'ControlFlow': ControlFlow<B, C>
 +            424..482 '{     ...!>); }': ()
 +            430..456 'Contro...sidual': fn from_residual<ControlFlow<u32, {unknown}>, ControlFlow<u32, !>>(ControlFlow<u32, !>) -> ControlFlow<u32, {unknown}>
 +            430..479 'Contro...2, !>)': ControlFlow<u32, {unknown}>
 +            457..478 'Contro...32, !>': ControlFlow<u32, !>
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn cfg_tail() {
 +    // https://github.com/rust-lang/rust-analyzer/issues/8378
 +    check_infer(
 +        r#"
 +        fn fake_tail(){
 +            { "first" }
 +            #[cfg(never)] 9
 +        }
 +        fn multiple_fake(){
 +            { "fake" }
 +            { "fake" }
 +            { "second" }
 +            #[cfg(never)] { 11 }
 +            #[cfg(never)] 12;
 +            #[cfg(never)] 13
 +        }
 +        fn no_normal_tail(){
 +            { "third" }
 +            #[cfg(never)] 14;
 +            #[cfg(never)] 15;
 +        }
 +        fn no_actual_tail(){
 +            { "fourth" };
 +            #[cfg(never)] 14;
 +            #[cfg(never)] 15
 +        }
 +        "#,
 +        expect![[r#"
 +            14..53 '{     ...)] 9 }': ()
 +            20..31 '{ "first" }': ()
 +            22..29 '"first"': &str
 +            72..190 '{     ...] 13 }': ()
 +            78..88 '{ "fake" }': &str
 +            80..86 '"fake"': &str
 +            93..103 '{ "fake" }': &str
 +            95..101 '"fake"': &str
 +            108..120 '{ "second" }': ()
 +            110..118 '"second"': &str
 +            210..273 '{     ... 15; }': ()
 +            216..227 '{ "third" }': ()
 +            218..225 '"third"': &str
 +            293..357 '{     ...] 15 }': ()
 +            299..311 '{ "fourth" }': &str
 +            301..309 '"fourth"': &str
 +        "#]],
 +    )
 +}
 +
 +#[test]
 +fn impl_trait_in_option_9530() {
 +    check_types(
 +        r#"
 +//- minicore: sized
 +struct Option<T>;
 +impl<T> Option<T> {
 +    fn unwrap(self) -> T { loop {} }
 +}
 +fn make() -> Option<impl Copy> { Option }
 +trait Copy {}
 +fn test() {
 +    let o = make();
 +    o.unwrap();
 +  //^^^^^^^^^^ impl Copy
 +}
 +        "#,
 +    )
 +}
 +
 +#[test]
 +fn bare_dyn_trait_binders_9639() {
 +    check_no_mismatches(
 +        r#"
 +//- minicore: fn, coerce_unsized
 +fn infix_parse<T, S>(_state: S, _level_code: &Fn(S)) -> T {
 +    loop {}
 +}
 +
 +fn parse_arule() {
 +    infix_parse((), &(|_recurse| ()))
 +}
 +        "#,
 +    )
 +}
 +
 +#[test]
 +fn call_expected_type_closure() {
 +    check_types(
 +        r#"
 +//- minicore: fn, option
 +
 +fn map<T, U>(o: Option<T>, f: impl FnOnce(T) -> U) -> Option<U> { loop {} }
 +struct S {
 +    field: u32
 +}
 +
 +fn test() {
 +    let o = Some(S { field: 2 });
 +    let _: Option<()> = map(o, |s| { s.field; });
 +                                  // ^^^^^^^ u32
 +}
 +        "#,
 +    );
 +}
 +
 +#[test]
 +fn coerce_diesel_panic() {
 +    check_no_mismatches(
 +        r#"
 +//- minicore: option
 +
 +trait TypeMetadata {
 +    type MetadataLookup;
 +}
 +
 +pub struct Output<'a, T, DB>
 +where
 +    DB: TypeMetadata,
 +    DB::MetadataLookup: 'a,
 +{
 +    out: T,
 +    metadata_lookup: Option<&'a DB::MetadataLookup>,
 +}
 +
 +impl<'a, T, DB: TypeMetadata> Output<'a, T, DB> {
 +    pub fn new(out: T, metadata_lookup: &'a DB::MetadataLookup) -> Self {
 +        Output {
 +            out,
 +            metadata_lookup: Some(metadata_lookup),
 +        }
 +    }
 +}
 +        "#,
 +    );
 +}
 +
 +#[test]
 +fn bitslice_panic() {
 +    check_no_mismatches(
 +        r#"
 +//- minicore: option, deref
 +
 +pub trait BitView {
 +    type Store;
 +}
 +
 +pub struct Lsb0;
 +
 +pub struct BitArray<V: BitView> { }
 +
 +pub struct BitSlice<T> { }
 +
 +impl<V: BitView> core::ops::Deref for BitArray<V> {
 +    type Target = BitSlice<V::Store>;
 +}
 +
 +impl<T> BitSlice<T> {
 +    pub fn split_first(&self) -> Option<(T, &Self)> { loop {} }
 +}
 +
 +fn multiexp_inner() {
 +    let exp: &BitArray<Foo>;
 +    exp.split_first();
 +}
 +        "#,
 +    );
 +}
 +
 +#[test]
 +fn macro_expands_to_impl_trait() {
 +    check_no_mismatches(
 +        r#"
 +trait Foo {}
 +
 +macro_rules! ty {
 +    () => {
 +        impl Foo
 +    }
 +}
 +
 +fn foo(_: ty!()) {}
 +
 +fn bar() {
 +    foo(());
 +}
 +    "#,
 +    )
 +}
 +
 +#[test]
 +fn nested_macro_in_fn_params() {
 +    check_no_mismatches(
 +        r#"
 +macro_rules! U32Inner {
 +    () => {
 +        u32
 +    };
 +}
 +
 +macro_rules! U32 {
 +    () => {
 +        U32Inner!()
 +    };
 +}
 +
 +fn mamba(a: U32!(), p: u32) -> u32 {
 +    a
 +}
 +    "#,
 +    )
 +}
 +
 +#[test]
 +fn for_loop_block_expr_iterable() {
 +    check_infer(
 +        r#"
 +fn test() {
 +    for _ in { let x = 0; } {
 +        let y = 0;
 +    }
 +}
 +        "#,
 +        expect![[r#"
 +            10..68 '{     ...   } }': ()
 +            16..66 'for _ ...     }': ()
 +            20..21 '_': {unknown}
 +            25..39 '{ let x = 0; }': ()
 +            31..32 'x': i32
 +            35..36 '0': i32
 +            40..66 '{     ...     }': ()
 +            54..55 'y': i32
 +            58..59 '0': i32
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn while_loop_block_expr_iterable() {
 +    check_infer(
 +        r#"
 +fn test() {
 +    while { true } {
 +        let y = 0;
 +    }
 +}
 +        "#,
 +        expect![[r#"
 +            10..59 '{     ...   } }': ()
 +            16..57 'while ...     }': ()
 +            22..30 '{ true }': bool
 +            24..28 'true': bool
 +            31..57 '{     ...     }': ()
 +            45..46 'y': i32
 +            49..50 '0': i32
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn bug_11242() {
 +    // FIXME: wrong, should be u32
 +    check_types(
 +        r#"
 +fn foo<A, B>()
 +where
 +    A: IntoIterator<Item = u32>,
 +    B: IntoIterator<Item = usize>,
 +{
 +    let _x: <A as IntoIterator>::Item;
 +     // ^^ {unknown}
 +}
 +
 +pub trait Iterator {
 +    type Item;
 +}
 +
 +pub trait IntoIterator {
 +    type Item;
 +    type IntoIter: Iterator<Item = Self::Item>;
 +}
 +
 +impl<I: Iterator> IntoIterator for I {
 +    type Item = I::Item;
 +    type IntoIter = I;
 +}
 +"#,
 +    );
 +}
 +
 +#[test]
 +fn bug_11659() {
 +    check_no_mismatches(
 +        r#"
 +struct LinkArray<const N: usize, LD>(LD);
 +fn f<const N: usize, LD>(x: LD) -> LinkArray<N, LD> {
 +    let r = LinkArray::<N, LD>(x);
 +    r
 +}
 +
 +fn test() {
 +    let x = f::<2, i32>(5);
 +    let y = LinkArray::<52, LinkArray<2, i32>>(x);
 +}
 +        "#,
 +    );
 +    check_no_mismatches(
 +        r#"
 +struct LinkArray<LD, const N: usize>(LD);
 +fn f<const N: usize, LD>(x: LD) -> LinkArray<LD, N> {
 +    let r = LinkArray::<LD, N>(x);
 +    r
 +}
 +
 +fn test() {
 +    let x = f::<i32, 2>(5);
 +    let y = LinkArray::<LinkArray<i32, 2>, 52>(x);
 +}
 +        "#,
 +    );
 +}
 +
 +#[test]
 +fn const_generic_error_tolerance() {
 +    check_no_mismatches(
 +        r#"
 +#[lang = "sized"]
 +pub trait Sized {}
 +
 +struct CT<const N: usize, T>(T);
 +struct TC<T, const N: usize>(T);
 +fn f<const N: usize, T>(x: T) -> (CT<N, T>, TC<T, N>) {
 +    let l = CT::<N, T>(x);
 +    let r = TC::<N, T>(x);
 +    (l, r)
 +}
 +
 +trait TR1<const N: usize>;
 +trait TR2<const N: usize>;
 +
 +impl<const N: usize, T> TR1<N> for CT<N, T>;
 +impl<const N: usize, T> TR1<5> for TC<T, N>;
 +impl<const N: usize, T> TR2<N> for CT<T, N>;
 +
 +trait TR3<const N: usize> {
 +    fn tr3(&self) -> &Self;
 +}
 +
 +impl<const N: usize, T> TR3<5> for TC<T, N> {
 +    fn tr3(&self) -> &Self {
 +        self
 +    }
 +}
 +
 +impl<const N: usize, T> TR3<Item = 5> for TC<T, N> {}
 +impl<const N: usize, T> TR3<T> for TC<T, N> {}
 +
 +fn impl_trait<const N: usize>(inp: impl TR1<N>) {}
 +fn dyn_trait<const N: usize>(inp: &dyn TR2<N>) {}
 +fn impl_trait_bad<'a, const N: usize>(inp: impl TR1<i32>) -> impl TR1<'a, i32> {}
 +fn impl_trait_very_bad<const N: usize>(inp: impl TR1<Item = i32>) -> impl TR1<'a, Item = i32, 5, Foo = N> {}
 +
 +fn test() {
 +    f::<2, i32>(5);
 +    f::<2, 2>(5);
 +    f(5);
 +    f::<i32>(5);
 +    CT::<52, CT<2, i32>>(x);
 +    CT::<CT<2, i32>>(x);
 +    impl_trait_bad(5);
 +    impl_trait_bad(12);
 +    TR3<5>::tr3();
 +    TR3<{ 2+3 }>::tr3();
 +    TC::<i32, 10>(5).tr3();
 +    TC::<i32, 20>(5).tr3();
 +    TC::<i32, i32>(5).tr3();
 +    TC::<i32, { 7 + 3 }>(5).tr3();
 +}
 +        "#,
 +    );
 +}
 +
 +#[test]
 +fn const_generic_impl_trait() {
 +    check_no_mismatches(
 +        r#"
 +        //- minicore: from
 +
 +        struct Foo<T, const M: usize>;
 +
 +        trait Tr<T> {
 +            fn f(T) -> Self;
 +        }
 +
 +        impl<T, const M: usize> Tr<[T; M]> for Foo<T, M> {
 +            fn f(_: [T; M]) -> Self {
 +                Self
 +            }
 +        }
 +
 +        fn test() {
 +            Foo::f([1, 2, 7, 10]);
 +        }
 +        "#,
 +    );
 +}
 +
 +#[test]
 +fn nalgebra_factorial() {
 +    check_no_mismatches(
 +        r#"
 +        const FACTORIAL: [u128; 4] = [1, 1, 2, 6];
 +
 +        fn factorial(n: usize) -> u128 {
 +            match FACTORIAL.get(n) {
 +                Some(f) => *f,
 +                None => panic!("{}! is greater than u128::MAX", n),
 +            }
 +        }
 +        "#,
 +    )
 +}
 +
 +#[test]
 +fn regression_11688_1() {
 +    check_no_mismatches(
 +        r#"
 +        pub struct Buffer<T>(T);
 +        type Writer = Buffer<u8>;
 +        impl<T> Buffer<T> {
 +            fn extend_from_array<const N: usize>(&mut self, xs: &[T; N]) {
 +                loop {}
 +            }
 +        }
 +        trait Encode<S> {
 +            fn encode(self, w: &mut Writer, s: &mut S);
 +        }
 +        impl<S> Encode<S> for u8 {
 +            fn encode(self, w: &mut Writer, _: &mut S) {
 +                w.extend_from_array(&self.to_le_bytes());
 +            }
 +        }
 +        "#,
 +    );
 +}
 +
 +#[test]
 +fn regression_11688_2() {
 +    check_types(
 +        r#"
 +        union MaybeUninit<T> {
 +            uninit: (),
 +            value: T,
 +        }
 +
 +        impl<T> MaybeUninit<T> {
 +            fn uninit_array<const LEN: usize>() -> [Self; LEN] {
 +                loop {}
 +            }
 +        }
 +
 +        fn main() {
 +            let x = MaybeUninit::<i32>::uninit_array::<1>();
 +              //^ [MaybeUninit<i32>; 1]
 +        }
 +        "#,
 +    );
 +}
 +
 +#[test]
 +fn regression_11688_3() {
 +    check_types(
 +        r#"
 +        //- minicore: iterator
 +        struct Ar<T, const N: u8>(T);
 +        fn f<const LEN: usize, T, const BASE: u8>(
 +            num_zeros: usize,
 +        ) -> dyn Iterator<Item = [Ar<T, BASE>; LEN]> {
 +            loop {}
 +        }
 +        fn dynamic_programming() {
 +            for board in f::<9, u8, 7>(1) {
 +              //^^^^^ [Ar<u8, 7>; 9]
 +            }
 +        }
 +        "#,
 +    );
 +}
 +
 +#[test]
 +fn regression_11688_4() {
 +    check_types(
 +        r#"
 +        trait Bar<const C: usize> {
 +            fn baz(&self) -> [i32; C];
 +        }
 +
 +        fn foo(x: &dyn Bar<2>) {
 +            x.baz();
 +          //^^^^^^^ [i32; 2]
 +        }
 +        "#,
 +    )
 +}
 +
 +#[test]
 +fn gat_crash_1() {
 +    cov_mark::check!(ignore_gats);
 +    check_no_mismatches(
 +        r#"
 +trait ATrait {}
 +
 +trait Crash {
 +    type Member<const N: usize>: ATrait;
 +    fn new<const N: usize>() -> Self::Member<N>;
 +}
 +
 +fn test<T: Crash>() {
 +    T::new();
 +}
 +"#,
 +    );
 +}
 +
 +#[test]
 +fn gat_crash_2() {
 +    check_no_mismatches(
 +        r#"
 +pub struct InlineStorage {}
 +
 +pub struct InlineStorageHandle<T: ?Sized> {}
 +
 +pub unsafe trait Storage {
 +    type Handle<T: ?Sized>;
 +    fn create<T: ?Sized>() -> Self::Handle<T>;
 +}
 +
 +unsafe impl Storage for InlineStorage {
 +    type Handle<T: ?Sized> = InlineStorageHandle<T>;
 +}
 +"#,
 +    );
 +}
 +
++#[test]
++fn gat_crash_3() {
++    // FIXME: This test currently crashes rust analyzer in a debug build but not in a
++    // release build (i.e. for the user). With the assumption that tests will always be run
++    // in debug mode, we catch the unwind and expect that it panicked. See the
++    // [`crate::utils::generics`] function for more information.
++    cov_mark::check!(ignore_gats);
++    std::panic::catch_unwind(|| {
++        check_no_mismatches(
++            r#"
++trait Collection {
++    type Item;
++    type Member<T>: Collection<Item = T>;
++    fn add(&mut self, value: Self::Item) -> Result<(), Self::Error>;
++}
++struct ConstGen<T, const N: usize> {
++    data: [T; N],
++}
++impl<T, const N: usize> Collection for ConstGen<T, N> {
++    type Item = T;
++    type Member<U> = ConstGen<U, N>;
++}
++        "#,
++        );
++    })
++    .expect_err("must panic");
++}
++
 +#[test]
 +fn cfgd_out_self_param() {
 +    cov_mark::check!(cfgd_out_self_param);
 +    check_no_mismatches(
 +        r#"
 +struct S;
 +impl S {
 +    fn f(#[cfg(never)] &self) {}
 +}
 +
 +fn f(s: S) {
 +    s.f();
 +}
 +"#,
 +    );
 +}
 +
 +#[test]
 +fn rust_161_option_clone() {
 +    check_types(
 +        r#"
 +//- minicore: option, drop
 +
 +fn test(o: &Option<i32>) {
 +    o.my_clone();
 +  //^^^^^^^^^^^^ Option<i32>
 +}
 +
 +pub trait MyClone: Sized {
 +    fn my_clone(&self) -> Self;
 +}
 +
 +impl<T> const MyClone for Option<T>
 +where
 +    T: ~const MyClone + ~const Drop + ~const Destruct,
 +{
 +    fn my_clone(&self) -> Self {
 +        match self {
 +            Some(x) => Some(x.my_clone()),
 +            None => None,
 +        }
 +    }
 +}
 +
 +impl const MyClone for i32 {
 +    fn my_clone(&self) -> Self {
 +        *self
 +    }
 +}
 +
 +pub trait Destruct {}
 +
 +impl<T: ?Sized> const Destruct for T {}
 +"#,
 +    );
 +}
 +
 +#[test]
 +fn rust_162_option_clone() {
 +    check_types(
 +        r#"
 +//- minicore: option, drop
 +
 +fn test(o: &Option<i32>) {
 +    o.my_clone();
 +  //^^^^^^^^^^^^ Option<i32>
 +}
 +
 +pub trait MyClone: Sized {
 +    fn my_clone(&self) -> Self;
 +}
 +
 +impl<T> const MyClone for Option<T>
 +where
 +    T: ~const MyClone + ~const Destruct,
 +{
 +    fn my_clone(&self) -> Self {
 +        match self {
 +            Some(x) => Some(x.my_clone()),
 +            None => None,
 +        }
 +    }
 +}
 +
 +impl const MyClone for i32 {
 +    fn my_clone(&self) -> Self {
 +        *self
 +    }
 +}
 +
 +#[lang = "destruct"]
 +pub trait Destruct {}
 +"#,
 +    );
 +}
 +
 +#[test]
 +fn tuple_struct_pattern_with_unmatched_args_crash() {
 +    check_infer(
 +        r#"
 +struct S(usize);
 +fn main() {
 +    let S(.., a, b) = S(1);
 +    let (.., a, b) = (1,);
 +}
 +        "#,
 +        expect![[r#"
 +        27..85 '{     ...1,); }': ()
 +        37..48 'S(.., a, b)': S
 +        43..44 'a': usize
 +        46..47 'b': {unknown}
 +        51..52 'S': S(usize) -> S
 +        51..55 'S(1)': S
 +        53..54 '1': usize
 +        65..75 '(.., a, b)': (i32, {unknown})
 +        70..71 'a': i32
 +        73..74 'b': {unknown}
 +        78..82 '(1,)': (i32,)
 +        79..80 '1': i32
 +        "#]],
 +    );
 +}
 +
 +#[test]
 +fn trailing_empty_macro() {
 +    cov_mark::check!(empty_macro_in_trailing_position_is_removed);
 +    check_no_mismatches(
 +        r#"
 +macro_rules! m2 {
 +    ($($t:tt)*) => {$($t)*};
 +}
 +
 +fn macrostmts() -> u8 {
 +    m2! { 0 }
 +    m2! {}
 +}
 +    "#,
 +    );
 +}
index 83319755da73aa5fbce84053edf06cbbcd9a7e8f,0000000000000000000000000000000000000000..d6638db02851183c5f080dc8cfc3fed5503ae76c
mode 100644,000000..100644
--- /dev/null
@@@ -1,408 -1,0 +1,413 @@@
-         return if has_consts {
-             // XXX: treat const generic associated types as not existing to avoid crashes (#11769)
 +//! Helper functions for working with def, which don't need to be a separate
 +//! query, but can't be computed directly from `*Data` (ie, which need a `db`).
 +
 +use std::iter;
 +
 +use base_db::CrateId;
 +use chalk_ir::{fold::Shift, BoundVar, DebruijnIndex};
 +use hir_def::{
 +    db::DefDatabase,
 +    generics::{
 +        GenericParams, TypeOrConstParamData, TypeParamProvenance, WherePredicate,
 +        WherePredicateTypeTarget,
 +    },
 +    intern::Interned,
 +    resolver::{HasResolver, TypeNs},
 +    type_ref::{TraitBoundModifier, TypeRef},
 +    ConstParamId, FunctionId, GenericDefId, ItemContainerId, Lookup, TraitId, TypeAliasId,
 +    TypeOrConstParamId, TypeParamId,
 +};
 +use hir_expand::name::{known, Name};
 +use itertools::Either;
 +use rustc_hash::FxHashSet;
 +use smallvec::{smallvec, SmallVec};
 +use syntax::SmolStr;
 +
 +use crate::{
 +    db::HirDatabase, ChalkTraitId, ConstData, ConstValue, GenericArgData, Interner, Substitution,
 +    TraitRef, TraitRefExt, TyKind, WhereClause,
 +};
 +
 +pub(crate) fn fn_traits(db: &dyn DefDatabase, krate: CrateId) -> impl Iterator<Item = TraitId> {
 +    [
 +        db.lang_item(krate, SmolStr::new_inline("fn")),
 +        db.lang_item(krate, SmolStr::new_inline("fn_mut")),
 +        db.lang_item(krate, SmolStr::new_inline("fn_once")),
 +    ]
 +    .into_iter()
 +    .flatten()
 +    .flat_map(|it| it.as_trait())
 +}
 +
 +fn direct_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
 +    let resolver = trait_.resolver(db);
 +    // returning the iterator directly doesn't easily work because of
 +    // lifetime problems, but since there usually shouldn't be more than a
 +    // few direct traits this should be fine (we could even use some kind of
 +    // SmallVec if performance is a concern)
 +    let generic_params = db.generic_params(trait_.into());
 +    let trait_self = generic_params.find_trait_self_param();
 +    generic_params
 +        .where_predicates
 +        .iter()
 +        .filter_map(|pred| match pred {
 +            WherePredicate::ForLifetime { target, bound, .. }
 +            | WherePredicate::TypeBound { target, bound } => {
 +                let is_trait = match target {
 +                    WherePredicateTypeTarget::TypeRef(type_ref) => match &**type_ref {
 +                        TypeRef::Path(p) => p.is_self_type(),
 +                        _ => false,
 +                    },
 +                    WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
 +                        Some(*local_id) == trait_self
 +                    }
 +                };
 +                match is_trait {
 +                    true => bound.as_path(),
 +                    false => None,
 +                }
 +            }
 +            WherePredicate::Lifetime { .. } => None,
 +        })
 +        .filter(|(_, bound_modifier)| matches!(bound_modifier, TraitBoundModifier::None))
 +        .filter_map(|(path, _)| match resolver.resolve_path_in_type_ns_fully(db, path.mod_path()) {
 +            Some(TypeNs::TraitId(t)) => Some(t),
 +            _ => None,
 +        })
 +        .collect()
 +}
 +
 +fn direct_super_trait_refs(db: &dyn HirDatabase, trait_ref: &TraitRef) -> Vec<TraitRef> {
 +    // returning the iterator directly doesn't easily work because of
 +    // lifetime problems, but since there usually shouldn't be more than a
 +    // few direct traits this should be fine (we could even use some kind of
 +    // SmallVec if performance is a concern)
 +    let generic_params = db.generic_params(trait_ref.hir_trait_id().into());
 +    let trait_self = match generic_params.find_trait_self_param() {
 +        Some(p) => TypeOrConstParamId { parent: trait_ref.hir_trait_id().into(), local_id: p },
 +        None => return Vec::new(),
 +    };
 +    db.generic_predicates_for_param(trait_self.parent, trait_self, None)
 +        .iter()
 +        .filter_map(|pred| {
 +            pred.as_ref().filter_map(|pred| match pred.skip_binders() {
 +                // FIXME: how to correctly handle higher-ranked bounds here?
 +                WhereClause::Implemented(tr) => Some(
 +                    tr.clone()
 +                        .shifted_out_to(Interner, DebruijnIndex::ONE)
 +                        .expect("FIXME unexpected higher-ranked trait bound"),
 +                ),
 +                _ => None,
 +            })
 +        })
 +        .map(|pred| pred.substitute(Interner, &trait_ref.substitution))
 +        .collect()
 +}
 +
 +/// Returns an iterator over the whole super trait hierarchy (including the
 +/// trait itself).
 +pub fn all_super_traits(db: &dyn DefDatabase, trait_: TraitId) -> SmallVec<[TraitId; 4]> {
 +    // we need to take care a bit here to avoid infinite loops in case of cycles
 +    // (i.e. if we have `trait A: B; trait B: A;`)
 +
 +    let mut result = smallvec![trait_];
 +    let mut i = 0;
 +    while let Some(&t) = result.get(i) {
 +        // yeah this is quadratic, but trait hierarchies should be flat
 +        // enough that this doesn't matter
 +        for tt in direct_super_traits(db, t) {
 +            if !result.contains(&tt) {
 +                result.push(tt);
 +            }
 +        }
 +        i += 1;
 +    }
 +    result
 +}
 +
 +/// Given a trait ref (`Self: Trait`), builds all the implied trait refs for
 +/// super traits. The original trait ref will be included. So the difference to
 +/// `all_super_traits` is that we keep track of type parameters; for example if
 +/// we have `Self: Trait<u32, i32>` and `Trait<T, U>: OtherTrait<U>` we'll get
 +/// `Self: OtherTrait<i32>`.
 +pub(super) fn all_super_trait_refs(db: &dyn HirDatabase, trait_ref: TraitRef) -> SuperTraits<'_> {
 +    SuperTraits { db, seen: iter::once(trait_ref.trait_id).collect(), stack: vec![trait_ref] }
 +}
 +
 +pub(super) struct SuperTraits<'a> {
 +    db: &'a dyn HirDatabase,
 +    stack: Vec<TraitRef>,
 +    seen: FxHashSet<ChalkTraitId>,
 +}
 +
 +impl<'a> SuperTraits<'a> {
 +    fn elaborate(&mut self, trait_ref: &TraitRef) {
 +        let mut trait_refs = direct_super_trait_refs(self.db, trait_ref);
 +        trait_refs.retain(|tr| !self.seen.contains(&tr.trait_id));
 +        self.stack.extend(trait_refs);
 +    }
 +}
 +
 +impl<'a> Iterator for SuperTraits<'a> {
 +    type Item = TraitRef;
 +
 +    fn next(&mut self) -> Option<Self::Item> {
 +        if let Some(next) = self.stack.pop() {
 +            self.elaborate(&next);
 +            Some(next)
 +        } else {
 +            None
 +        }
 +    }
 +}
 +
 +pub(super) fn associated_type_by_name_including_super_traits(
 +    db: &dyn HirDatabase,
 +    trait_ref: TraitRef,
 +    name: &Name,
 +) -> Option<(TraitRef, TypeAliasId)> {
 +    all_super_trait_refs(db, trait_ref).find_map(|t| {
 +        let assoc_type = db.trait_data(t.hir_trait_id()).associated_type_by_name(name)?;
 +        Some((t, assoc_type))
 +    })
 +}
 +
 +pub(crate) fn generics(db: &dyn DefDatabase, def: GenericDefId) -> Generics {
 +    let parent_generics = parent_generic_def(db, def).map(|def| Box::new(generics(db, def)));
 +    if parent_generics.is_some() && matches!(def, GenericDefId::TypeAliasId(_)) {
 +        let params = db.generic_params(def);
++        let parent_params = &parent_generics.as_ref().unwrap().params;
 +        let has_consts =
 +            params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
-             let (idx, (_local_id, data)) = self
-                 .params
-                 .iter()
-                 .enumerate()
-                 .find(|(_, (idx, _))| *idx == param.local_id)
-                 .unwrap();
++        let parent_has_consts =
++            parent_params.iter().any(|(_, x)| matches!(x, TypeOrConstParamData::ConstParamData(_)));
++        return if has_consts || parent_has_consts {
++            // XXX: treat const generic associated types as not existing to avoid crashes
++            // (#11769)
++            //
++            // Note: Also crashes when the parent has const generics (also even if the GAT
++            // doesn't use them), see `tests::regression::gat_crash_3` for an example.
++            // Avoids that by disabling GATs when the parent (i.e. `impl` block) has
++            // const generics (#12193).
 +            //
 +            // Chalk expects the inner associated type's parameters to come
 +            // *before*, not after the trait's generics as we've always done it.
 +            // Adapting to this requires a larger refactoring
 +            cov_mark::hit!(ignore_gats);
 +            Generics { def, params: Interned::new(Default::default()), parent_generics }
 +        } else {
 +            Generics { def, params, parent_generics }
 +        };
 +    }
 +    Generics { def, params: db.generic_params(def), parent_generics }
 +}
 +
 +#[derive(Debug)]
 +pub(crate) struct Generics {
 +    def: GenericDefId,
 +    pub(crate) params: Interned<GenericParams>,
 +    parent_generics: Option<Box<Generics>>,
 +}
 +
 +impl Generics {
 +    pub(crate) fn iter_id<'a>(
 +        &'a self,
 +    ) -> impl Iterator<Item = Either<TypeParamId, ConstParamId>> + 'a {
 +        self.iter().map(|(id, data)| match data {
 +            TypeOrConstParamData::TypeParamData(_) => Either::Left(TypeParamId::from_unchecked(id)),
 +            TypeOrConstParamData::ConstParamData(_) => {
 +                Either::Right(ConstParamId::from_unchecked(id))
 +            }
 +        })
 +    }
 +
 +    /// Iterator over types and const params of parent, then self.
 +    pub(crate) fn iter<'a>(
 +        &'a self,
 +    ) -> impl DoubleEndedIterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
 +        let to_toc_id = |it: &'a Generics| {
 +            move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p)
 +        };
 +        self.parent_generics()
 +            .into_iter()
 +            .flat_map(move |it| it.params.iter().map(to_toc_id(it)))
 +            .chain(self.params.iter().map(to_toc_id(self)))
 +    }
 +
 +    /// Iterator over types and const params of parent.
 +    pub(crate) fn iter_parent<'a>(
 +        &'a self,
 +    ) -> impl Iterator<Item = (TypeOrConstParamId, &'a TypeOrConstParamData)> + 'a {
 +        self.parent_generics().into_iter().flat_map(|it| {
 +            let to_toc_id =
 +                move |(local_id, p)| (TypeOrConstParamId { parent: it.def, local_id }, p);
 +            it.params.iter().map(to_toc_id)
 +        })
 +    }
 +
 +    pub(crate) fn len(&self) -> usize {
 +        let parent = self.parent_generics().map_or(0, Generics::len);
 +        let child = self.params.type_or_consts.len();
 +        parent + child
 +    }
 +
 +    /// (parent total, self param, type param list, const param list, impl trait)
 +    pub(crate) fn provenance_split(&self) -> (usize, usize, usize, usize, usize) {
 +        let ty_iter = || self.params.iter().filter_map(|x| x.1.type_param());
 +
 +        let self_params =
 +            ty_iter().filter(|p| p.provenance == TypeParamProvenance::TraitSelf).count();
 +        let type_params =
 +            ty_iter().filter(|p| p.provenance == TypeParamProvenance::TypeParamList).count();
 +        let impl_trait_params =
 +            ty_iter().filter(|p| p.provenance == TypeParamProvenance::ArgumentImplTrait).count();
 +        let const_params = self.params.iter().filter_map(|x| x.1.const_param()).count();
 +
 +        let parent_len = self.parent_generics().map_or(0, Generics::len);
 +        (parent_len, self_params, type_params, const_params, impl_trait_params)
 +    }
 +
 +    pub(crate) fn param_idx(&self, param: TypeOrConstParamId) -> Option<usize> {
 +        Some(self.find_param(param)?.0)
 +    }
 +
 +    fn find_param(&self, param: TypeOrConstParamId) -> Option<(usize, &TypeOrConstParamData)> {
 +        if param.parent == self.def {
++            let (idx, (_local_id, data)) =
++                self.params.iter().enumerate().find(|(_, (idx, _))| *idx == param.local_id)?;
 +            let parent_len = self.parent_generics().map_or(0, Generics::len);
 +            Some((parent_len + idx, data))
 +        } else {
 +            self.parent_generics().and_then(|g| g.find_param(param))
 +        }
 +    }
 +
 +    fn parent_generics(&self) -> Option<&Generics> {
 +        self.parent_generics.as_ref().map(|it| &**it)
 +    }
 +
 +    /// Returns a Substitution that replaces each parameter by a bound variable.
 +    pub(crate) fn bound_vars_subst(
 +        &self,
 +        db: &dyn HirDatabase,
 +        debruijn: DebruijnIndex,
 +    ) -> Substitution {
 +        Substitution::from_iter(
 +            Interner,
 +            self.iter_id().enumerate().map(|(idx, id)| match id {
 +                Either::Left(_) => GenericArgData::Ty(
 +                    TyKind::BoundVar(BoundVar::new(debruijn, idx)).intern(Interner),
 +                )
 +                .intern(Interner),
 +                Either::Right(id) => GenericArgData::Const(
 +                    ConstData {
 +                        value: ConstValue::BoundVar(BoundVar::new(debruijn, idx)),
 +                        ty: db.const_param_ty(id),
 +                    }
 +                    .intern(Interner),
 +                )
 +                .intern(Interner),
 +            }),
 +        )
 +    }
 +
 +    /// Returns a Substitution that replaces each parameter by itself (i.e. `Ty::Param`).
 +    pub(crate) fn placeholder_subst(&self, db: &dyn HirDatabase) -> Substitution {
 +        Substitution::from_iter(
 +            Interner,
 +            self.iter_id().map(|id| match id {
 +                Either::Left(id) => GenericArgData::Ty(
 +                    TyKind::Placeholder(crate::to_placeholder_idx(db, id.into())).intern(Interner),
 +                )
 +                .intern(Interner),
 +                Either::Right(id) => GenericArgData::Const(
 +                    ConstData {
 +                        value: ConstValue::Placeholder(crate::to_placeholder_idx(db, id.into())),
 +                        ty: db.const_param_ty(id),
 +                    }
 +                    .intern(Interner),
 +                )
 +                .intern(Interner),
 +            }),
 +        )
 +    }
 +}
 +
 +fn parent_generic_def(db: &dyn DefDatabase, def: GenericDefId) -> Option<GenericDefId> {
 +    let container = match def {
 +        GenericDefId::FunctionId(it) => it.lookup(db).container,
 +        GenericDefId::TypeAliasId(it) => it.lookup(db).container,
 +        GenericDefId::ConstId(it) => it.lookup(db).container,
 +        GenericDefId::EnumVariantId(it) => return Some(it.parent.into()),
 +        GenericDefId::AdtId(_) | GenericDefId::TraitId(_) | GenericDefId::ImplId(_) => return None,
 +    };
 +
 +    match container {
 +        ItemContainerId::ImplId(it) => Some(it.into()),
 +        ItemContainerId::TraitId(it) => Some(it.into()),
 +        ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
 +    }
 +}
 +
 +pub fn is_fn_unsafe_to_call(db: &dyn HirDatabase, func: FunctionId) -> bool {
 +    let data = db.function_data(func);
 +    if data.has_unsafe_kw() {
 +        return true;
 +    }
 +
 +    match func.lookup(db.upcast()).container {
 +        hir_def::ItemContainerId::ExternBlockId(block) => {
 +            // Function in an `extern` block are always unsafe to call, except when it has
 +            // `"rust-intrinsic"` ABI there are a few exceptions.
 +            let id = block.lookup(db.upcast()).id;
 +            !matches!(
 +                id.item_tree(db.upcast())[id.value].abi.as_deref(),
 +                Some("rust-intrinsic") if !is_intrinsic_fn_unsafe(&data.name)
 +            )
 +        }
 +        _ => false,
 +    }
 +}
 +
 +/// Returns `true` if the given intrinsic is unsafe to call, or false otherwise.
 +fn is_intrinsic_fn_unsafe(name: &Name) -> bool {
 +    // Should be kept in sync with https://github.com/rust-lang/rust/blob/532d2b14c05f9bc20b2d27cbb5f4550d28343a36/compiler/rustc_typeck/src/check/intrinsic.rs#L72-L106
 +    ![
 +        known::abort,
 +        known::add_with_overflow,
 +        known::bitreverse,
 +        known::black_box,
 +        known::bswap,
 +        known::caller_location,
 +        known::ctlz,
 +        known::ctpop,
 +        known::cttz,
 +        known::discriminant_value,
 +        known::forget,
 +        known::likely,
 +        known::maxnumf32,
 +        known::maxnumf64,
 +        known::min_align_of,
 +        known::minnumf32,
 +        known::minnumf64,
 +        known::mul_with_overflow,
 +        known::needs_drop,
 +        known::ptr_guaranteed_eq,
 +        known::ptr_guaranteed_ne,
 +        known::rotate_left,
 +        known::rotate_right,
 +        known::rustc_peek,
 +        known::saturating_add,
 +        known::saturating_sub,
 +        known::size_of,
 +        known::sub_with_overflow,
 +        known::type_id,
 +        known::type_name,
 +        known::unlikely,
 +        known::variant_count,
 +        known::wrapping_add,
 +        known::wrapping_mul,
 +        known::wrapping_sub,
 +    ]
 +    .contains(name)
 +}
index aa019ca48381adc9230359094bdf6e69f23701f9,0000000000000000000000000000000000000000..6dccf2ed20b8e8e6dc00534678e42e7a558de526
mode 100644,000000..100644
--- /dev/null
@@@ -1,3652 -1,0 +1,3663 @@@
 +//! HIR (previously known as descriptors) provides a high-level object oriented
 +//! access to Rust code.
 +//!
 +//! The principal difference between HIR and syntax trees is that HIR is bound
 +//! to a particular crate instance. That is, it has cfg flags and features
 +//! applied. So, the relation between syntax and HIR is many-to-one.
 +//!
 +//! HIR is the public API of the all of the compiler logic above syntax trees.
 +//! It is written in "OO" style. Each type is self contained (as in, it knows it's
 +//! parents and full context). It should be "clean code".
 +//!
 +//! `hir_*` crates are the implementation of the compiler logic.
 +//! They are written in "ECS" style, with relatively little abstractions.
 +//! Many types are not self-contained, and explicitly use local indexes, arenas, etc.
 +//!
 +//! `hir` is what insulates the "we don't know how to actually write an incremental compiler"
 +//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
 +//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
 +
 +#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 +#![recursion_limit = "512"]
 +
 +mod semantics;
 +mod source_analyzer;
 +
 +mod from_id;
 +mod attrs;
 +mod has_source;
 +
 +pub mod diagnostics;
 +pub mod db;
 +pub mod symbols;
 +
 +mod display;
 +
 +use std::{iter, ops::ControlFlow, sync::Arc};
 +
 +use arrayvec::ArrayVec;
 +use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind};
 +use either::Either;
 +use hir_def::{
 +    adt::{ReprKind, VariantData},
 +    body::{BodyDiagnostic, SyntheticSyntax},
 +    expr::{BindingAnnotation, LabelId, Pat, PatId},
 +    generics::{TypeOrConstParamData, TypeParamProvenance},
 +    item_tree::ItemTreeNode,
 +    lang_item::LangItemTarget,
 +    nameres::{self, diagnostics::DefDiagnostic},
 +    per_ns::PerNs,
 +    resolver::{HasResolver, Resolver},
 +    src::HasSource as _,
 +    AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
 +    FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
 +    LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
 +    TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
 +};
 +use hir_expand::{name::name, MacroCallKind};
 +use hir_ty::{
 +    all_super_traits, autoderef,
 +    consteval::{unknown_const_as_generic, ComputedExpr, ConstEvalError, ConstExt},
 +    diagnostics::BodyValidationDiagnostic,
 +    method_resolution::{self, TyFingerprint},
 +    primitive::UintTy,
 +    subst_prefix,
 +    traits::FnTrait,
 +    AliasEq, AliasTy, BoundVar, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast,
 +    ClosureId, DebruijnIndex, GenericArgData, InEnvironment, Interner, ParamKind,
 +    QuantifiedWhereClause, Scalar, Solution, Substitution, TraitEnvironment, TraitRefExt, Ty,
 +    TyBuilder, TyDefId, TyExt, TyKind, TyVariableKind, WhereClause,
 +};
 +use itertools::Itertools;
 +use nameres::diagnostics::DefDiagnosticKind;
 +use once_cell::unsync::Lazy;
 +use rustc_hash::FxHashSet;
 +use stdx::{impl_from, never};
 +use syntax::{
 +    ast::{self, HasAttrs as _, HasDocComments, HasName},
 +    AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T,
 +};
 +
 +use crate::db::{DefDatabase, HirDatabase};
 +
 +pub use crate::{
 +    attrs::{HasAttrs, Namespace},
 +    diagnostics::{
 +        AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget,
 +        MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms,
 +        MissingUnsafe, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch,
 +        UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall,
 +        UnresolvedModule, UnresolvedProcMacro,
 +    },
 +    has_source::HasSource,
 +    semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
 +};
 +
 +// Be careful with these re-exports.
 +//
 +// `hir` is the boundary between the compiler and the IDE. It should try hard to
 +// isolate the compiler from the ide, to allow the two to be refactored
 +// independently. Re-exporting something from the compiler is the sure way to
 +// breach the boundary.
 +//
 +// Generally, a refactoring which *removes* a name from this list is a good
 +// idea!
 +pub use {
 +    cfg::{CfgAtom, CfgExpr, CfgOptions},
 +    hir_def::{
 +        adt::StructKind,
 +        attr::{Attr, Attrs, AttrsWithOwner, Documentation},
 +        builtin_attr::AttributeTemplate,
 +        find_path::PrefixKind,
 +        import_map,
 +        nameres::ModuleSource,
 +        path::{ModPath, PathKind},
 +        type_ref::{Mutability, TypeRef},
 +        visibility::Visibility,
 +    },
 +    hir_expand::{
 +        name::{known, Name},
 +        ExpandResult, HirFileId, InFile, MacroFile, Origin,
 +    },
 +    hir_ty::display::HirDisplay,
 +};
 +
 +// These are negative re-exports: pub using these names is forbidden, they
 +// should remain private to hir internals.
 +#[allow(unused)]
 +use {
 +    hir_def::path::Path,
 +    hir_expand::{hygiene::Hygiene, name::AsName},
 +};
 +
 +/// hir::Crate describes a single crate. It's the main interface with which
 +/// a crate's dependencies interact. Mostly, it should be just a proxy for the
 +/// root module.
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Crate {
 +    pub(crate) id: CrateId,
 +}
 +
 +#[derive(Debug)]
 +pub struct CrateDependency {
 +    pub krate: Crate,
 +    pub name: Name,
 +}
 +
 +impl Crate {
 +    pub fn origin(self, db: &dyn HirDatabase) -> CrateOrigin {
 +        db.crate_graph()[self.id].origin.clone()
 +    }
 +
 +    pub fn is_builtin(self, db: &dyn HirDatabase) -> bool {
 +        matches!(self.origin(db), CrateOrigin::Lang(_))
 +    }
 +
 +    pub fn dependencies(self, db: &dyn HirDatabase) -> Vec<CrateDependency> {
 +        db.crate_graph()[self.id]
 +            .dependencies
 +            .iter()
 +            .map(|dep| {
 +                let krate = Crate { id: dep.crate_id };
 +                let name = dep.as_name();
 +                CrateDependency { krate, name }
 +            })
 +            .collect()
 +    }
 +
 +    pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec<Crate> {
 +        let crate_graph = db.crate_graph();
 +        crate_graph
 +            .iter()
 +            .filter(|&krate| {
 +                crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id)
 +            })
 +            .map(|id| Crate { id })
 +            .collect()
 +    }
 +
 +    pub fn transitive_reverse_dependencies(
 +        self,
 +        db: &dyn HirDatabase,
 +    ) -> impl Iterator<Item = Crate> {
 +        db.crate_graph().transitive_rev_deps(self.id).map(|id| Crate { id })
 +    }
 +
 +    pub fn root_module(self, db: &dyn HirDatabase) -> Module {
 +        let def_map = db.crate_def_map(self.id);
 +        Module { id: def_map.module_id(def_map.root()) }
 +    }
 +
 +    pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
 +        let def_map = db.crate_def_map(self.id);
 +        def_map.modules().map(|(id, _)| def_map.module_id(id).into()).collect()
 +    }
 +
 +    pub fn root_file(self, db: &dyn HirDatabase) -> FileId {
 +        db.crate_graph()[self.id].root_file_id
 +    }
 +
 +    pub fn edition(self, db: &dyn HirDatabase) -> Edition {
 +        db.crate_graph()[self.id].edition
 +    }
 +
 +    pub fn version(self, db: &dyn HirDatabase) -> Option<String> {
 +        db.crate_graph()[self.id].version.clone()
 +    }
 +
 +    pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateDisplayName> {
 +        db.crate_graph()[self.id].display_name.clone()
 +    }
 +
 +    pub fn query_external_importables(
 +        self,
 +        db: &dyn DefDatabase,
 +        query: import_map::Query,
 +    ) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
 +        let _p = profile::span("query_external_importables");
 +        import_map::search_dependencies(db, self.into(), query).into_iter().map(|item| {
 +            match ItemInNs::from(item) {
 +                ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
 +                ItemInNs::Macros(mac_id) => Either::Right(mac_id),
 +            }
 +        })
 +    }
 +
 +    pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
 +        db.crate_graph().iter().map(|id| Crate { id }).collect()
 +    }
 +
 +    /// Try to get the root URL of the documentation of a crate.
 +    pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
 +        // Look for #![doc(html_root_url = "...")]
 +        let attrs = db.attrs(AttrDefId::ModuleId(self.root_module(db).into()));
 +        let doc_url = attrs.by_key("doc").find_string_value_in_tt("html_root_url");
 +        doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
 +    }
 +
 +    pub fn cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
 +        db.crate_graph()[self.id].cfg_options.clone()
 +    }
 +
 +    pub fn potential_cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
 +        db.crate_graph()[self.id].potential_cfg_options.clone()
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Module {
 +    pub(crate) id: ModuleId,
 +}
 +
 +/// The defs which can be visible in the module.
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub enum ModuleDef {
 +    Module(Module),
 +    Function(Function),
 +    Adt(Adt),
 +    // Can't be directly declared, but can be imported.
 +    Variant(Variant),
 +    Const(Const),
 +    Static(Static),
 +    Trait(Trait),
 +    TypeAlias(TypeAlias),
 +    BuiltinType(BuiltinType),
 +    Macro(Macro),
 +}
 +impl_from!(
 +    Module,
 +    Function,
 +    Adt(Struct, Enum, Union),
 +    Variant,
 +    Const,
 +    Static,
 +    Trait,
 +    TypeAlias,
 +    BuiltinType,
 +    Macro
 +    for ModuleDef
 +);
 +
 +impl From<VariantDef> for ModuleDef {
 +    fn from(var: VariantDef) -> Self {
 +        match var {
 +            VariantDef::Struct(t) => Adt::from(t).into(),
 +            VariantDef::Union(t) => Adt::from(t).into(),
 +            VariantDef::Variant(t) => t.into(),
 +        }
 +    }
 +}
 +
 +impl ModuleDef {
 +    pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
 +        match self {
 +            ModuleDef::Module(it) => it.parent(db),
 +            ModuleDef::Function(it) => Some(it.module(db)),
 +            ModuleDef::Adt(it) => Some(it.module(db)),
 +            ModuleDef::Variant(it) => Some(it.module(db)),
 +            ModuleDef::Const(it) => Some(it.module(db)),
 +            ModuleDef::Static(it) => Some(it.module(db)),
 +            ModuleDef::Trait(it) => Some(it.module(db)),
 +            ModuleDef::TypeAlias(it) => Some(it.module(db)),
 +            ModuleDef::Macro(it) => Some(it.module(db)),
 +            ModuleDef::BuiltinType(_) => None,
 +        }
 +    }
 +
 +    pub fn canonical_path(&self, db: &dyn HirDatabase) -> Option<String> {
 +        let mut segments = vec![self.name(db)?];
 +        for m in self.module(db)?.path_to_root(db) {
 +            segments.extend(m.name(db))
 +        }
 +        segments.reverse();
 +        Some(segments.into_iter().join("::"))
 +    }
 +
 +    pub fn canonical_module_path(
 +        &self,
 +        db: &dyn HirDatabase,
 +    ) -> Option<impl Iterator<Item = Module>> {
 +        self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
 +        let name = match self {
 +            ModuleDef::Module(it) => it.name(db)?,
 +            ModuleDef::Const(it) => it.name(db)?,
 +            ModuleDef::Adt(it) => it.name(db),
 +            ModuleDef::Trait(it) => it.name(db),
 +            ModuleDef::Function(it) => it.name(db),
 +            ModuleDef::Variant(it) => it.name(db),
 +            ModuleDef::TypeAlias(it) => it.name(db),
 +            ModuleDef::Static(it) => it.name(db),
 +            ModuleDef::Macro(it) => it.name(db),
 +            ModuleDef::BuiltinType(it) => it.name(),
 +        };
 +        Some(name)
 +    }
 +
 +    pub fn diagnostics(self, db: &dyn HirDatabase) -> Vec<AnyDiagnostic> {
 +        let id = match self {
 +            ModuleDef::Adt(it) => match it {
 +                Adt::Struct(it) => it.id.into(),
 +                Adt::Enum(it) => it.id.into(),
 +                Adt::Union(it) => it.id.into(),
 +            },
 +            ModuleDef::Trait(it) => it.id.into(),
 +            ModuleDef::Function(it) => it.id.into(),
 +            ModuleDef::TypeAlias(it) => it.id.into(),
 +            ModuleDef::Module(it) => it.id.into(),
 +            ModuleDef::Const(it) => it.id.into(),
 +            ModuleDef::Static(it) => it.id.into(),
 +            _ => return Vec::new(),
 +        };
 +
 +        let module = match self.module(db) {
 +            Some(it) => it,
 +            None => return Vec::new(),
 +        };
 +
 +        let mut acc = Vec::new();
 +
 +        match self.as_def_with_body() {
 +            Some(def) => {
 +                def.diagnostics(db, &mut acc);
 +            }
 +            None => {
 +                for diag in hir_ty::diagnostics::incorrect_case(db, module.id.krate(), id) {
 +                    acc.push(diag.into())
 +                }
 +            }
 +        }
 +
 +        acc
 +    }
 +
 +    pub fn as_def_with_body(self) -> Option<DefWithBody> {
 +        match self {
 +            ModuleDef::Function(it) => Some(it.into()),
 +            ModuleDef::Const(it) => Some(it.into()),
 +            ModuleDef::Static(it) => Some(it.into()),
 +
 +            ModuleDef::Module(_)
 +            | ModuleDef::Adt(_)
 +            | ModuleDef::Variant(_)
 +            | ModuleDef::Trait(_)
 +            | ModuleDef::TypeAlias(_)
 +            | ModuleDef::Macro(_)
 +            | ModuleDef::BuiltinType(_) => None,
 +        }
 +    }
 +
 +    pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
 +        Some(match self {
 +            ModuleDef::Module(it) => it.attrs(db),
 +            ModuleDef::Function(it) => it.attrs(db),
 +            ModuleDef::Adt(it) => it.attrs(db),
 +            ModuleDef::Variant(it) => it.attrs(db),
 +            ModuleDef::Const(it) => it.attrs(db),
 +            ModuleDef::Static(it) => it.attrs(db),
 +            ModuleDef::Trait(it) => it.attrs(db),
 +            ModuleDef::TypeAlias(it) => it.attrs(db),
 +            ModuleDef::Macro(it) => it.attrs(db),
 +            ModuleDef::BuiltinType(_) => return None,
 +        })
 +    }
 +}
 +
 +impl HasVisibility for ModuleDef {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        match *self {
 +            ModuleDef::Module(it) => it.visibility(db),
 +            ModuleDef::Function(it) => it.visibility(db),
 +            ModuleDef::Adt(it) => it.visibility(db),
 +            ModuleDef::Const(it) => it.visibility(db),
 +            ModuleDef::Static(it) => it.visibility(db),
 +            ModuleDef::Trait(it) => it.visibility(db),
 +            ModuleDef::TypeAlias(it) => it.visibility(db),
 +            ModuleDef::Variant(it) => it.visibility(db),
 +            ModuleDef::Macro(it) => it.visibility(db),
 +            ModuleDef::BuiltinType(_) => Visibility::Public,
 +        }
 +    }
 +}
 +
 +impl Module {
 +    /// Name of this module.
 +    pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
 +        let def_map = self.id.def_map(db.upcast());
 +        let parent = def_map[self.id.local_id].parent?;
 +        def_map[parent].children.iter().find_map(|(name, module_id)| {
 +            if *module_id == self.id.local_id {
 +                Some(name.clone())
 +            } else {
 +                None
 +            }
 +        })
 +    }
 +
 +    /// Returns the crate this module is part of.
 +    pub fn krate(self) -> Crate {
 +        Crate { id: self.id.krate() }
 +    }
 +
 +    /// Topmost parent of this module. Every module has a `crate_root`, but some
 +    /// might be missing `krate`. This can happen if a module's file is not included
 +    /// in the module tree of any target in `Cargo.toml`.
 +    pub fn crate_root(self, db: &dyn HirDatabase) -> Module {
 +        let def_map = db.crate_def_map(self.id.krate());
 +        Module { id: def_map.module_id(def_map.root()) }
 +    }
 +
 +    pub fn is_crate_root(self, db: &dyn HirDatabase) -> bool {
 +        let def_map = db.crate_def_map(self.id.krate());
 +        def_map.root() == self.id.local_id
 +    }
 +
 +    /// Iterates over all child modules.
 +    pub fn children(self, db: &dyn HirDatabase) -> impl Iterator<Item = Module> {
 +        let def_map = self.id.def_map(db.upcast());
 +        let children = def_map[self.id.local_id]
 +            .children
 +            .iter()
 +            .map(|(_, module_id)| Module { id: def_map.module_id(*module_id) })
 +            .collect::<Vec<_>>();
 +        children.into_iter()
 +    }
 +
 +    /// Finds a parent module.
 +    pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
 +        // FIXME: handle block expressions as modules (their parent is in a different DefMap)
 +        let def_map = self.id.def_map(db.upcast());
 +        let parent_id = def_map[self.id.local_id].parent?;
 +        Some(Module { id: def_map.module_id(parent_id) })
 +    }
 +
 +    pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
 +        let mut res = vec![self];
 +        let mut curr = self;
 +        while let Some(next) = curr.parent(db) {
 +            res.push(next);
 +            curr = next
 +        }
 +        res
 +    }
 +
 +    /// Returns a `ModuleScope`: a set of items, visible in this module.
 +    pub fn scope(
 +        self,
 +        db: &dyn HirDatabase,
 +        visible_from: Option<Module>,
 +    ) -> Vec<(Name, ScopeDef)> {
 +        self.id.def_map(db.upcast())[self.id.local_id]
 +            .scope
 +            .entries()
 +            .filter_map(|(name, def)| {
 +                if let Some(m) = visible_from {
 +                    let filtered =
 +                        def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id));
 +                    if filtered.is_none() && !def.is_none() {
 +                        None
 +                    } else {
 +                        Some((name, filtered))
 +                    }
 +                } else {
 +                    Some((name, def))
 +                }
 +            })
 +            .flat_map(|(name, def)| {
 +                ScopeDef::all_items(def).into_iter().map(move |item| (name.clone(), item))
 +            })
 +            .collect()
 +    }
 +
++    /// Fills `acc` with the module's diagnostics.
 +    pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
 +        let _p = profile::span("Module::diagnostics").detail(|| {
 +            format!("{:?}", self.name(db).map_or("<unknown>".into(), |name| name.to_string()))
 +        });
 +        let def_map = self.id.def_map(db.upcast());
 +        for diag in def_map.diagnostics() {
 +            if diag.in_module != self.id.local_id {
 +                // FIXME: This is accidentally quadratic.
 +                continue;
 +            }
 +            emit_def_diagnostic(db, acc, diag);
 +        }
 +        for decl in self.declarations(db) {
 +            match decl {
 +                ModuleDef::Module(m) => {
 +                    // Only add diagnostics from inline modules
 +                    if def_map[m.id.local_id].origin.is_inline() {
 +                        m.diagnostics(db, acc)
 +                    }
 +                }
++                ModuleDef::Trait(t) => {
++                    for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
++                        emit_def_diagnostic(db, acc, diag);
++                    }
++                    acc.extend(decl.diagnostics(db))
++                }
 +                _ => acc.extend(decl.diagnostics(db)),
 +            }
 +        }
 +
 +        for impl_def in self.impl_defs(db) {
++            for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
++                emit_def_diagnostic(db, acc, diag);
++            }
++
 +            for item in impl_def.items(db) {
 +                let def: DefWithBody = match item {
 +                    AssocItem::Function(it) => it.into(),
 +                    AssocItem::Const(it) => it.into(),
 +                    AssocItem::TypeAlias(_) => continue,
 +                };
 +
 +                def.diagnostics(db, acc);
 +            }
 +        }
 +    }
 +
 +    pub fn declarations(self, db: &dyn HirDatabase) -> Vec<ModuleDef> {
 +        let def_map = self.id.def_map(db.upcast());
 +        let scope = &def_map[self.id.local_id].scope;
 +        scope
 +            .declarations()
 +            .map(ModuleDef::from)
 +            .chain(scope.unnamed_consts().map(|id| ModuleDef::Const(Const::from(id))))
 +            .collect()
 +    }
 +
 +    pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> {
 +        let def_map = self.id.def_map(db.upcast());
 +        let scope = &def_map[self.id.local_id].scope;
 +        scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect()
 +    }
 +
 +    pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
 +        let def_map = self.id.def_map(db.upcast());
 +        def_map[self.id.local_id].scope.impls().map(Impl::from).collect()
 +    }
 +
 +    /// Finds a path that can be used to refer to the given item from within
 +    /// this module, if possible.
 +    pub fn find_use_path(self, db: &dyn DefDatabase, item: impl Into<ItemInNs>) -> Option<ModPath> {
 +        hir_def::find_path::find_path(db, item.into().into(), self.into())
 +    }
 +
 +    /// Finds a path that can be used to refer to the given item from within
 +    /// this module, if possible. This is used for returning import paths for use-statements.
 +    pub fn find_use_path_prefixed(
 +        self,
 +        db: &dyn DefDatabase,
 +        item: impl Into<ItemInNs>,
 +        prefix_kind: PrefixKind,
 +    ) -> Option<ModPath> {
 +        hir_def::find_path::find_path_prefixed(db, item.into().into(), self.into(), prefix_kind)
 +    }
 +}
 +
 +fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag: &DefDiagnostic) {
 +    match &diag.kind {
 +        DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
 +            let decl = declaration.to_node(db.upcast());
 +            acc.push(
 +                UnresolvedModule {
 +                    decl: InFile::new(declaration.file_id, AstPtr::new(&decl)),
 +                    candidates: candidates.clone(),
 +                }
 +                .into(),
 +            )
 +        }
 +        DefDiagnosticKind::UnresolvedExternCrate { ast } => {
 +            let item = ast.to_node(db.upcast());
 +            acc.push(
 +                UnresolvedExternCrate { decl: InFile::new(ast.file_id, AstPtr::new(&item)) }.into(),
 +            );
 +        }
 +
 +        DefDiagnosticKind::UnresolvedImport { id, index } => {
 +            let file_id = id.file_id();
 +            let item_tree = id.item_tree(db.upcast());
 +            let import = &item_tree[id.value];
 +
 +            let use_tree = import.use_tree_to_ast(db.upcast(), file_id, *index);
 +            acc.push(
 +                UnresolvedImport { decl: InFile::new(file_id, AstPtr::new(&use_tree)) }.into(),
 +            );
 +        }
 +
 +        DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => {
 +            let item = ast.to_node(db.upcast());
 +            acc.push(
 +                InactiveCode {
 +                    node: ast.with_value(AstPtr::new(&item).into()),
 +                    cfg: cfg.clone(),
 +                    opts: opts.clone(),
 +                }
 +                .into(),
 +            );
 +        }
 +
 +        DefDiagnosticKind::UnresolvedProcMacro { ast, krate } => {
 +            let (node, precise_location, macro_name, kind) = precise_macro_call_location(ast, db);
 +            acc.push(
 +                UnresolvedProcMacro { node, precise_location, macro_name, kind, krate: *krate }
 +                    .into(),
 +            );
 +        }
 +
 +        DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
 +            let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
 +            acc.push(
 +                UnresolvedMacroCall {
 +                    macro_call: node,
 +                    precise_location,
 +                    path: path.clone(),
 +                    is_bang: matches!(ast, MacroCallKind::FnLike { .. }),
 +                }
 +                .into(),
 +            );
 +        }
 +
 +        DefDiagnosticKind::MacroError { ast, message } => {
 +            let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
 +            acc.push(MacroError { node, precise_location, message: message.clone() }.into());
 +        }
 +
 +        DefDiagnosticKind::UnimplementedBuiltinMacro { ast } => {
 +            let node = ast.to_node(db.upcast());
 +            // Must have a name, otherwise we wouldn't emit it.
 +            let name = node.name().expect("unimplemented builtin macro with no name");
 +            acc.push(
 +                UnimplementedBuiltinMacro {
 +                    node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&name))),
 +                }
 +                .into(),
 +            );
 +        }
 +        DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
 +            let node = ast.to_node(db.upcast());
 +            let derive = node.attrs().nth(*id as usize);
 +            match derive {
 +                Some(derive) => {
 +                    acc.push(
 +                        InvalidDeriveTarget {
 +                            node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
 +                        }
 +                        .into(),
 +                    );
 +                }
 +                None => stdx::never!("derive diagnostic on item without derive attribute"),
 +            }
 +        }
 +        DefDiagnosticKind::MalformedDerive { ast, id } => {
 +            let node = ast.to_node(db.upcast());
 +            let derive = node.attrs().nth(*id as usize);
 +            match derive {
 +                Some(derive) => {
 +                    acc.push(
 +                        MalformedDerive {
 +                            node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
 +                        }
 +                        .into(),
 +                    );
 +                }
 +                None => stdx::never!("derive diagnostic on item without derive attribute"),
 +            }
 +        }
 +    }
 +}
 +
 +fn precise_macro_call_location(
 +    ast: &MacroCallKind,
 +    db: &dyn HirDatabase,
 +) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) {
 +    // FIXME: maaybe we actually want slightly different ranges for the different macro diagnostics
 +    // - e.g. the full attribute for macro errors, but only the name for name resolution
 +    match ast {
 +        MacroCallKind::FnLike { ast_id, .. } => {
 +            let node = ast_id.to_node(db.upcast());
 +            (
 +                ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
 +                node.path()
 +                    .and_then(|it| it.segment())
 +                    .and_then(|it| it.name_ref())
 +                    .map(|it| it.syntax().text_range()),
 +                node.path().and_then(|it| it.segment()).map(|it| it.to_string()),
 +                MacroKind::ProcMacro,
 +            )
 +        }
 +        MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
 +            let node = ast_id.to_node(db.upcast());
 +            // Compute the precise location of the macro name's token in the derive
 +            // list.
 +            let token = (|| {
 +                let derive_attr = node
 +                    .doc_comments_and_attrs()
 +                    .nth(*derive_attr_index as usize)
 +                    .and_then(Either::left)?;
 +                let token_tree = derive_attr.meta()?.token_tree()?;
 +                let group_by = token_tree
 +                    .syntax()
 +                    .children_with_tokens()
 +                    .filter_map(|elem| match elem {
 +                        syntax::NodeOrToken::Token(tok) => Some(tok),
 +                        _ => None,
 +                    })
 +                    .group_by(|t| t.kind() == T![,]);
 +                let (_, mut group) = group_by
 +                    .into_iter()
 +                    .filter(|&(comma, _)| !comma)
 +                    .nth(*derive_index as usize)?;
 +                group.find(|t| t.kind() == T![ident])
 +            })();
 +            (
 +                ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
 +                token.as_ref().map(|tok| tok.text_range()),
 +                token.as_ref().map(ToString::to_string),
 +                MacroKind::Derive,
 +            )
 +        }
 +        MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
 +            let node = ast_id.to_node(db.upcast());
 +            let attr = node
 +                .doc_comments_and_attrs()
 +                .nth((*invoc_attr_index) as usize)
 +                .and_then(Either::left)
 +                .unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
 +
 +            (
 +                ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
 +                Some(attr.syntax().text_range()),
 +                attr.path()
 +                    .and_then(|path| path.segment())
 +                    .and_then(|seg| seg.name_ref())
 +                    .as_ref()
 +                    .map(ToString::to_string),
 +                MacroKind::Attr,
 +            )
 +        }
 +    }
 +}
 +
 +impl HasVisibility for Module {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        let def_map = self.id.def_map(db.upcast());
 +        let module_data = &def_map[self.id.local_id];
 +        module_data.visibility
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Field {
 +    pub(crate) parent: VariantDef,
 +    pub(crate) id: LocalFieldId,
 +}
 +
 +#[derive(Debug, PartialEq, Eq)]
 +pub enum FieldSource {
 +    Named(ast::RecordField),
 +    Pos(ast::TupleField),
 +}
 +
 +impl Field {
 +    pub fn name(&self, db: &dyn HirDatabase) -> Name {
 +        self.parent.variant_data(db).fields()[self.id].name.clone()
 +    }
 +
 +    /// Returns the type as in the signature of the struct (i.e., with
 +    /// placeholder types for type parameters). Only use this in the context of
 +    /// the field definition.
 +    pub fn ty(&self, db: &dyn HirDatabase) -> Type {
 +        let var_id = self.parent.into();
 +        let generic_def_id: GenericDefId = match self.parent {
 +            VariantDef::Struct(it) => it.id.into(),
 +            VariantDef::Union(it) => it.id.into(),
 +            VariantDef::Variant(it) => it.parent.id.into(),
 +        };
 +        let substs = TyBuilder::placeholder_subst(db, generic_def_id);
 +        let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
 +        Type::new(db, var_id, ty)
 +    }
 +
 +    pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef {
 +        self.parent
 +    }
 +}
 +
 +impl HasVisibility for Field {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        let variant_data = self.parent.variant_data(db);
 +        let visibility = &variant_data.fields()[self.id].visibility;
 +        let parent_id: hir_def::VariantId = self.parent.into();
 +        visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast()))
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Struct {
 +    pub(crate) id: StructId,
 +}
 +
 +impl Struct {
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        Module { id: self.id.lookup(db.upcast()).container }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        db.struct_data(self.id).name.clone()
 +    }
 +
 +    pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
 +        db.struct_data(self.id)
 +            .variant_data
 +            .fields()
 +            .iter()
 +            .map(|(id, _)| Field { parent: self.into(), id })
 +            .collect()
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        Type::from_def(db, self.id)
 +    }
 +
 +    pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprKind> {
 +        db.struct_data(self.id).repr.clone()
 +    }
 +
 +    pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
 +        self.variant_data(db).kind()
 +    }
 +
 +    fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
 +        db.struct_data(self.id).variant_data.clone()
 +    }
 +}
 +
 +impl HasVisibility for Struct {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        db.struct_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Union {
 +    pub(crate) id: UnionId,
 +}
 +
 +impl Union {
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        db.union_data(self.id).name.clone()
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        Module { id: self.id.lookup(db.upcast()).container }
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        Type::from_def(db, self.id)
 +    }
 +
 +    pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
 +        db.union_data(self.id)
 +            .variant_data
 +            .fields()
 +            .iter()
 +            .map(|(id, _)| Field { parent: self.into(), id })
 +            .collect()
 +    }
 +
 +    fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
 +        db.union_data(self.id).variant_data.clone()
 +    }
 +}
 +
 +impl HasVisibility for Union {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        db.union_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Enum {
 +    pub(crate) id: EnumId,
 +}
 +
 +impl Enum {
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        Module { id: self.id.lookup(db.upcast()).container }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        db.enum_data(self.id).name.clone()
 +    }
 +
 +    pub fn variants(self, db: &dyn HirDatabase) -> Vec<Variant> {
 +        db.enum_data(self.id).variants.iter().map(|(id, _)| Variant { parent: self, id }).collect()
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        Type::from_def(db, self.id)
 +    }
 +}
 +
 +impl HasVisibility for Enum {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        db.enum_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Variant {
 +    pub(crate) parent: Enum,
 +    pub(crate) id: LocalEnumVariantId,
 +}
 +
 +impl Variant {
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        self.parent.module(db)
 +    }
 +
 +    pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum {
 +        self.parent
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        db.enum_data(self.parent.id).variants[self.id].name.clone()
 +    }
 +
 +    pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
 +        self.variant_data(db)
 +            .fields()
 +            .iter()
 +            .map(|(id, _)| Field { parent: self.into(), id })
 +            .collect()
 +    }
 +
 +    pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
 +        self.variant_data(db).kind()
 +    }
 +
 +    pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
 +        db.enum_data(self.parent.id).variants[self.id].variant_data.clone()
 +    }
 +}
 +
 +/// Variants inherit visibility from the parent enum.
 +impl HasVisibility for Variant {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        self.parent_enum(db).visibility(db)
 +    }
 +}
 +
 +/// A Data Type
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub enum Adt {
 +    Struct(Struct),
 +    Union(Union),
 +    Enum(Enum),
 +}
 +impl_from!(Struct, Union, Enum for Adt);
 +
 +impl Adt {
 +    pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
 +        let subst = db.generic_defaults(self.into());
 +        subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
 +            GenericArgData::Ty(x) => x.is_unknown(),
 +            _ => false,
 +        })
 +    }
 +
 +    /// Turns this ADT into a type. Any type parameters of the ADT will be
 +    /// turned into unknown types, which is good for e.g. finding the most
 +    /// general set of completions, but will not look very nice when printed.
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        let id = AdtId::from(self);
 +        Type::from_def(db, id)
 +    }
 +
 +    /// Turns this ADT into a type with the given type parameters. This isn't
 +    /// the greatest API, FIXME find a better one.
 +    pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type {
 +        let id = AdtId::from(self);
 +        let mut it = args.iter().map(|t| t.ty.clone());
 +        let ty = TyBuilder::def_ty(db, id.into())
 +            .fill(|x| {
 +                let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
 +                match x {
 +                    ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
 +                    ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
 +                }
 +            })
 +            .build();
 +        Type::new(db, id, ty)
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        match self {
 +            Adt::Struct(s) => s.module(db),
 +            Adt::Union(s) => s.module(db),
 +            Adt::Enum(e) => e.module(db),
 +        }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        match self {
 +            Adt::Struct(s) => s.name(db),
 +            Adt::Union(u) => u.name(db),
 +            Adt::Enum(e) => e.name(db),
 +        }
 +    }
 +
 +    pub fn as_enum(&self) -> Option<Enum> {
 +        if let Self::Enum(v) = self {
 +            Some(*v)
 +        } else {
 +            None
 +        }
 +    }
 +}
 +
 +impl HasVisibility for Adt {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        match self {
 +            Adt::Struct(it) => it.visibility(db),
 +            Adt::Union(it) => it.visibility(db),
 +            Adt::Enum(it) => it.visibility(db),
 +        }
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub enum VariantDef {
 +    Struct(Struct),
 +    Union(Union),
 +    Variant(Variant),
 +}
 +impl_from!(Struct, Union, Variant for VariantDef);
 +
 +impl VariantDef {
 +    pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
 +        match self {
 +            VariantDef::Struct(it) => it.fields(db),
 +            VariantDef::Union(it) => it.fields(db),
 +            VariantDef::Variant(it) => it.fields(db),
 +        }
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        match self {
 +            VariantDef::Struct(it) => it.module(db),
 +            VariantDef::Union(it) => it.module(db),
 +            VariantDef::Variant(it) => it.module(db),
 +        }
 +    }
 +
 +    pub fn name(&self, db: &dyn HirDatabase) -> Name {
 +        match self {
 +            VariantDef::Struct(s) => s.name(db),
 +            VariantDef::Union(u) => u.name(db),
 +            VariantDef::Variant(e) => e.name(db),
 +        }
 +    }
 +
 +    pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
 +        match self {
 +            VariantDef::Struct(it) => it.variant_data(db),
 +            VariantDef::Union(it) => it.variant_data(db),
 +            VariantDef::Variant(it) => it.variant_data(db),
 +        }
 +    }
 +}
 +
 +/// The defs which have a body.
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub enum DefWithBody {
 +    Function(Function),
 +    Static(Static),
 +    Const(Const),
 +}
 +impl_from!(Function, Const, Static for DefWithBody);
 +
 +impl DefWithBody {
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        match self {
 +            DefWithBody::Const(c) => c.module(db),
 +            DefWithBody::Function(f) => f.module(db),
 +            DefWithBody::Static(s) => s.module(db),
 +        }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
 +        match self {
 +            DefWithBody::Function(f) => Some(f.name(db)),
 +            DefWithBody::Static(s) => Some(s.name(db)),
 +            DefWithBody::Const(c) => c.name(db),
 +        }
 +    }
 +
 +    /// Returns the type this def's body has to evaluate to.
 +    pub fn body_type(self, db: &dyn HirDatabase) -> Type {
 +        match self {
 +            DefWithBody::Function(it) => it.ret_type(db),
 +            DefWithBody::Static(it) => it.ty(db),
 +            DefWithBody::Const(it) => it.ty(db),
 +        }
 +    }
 +
 +    fn id(&self) -> DefWithBodyId {
 +        match self {
 +            DefWithBody::Function(it) => it.id.into(),
 +            DefWithBody::Static(it) => it.id.into(),
 +            DefWithBody::Const(it) => it.id.into(),
 +        }
 +    }
 +
 +    /// A textual representation of the HIR of this def's body for debugging purposes.
 +    pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
 +        let body = db.body(self.id());
 +        body.pretty_print(db.upcast(), self.id())
 +    }
 +
 +    pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
 +        let krate = self.module(db).id.krate();
 +
 +        let (body, source_map) = db.body_with_source_map(self.into());
 +
 +        for (_, def_map) in body.blocks(db.upcast()) {
 +            for diag in def_map.diagnostics() {
 +                emit_def_diagnostic(db, acc, diag);
 +            }
 +        }
 +
 +        for diag in source_map.diagnostics() {
 +            match diag {
 +                BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push(
 +                    InactiveCode { node: node.clone(), cfg: cfg.clone(), opts: opts.clone() }
 +                        .into(),
 +                ),
 +                BodyDiagnostic::MacroError { node, message } => acc.push(
 +                    MacroError {
 +                        node: node.clone().map(|it| it.into()),
 +                        precise_location: None,
 +                        message: message.to_string(),
 +                    }
 +                    .into(),
 +                ),
 +                BodyDiagnostic::UnresolvedProcMacro { node, krate } => acc.push(
 +                    UnresolvedProcMacro {
 +                        node: node.clone().map(|it| it.into()),
 +                        precise_location: None,
 +                        macro_name: None,
 +                        kind: MacroKind::ProcMacro,
 +                        krate: *krate,
 +                    }
 +                    .into(),
 +                ),
 +                BodyDiagnostic::UnresolvedMacroCall { node, path } => acc.push(
 +                    UnresolvedMacroCall {
 +                        macro_call: node.clone().map(|ast_ptr| ast_ptr.into()),
 +                        precise_location: None,
 +                        path: path.clone(),
 +                        is_bang: true,
 +                    }
 +                    .into(),
 +                ),
 +            }
 +        }
 +
 +        let infer = db.infer(self.into());
 +        let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
 +        for d in &infer.diagnostics {
 +            match d {
 +                hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
 +                    let field = source_map.field_syntax(*expr);
 +                    acc.push(NoSuchField { field }.into())
 +                }
 +                hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr } => {
 +                    let expr = source_map
 +                        .expr_syntax(*expr)
 +                        .expect("break outside of loop in synthetic syntax");
 +                    acc.push(BreakOutsideOfLoop { expr }.into())
 +                }
 +                hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
 +                    match source_map.expr_syntax(*call_expr) {
 +                        Ok(source_ptr) => acc.push(
 +                            MismatchedArgCount {
 +                                call_expr: source_ptr,
 +                                expected: *expected,
 +                                found: *found,
 +                            }
 +                            .into(),
 +                        ),
 +                        Err(SyntheticSyntax) => (),
 +                    }
 +                }
 +            }
 +        }
 +        for (expr, mismatch) in infer.expr_type_mismatches() {
 +            let expr = match source_map.expr_syntax(expr) {
 +                Ok(expr) => expr,
 +                Err(SyntheticSyntax) => continue,
 +            };
 +            acc.push(
 +                TypeMismatch {
 +                    expr,
 +                    expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.clone()),
 +                    actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.clone()),
 +                }
 +                .into(),
 +            );
 +        }
 +
 +        for expr in hir_ty::diagnostics::missing_unsafe(db, self.into()) {
 +            match source_map.expr_syntax(expr) {
 +                Ok(expr) => acc.push(MissingUnsafe { expr }.into()),
 +                Err(SyntheticSyntax) => {
 +                    // FIXME: Here and eslwhere in this file, the `expr` was
 +                    // desugared, report or assert that this doesn't happen.
 +                }
 +            }
 +        }
 +
 +        for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
 +            match diagnostic {
 +                BodyValidationDiagnostic::RecordMissingFields {
 +                    record,
 +                    variant,
 +                    missed_fields,
 +                } => {
 +                    let variant_data = variant.variant_data(db.upcast());
 +                    let missed_fields = missed_fields
 +                        .into_iter()
 +                        .map(|idx| variant_data.fields()[idx].name.clone())
 +                        .collect();
 +
 +                    match record {
 +                        Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
 +                            Ok(source_ptr) => {
 +                                let root = source_ptr.file_syntax(db.upcast());
 +                                if let ast::Expr::RecordExpr(record_expr) =
 +                                    &source_ptr.value.to_node(&root)
 +                                {
 +                                    if record_expr.record_expr_field_list().is_some() {
 +                                        acc.push(
 +                                            MissingFields {
 +                                                file: source_ptr.file_id,
 +                                                field_list_parent: Either::Left(AstPtr::new(
 +                                                    record_expr,
 +                                                )),
 +                                                field_list_parent_path: record_expr
 +                                                    .path()
 +                                                    .map(|path| AstPtr::new(&path)),
 +                                                missed_fields,
 +                                            }
 +                                            .into(),
 +                                        )
 +                                    }
 +                                }
 +                            }
 +                            Err(SyntheticSyntax) => (),
 +                        },
 +                        Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
 +                            Ok(source_ptr) => {
 +                                if let Some(expr) = source_ptr.value.as_ref().left() {
 +                                    let root = source_ptr.file_syntax(db.upcast());
 +                                    if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
 +                                        if record_pat.record_pat_field_list().is_some() {
 +                                            acc.push(
 +                                                MissingFields {
 +                                                    file: source_ptr.file_id,
 +                                                    field_list_parent: Either::Right(AstPtr::new(
 +                                                        &record_pat,
 +                                                    )),
 +                                                    field_list_parent_path: record_pat
 +                                                        .path()
 +                                                        .map(|path| AstPtr::new(&path)),
 +                                                    missed_fields,
 +                                                }
 +                                                .into(),
 +                                            )
 +                                        }
 +                                    }
 +                                }
 +                            }
 +                            Err(SyntheticSyntax) => (),
 +                        },
 +                    }
 +                }
 +                BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
 +                    if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
 +                        acc.push(
 +                            ReplaceFilterMapNextWithFindMap {
 +                                file: next_source_ptr.file_id,
 +                                next_expr: next_source_ptr.value,
 +                            }
 +                            .into(),
 +                        );
 +                    }
 +                }
 +                BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
 +                    match source_map.expr_syntax(match_expr) {
 +                        Ok(source_ptr) => {
 +                            let root = source_ptr.file_syntax(db.upcast());
 +                            if let ast::Expr::MatchExpr(match_expr) =
 +                                &source_ptr.value.to_node(&root)
 +                            {
 +                                if let Some(match_expr) = match_expr.expr() {
 +                                    acc.push(
 +                                        MissingMatchArms {
 +                                            file: source_ptr.file_id,
 +                                            match_expr: AstPtr::new(&match_expr),
 +                                            uncovered_patterns,
 +                                        }
 +                                        .into(),
 +                                    );
 +                                }
 +                            }
 +                        }
 +                        Err(SyntheticSyntax) => (),
 +                    }
 +                }
 +            }
 +        }
 +
 +        let def: ModuleDef = match self {
 +            DefWithBody::Function(it) => it.into(),
 +            DefWithBody::Static(it) => it.into(),
 +            DefWithBody::Const(it) => it.into(),
 +        };
 +        for diag in hir_ty::diagnostics::incorrect_case(db, krate, def.into()) {
 +            acc.push(diag.into())
 +        }
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Function {
 +    pub(crate) id: FunctionId,
 +}
 +
 +impl Function {
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        self.id.lookup(db.upcast()).module(db.upcast()).into()
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        db.function_data(self.id).name.clone()
 +    }
 +
 +    /// Get this function's return type
 +    pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
 +        let resolver = self.id.resolver(db.upcast());
 +        let substs = TyBuilder::placeholder_subst(db, self.id);
 +        let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
 +        let ty = callable_sig.ret().clone();
 +        Type::new_with_resolver_inner(db, &resolver, ty)
 +    }
 +
 +    pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> {
 +        if !self.is_async(db) {
 +            return None;
 +        }
 +        let resolver = self.id.resolver(db.upcast());
 +        let substs = TyBuilder::placeholder_subst(db, self.id);
 +        let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
 +        let ret_ty = callable_sig.ret().clone();
 +        for pred in ret_ty.impl_trait_bounds(db).into_iter().flatten() {
 +            if let WhereClause::AliasEq(output_eq) = pred.into_value_and_skipped_binders().0 {
 +                return Type::new_with_resolver_inner(db, &resolver, output_eq.ty).into();
 +            }
 +        }
 +        never!("Async fn ret_type should be impl Future");
 +        None
 +    }
 +
 +    pub fn has_self_param(self, db: &dyn HirDatabase) -> bool {
 +        db.function_data(self.id).has_self_param()
 +    }
 +
 +    pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
 +        self.has_self_param(db).then(|| SelfParam { func: self.id })
 +    }
 +
 +    pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
 +        let environment = db.trait_environment(self.id.into());
 +        let substs = TyBuilder::placeholder_subst(db, self.id);
 +        let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
 +        callable_sig
 +            .params()
 +            .iter()
 +            .enumerate()
 +            .map(|(idx, ty)| {
 +                let ty = Type { env: environment.clone(), ty: ty.clone() };
 +                Param { func: self, ty, idx }
 +            })
 +            .collect()
 +    }
 +
 +    pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
 +        if self.self_param(db).is_none() {
 +            return None;
 +        }
 +        Some(self.params_without_self(db))
 +    }
 +
 +    pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param> {
 +        let environment = db.trait_environment(self.id.into());
 +        let substs = TyBuilder::placeholder_subst(db, self.id);
 +        let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
 +        let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 };
 +        callable_sig
 +            .params()
 +            .iter()
 +            .enumerate()
 +            .skip(skip)
 +            .map(|(idx, ty)| {
 +                let ty = Type { env: environment.clone(), ty: ty.clone() };
 +                Param { func: self, ty, idx }
 +            })
 +            .collect()
 +    }
 +
 +    pub fn is_const(self, db: &dyn HirDatabase) -> bool {
 +        db.function_data(self.id).has_const_kw()
 +    }
 +
 +    pub fn is_async(self, db: &dyn HirDatabase) -> bool {
 +        db.function_data(self.id).has_async_kw()
 +    }
 +
 +    pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
 +        hir_ty::is_fn_unsafe_to_call(db, self.id)
 +    }
 +
 +    /// Whether this function declaration has a definition.
 +    ///
 +    /// This is false in the case of required (not provided) trait methods.
 +    pub fn has_body(self, db: &dyn HirDatabase) -> bool {
 +        db.function_data(self.id).has_body()
 +    }
 +
 +    pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<Macro> {
 +        let function_data = db.function_data(self.id);
 +        let attrs = &function_data.attrs;
 +        // FIXME: Store this in FunctionData flags?
 +        if !(attrs.is_proc_macro()
 +            || attrs.is_proc_macro_attribute()
 +            || attrs.is_proc_macro_derive())
 +        {
 +            return None;
 +        }
 +        let loc = self.id.lookup(db.upcast());
 +        let def_map = db.crate_def_map(loc.krate(db).into());
 +        def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
 +    }
 +}
 +
 +// Note: logically, this belongs to `hir_ty`, but we are not using it there yet.
 +#[derive(Clone, Copy, PartialEq, Eq)]
 +pub enum Access {
 +    Shared,
 +    Exclusive,
 +    Owned,
 +}
 +
 +impl From<hir_ty::Mutability> for Access {
 +    fn from(mutability: hir_ty::Mutability) -> Access {
 +        match mutability {
 +            hir_ty::Mutability::Not => Access::Shared,
 +            hir_ty::Mutability::Mut => Access::Exclusive,
 +        }
 +    }
 +}
 +
 +#[derive(Clone, Debug)]
 +pub struct Param {
 +    func: Function,
 +    /// The index in parameter list, including self parameter.
 +    idx: usize,
 +    ty: Type,
 +}
 +
 +impl Param {
 +    pub fn ty(&self) -> &Type {
 +        &self.ty
 +    }
 +
 +    pub fn name(&self, db: &dyn HirDatabase) -> Option<Name> {
 +        db.function_data(self.func.id).params[self.idx].0.clone()
 +    }
 +
 +    pub fn as_local(&self, db: &dyn HirDatabase) -> Option<Local> {
 +        let parent = DefWithBodyId::FunctionId(self.func.into());
 +        let body = db.body(parent);
 +        let pat_id = body.params[self.idx];
 +        if let Pat::Bind { .. } = &body[pat_id] {
 +            Some(Local { parent, pat_id: body.params[self.idx] })
 +        } else {
 +            None
 +        }
 +    }
 +
 +    pub fn pattern_source(&self, db: &dyn HirDatabase) -> Option<ast::Pat> {
 +        self.source(db).and_then(|p| p.value.pat())
 +    }
 +
 +    pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::Param>> {
 +        let InFile { file_id, value } = self.func.source(db)?;
 +        let params = value.param_list()?;
 +        if params.self_param().is_some() {
 +            params.params().nth(self.idx.checked_sub(1)?)
 +        } else {
 +            params.params().nth(self.idx)
 +        }
 +        .map(|value| InFile { file_id, value })
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct SelfParam {
 +    func: FunctionId,
 +}
 +
 +impl SelfParam {
 +    pub fn access(self, db: &dyn HirDatabase) -> Access {
 +        let func_data = db.function_data(self.func);
 +        func_data
 +            .params
 +            .first()
 +            .map(|(_, param)| match &**param {
 +                TypeRef::Reference(.., mutability) => match mutability {
 +                    hir_def::type_ref::Mutability::Shared => Access::Shared,
 +                    hir_def::type_ref::Mutability::Mut => Access::Exclusive,
 +                },
 +                _ => Access::Owned,
 +            })
 +            .unwrap_or(Access::Owned)
 +    }
 +
 +    pub fn display(self, db: &dyn HirDatabase) -> &'static str {
 +        match self.access(db) {
 +            Access::Shared => "&self",
 +            Access::Exclusive => "&mut self",
 +            Access::Owned => "self",
 +        }
 +    }
 +
 +    pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::SelfParam>> {
 +        let InFile { file_id, value } = Function::from(self.func).source(db)?;
 +        value
 +            .param_list()
 +            .and_then(|params| params.self_param())
 +            .map(|value| InFile { file_id, value })
 +    }
 +
 +    pub fn ty(&self, db: &dyn HirDatabase) -> Type {
 +        let substs = TyBuilder::placeholder_subst(db, self.func);
 +        let callable_sig =
 +            db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
 +        let environment = db.trait_environment(self.func.into());
 +        let ty = callable_sig.params()[0].clone();
 +        Type { env: environment, ty }
 +    }
 +}
 +
 +impl HasVisibility for Function {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        db.function_visibility(self.id)
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Const {
 +    pub(crate) id: ConstId,
 +}
 +
 +impl Const {
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
 +        db.const_data(self.id).name.clone()
 +    }
 +
 +    pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
 +        self.source(db)?.value.body()
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        let data = db.const_data(self.id);
 +        let resolver = self.id.resolver(db.upcast());
 +        let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
 +        let ty = ctx.lower_ty(&data.type_ref);
 +        Type::new_with_resolver_inner(db, &resolver, ty)
 +    }
 +
 +    pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
 +        db.const_eval(self.id)
 +    }
 +}
 +
 +impl HasVisibility for Const {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        db.const_visibility(self.id)
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Static {
 +    pub(crate) id: StaticId,
 +}
 +
 +impl Static {
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        db.static_data(self.id).name.clone()
 +    }
 +
 +    pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
 +        db.static_data(self.id).mutable
 +    }
 +
 +    pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
 +        self.source(db)?.value.body()
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        let data = db.static_data(self.id);
 +        let resolver = self.id.resolver(db.upcast());
 +        let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
 +        let ty = ctx.lower_ty(&data.type_ref);
 +        Type::new_with_resolver_inner(db, &resolver, ty)
 +    }
 +}
 +
 +impl HasVisibility for Static {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        db.static_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Trait {
 +    pub(crate) id: TraitId,
 +}
 +
 +impl Trait {
 +    pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option<Trait> {
 +        db.lang_item(krate.into(), name.to_smol_str())
 +            .and_then(LangItemTarget::as_trait)
 +            .map(Into::into)
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        Module { id: self.id.lookup(db.upcast()).container }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        db.trait_data(self.id).name.clone()
 +    }
 +
 +    pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
 +        db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
 +    }
 +
 +    pub fn items_with_supertraits(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
 +        let traits = all_super_traits(db.upcast(), self.into());
 +        traits.iter().flat_map(|tr| Trait::from(*tr).items(db)).collect()
 +    }
 +
 +    pub fn is_auto(self, db: &dyn HirDatabase) -> bool {
 +        db.trait_data(self.id).is_auto
 +    }
 +
 +    pub fn is_unsafe(&self, db: &dyn HirDatabase) -> bool {
 +        db.trait_data(self.id).is_unsafe
 +    }
 +
 +    pub fn type_or_const_param_count(
 +        &self,
 +        db: &dyn HirDatabase,
 +        count_required_only: bool,
 +    ) -> usize {
 +        db.generic_params(GenericDefId::from(self.id))
 +            .type_or_consts
 +            .iter()
 +            .filter(|(_, ty)| match ty {
 +                TypeOrConstParamData::TypeParamData(ty)
 +                    if ty.provenance != TypeParamProvenance::TypeParamList =>
 +                {
 +                    false
 +                }
 +                _ => true,
 +            })
 +            .filter(|(_, ty)| !count_required_only || !ty.has_default())
 +            .count()
 +    }
 +}
 +
 +impl HasVisibility for Trait {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        db.trait_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct TypeAlias {
 +    pub(crate) id: TypeAliasId,
 +}
 +
 +impl TypeAlias {
 +    pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
 +        let subst = db.generic_defaults(self.id.into());
 +        subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
 +            GenericArgData::Ty(x) => x.is_unknown(),
 +            _ => false,
 +        })
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
 +    }
 +
 +    pub fn type_ref(self, db: &dyn HirDatabase) -> Option<TypeRef> {
 +        db.type_alias_data(self.id).type_ref.as_deref().cloned()
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        Type::from_def(db, self.id)
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        db.type_alias_data(self.id).name.clone()
 +    }
 +}
 +
 +impl HasVisibility for TypeAlias {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        let function_data = db.type_alias_data(self.id);
 +        let visibility = &function_data.visibility;
 +        visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct BuiltinType {
 +    pub(crate) inner: hir_def::builtin_type::BuiltinType,
 +}
 +
 +impl BuiltinType {
 +    pub fn str() -> BuiltinType {
 +        BuiltinType { inner: hir_def::builtin_type::BuiltinType::Str }
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        Type::new_for_crate(db.crate_graph().iter().next().unwrap(), TyBuilder::builtin(self.inner))
 +    }
 +
 +    pub fn name(self) -> Name {
 +        self.inner.as_name()
 +    }
 +
 +    pub fn is_int(&self) -> bool {
 +        matches!(self.inner, hir_def::builtin_type::BuiltinType::Int(_))
 +    }
 +
 +    pub fn is_uint(&self) -> bool {
 +        matches!(self.inner, hir_def::builtin_type::BuiltinType::Uint(_))
 +    }
 +
 +    pub fn is_float(&self) -> bool {
 +        matches!(self.inner, hir_def::builtin_type::BuiltinType::Float(_))
 +    }
 +
 +    pub fn is_char(&self) -> bool {
 +        matches!(self.inner, hir_def::builtin_type::BuiltinType::Char)
 +    }
 +
 +    pub fn is_bool(&self) -> bool {
 +        matches!(self.inner, hir_def::builtin_type::BuiltinType::Bool)
 +    }
 +
 +    pub fn is_str(&self) -> bool {
 +        matches!(self.inner, hir_def::builtin_type::BuiltinType::Str)
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub enum MacroKind {
 +    /// `macro_rules!` or Macros 2.0 macro.
 +    Declarative,
 +    /// A built-in or custom derive.
 +    Derive,
 +    /// A built-in function-like macro.
 +    BuiltIn,
 +    /// A procedural attribute macro.
 +    Attr,
 +    /// A function-like procedural macro.
 +    ProcMacro,
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Macro {
 +    pub(crate) id: MacroId,
 +}
 +
 +impl Macro {
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        Module { id: self.id.module(db.upcast()) }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        match self.id {
 +            MacroId::Macro2Id(id) => db.macro2_data(id).name.clone(),
 +            MacroId::MacroRulesId(id) => db.macro_rules_data(id).name.clone(),
 +            MacroId::ProcMacroId(id) => db.proc_macro_data(id).name.clone(),
 +        }
 +    }
 +
 +    pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool {
 +        matches!(self.id, MacroId::MacroRulesId(id) if db.macro_rules_data(id).macro_export)
 +    }
 +
 +    pub fn kind(&self, db: &dyn HirDatabase) -> MacroKind {
 +        match self.id {
 +            MacroId::Macro2Id(it) => match it.lookup(db.upcast()).expander {
 +                MacroExpander::Declarative => MacroKind::Declarative,
 +                MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
 +                MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
 +                MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
 +            },
 +            MacroId::MacroRulesId(it) => match it.lookup(db.upcast()).expander {
 +                MacroExpander::Declarative => MacroKind::Declarative,
 +                MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
 +                MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
 +                MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
 +            },
 +            MacroId::ProcMacroId(it) => match it.lookup(db.upcast()).kind {
 +                ProcMacroKind::CustomDerive => MacroKind::Derive,
 +                ProcMacroKind::FuncLike => MacroKind::ProcMacro,
 +                ProcMacroKind::Attr => MacroKind::Attr,
 +            },
 +        }
 +    }
 +
 +    pub fn is_fn_like(&self, db: &dyn HirDatabase) -> bool {
 +        match self.kind(db) {
 +            MacroKind::Declarative | MacroKind::BuiltIn | MacroKind::ProcMacro => true,
 +            MacroKind::Attr | MacroKind::Derive => false,
 +        }
 +    }
 +
 +    pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> bool {
 +        match self.id {
 +            MacroId::Macro2Id(it) => {
 +                matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
 +            }
 +            MacroId::MacroRulesId(it) => {
 +                matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
 +            }
 +            MacroId::ProcMacroId(_) => false,
 +        }
 +    }
 +
 +    pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
 +        matches!(self.kind(db), MacroKind::Attr)
 +    }
 +
 +    pub fn is_derive(&self, db: &dyn HirDatabase) -> bool {
 +        matches!(self.kind(db), MacroKind::Derive)
 +    }
 +}
 +
 +impl HasVisibility for Macro {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        match self.id {
 +            MacroId::Macro2Id(id) => {
 +                let data = db.macro2_data(id);
 +                let visibility = &data.visibility;
 +                visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
 +            }
 +            MacroId::MacroRulesId(_) => Visibility::Public,
 +            MacroId::ProcMacroId(_) => Visibility::Public,
 +        }
 +    }
 +}
 +
 +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
 +pub enum ItemInNs {
 +    Types(ModuleDef),
 +    Values(ModuleDef),
 +    Macros(Macro),
 +}
 +
 +impl From<Macro> for ItemInNs {
 +    fn from(it: Macro) -> Self {
 +        Self::Macros(it)
 +    }
 +}
 +
 +impl From<ModuleDef> for ItemInNs {
 +    fn from(module_def: ModuleDef) -> Self {
 +        match module_def {
 +            ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => {
 +                ItemInNs::Values(module_def)
 +            }
 +            _ => ItemInNs::Types(module_def),
 +        }
 +    }
 +}
 +
 +impl ItemInNs {
 +    pub fn as_module_def(self) -> Option<ModuleDef> {
 +        match self {
 +            ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id),
 +            ItemInNs::Macros(_) => None,
 +        }
 +    }
 +
 +    /// Returns the crate defining this item (or `None` if `self` is built-in).
 +    pub fn krate(&self, db: &dyn HirDatabase) -> Option<Crate> {
 +        match self {
 +            ItemInNs::Types(did) | ItemInNs::Values(did) => did.module(db).map(|m| m.krate()),
 +            ItemInNs::Macros(id) => Some(id.module(db).krate()),
 +        }
 +    }
 +
 +    pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
 +        match self {
 +            ItemInNs::Types(it) | ItemInNs::Values(it) => it.attrs(db),
 +            ItemInNs::Macros(it) => Some(it.attrs(db)),
 +        }
 +    }
 +}
 +
 +/// Invariant: `inner.as_assoc_item(db).is_some()`
 +/// We do not actively enforce this invariant.
 +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
 +pub enum AssocItem {
 +    Function(Function),
 +    Const(Const),
 +    TypeAlias(TypeAlias),
 +}
 +#[derive(Debug)]
 +pub enum AssocItemContainer {
 +    Trait(Trait),
 +    Impl(Impl),
 +}
 +pub trait AsAssocItem {
 +    fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem>;
 +}
 +
 +impl AsAssocItem for Function {
 +    fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
 +        as_assoc_item(db, AssocItem::Function, self.id)
 +    }
 +}
 +impl AsAssocItem for Const {
 +    fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
 +        as_assoc_item(db, AssocItem::Const, self.id)
 +    }
 +}
 +impl AsAssocItem for TypeAlias {
 +    fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
 +        as_assoc_item(db, AssocItem::TypeAlias, self.id)
 +    }
 +}
 +impl AsAssocItem for ModuleDef {
 +    fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
 +        match self {
 +            ModuleDef::Function(it) => it.as_assoc_item(db),
 +            ModuleDef::Const(it) => it.as_assoc_item(db),
 +            ModuleDef::TypeAlias(it) => it.as_assoc_item(db),
 +            _ => None,
 +        }
 +    }
 +}
 +fn as_assoc_item<ID, DEF, CTOR, AST>(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option<AssocItem>
 +where
 +    ID: Lookup<Data = AssocItemLoc<AST>>,
 +    DEF: From<ID>,
 +    CTOR: FnOnce(DEF) -> AssocItem,
 +    AST: ItemTreeNode,
 +{
 +    match id.lookup(db.upcast()).container {
 +        ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => Some(ctor(DEF::from(id))),
 +        ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
 +    }
 +}
 +
 +impl AssocItem {
 +    pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
 +        match self {
 +            AssocItem::Function(it) => Some(it.name(db)),
 +            AssocItem::Const(it) => it.name(db),
 +            AssocItem::TypeAlias(it) => Some(it.name(db)),
 +        }
 +    }
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        match self {
 +            AssocItem::Function(f) => f.module(db),
 +            AssocItem::Const(c) => c.module(db),
 +            AssocItem::TypeAlias(t) => t.module(db),
 +        }
 +    }
 +    pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer {
 +        let container = match self {
 +            AssocItem::Function(it) => it.id.lookup(db.upcast()).container,
 +            AssocItem::Const(it) => it.id.lookup(db.upcast()).container,
 +            AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container,
 +        };
 +        match container {
 +            ItemContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()),
 +            ItemContainerId::ImplId(id) => AssocItemContainer::Impl(id.into()),
 +            ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
 +                panic!("invalid AssocItem")
 +            }
 +        }
 +    }
 +
 +    pub fn containing_trait(self, db: &dyn HirDatabase) -> Option<Trait> {
 +        match self.container(db) {
 +            AssocItemContainer::Trait(t) => Some(t),
 +            _ => None,
 +        }
 +    }
 +
 +    pub fn containing_trait_impl(self, db: &dyn HirDatabase) -> Option<Trait> {
 +        match self.container(db) {
 +            AssocItemContainer::Impl(i) => i.trait_(db),
 +            _ => None,
 +        }
 +    }
 +
 +    pub fn containing_trait_or_trait_impl(self, db: &dyn HirDatabase) -> Option<Trait> {
 +        match self.container(db) {
 +            AssocItemContainer::Trait(t) => Some(t),
 +            AssocItemContainer::Impl(i) => i.trait_(db),
 +        }
 +    }
 +}
 +
 +impl HasVisibility for AssocItem {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
 +        match self {
 +            AssocItem::Function(f) => f.visibility(db),
 +            AssocItem::Const(c) => c.visibility(db),
 +            AssocItem::TypeAlias(t) => t.visibility(db),
 +        }
 +    }
 +}
 +
 +impl From<AssocItem> for ModuleDef {
 +    fn from(assoc: AssocItem) -> Self {
 +        match assoc {
 +            AssocItem::Function(it) => ModuleDef::Function(it),
 +            AssocItem::Const(it) => ModuleDef::Const(it),
 +            AssocItem::TypeAlias(it) => ModuleDef::TypeAlias(it),
 +        }
 +    }
 +}
 +
 +#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
 +pub enum GenericDef {
 +    Function(Function),
 +    Adt(Adt),
 +    Trait(Trait),
 +    TypeAlias(TypeAlias),
 +    Impl(Impl),
 +    // enum variants cannot have generics themselves, but their parent enums
 +    // can, and this makes some code easier to write
 +    Variant(Variant),
 +    // consts can have type parameters from their parents (i.e. associated consts of traits)
 +    Const(Const),
 +}
 +impl_from!(
 +    Function,
 +    Adt(Struct, Enum, Union),
 +    Trait,
 +    TypeAlias,
 +    Impl,
 +    Variant,
 +    Const
 +    for GenericDef
 +);
 +
 +impl GenericDef {
 +    pub fn params(self, db: &dyn HirDatabase) -> Vec<GenericParam> {
 +        let generics = db.generic_params(self.into());
 +        let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| {
 +            let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
 +            match toc.split(db) {
 +                Either::Left(x) => GenericParam::ConstParam(x),
 +                Either::Right(x) => GenericParam::TypeParam(x),
 +            }
 +        });
 +        let lt_params = generics
 +            .lifetimes
 +            .iter()
 +            .map(|(local_id, _)| LifetimeParam {
 +                id: LifetimeParamId { parent: self.into(), local_id },
 +            })
 +            .map(GenericParam::LifetimeParam);
 +        lt_params.chain(ty_params).collect()
 +    }
 +
 +    pub fn type_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
 +        let generics = db.generic_params(self.into());
 +        generics
 +            .type_or_consts
 +            .iter()
 +            .map(|(local_id, _)| TypeOrConstParam {
 +                id: TypeOrConstParamId { parent: self.into(), local_id },
 +            })
 +            .collect()
 +    }
 +}
 +
 +/// A single local definition.
 +///
 +/// If the definition of this is part of a "MultiLocal", that is a local that has multiple declarations due to or-patterns
 +/// then this only references a single one of those.
 +/// To retrieve the other locals you should use [`Local::associated_locals`]
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct Local {
 +    pub(crate) parent: DefWithBodyId,
 +    pub(crate) pat_id: PatId,
 +}
 +
 +impl Local {
 +    pub fn is_param(self, db: &dyn HirDatabase) -> bool {
 +        let src = self.source(db);
 +        match src.value {
 +            Either::Left(pat) => pat
 +                .syntax()
 +                .ancestors()
 +                .map(|it| it.kind())
 +                .take_while(|&kind| ast::Pat::can_cast(kind) || ast::Param::can_cast(kind))
 +                .any(ast::Param::can_cast),
 +            Either::Right(_) => true,
 +        }
 +    }
 +
 +    pub fn as_self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
 +        match self.parent {
 +            DefWithBodyId::FunctionId(func) if self.is_self(db) => Some(SelfParam { func }),
 +            _ => None,
 +        }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        let body = db.body(self.parent);
 +        match &body[self.pat_id] {
 +            Pat::Bind { name, .. } => name.clone(),
 +            _ => {
 +                stdx::never!("hir::Local is missing a name!");
 +                Name::missing()
 +            }
 +        }
 +    }
 +
 +    pub fn is_self(self, db: &dyn HirDatabase) -> bool {
 +        self.name(db) == name![self]
 +    }
 +
 +    pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
 +        let body = db.body(self.parent);
 +        matches!(&body[self.pat_id], Pat::Bind { mode: BindingAnnotation::Mutable, .. })
 +    }
 +
 +    pub fn is_ref(self, db: &dyn HirDatabase) -> bool {
 +        let body = db.body(self.parent);
 +        matches!(
 +            &body[self.pat_id],
 +            Pat::Bind { mode: BindingAnnotation::Ref | BindingAnnotation::RefMut, .. }
 +        )
 +    }
 +
 +    pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
 +        self.parent.into()
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        self.parent(db).module(db)
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        let def = self.parent;
 +        let infer = db.infer(def);
 +        let ty = infer[self.pat_id].clone();
 +        Type::new(db, def, ty)
 +    }
 +
 +    pub fn associated_locals(self, db: &dyn HirDatabase) -> Box<[Local]> {
 +        let body = db.body(self.parent);
 +        body.ident_patterns_for(&self.pat_id)
 +            .iter()
 +            .map(|&pat_id| Local { parent: self.parent, pat_id })
 +            .collect()
 +    }
 +
 +    /// If this local is part of a multi-local, retrieve the representative local.
 +    /// That is the local that references are being resolved to.
 +    pub fn representative(self, db: &dyn HirDatabase) -> Local {
 +        let body = db.body(self.parent);
 +        Local { pat_id: body.pattern_representative(self.pat_id), ..self }
 +    }
 +
 +    pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::IdentPat, ast::SelfParam>> {
 +        let (_body, source_map) = db.body_with_source_map(self.parent);
 +        let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
 +        let root = src.file_syntax(db.upcast());
 +        src.map(|ast| match ast {
 +            // Suspicious unwrap
 +            Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
 +            Either::Right(it) => Either::Right(it.to_node(&root)),
 +        })
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct DeriveHelper {
 +    pub(crate) derive: MacroId,
 +    pub(crate) idx: usize,
 +}
 +
 +impl DeriveHelper {
 +    pub fn derive(&self) -> Macro {
 +        Macro { id: self.derive.into() }
 +    }
 +
 +    pub fn name(&self, db: &dyn HirDatabase) -> Name {
 +        match self.derive {
 +            MacroId::Macro2Id(_) => None,
 +            MacroId::MacroRulesId(_) => None,
 +            MacroId::ProcMacroId(proc_macro) => db
 +                .proc_macro_data(proc_macro)
 +                .helpers
 +                .as_ref()
 +                .and_then(|it| it.get(self.idx))
 +                .cloned(),
 +        }
 +        .unwrap_or_else(|| Name::missing())
 +    }
 +}
 +
 +// FIXME: Wrong name? This is could also be a registered attribute
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct BuiltinAttr {
 +    krate: Option<CrateId>,
 +    idx: usize,
 +}
 +
 +impl BuiltinAttr {
 +    // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
 +    pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
 +        if let builtin @ Some(_) = Self::builtin(name) {
 +            return builtin;
 +        }
 +        let idx = db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)?;
 +        Some(BuiltinAttr { krate: Some(krate.id), idx })
 +    }
 +
 +    fn builtin(name: &str) -> Option<Self> {
 +        hir_def::builtin_attr::INERT_ATTRIBUTES
 +            .iter()
 +            .position(|tool| tool.name == name)
 +            .map(|idx| BuiltinAttr { krate: None, idx })
 +    }
 +
 +    pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
 +        // FIXME: Return a `Name` here
 +        match self.krate {
 +            Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx].clone(),
 +            None => SmolStr::new(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].name),
 +        }
 +    }
 +
 +    pub fn template(&self, _: &dyn HirDatabase) -> Option<AttributeTemplate> {
 +        match self.krate {
 +            Some(_) => None,
 +            None => Some(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].template),
 +        }
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct ToolModule {
 +    krate: Option<CrateId>,
 +    idx: usize,
 +}
 +
 +impl ToolModule {
 +    // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
 +    pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
 +        if let builtin @ Some(_) = Self::builtin(name) {
 +            return builtin;
 +        }
 +        let idx = db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)?;
 +        Some(ToolModule { krate: Some(krate.id), idx })
 +    }
 +
 +    fn builtin(name: &str) -> Option<Self> {
 +        hir_def::builtin_attr::TOOL_MODULES
 +            .iter()
 +            .position(|&tool| tool == name)
 +            .map(|idx| ToolModule { krate: None, idx })
 +    }
 +
 +    pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
 +        // FIXME: Return a `Name` here
 +        match self.krate {
 +            Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx].clone(),
 +            None => SmolStr::new(hir_def::builtin_attr::TOOL_MODULES[self.idx]),
 +        }
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct Label {
 +    pub(crate) parent: DefWithBodyId,
 +    pub(crate) label_id: LabelId,
 +}
 +
 +impl Label {
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        self.parent(db).module(db)
 +    }
 +
 +    pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
 +        self.parent.into()
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        let body = db.body(self.parent);
 +        body[self.label_id].name.clone()
 +    }
 +
 +    pub fn source(self, db: &dyn HirDatabase) -> InFile<ast::Label> {
 +        let (_body, source_map) = db.body_with_source_map(self.parent);
 +        let src = source_map.label_syntax(self.label_id);
 +        let root = src.file_syntax(db.upcast());
 +        src.map(|ast| ast.to_node(&root))
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub enum GenericParam {
 +    TypeParam(TypeParam),
 +    ConstParam(ConstParam),
 +    LifetimeParam(LifetimeParam),
 +}
 +impl_from!(TypeParam, ConstParam, LifetimeParam for GenericParam);
 +
 +impl GenericParam {
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        match self {
 +            GenericParam::TypeParam(it) => it.module(db),
 +            GenericParam::ConstParam(it) => it.module(db),
 +            GenericParam::LifetimeParam(it) => it.module(db),
 +        }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        match self {
 +            GenericParam::TypeParam(it) => it.name(db),
 +            GenericParam::ConstParam(it) => it.name(db),
 +            GenericParam::LifetimeParam(it) => it.name(db),
 +        }
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct TypeParam {
 +    pub(crate) id: TypeParamId,
 +}
 +
 +impl TypeParam {
 +    pub fn merge(self) -> TypeOrConstParam {
 +        TypeOrConstParam { id: self.id.into() }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        self.merge().name(db)
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        self.id.parent().module(db.upcast()).into()
 +    }
 +
 +    /// Is this type parameter implicitly introduced (eg. `Self` in a trait or an `impl Trait`
 +    /// argument)?
 +    pub fn is_implicit(self, db: &dyn HirDatabase) -> bool {
 +        let params = db.generic_params(self.id.parent());
 +        let data = &params.type_or_consts[self.id.local_id()];
 +        match data.type_param().unwrap().provenance {
 +            hir_def::generics::TypeParamProvenance::TypeParamList => false,
 +            hir_def::generics::TypeParamProvenance::TraitSelf
 +            | hir_def::generics::TypeParamProvenance::ArgumentImplTrait => true,
 +        }
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        let resolver = self.id.parent().resolver(db.upcast());
 +        let ty =
 +            TyKind::Placeholder(hir_ty::to_placeholder_idx(db, self.id.into())).intern(Interner);
 +        Type::new_with_resolver_inner(db, &resolver, ty)
 +    }
 +
 +    /// FIXME: this only lists trait bounds from the item defining the type
 +    /// parameter, not additional bounds that might be added e.g. by a method if
 +    /// the parameter comes from an impl!
 +    pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> {
 +        db.generic_predicates_for_param(self.id.parent(), self.id.into(), None)
 +            .iter()
 +            .filter_map(|pred| match &pred.skip_binders().skip_binders() {
 +                hir_ty::WhereClause::Implemented(trait_ref) => {
 +                    Some(Trait::from(trait_ref.hir_trait_id()))
 +                }
 +                _ => None,
 +            })
 +            .collect()
 +    }
 +
 +    pub fn default(self, db: &dyn HirDatabase) -> Option<Type> {
 +        let params = db.generic_defaults(self.id.parent());
 +        let local_idx = hir_ty::param_idx(db, self.id.into())?;
 +        let resolver = self.id.parent().resolver(db.upcast());
 +        let ty = params.get(local_idx)?.clone();
 +        let subst = TyBuilder::placeholder_subst(db, self.id.parent());
 +        let ty = ty.substitute(Interner, &subst_prefix(&subst, local_idx));
 +        match ty.data(Interner) {
 +            GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())),
 +            _ => None,
 +        }
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct LifetimeParam {
 +    pub(crate) id: LifetimeParamId,
 +}
 +
 +impl LifetimeParam {
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        let params = db.generic_params(self.id.parent);
 +        params.lifetimes[self.id.local_id].name.clone()
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        self.id.parent.module(db.upcast()).into()
 +    }
 +
 +    pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
 +        self.id.parent.into()
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct ConstParam {
 +    pub(crate) id: ConstParamId,
 +}
 +
 +impl ConstParam {
 +    pub fn merge(self) -> TypeOrConstParam {
 +        TypeOrConstParam { id: self.id.into() }
 +    }
 +
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        let params = db.generic_params(self.id.parent());
 +        match params.type_or_consts[self.id.local_id()].name() {
 +            Some(x) => x.clone(),
 +            None => {
 +                never!();
 +                Name::missing()
 +            }
 +        }
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        self.id.parent().module(db.upcast()).into()
 +    }
 +
 +    pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
 +        self.id.parent().into()
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        Type::new(db, self.id.parent(), db.const_param_ty(self.id))
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct TypeOrConstParam {
 +    pub(crate) id: TypeOrConstParamId,
 +}
 +
 +impl TypeOrConstParam {
 +    pub fn name(self, db: &dyn HirDatabase) -> Name {
 +        let params = db.generic_params(self.id.parent);
 +        match params.type_or_consts[self.id.local_id].name() {
 +            Some(n) => n.clone(),
 +            _ => Name::missing(),
 +        }
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        self.id.parent.module(db.upcast()).into()
 +    }
 +
 +    pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
 +        self.id.parent.into()
 +    }
 +
 +    pub fn split(self, db: &dyn HirDatabase) -> Either<ConstParam, TypeParam> {
 +        let params = db.generic_params(self.id.parent);
 +        match &params.type_or_consts[self.id.local_id] {
 +            hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
 +                Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) })
 +            }
 +            hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
 +                Either::Left(ConstParam { id: ConstParamId::from_unchecked(self.id) })
 +            }
 +        }
 +    }
 +
 +    pub fn ty(self, db: &dyn HirDatabase) -> Type {
 +        match self.split(db) {
 +            Either::Left(x) => x.ty(db),
 +            Either::Right(x) => x.ty(db),
 +        }
 +    }
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
 +pub struct Impl {
 +    pub(crate) id: ImplId,
 +}
 +
 +impl Impl {
 +    pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<Impl> {
 +        let inherent = db.inherent_impls_in_crate(krate.id);
 +        let trait_ = db.trait_impls_in_crate(krate.id);
 +
 +        inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect()
 +    }
 +
 +    pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> {
 +        let def_crates = match method_resolution::def_crates(db, &ty, env.krate) {
 +            Some(def_crates) => def_crates,
 +            None => return Vec::new(),
 +        };
 +
 +        let filter = |impl_def: &Impl| {
 +            let self_ty = impl_def.self_ty(db);
 +            let rref = self_ty.remove_ref();
 +            ty.equals_ctor(rref.as_ref().map_or(&self_ty.ty, |it| &it.ty))
 +        };
 +
 +        let fp = TyFingerprint::for_inherent_impl(&ty);
 +        let fp = match fp {
 +            Some(fp) => fp,
 +            None => return Vec::new(),
 +        };
 +
 +        let mut all = Vec::new();
 +        def_crates.iter().for_each(|&id| {
 +            all.extend(
 +                db.inherent_impls_in_crate(id)
 +                    .for_self_ty(&ty)
 +                    .iter()
 +                    .cloned()
 +                    .map(Self::from)
 +                    .filter(filter),
 +            )
 +        });
 +        for id in def_crates
 +            .iter()
 +            .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db))
 +            .map(|Crate { id }| id)
 +            .chain(def_crates.iter().copied())
 +            .unique()
 +        {
 +            all.extend(
 +                db.trait_impls_in_crate(id)
 +                    .for_self_ty_without_blanket_impls(fp)
 +                    .map(Self::from)
 +                    .filter(filter),
 +            );
 +        }
 +        all
 +    }
 +
 +    pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
 +        let krate = trait_.module(db).krate();
 +        let mut all = Vec::new();
 +        for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() {
 +            let impls = db.trait_impls_in_crate(id);
 +            all.extend(impls.for_trait(trait_.id).map(Self::from))
 +        }
 +        all
 +    }
 +
 +    // FIXME: the return type is wrong. This should be a hir version of
 +    // `TraitRef` (to account for parameters and qualifiers)
 +    pub fn trait_(self, db: &dyn HirDatabase) -> Option<Trait> {
 +        let trait_ref = db.impl_trait(self.id)?.skip_binders().clone();
 +        let id = hir_ty::from_chalk_trait_id(trait_ref.trait_id);
 +        Some(Trait { id })
 +    }
 +
 +    pub fn self_ty(self, db: &dyn HirDatabase) -> Type {
 +        let resolver = self.id.resolver(db.upcast());
 +        let substs = TyBuilder::placeholder_subst(db, self.id);
 +        let ty = db.impl_self_ty(self.id).substitute(Interner, &substs);
 +        Type::new_with_resolver_inner(db, &resolver, ty)
 +    }
 +
 +    pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
 +        db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect()
 +    }
 +
 +    pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
 +        db.impl_data(self.id).is_negative
 +    }
 +
 +    pub fn module(self, db: &dyn HirDatabase) -> Module {
 +        self.id.lookup(db.upcast()).container.into()
 +    }
 +
 +    pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
 +        let src = self.source(db)?;
 +        src.file_id.is_builtin_derive(db.upcast())
 +    }
 +}
 +
 +#[derive(Clone, PartialEq, Eq, Debug)]
 +pub struct Type {
 +    env: Arc<TraitEnvironment>,
 +    ty: Ty,
 +}
 +
 +impl Type {
 +    pub(crate) fn new_with_resolver(db: &dyn HirDatabase, resolver: &Resolver, ty: Ty) -> Type {
 +        Type::new_with_resolver_inner(db, resolver, ty)
 +    }
 +
 +    pub(crate) fn new_with_resolver_inner(
 +        db: &dyn HirDatabase,
 +        resolver: &Resolver,
 +        ty: Ty,
 +    ) -> Type {
 +        let environment = resolver.generic_def().map_or_else(
 +            || Arc::new(TraitEnvironment::empty(resolver.krate())),
 +            |d| db.trait_environment(d),
 +        );
 +        Type { env: environment, ty }
 +    }
 +
 +    pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
 +        Type { env: Arc::new(TraitEnvironment::empty(krate)), ty }
 +    }
 +
 +    pub fn reference(inner: &Type, m: Mutability) -> Type {
 +        inner.derived(
 +            TyKind::Ref(
 +                if m.is_mut() { hir_ty::Mutability::Mut } else { hir_ty::Mutability::Not },
 +                hir_ty::static_lifetime(),
 +                inner.ty.clone(),
 +            )
 +            .intern(Interner),
 +        )
 +    }
 +
 +    fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
 +        let resolver = lexical_env.resolver(db.upcast());
 +        let environment = resolver.generic_def().map_or_else(
 +            || Arc::new(TraitEnvironment::empty(resolver.krate())),
 +            |d| db.trait_environment(d),
 +        );
 +        Type { env: environment, ty }
 +    }
 +
 +    fn from_def(db: &dyn HirDatabase, def: impl HasResolver + Into<TyDefId>) -> Type {
 +        let ty = TyBuilder::def_ty(db, def.into()).fill_with_unknown().build();
 +        Type::new(db, def, ty)
 +    }
 +
 +    pub fn new_slice(ty: Type) -> Type {
 +        Type { env: ty.env, ty: TyBuilder::slice(ty.ty) }
 +    }
 +
 +    pub fn is_unit(&self) -> bool {
 +        matches!(self.ty.kind(Interner), TyKind::Tuple(0, ..))
 +    }
 +
 +    pub fn is_bool(&self) -> bool {
 +        matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Bool))
 +    }
 +
 +    pub fn is_never(&self) -> bool {
 +        matches!(self.ty.kind(Interner), TyKind::Never)
 +    }
 +
 +    pub fn is_mutable_reference(&self) -> bool {
 +        matches!(self.ty.kind(Interner), TyKind::Ref(hir_ty::Mutability::Mut, ..))
 +    }
 +
 +    pub fn is_reference(&self) -> bool {
 +        matches!(self.ty.kind(Interner), TyKind::Ref(..))
 +    }
 +
 +    pub fn as_reference(&self) -> Option<(Type, Mutability)> {
 +        let (ty, _lt, m) = self.ty.as_reference()?;
 +        let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut));
 +        Some((self.derived(ty.clone()), m))
 +    }
 +
 +    pub fn is_slice(&self) -> bool {
 +        matches!(self.ty.kind(Interner), TyKind::Slice(..))
 +    }
 +
 +    pub fn is_usize(&self) -> bool {
 +        matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize)))
 +    }
 +
 +    pub fn remove_ref(&self) -> Option<Type> {
 +        match &self.ty.kind(Interner) {
 +            TyKind::Ref(.., ty) => Some(self.derived(ty.clone())),
 +            _ => None,
 +        }
 +    }
 +
 +    pub fn strip_references(&self) -> Type {
 +        self.derived(self.ty.strip_references().clone())
 +    }
 +
 +    pub fn strip_reference(&self) -> Type {
 +        self.derived(self.ty.strip_reference().clone())
 +    }
 +
 +    pub fn is_unknown(&self) -> bool {
 +        self.ty.is_unknown()
 +    }
 +
 +    /// Checks that particular type `ty` implements `std::future::IntoFuture` or
 +    /// `std::future::Future`.
 +    /// This function is used in `.await` syntax completion.
 +    pub fn impls_into_future(&self, db: &dyn HirDatabase) -> bool {
 +        let trait_ = db
 +            .lang_item(self.env.krate, SmolStr::new_inline("into_future"))
 +            .and_then(|it| {
 +                let into_future_fn = it.as_function()?;
 +                let assoc_item = as_assoc_item(db, AssocItem::Function, into_future_fn)?;
 +                let into_future_trait = assoc_item.containing_trait_or_trait_impl(db)?;
 +                Some(into_future_trait.id)
 +            })
 +            .or_else(|| {
 +                let future_trait =
 +                    db.lang_item(self.env.krate, SmolStr::new_inline("future_trait"))?;
 +                future_trait.as_trait()
 +            });
 +
 +        let trait_ = match trait_ {
 +            Some(it) => it,
 +            None => return false,
 +        };
 +
 +        let canonical_ty =
 +            Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
 +        method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), trait_)
 +    }
 +
 +    /// Checks that particular type `ty` implements `std::ops::FnOnce`.
 +    ///
 +    /// This function can be used to check if a particular type is callable, since FnOnce is a
 +    /// supertrait of Fn and FnMut, so all callable types implements at least FnOnce.
 +    pub fn impls_fnonce(&self, db: &dyn HirDatabase) -> bool {
 +        let fnonce_trait = match FnTrait::FnOnce.get_id(db, self.env.krate) {
 +            Some(it) => it,
 +            None => return false,
 +        };
 +
 +        let canonical_ty =
 +            Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
 +        method_resolution::implements_trait_unique(
 +            &canonical_ty,
 +            db,
 +            self.env.clone(),
 +            fnonce_trait,
 +        )
 +    }
 +
 +    pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool {
 +        let mut it = args.iter().map(|t| t.ty.clone());
 +        let trait_ref = TyBuilder::trait_ref(db, trait_.id)
 +            .push(self.ty.clone())
 +            .fill(|x| {
 +                let r = it.next().unwrap();
 +                match x {
 +                    ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
 +                    ParamKind::Const(ty) => {
 +                        // FIXME: this code is not covered in tests.
 +                        unknown_const_as_generic(ty.clone())
 +                    }
 +                }
 +            })
 +            .build();
 +
 +        let goal = Canonical {
 +            value: hir_ty::InEnvironment::new(&self.env.env, trait_ref.cast(Interner)),
 +            binders: CanonicalVarKinds::empty(Interner),
 +        };
 +
 +        db.trait_solve(self.env.krate, goal).is_some()
 +    }
 +
 +    pub fn normalize_trait_assoc_type(
 +        &self,
 +        db: &dyn HirDatabase,
 +        args: &[Type],
 +        alias: TypeAlias,
 +    ) -> Option<Type> {
 +        let mut args = args.iter();
 +        let projection = TyBuilder::assoc_type_projection(db, alias.id)
 +            .push(self.ty.clone())
 +            .fill(|x| {
 +                // FIXME: this code is not covered in tests.
 +                match x {
 +                    ParamKind::Type => {
 +                        GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner)
 +                    }
 +                    ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
 +                }
 +            })
 +            .build();
 +        let goal = hir_ty::make_canonical(
 +            InEnvironment::new(
 +                &self.env.env,
 +                AliasEq {
 +                    alias: AliasTy::Projection(projection),
 +                    ty: TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
 +                        .intern(Interner),
 +                }
 +                .cast(Interner),
 +            ),
 +            [TyVariableKind::General].into_iter(),
 +        );
 +
 +        match db.trait_solve(self.env.krate, goal)? {
 +            Solution::Unique(s) => s
 +                .value
 +                .subst
 +                .as_slice(Interner)
 +                .first()
 +                .map(|ty| self.derived(ty.assert_ty_ref(Interner).clone())),
 +            Solution::Ambig(_) => None,
 +        }
 +    }
 +
 +    pub fn is_copy(&self, db: &dyn HirDatabase) -> bool {
 +        let lang_item = db.lang_item(self.env.krate, SmolStr::new_inline("copy"));
 +        let copy_trait = match lang_item {
 +            Some(LangItemTarget::TraitId(it)) => it,
 +            _ => return false,
 +        };
 +        self.impls_trait(db, copy_trait.into(), &[])
 +    }
 +
 +    pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> {
 +        let callee = match self.ty.kind(Interner) {
 +            TyKind::Closure(id, _) => Callee::Closure(*id),
 +            TyKind::Function(_) => Callee::FnPtr,
 +            _ => Callee::Def(self.ty.callable_def(db)?),
 +        };
 +
 +        let sig = self.ty.callable_sig(db)?;
 +        Some(Callable { ty: self.clone(), sig, callee, is_bound_method: false })
 +    }
 +
 +    pub fn is_closure(&self) -> bool {
 +        matches!(&self.ty.kind(Interner), TyKind::Closure { .. })
 +    }
 +
 +    pub fn is_fn(&self) -> bool {
 +        matches!(&self.ty.kind(Interner), TyKind::FnDef(..) | TyKind::Function { .. })
 +    }
 +
 +    pub fn is_array(&self) -> bool {
 +        matches!(&self.ty.kind(Interner), TyKind::Array(..))
 +    }
 +
 +    pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
 +        let adt_id = match *self.ty.kind(Interner) {
 +            TyKind::Adt(hir_ty::AdtId(adt_id), ..) => adt_id,
 +            _ => return false,
 +        };
 +
 +        let adt = adt_id.into();
 +        match adt {
 +            Adt::Struct(s) => matches!(s.repr(db), Some(ReprKind::Packed)),
 +            _ => false,
 +        }
 +    }
 +
 +    pub fn is_raw_ptr(&self) -> bool {
 +        matches!(&self.ty.kind(Interner), TyKind::Raw(..))
 +    }
 +
 +    pub fn contains_unknown(&self) -> bool {
 +        return go(&self.ty);
 +
 +        fn go(ty: &Ty) -> bool {
 +            match ty.kind(Interner) {
 +                TyKind::Error => true,
 +
 +                TyKind::Adt(_, substs)
 +                | TyKind::AssociatedType(_, substs)
 +                | TyKind::Tuple(_, substs)
 +                | TyKind::OpaqueType(_, substs)
 +                | TyKind::FnDef(_, substs)
 +                | TyKind::Closure(_, substs) => {
 +                    substs.iter(Interner).filter_map(|a| a.ty(Interner)).any(go)
 +                }
 +
 +                TyKind::Array(_ty, len) if len.is_unknown() => true,
 +                TyKind::Array(ty, _)
 +                | TyKind::Slice(ty)
 +                | TyKind::Raw(_, ty)
 +                | TyKind::Ref(_, _, ty) => go(ty),
 +
 +                TyKind::Scalar(_)
 +                | TyKind::Str
 +                | TyKind::Never
 +                | TyKind::Placeholder(_)
 +                | TyKind::BoundVar(_)
 +                | TyKind::InferenceVar(_, _)
 +                | TyKind::Dyn(_)
 +                | TyKind::Function(_)
 +                | TyKind::Alias(_)
 +                | TyKind::Foreign(_)
 +                | TyKind::Generator(..)
 +                | TyKind::GeneratorWitness(..) => false,
 +            }
 +        }
 +    }
 +
 +    pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> {
 +        let (variant_id, substs) = match self.ty.kind(Interner) {
 +            TyKind::Adt(hir_ty::AdtId(AdtId::StructId(s)), substs) => ((*s).into(), substs),
 +            TyKind::Adt(hir_ty::AdtId(AdtId::UnionId(u)), substs) => ((*u).into(), substs),
 +            _ => return Vec::new(),
 +        };
 +
 +        db.field_types(variant_id)
 +            .iter()
 +            .map(|(local_id, ty)| {
 +                let def = Field { parent: variant_id.into(), id: local_id };
 +                let ty = ty.clone().substitute(Interner, substs);
 +                (def, self.derived(ty))
 +            })
 +            .collect()
 +    }
 +
 +    pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec<Type> {
 +        if let TyKind::Tuple(_, substs) = &self.ty.kind(Interner) {
 +            substs
 +                .iter(Interner)
 +                .map(|ty| self.derived(ty.assert_ty_ref(Interner).clone()))
 +                .collect()
 +        } else {
 +            Vec::new()
 +        }
 +    }
 +
 +    pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
 +        self.autoderef_(db).map(move |ty| self.derived(ty))
 +    }
 +
 +    fn autoderef_<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Ty> + 'a {
 +        // There should be no inference vars in types passed here
 +        let canonical = hir_ty::replace_errors_with_variables(&self.ty);
 +        let environment = self.env.clone();
 +        autoderef(db, environment, canonical).map(|canonical| canonical.value)
 +    }
 +
 +    // This would be nicer if it just returned an iterator, but that runs into
 +    // lifetime problems, because we need to borrow temp `CrateImplDefs`.
 +    pub fn iterate_assoc_items<T>(
 +        &self,
 +        db: &dyn HirDatabase,
 +        krate: Crate,
 +        mut callback: impl FnMut(AssocItem) -> Option<T>,
 +    ) -> Option<T> {
 +        let mut slot = None;
 +        self.iterate_assoc_items_dyn(db, krate, &mut |assoc_item_id| {
 +            slot = callback(assoc_item_id.into());
 +            slot.is_some()
 +        });
 +        slot
 +    }
 +
 +    fn iterate_assoc_items_dyn(
 +        &self,
 +        db: &dyn HirDatabase,
 +        krate: Crate,
 +        callback: &mut dyn FnMut(AssocItemId) -> bool,
 +    ) {
 +        let def_crates = match method_resolution::def_crates(db, &self.ty, krate.id) {
 +            Some(it) => it,
 +            None => return,
 +        };
 +        for krate in def_crates {
 +            let impls = db.inherent_impls_in_crate(krate);
 +
 +            for impl_def in impls.for_self_ty(&self.ty) {
 +                for &item in db.impl_data(*impl_def).items.iter() {
 +                    if callback(item) {
 +                        return;
 +                    }
 +                }
 +            }
 +        }
 +    }
 +
 +    pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
 +        self.ty
 +            .strip_references()
 +            .as_adt()
 +            .into_iter()
 +            .flat_map(|(_, substs)| substs.iter(Interner))
 +            .filter_map(|arg| arg.ty(Interner).cloned())
 +            .map(move |ty| self.derived(ty))
 +    }
 +
 +    pub fn iterate_method_candidates<T>(
 +        &self,
 +        db: &dyn HirDatabase,
 +        scope: &SemanticsScope<'_>,
 +        // FIXME this can be retrieved from `scope`, except autoimport uses this
 +        // to specify a different set, so the method needs to be split
 +        traits_in_scope: &FxHashSet<TraitId>,
 +        with_local_impls: Option<Module>,
 +        name: Option<&Name>,
 +        mut callback: impl FnMut(Function) -> Option<T>,
 +    ) -> Option<T> {
 +        let _p = profile::span("iterate_method_candidates");
 +        let mut slot = None;
 +
 +        self.iterate_method_candidates_dyn(
 +            db,
 +            scope,
 +            traits_in_scope,
 +            with_local_impls,
 +            name,
 +            &mut |assoc_item_id| {
 +                if let AssocItemId::FunctionId(func) = assoc_item_id {
 +                    if let Some(res) = callback(func.into()) {
 +                        slot = Some(res);
 +                        return ControlFlow::Break(());
 +                    }
 +                }
 +                ControlFlow::Continue(())
 +            },
 +        );
 +        slot
 +    }
 +
 +    fn iterate_method_candidates_dyn(
 +        &self,
 +        db: &dyn HirDatabase,
 +        scope: &SemanticsScope<'_>,
 +        traits_in_scope: &FxHashSet<TraitId>,
 +        with_local_impls: Option<Module>,
 +        name: Option<&Name>,
 +        callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
 +    ) {
 +        // There should be no inference vars in types passed here
 +        let canonical = hir_ty::replace_errors_with_variables(&self.ty);
 +
 +        let krate = scope.krate();
 +        let environment = scope.resolver().generic_def().map_or_else(
 +            || Arc::new(TraitEnvironment::empty(krate.id)),
 +            |d| db.trait_environment(d),
 +        );
 +
 +        method_resolution::iterate_method_candidates_dyn(
 +            &canonical,
 +            db,
 +            environment,
 +            traits_in_scope,
 +            with_local_impls.and_then(|b| b.id.containing_block()).into(),
 +            name,
 +            method_resolution::LookupMode::MethodCall,
 +            &mut |_adj, id| callback(id),
 +        );
 +    }
 +
 +    pub fn iterate_path_candidates<T>(
 +        &self,
 +        db: &dyn HirDatabase,
 +        scope: &SemanticsScope<'_>,
 +        traits_in_scope: &FxHashSet<TraitId>,
 +        with_local_impls: Option<Module>,
 +        name: Option<&Name>,
 +        mut callback: impl FnMut(AssocItem) -> Option<T>,
 +    ) -> Option<T> {
 +        let _p = profile::span("iterate_path_candidates");
 +        let mut slot = None;
 +        self.iterate_path_candidates_dyn(
 +            db,
 +            scope,
 +            traits_in_scope,
 +            with_local_impls,
 +            name,
 +            &mut |assoc_item_id| {
 +                if let Some(res) = callback(assoc_item_id.into()) {
 +                    slot = Some(res);
 +                    return ControlFlow::Break(());
 +                }
 +                ControlFlow::Continue(())
 +            },
 +        );
 +        slot
 +    }
 +
 +    fn iterate_path_candidates_dyn(
 +        &self,
 +        db: &dyn HirDatabase,
 +        scope: &SemanticsScope<'_>,
 +        traits_in_scope: &FxHashSet<TraitId>,
 +        with_local_impls: Option<Module>,
 +        name: Option<&Name>,
 +        callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
 +    ) {
 +        let canonical = hir_ty::replace_errors_with_variables(&self.ty);
 +
 +        let krate = scope.krate();
 +        let environment = scope.resolver().generic_def().map_or_else(
 +            || Arc::new(TraitEnvironment::empty(krate.id)),
 +            |d| db.trait_environment(d),
 +        );
 +
 +        method_resolution::iterate_path_candidates(
 +            &canonical,
 +            db,
 +            environment,
 +            traits_in_scope,
 +            with_local_impls.and_then(|b| b.id.containing_block()).into(),
 +            name,
 +            &mut |id| callback(id),
 +        );
 +    }
 +
 +    pub fn as_adt(&self) -> Option<Adt> {
 +        let (adt, _subst) = self.ty.as_adt()?;
 +        Some(adt.into())
 +    }
 +
 +    pub fn as_builtin(&self) -> Option<BuiltinType> {
 +        self.ty.as_builtin().map(|inner| BuiltinType { inner })
 +    }
 +
 +    pub fn as_dyn_trait(&self) -> Option<Trait> {
 +        self.ty.dyn_trait().map(Into::into)
 +    }
 +
 +    /// If a type can be represented as `dyn Trait`, returns all traits accessible via this type,
 +    /// or an empty iterator otherwise.
 +    pub fn applicable_inherent_traits<'a>(
 +        &'a self,
 +        db: &'a dyn HirDatabase,
 +    ) -> impl Iterator<Item = Trait> + 'a {
 +        let _p = profile::span("applicable_inherent_traits");
 +        self.autoderef_(db)
 +            .filter_map(|ty| ty.dyn_trait())
 +            .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
 +            .map(Trait::from)
 +    }
 +
 +    pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
 +        let _p = profile::span("env_traits");
 +        self.autoderef_(db)
 +            .filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
 +            .flat_map(|ty| {
 +                self.env
 +                    .traits_in_scope_from_clauses(ty)
 +                    .flat_map(|t| hir_ty::all_super_traits(db.upcast(), t))
 +            })
 +            .map(Trait::from)
 +    }
 +
 +    pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option<impl Iterator<Item = Trait>> {
 +        self.ty.impl_trait_bounds(db).map(|it| {
 +            it.into_iter().filter_map(|pred| match pred.skip_binders() {
 +                hir_ty::WhereClause::Implemented(trait_ref) => {
 +                    Some(Trait::from(trait_ref.hir_trait_id()))
 +                }
 +                _ => None,
 +            })
 +        })
 +    }
 +
 +    pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<Trait> {
 +        self.ty.associated_type_parent_trait(db).map(Into::into)
 +    }
 +
 +    fn derived(&self, ty: Ty) -> Type {
 +        Type { env: self.env.clone(), ty }
 +    }
 +
 +    pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) {
 +        // TypeWalk::walk for a Ty at first visits parameters and only after that the Ty itself.
 +        // We need a different order here.
 +
 +        fn walk_substs(
 +            db: &dyn HirDatabase,
 +            type_: &Type,
 +            substs: &Substitution,
 +            cb: &mut impl FnMut(Type),
 +        ) {
 +            for ty in substs.iter(Interner).filter_map(|a| a.ty(Interner)) {
 +                walk_type(db, &type_.derived(ty.clone()), cb);
 +            }
 +        }
 +
 +        fn walk_bounds(
 +            db: &dyn HirDatabase,
 +            type_: &Type,
 +            bounds: &[QuantifiedWhereClause],
 +            cb: &mut impl FnMut(Type),
 +        ) {
 +            for pred in bounds {
 +                if let WhereClause::Implemented(trait_ref) = pred.skip_binders() {
 +                    cb(type_.clone());
 +                    // skip the self type. it's likely the type we just got the bounds from
 +                    for ty in
 +                        trait_ref.substitution.iter(Interner).skip(1).filter_map(|a| a.ty(Interner))
 +                    {
 +                        walk_type(db, &type_.derived(ty.clone()), cb);
 +                    }
 +                }
 +            }
 +        }
 +
 +        fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) {
 +            let ty = type_.ty.strip_references();
 +            match ty.kind(Interner) {
 +                TyKind::Adt(_, substs) => {
 +                    cb(type_.derived(ty.clone()));
 +                    walk_substs(db, type_, substs, cb);
 +                }
 +                TyKind::AssociatedType(_, substs) => {
 +                    if ty.associated_type_parent_trait(db).is_some() {
 +                        cb(type_.derived(ty.clone()));
 +                    }
 +                    walk_substs(db, type_, substs, cb);
 +                }
 +                TyKind::OpaqueType(_, subst) => {
 +                    if let Some(bounds) = ty.impl_trait_bounds(db) {
 +                        walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
 +                    }
 +
 +                    walk_substs(db, type_, subst, cb);
 +                }
 +                TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
 +                    if let Some(bounds) = ty.impl_trait_bounds(db) {
 +                        walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
 +                    }
 +
 +                    walk_substs(db, type_, &opaque_ty.substitution, cb);
 +                }
 +                TyKind::Placeholder(_) => {
 +                    if let Some(bounds) = ty.impl_trait_bounds(db) {
 +                        walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
 +                    }
 +                }
 +                TyKind::Dyn(bounds) => {
 +                    walk_bounds(
 +                        db,
 +                        &type_.derived(ty.clone()),
 +                        bounds.bounds.skip_binders().interned(),
 +                        cb,
 +                    );
 +                }
 +
 +                TyKind::Ref(_, _, ty)
 +                | TyKind::Raw(_, ty)
 +                | TyKind::Array(ty, _)
 +                | TyKind::Slice(ty) => {
 +                    walk_type(db, &type_.derived(ty.clone()), cb);
 +                }
 +
 +                TyKind::FnDef(_, substs)
 +                | TyKind::Tuple(_, substs)
 +                | TyKind::Closure(.., substs) => {
 +                    walk_substs(db, type_, substs, cb);
 +                }
 +                TyKind::Function(hir_ty::FnPointer { substitution, .. }) => {
 +                    walk_substs(db, type_, &substitution.0, cb);
 +                }
 +
 +                _ => {}
 +            }
 +        }
 +
 +        walk_type(db, self, &mut cb);
 +    }
 +
 +    pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool {
 +        let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
 +        hir_ty::could_unify(db, self.env.clone(), &tys)
 +    }
 +
 +    pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool {
 +        let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone()));
 +        hir_ty::could_coerce(db, self.env.clone(), &tys)
 +    }
 +
 +    pub fn as_type_param(&self, db: &dyn HirDatabase) -> Option<TypeParam> {
 +        match self.ty.kind(Interner) {
 +            TyKind::Placeholder(p) => Some(TypeParam {
 +                id: TypeParamId::from_unchecked(hir_ty::from_placeholder_idx(db, *p)),
 +            }),
 +            _ => None,
 +        }
 +    }
 +}
 +
 +#[derive(Debug)]
 +pub struct Callable {
 +    ty: Type,
 +    sig: CallableSig,
 +    callee: Callee,
 +    pub(crate) is_bound_method: bool,
 +}
 +
 +#[derive(Debug)]
 +enum Callee {
 +    Def(CallableDefId),
 +    Closure(ClosureId),
 +    FnPtr,
 +}
 +
 +pub enum CallableKind {
 +    Function(Function),
 +    TupleStruct(Struct),
 +    TupleEnumVariant(Variant),
 +    Closure,
 +    FnPtr,
 +}
 +
 +impl Callable {
 +    pub fn kind(&self) -> CallableKind {
 +        use Callee::*;
 +        match self.callee {
 +            Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
 +            Def(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()),
 +            Def(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()),
 +            Closure(_) => CallableKind::Closure,
 +            FnPtr => CallableKind::FnPtr,
 +        }
 +    }
 +    pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<ast::SelfParam> {
 +        let func = match self.callee {
 +            Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
 +            _ => return None,
 +        };
 +        let src = func.lookup(db.upcast()).source(db.upcast());
 +        let param_list = src.value.param_list()?;
 +        param_list.self_param()
 +    }
 +    pub fn n_params(&self) -> usize {
 +        self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
 +    }
 +    pub fn params(
 +        &self,
 +        db: &dyn HirDatabase,
 +    ) -> Vec<(Option<Either<ast::SelfParam, ast::Pat>>, Type)> {
 +        let types = self
 +            .sig
 +            .params()
 +            .iter()
 +            .skip(if self.is_bound_method { 1 } else { 0 })
 +            .map(|ty| self.ty.derived(ty.clone()));
 +        let map_param = |it: ast::Param| it.pat().map(Either::Right);
 +        let patterns = match self.callee {
 +            Callee::Def(CallableDefId::FunctionId(func)) => {
 +                let src = func.lookup(db.upcast()).source(db.upcast());
 +                src.value.param_list().map(|param_list| {
 +                    param_list
 +                        .self_param()
 +                        .map(|it| Some(Either::Left(it)))
 +                        .filter(|_| !self.is_bound_method)
 +                        .into_iter()
 +                        .chain(param_list.params().map(map_param))
 +                })
 +            }
 +            Callee::Closure(closure_id) => match closure_source(db, closure_id) {
 +                Some(src) => src.param_list().map(|param_list| {
 +                    param_list
 +                        .self_param()
 +                        .map(|it| Some(Either::Left(it)))
 +                        .filter(|_| !self.is_bound_method)
 +                        .into_iter()
 +                        .chain(param_list.params().map(map_param))
 +                }),
 +                None => None,
 +            },
 +            _ => None,
 +        };
 +        patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect()
 +    }
 +    pub fn return_type(&self) -> Type {
 +        self.ty.derived(self.sig.ret().clone())
 +    }
 +}
 +
 +fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> {
 +    let (owner, expr_id) = db.lookup_intern_closure(closure.into());
 +    let (_, source_map) = db.body_with_source_map(owner);
 +    let ast = source_map.expr_syntax(expr_id).ok()?;
 +    let root = ast.file_syntax(db.upcast());
 +    let expr = ast.value.to_node(&root);
 +    match expr {
 +        ast::Expr::ClosureExpr(it) => Some(it),
 +        _ => None,
 +    }
 +}
 +
 +#[derive(Copy, Clone, Debug, Eq, PartialEq)]
 +pub enum BindingMode {
 +    Move,
 +    Ref(Mutability),
 +}
 +
 +/// For IDE only
 +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
 +pub enum ScopeDef {
 +    ModuleDef(ModuleDef),
 +    GenericParam(GenericParam),
 +    ImplSelfType(Impl),
 +    AdtSelfType(Adt),
 +    Local(Local),
 +    Label(Label),
 +    Unknown,
 +}
 +
 +impl ScopeDef {
 +    pub fn all_items(def: PerNs) -> ArrayVec<Self, 3> {
 +        let mut items = ArrayVec::new();
 +
 +        match (def.take_types(), def.take_values()) {
 +            (Some(m1), None) => items.push(ScopeDef::ModuleDef(m1.into())),
 +            (None, Some(m2)) => items.push(ScopeDef::ModuleDef(m2.into())),
 +            (Some(m1), Some(m2)) => {
 +                // Some items, like unit structs and enum variants, are
 +                // returned as both a type and a value. Here we want
 +                // to de-duplicate them.
 +                if m1 != m2 {
 +                    items.push(ScopeDef::ModuleDef(m1.into()));
 +                    items.push(ScopeDef::ModuleDef(m2.into()));
 +                } else {
 +                    items.push(ScopeDef::ModuleDef(m1.into()));
 +                }
 +            }
 +            (None, None) => {}
 +        };
 +
 +        if let Some(macro_def_id) = def.take_macros() {
 +            items.push(ScopeDef::ModuleDef(ModuleDef::Macro(macro_def_id.into())));
 +        }
 +
 +        if items.is_empty() {
 +            items.push(ScopeDef::Unknown);
 +        }
 +
 +        items
 +    }
 +
 +    pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
 +        match self {
 +            ScopeDef::ModuleDef(it) => it.attrs(db),
 +            ScopeDef::GenericParam(it) => Some(it.attrs(db)),
 +            ScopeDef::ImplSelfType(_)
 +            | ScopeDef::AdtSelfType(_)
 +            | ScopeDef::Local(_)
 +            | ScopeDef::Label(_)
 +            | ScopeDef::Unknown => None,
 +        }
 +    }
 +
 +    pub fn krate(&self, db: &dyn HirDatabase) -> Option<Crate> {
 +        match self {
 +            ScopeDef::ModuleDef(it) => it.module(db).map(|m| m.krate()),
 +            ScopeDef::GenericParam(it) => Some(it.module(db).krate()),
 +            ScopeDef::ImplSelfType(_) => None,
 +            ScopeDef::AdtSelfType(it) => Some(it.module(db).krate()),
 +            ScopeDef::Local(it) => Some(it.module(db).krate()),
 +            ScopeDef::Label(it) => Some(it.module(db).krate()),
 +            ScopeDef::Unknown => None,
 +        }
 +    }
 +}
 +
 +impl From<ItemInNs> for ScopeDef {
 +    fn from(item: ItemInNs) -> Self {
 +        match item {
 +            ItemInNs::Types(id) => ScopeDef::ModuleDef(id),
 +            ItemInNs::Values(id) => ScopeDef::ModuleDef(id),
 +            ItemInNs::Macros(id) => ScopeDef::ModuleDef(ModuleDef::Macro(id)),
 +        }
 +    }
 +}
 +
 +pub trait HasVisibility {
 +    fn visibility(&self, db: &dyn HirDatabase) -> Visibility;
 +    fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool {
 +        let vis = self.visibility(db);
 +        vis.is_visible_from(db.upcast(), module.id)
 +    }
 +}
 +
 +/// Trait for obtaining the defining crate of an item.
 +pub trait HasCrate {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate;
 +}
 +
 +impl<T: hir_def::HasModule> HasCrate for T {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db.upcast()).krate().into()
 +    }
 +}
 +
 +impl HasCrate for AssocItem {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Struct {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Union {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Field {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.parent_def(db).module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Variant {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Function {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Const {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for TypeAlias {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Type {
 +    fn krate(&self, _db: &dyn HirDatabase) -> Crate {
 +        self.env.krate.into()
 +    }
 +}
 +
 +impl HasCrate for Macro {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Trait {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Static {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Adt {
 +    fn krate(&self, db: &dyn HirDatabase) -> Crate {
 +        self.module(db).krate()
 +    }
 +}
 +
 +impl HasCrate for Module {
 +    fn krate(&self, _: &dyn HirDatabase) -> Crate {
 +        Module::krate(*self)
 +    }
 +}
index af584cdb4384aa96b508775bfaec88932b14b456,0000000000000000000000000000000000000000..03aa8601d14e1ef3e34207da916dd4574d175bbf
mode 100644,000000..100644
--- /dev/null
@@@ -1,360 -1,0 +1,404 @@@
 +use either::Either;
 +use ide_db::syntax_helpers::node_ext::walk_ty;
 +use itertools::Itertools;
 +use syntax::{
 +    ast::{self, edit::IndentLevel, AstNode, HasGenericParams, HasName},
 +    match_ast,
 +};
 +
 +use crate::{AssistContext, AssistId, AssistKind, Assists};
 +
 +// Assist: extract_type_alias
 +//
 +// Extracts the selected type as a type alias.
 +//
 +// ```
 +// struct S {
 +//     field: $0(u8, u8, u8)$0,
 +// }
 +// ```
 +// ->
 +// ```
 +// type $0Type = (u8, u8, u8);
 +//
 +// struct S {
 +//     field: Type,
 +// }
 +// ```
 +pub(crate) fn extract_type_alias(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
 +    if ctx.has_empty_selection() {
 +        return None;
 +    }
 +
 +    let ty = ctx.find_node_at_range::<ast::Type>()?;
 +    let item = ty.syntax().ancestors().find_map(ast::Item::cast)?;
 +    let assoc_owner = item.syntax().ancestors().nth(2).and_then(|it| {
 +        match_ast! {
 +            match it {
 +                ast::Trait(tr) => Some(Either::Left(tr)),
 +                ast::Impl(impl_) => Some(Either::Right(impl_)),
 +                _ => None,
 +            }
 +        }
 +    });
 +    let node = assoc_owner.as_ref().map_or_else(
 +        || item.syntax(),
 +        |impl_| impl_.as_ref().either(AstNode::syntax, AstNode::syntax),
 +    );
 +    let insert_pos = node.text_range().start();
 +    let target = ty.syntax().text_range();
 +
 +    acc.add(
 +        AssistId("extract_type_alias", AssistKind::RefactorExtract),
 +        "Extract type as type alias",
 +        target,
 +        |builder| {
 +            let mut known_generics = match item.generic_param_list() {
 +                Some(it) => it.generic_params().collect(),
 +                None => Vec::new(),
 +            };
 +            if let Some(it) = assoc_owner.as_ref().and_then(|it| match it {
 +                Either::Left(it) => it.generic_param_list(),
 +                Either::Right(it) => it.generic_param_list(),
 +            }) {
 +                known_generics.extend(it.generic_params());
 +            }
 +            let generics = collect_used_generics(&ty, &known_generics);
 +
 +            let replacement = if !generics.is_empty() {
 +                format!(
 +                    "Type<{}>",
 +                    generics.iter().format_with(", ", |generic, f| {
 +                        match generic {
 +                            ast::GenericParam::ConstParam(cp) => f(&cp.name().unwrap()),
 +                            ast::GenericParam::LifetimeParam(lp) => f(&lp.lifetime().unwrap()),
 +                            ast::GenericParam::TypeParam(tp) => f(&tp.name().unwrap()),
 +                        }
 +                    })
 +                )
 +            } else {
 +                String::from("Type")
 +            };
 +            builder.replace(target, replacement);
 +
 +            let indent = IndentLevel::from_node(node);
 +            let generics = if !generics.is_empty() {
 +                format!("<{}>", generics.iter().format(", "))
 +            } else {
 +                String::new()
 +            };
 +            match ctx.config.snippet_cap {
 +                Some(cap) => {
 +                    builder.insert_snippet(
 +                        cap,
 +                        insert_pos,
 +                        format!("type $0Type{} = {};\n\n{}", generics, ty, indent),
 +                    );
 +                }
 +                None => {
 +                    builder.insert(
 +                        insert_pos,
 +                        format!("type Type{} = {};\n\n{}", generics, ty, indent),
 +                    );
 +                }
 +            }
 +        },
 +    )
 +}
 +
 +fn collect_used_generics<'gp>(
 +    ty: &ast::Type,
 +    known_generics: &'gp [ast::GenericParam],
 +) -> Vec<&'gp ast::GenericParam> {
 +    // can't use a closure -> closure here cause lifetime inference fails for that
 +    fn find_lifetime(text: &str) -> impl Fn(&&ast::GenericParam) -> bool + '_ {
 +        move |gp: &&ast::GenericParam| match gp {
 +            ast::GenericParam::LifetimeParam(lp) => {
 +                lp.lifetime().map_or(false, |lt| lt.text() == text)
 +            }
 +            _ => false,
 +        }
 +    }
 +
 +    let mut generics = Vec::new();
 +    walk_ty(ty, &mut |ty| match ty {
 +        ast::Type::PathType(ty) => {
 +            if let Some(path) = ty.path() {
 +                if let Some(name_ref) = path.as_single_name_ref() {
 +                    if let Some(param) = known_generics.iter().find(|gp| {
 +                        match gp {
 +                            ast::GenericParam::ConstParam(cp) => cp.name(),
 +                            ast::GenericParam::TypeParam(tp) => tp.name(),
 +                            _ => None,
 +                        }
 +                        .map_or(false, |n| n.text() == name_ref.text())
 +                    }) {
 +                        generics.push(param);
 +                    }
 +                }
 +                generics.extend(
 +                    path.segments()
 +                        .filter_map(|seg| seg.generic_arg_list())
 +                        .flat_map(|it| it.generic_args())
 +                        .filter_map(|it| match it {
 +                            ast::GenericArg::LifetimeArg(lt) => {
 +                                let lt = lt.lifetime()?;
 +                                known_generics.iter().find(find_lifetime(&lt.text()))
 +                            }
 +                            _ => None,
 +                        }),
 +                );
 +            }
 +        }
 +        ast::Type::ImplTraitType(impl_ty) => {
 +            if let Some(it) = impl_ty.type_bound_list() {
 +                generics.extend(
 +                    it.bounds()
 +                        .filter_map(|it| it.lifetime())
 +                        .filter_map(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
 +                );
 +            }
 +        }
 +        ast::Type::DynTraitType(dyn_ty) => {
 +            if let Some(it) = dyn_ty.type_bound_list() {
 +                generics.extend(
 +                    it.bounds()
 +                        .filter_map(|it| it.lifetime())
 +                        .filter_map(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
 +                );
 +            }
 +        }
 +        ast::Type::RefType(ref_) => generics.extend(
 +            ref_.lifetime().and_then(|lt| known_generics.iter().find(find_lifetime(&lt.text()))),
 +        ),
++        ast::Type::ArrayType(ar) => {
++            if let Some(expr) = ar.expr() {
++                if let ast::Expr::PathExpr(p) = expr {
++                    if let Some(path) = p.path() {
++                        if let Some(name_ref) = path.as_single_name_ref() {
++                            if let Some(param) = known_generics.iter().find(|gp| {
++                                if let ast::GenericParam::ConstParam(cp) = gp {
++                                    cp.name().map_or(false, |n| n.text() == name_ref.text())
++                                } else {
++                                    false
++                                }
++                            }) {
++                                generics.push(param);
++                            }
++                        }
++                    }
++                }
++            }
++        }
 +        _ => (),
 +    });
 +    // stable resort to lifetime, type, const
 +    generics.sort_by_key(|gp| match gp {
 +        ast::GenericParam::ConstParam(_) => 2,
 +        ast::GenericParam::LifetimeParam(_) => 0,
 +        ast::GenericParam::TypeParam(_) => 1,
 +    });
 +    generics
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use crate::tests::{check_assist, check_assist_not_applicable};
 +
 +    use super::*;
 +
 +    #[test]
 +    fn test_not_applicable_without_selection() {
 +        check_assist_not_applicable(
 +            extract_type_alias,
 +            r"
 +struct S {
 +    field: $0(u8, u8, u8),
 +}
 +            ",
 +        );
 +    }
 +
 +    #[test]
 +    fn test_simple_types() {
 +        check_assist(
 +            extract_type_alias,
 +            r"
 +struct S {
 +    field: $0u8$0,
 +}
 +            ",
 +            r#"
 +type $0Type = u8;
 +
 +struct S {
 +    field: Type,
 +}
 +            "#,
 +        );
 +    }
 +
 +    #[test]
 +    fn test_generic_type_arg() {
 +        check_assist(
 +            extract_type_alias,
 +            r"
 +fn generic<T>() {}
 +
 +fn f() {
 +    generic::<$0()$0>();
 +}
 +            ",
 +            r#"
 +fn generic<T>() {}
 +
 +type $0Type = ();
 +
 +fn f() {
 +    generic::<Type>();
 +}
 +            "#,
 +        );
 +    }
 +
 +    #[test]
 +    fn test_inner_type_arg() {
 +        check_assist(
 +            extract_type_alias,
 +            r"
 +struct Vec<T> {}
 +struct S {
 +    v: Vec<Vec<$0Vec<u8>$0>>,
 +}
 +            ",
 +            r#"
 +struct Vec<T> {}
 +type $0Type = Vec<u8>;
 +
 +struct S {
 +    v: Vec<Vec<Type>>,
 +}
 +            "#,
 +        );
 +    }
 +
 +    #[test]
 +    fn test_extract_inner_type() {
 +        check_assist(
 +            extract_type_alias,
 +            r"
 +struct S {
 +    field: ($0u8$0,),
 +}
 +            ",
 +            r#"
 +type $0Type = u8;
 +
 +struct S {
 +    field: (Type,),
 +}
 +            "#,
 +        );
 +    }
 +
 +    #[test]
 +    fn extract_from_impl_or_trait() {
 +        // When invoked in an impl/trait, extracted type alias should be placed next to the
 +        // impl/trait, not inside.
 +        check_assist(
 +            extract_type_alias,
 +            r#"
 +impl S {
 +    fn f() -> $0(u8, u8)$0 {}
 +}
 +            "#,
 +            r#"
 +type $0Type = (u8, u8);
 +
 +impl S {
 +    fn f() -> Type {}
 +}
 +            "#,
 +        );
 +        check_assist(
 +            extract_type_alias,
 +            r#"
 +trait Tr {
 +    fn f() -> $0(u8, u8)$0 {}
 +}
 +            "#,
 +            r#"
 +type $0Type = (u8, u8);
 +
 +trait Tr {
 +    fn f() -> Type {}
 +}
 +            "#,
 +        );
 +    }
 +
 +    #[test]
 +    fn indentation() {
 +        check_assist(
 +            extract_type_alias,
 +            r#"
 +mod m {
 +    fn f() -> $0u8$0 {}
 +}
 +            "#,
 +            r#"
 +mod m {
 +    type $0Type = u8;
 +
 +    fn f() -> Type {}
 +}
 +            "#,
 +        );
 +    }
 +
 +    #[test]
 +    fn generics() {
 +        check_assist(
 +            extract_type_alias,
 +            r#"
 +struct Struct<const C: usize>;
 +impl<'outer, Outer, const OUTER: usize> () {
 +    fn func<'inner, Inner, const INNER: usize>(_: $0&(Struct<INNER>, Struct<OUTER>, Outer, &'inner (), Inner, &'outer ())$0) {}
 +}
 +"#,
 +            r#"
 +struct Struct<const C: usize>;
 +type $0Type<'inner, 'outer, Outer, Inner, const INNER: usize, const OUTER: usize> = &(Struct<INNER>, Struct<OUTER>, Outer, &'inner (), Inner, &'outer ());
 +
 +impl<'outer, Outer, const OUTER: usize> () {
 +    fn func<'inner, Inner, const INNER: usize>(_: Type<'inner, 'outer, Outer, Inner, INNER, OUTER>) {}
 +}
 +"#,
 +        );
 +    }
++
++    #[test]
++    fn issue_11197() {
++        check_assist(
++            extract_type_alias,
++            r#"
++struct Foo<T, const N: usize>
++where
++    [T; N]: Sized,
++{
++    arr: $0[T; N]$0,
++}
++            "#,
++            r#"
++type $0Type<T, const N: usize> = [T; N];
++
++struct Foo<T, const N: usize>
++where
++    [T; N]: Sized,
++{
++    arr: Type<T, N>,
++}
++            "#,
++        );
++    }
 +}
index b5d092e39b02972b24da8cdfaedf9432e8e42691,0000000000000000000000000000000000000000..96890ad51a6f941e3399fea772058ab19d653af9
mode 100644,000000..100644
--- /dev/null
@@@ -1,1227 -1,0 +1,1256 @@@
-     if let Some(t) = body.syntax().ancestors().find_map(ast::Impl::cast).and_then(|i| i.self_ty()) {
-         body.syntax()
-             .descendants_with_tokens()
-             .filter_map(NodeOrToken::into_token)
-             .filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
-             .for_each(|tok| ted::replace(tok, t.syntax()));
 +use ast::make;
 +use either::Either;
 +use hir::{db::HirDatabase, PathResolution, Semantics, TypeInfo};
 +use ide_db::{
 +    base_db::{FileId, FileRange},
 +    defs::Definition,
 +    imports::insert_use::remove_path_if_in_use_stmt,
 +    path_transform::PathTransform,
 +    search::{FileReference, SearchScope},
 +    syntax_helpers::{insert_whitespace_into_node::insert_ws_into, node_ext::expr_as_name_ref},
 +    RootDatabase,
 +};
 +use itertools::{izip, Itertools};
 +use syntax::{
 +    ast::{self, edit_in_place::Indent, HasArgList, PathExpr},
 +    ted, AstNode, NodeOrToken, SyntaxKind,
 +};
 +
 +use crate::{
 +    assist_context::{AssistContext, Assists},
 +    AssistId, AssistKind,
 +};
 +
 +// Assist: inline_into_callers
 +//
 +// Inline a function or method body into all of its callers where possible, creating a `let` statement per parameter
 +// unless the parameter can be inlined. The parameter will be inlined either if it the supplied argument is a simple local
 +// or if the parameter is only accessed inside the function body once.
 +// If all calls can be inlined the function will be removed.
 +//
 +// ```
 +// fn print(_: &str) {}
 +// fn foo$0(word: &str) {
 +//     if !word.is_empty() {
 +//         print(word);
 +//     }
 +// }
 +// fn bar() {
 +//     foo("안녕하세요");
 +//     foo("여러분");
 +// }
 +// ```
 +// ->
 +// ```
 +// fn print(_: &str) {}
 +//
 +// fn bar() {
 +//     {
 +//         let word = "안녕하세요";
 +//         if !word.is_empty() {
 +//             print(word);
 +//         }
 +//     };
 +//     {
 +//         let word = "여러분";
 +//         if !word.is_empty() {
 +//             print(word);
 +//         }
 +//     };
 +// }
 +// ```
 +pub(crate) fn inline_into_callers(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
 +    let def_file = ctx.file_id();
 +    let name = ctx.find_node_at_offset::<ast::Name>()?;
 +    let ast_func = name.syntax().parent().and_then(ast::Fn::cast)?;
 +    let func_body = ast_func.body()?;
 +    let param_list = ast_func.param_list()?;
 +
 +    let function = ctx.sema.to_def(&ast_func)?;
 +
 +    let params = get_fn_params(ctx.sema.db, function, &param_list)?;
 +
 +    let usages = Definition::Function(function).usages(&ctx.sema);
 +    if !usages.at_least_one() {
 +        return None;
 +    }
 +
 +    let is_recursive_fn = usages
 +        .clone()
 +        .in_scope(SearchScope::file_range(FileRange {
 +            file_id: def_file,
 +            range: func_body.syntax().text_range(),
 +        }))
 +        .at_least_one();
 +    if is_recursive_fn {
 +        cov_mark::hit!(inline_into_callers_recursive);
 +        return None;
 +    }
 +
 +    acc.add(
 +        AssistId("inline_into_callers", AssistKind::RefactorInline),
 +        "Inline into all callers",
 +        name.syntax().text_range(),
 +        |builder| {
 +            let mut usages = usages.all();
 +            let current_file_usage = usages.references.remove(&def_file);
 +
 +            let mut remove_def = true;
 +            let mut inline_refs_for_file = |file_id, refs: Vec<FileReference>| {
 +                builder.edit_file(file_id);
 +                let count = refs.len();
 +                // The collects are required as we are otherwise iterating while mutating 🙅‍♀️🙅‍♂️
 +                let (name_refs, name_refs_use): (Vec<_>, Vec<_>) = refs
 +                    .into_iter()
 +                    .filter_map(|file_ref| match file_ref.name {
 +                        ast::NameLike::NameRef(name_ref) => Some(name_ref),
 +                        _ => None,
 +                    })
 +                    .partition_map(|name_ref| {
 +                        match name_ref.syntax().ancestors().find_map(ast::UseTree::cast) {
 +                            Some(use_tree) => Either::Right(builder.make_mut(use_tree)),
 +                            None => Either::Left(name_ref),
 +                        }
 +                    });
 +                let call_infos: Vec<_> = name_refs
 +                    .into_iter()
 +                    .filter_map(CallInfo::from_name_ref)
 +                    .map(|call_info| {
 +                        let mut_node = builder.make_syntax_mut(call_info.node.syntax().clone());
 +                        (call_info, mut_node)
 +                    })
 +                    .collect();
 +                let replaced = call_infos
 +                    .into_iter()
 +                    .map(|(call_info, mut_node)| {
 +                        let replacement =
 +                            inline(&ctx.sema, def_file, function, &func_body, &params, &call_info);
 +                        ted::replace(mut_node, replacement.syntax());
 +                    })
 +                    .count();
 +                if replaced + name_refs_use.len() == count {
 +                    // we replaced all usages in this file, so we can remove the imports
 +                    name_refs_use.into_iter().for_each(|use_tree| {
 +                        if let Some(path) = use_tree.path() {
 +                            remove_path_if_in_use_stmt(&path);
 +                        }
 +                    })
 +                } else {
 +                    remove_def = false;
 +                }
 +            };
 +            for (file_id, refs) in usages.into_iter() {
 +                inline_refs_for_file(file_id, refs);
 +            }
 +            match current_file_usage {
 +                Some(refs) => inline_refs_for_file(def_file, refs),
 +                None => builder.edit_file(def_file),
 +            }
 +            if remove_def {
 +                builder.delete(ast_func.syntax().text_range());
 +            }
 +        },
 +    )
 +}
 +
 +// Assist: inline_call
 +//
 +// Inlines a function or method body creating a `let` statement per parameter unless the parameter
 +// can be inlined. The parameter will be inlined either if it the supplied argument is a simple local
 +// or if the parameter is only accessed inside the function body once.
 +//
 +// ```
 +// # //- minicore: option
 +// fn foo(name: Option<&str>) {
 +//     let name = name.unwrap$0();
 +// }
 +// ```
 +// ->
 +// ```
 +// fn foo(name: Option<&str>) {
 +//     let name = match name {
 +//             Some(val) => val,
 +//             None => panic!("called `Option::unwrap()` on a `None` value"),
 +//         };
 +// }
 +// ```
 +pub(crate) fn inline_call(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
 +    let name_ref: ast::NameRef = ctx.find_node_at_offset()?;
 +    let call_info = CallInfo::from_name_ref(name_ref.clone())?;
 +    let (function, label) = match &call_info.node {
 +        ast::CallableExpr::Call(call) => {
 +            let path = match call.expr()? {
 +                ast::Expr::PathExpr(path) => path.path(),
 +                _ => None,
 +            }?;
 +            let function = match ctx.sema.resolve_path(&path)? {
 +                PathResolution::Def(hir::ModuleDef::Function(f)) => f,
 +                _ => return None,
 +            };
 +            (function, format!("Inline `{}`", path))
 +        }
 +        ast::CallableExpr::MethodCall(call) => {
 +            (ctx.sema.resolve_method_call(call)?, format!("Inline `{}`", name_ref))
 +        }
 +    };
 +
 +    let fn_source = ctx.sema.source(function)?;
 +    let fn_body = fn_source.value.body()?;
 +    let param_list = fn_source.value.param_list()?;
 +
 +    let FileRange { file_id, range } = fn_source.syntax().original_file_range(ctx.sema.db);
 +    if file_id == ctx.file_id() && range.contains(ctx.offset()) {
 +        cov_mark::hit!(inline_call_recursive);
 +        return None;
 +    }
 +    let params = get_fn_params(ctx.sema.db, function, &param_list)?;
 +
 +    if call_info.arguments.len() != params.len() {
 +        // Can't inline the function because they've passed the wrong number of
 +        // arguments to this function
 +        cov_mark::hit!(inline_call_incorrect_number_of_arguments);
 +        return None;
 +    }
 +
 +    let syntax = call_info.node.syntax().clone();
 +    acc.add(
 +        AssistId("inline_call", AssistKind::RefactorInline),
 +        label,
 +        syntax.text_range(),
 +        |builder| {
 +            let replacement = inline(&ctx.sema, file_id, function, &fn_body, &params, &call_info);
 +
 +            builder.replace_ast(
 +                match call_info.node {
 +                    ast::CallableExpr::Call(it) => ast::Expr::CallExpr(it),
 +                    ast::CallableExpr::MethodCall(it) => ast::Expr::MethodCallExpr(it),
 +                },
 +                replacement,
 +            );
 +        },
 +    )
 +}
 +
 +struct CallInfo {
 +    node: ast::CallableExpr,
 +    arguments: Vec<ast::Expr>,
 +    generic_arg_list: Option<ast::GenericArgList>,
 +}
 +
 +impl CallInfo {
 +    fn from_name_ref(name_ref: ast::NameRef) -> Option<CallInfo> {
 +        let parent = name_ref.syntax().parent()?;
 +        if let Some(call) = ast::MethodCallExpr::cast(parent.clone()) {
 +            let receiver = call.receiver()?;
 +            let mut arguments = vec![receiver];
 +            arguments.extend(call.arg_list()?.args());
 +            Some(CallInfo {
 +                generic_arg_list: call.generic_arg_list(),
 +                node: ast::CallableExpr::MethodCall(call),
 +                arguments,
 +            })
 +        } else if let Some(segment) = ast::PathSegment::cast(parent) {
 +            let path = segment.syntax().parent().and_then(ast::Path::cast)?;
 +            let path = path.syntax().parent().and_then(ast::PathExpr::cast)?;
 +            let call = path.syntax().parent().and_then(ast::CallExpr::cast)?;
 +
 +            Some(CallInfo {
 +                arguments: call.arg_list()?.args().collect(),
 +                node: ast::CallableExpr::Call(call),
 +                generic_arg_list: segment.generic_arg_list(),
 +            })
 +        } else {
 +            None
 +        }
 +    }
 +}
 +
 +fn get_fn_params(
 +    db: &dyn HirDatabase,
 +    function: hir::Function,
 +    param_list: &ast::ParamList,
 +) -> Option<Vec<(ast::Pat, Option<ast::Type>, hir::Param)>> {
 +    let mut assoc_fn_params = function.assoc_fn_params(db).into_iter();
 +
 +    let mut params = Vec::new();
 +    if let Some(self_param) = param_list.self_param() {
 +        // FIXME this should depend on the receiver as well as the self_param
 +        params.push((
 +            make::ident_pat(
 +                self_param.amp_token().is_some(),
 +                self_param.mut_token().is_some(),
 +                make::name("this"),
 +            )
 +            .into(),
 +            None,
 +            assoc_fn_params.next()?,
 +        ));
 +    }
 +    for param in param_list.params() {
 +        params.push((param.pat()?, param.ty(), assoc_fn_params.next()?));
 +    }
 +
 +    Some(params)
 +}
 +
 +fn inline(
 +    sema: &Semantics<'_, RootDatabase>,
 +    function_def_file_id: FileId,
 +    function: hir::Function,
 +    fn_body: &ast::BlockExpr,
 +    params: &[(ast::Pat, Option<ast::Type>, hir::Param)],
 +    CallInfo { node, arguments, generic_arg_list }: &CallInfo,
 +) -> ast::Expr {
 +    let body = if sema.hir_file_for(fn_body.syntax()).is_macro() {
 +        cov_mark::hit!(inline_call_defined_in_macro);
 +        if let Some(body) = ast::BlockExpr::cast(insert_ws_into(fn_body.syntax().clone())) {
 +            body
 +        } else {
 +            fn_body.clone_for_update()
 +        }
 +    } else {
 +        fn_body.clone_for_update()
 +    };
++    if let Some(imp) = body.syntax().ancestors().find_map(ast::Impl::cast) {
++        if !node.syntax().ancestors().any(|anc| &anc == imp.syntax()) {
++            if let Some(t) = imp.self_ty() {
++                body.syntax()
++                    .descendants_with_tokens()
++                    .filter_map(NodeOrToken::into_token)
++                    .filter(|tok| tok.kind() == SyntaxKind::SELF_TYPE_KW)
++                    .for_each(|tok| ted::replace(tok, t.syntax()));
++            }
++        }
 +    }
 +    let usages_for_locals = |local| {
 +        Definition::Local(local)
 +            .usages(sema)
 +            .all()
 +            .references
 +            .remove(&function_def_file_id)
 +            .unwrap_or_default()
 +            .into_iter()
 +    };
 +    let param_use_nodes: Vec<Vec<_>> = params
 +        .iter()
 +        .map(|(pat, _, param)| {
 +            if !matches!(pat, ast::Pat::IdentPat(pat) if pat.is_simple_ident()) {
 +                return Vec::new();
 +            }
 +            // FIXME: we need to fetch all locals declared in the parameter here
 +            // not only the local if it is a simple binding
 +            match param.as_local(sema.db) {
 +                Some(l) => usages_for_locals(l)
 +                    .map(|FileReference { name, range, .. }| match name {
 +                        ast::NameLike::NameRef(_) => body
 +                            .syntax()
 +                            .covering_element(range)
 +                            .ancestors()
 +                            .nth(3)
 +                            .and_then(ast::PathExpr::cast),
 +                        _ => None,
 +                    })
 +                    .collect::<Option<Vec<_>>>()
 +                    .unwrap_or_default(),
 +                None => Vec::new(),
 +            }
 +        })
 +        .collect();
 +
 +    if function.self_param(sema.db).is_some() {
 +        let this = || make::name_ref("this").syntax().clone_for_update();
 +        if let Some(self_local) = params[0].2.as_local(sema.db) {
 +            usages_for_locals(self_local)
 +                .flat_map(|FileReference { name, range, .. }| match name {
 +                    ast::NameLike::NameRef(_) => Some(body.syntax().covering_element(range)),
 +                    _ => None,
 +                })
 +                .for_each(|it| {
 +                    ted::replace(it, &this());
 +                })
 +        }
 +    }
 +    // Inline parameter expressions or generate `let` statements depending on whether inlining works or not.
 +    for ((pat, param_ty, _), usages, expr) in izip!(params, param_use_nodes, arguments).rev() {
 +        let inline_direct = |usage, replacement: &ast::Expr| {
 +            if let Some(field) = path_expr_as_record_field(usage) {
 +                cov_mark::hit!(inline_call_inline_direct_field);
 +                field.replace_expr(replacement.clone_for_update());
 +            } else {
 +                ted::replace(usage.syntax(), &replacement.syntax().clone_for_update());
 +            }
 +        };
 +        // izip confuses RA due to our lack of hygiene info currently losing us type info causing incorrect errors
 +        let usages: &[ast::PathExpr] = &*usages;
 +        let expr: &ast::Expr = expr;
 +        match usages {
 +            // inline single use closure arguments
 +            [usage]
 +                if matches!(expr, ast::Expr::ClosureExpr(_))
 +                    && usage.syntax().parent().and_then(ast::Expr::cast).is_some() =>
 +            {
 +                cov_mark::hit!(inline_call_inline_closure);
 +                let expr = make::expr_paren(expr.clone());
 +                inline_direct(usage, &expr);
 +            }
 +            // inline single use literals
 +            [usage] if matches!(expr, ast::Expr::Literal(_)) => {
 +                cov_mark::hit!(inline_call_inline_literal);
 +                inline_direct(usage, expr);
 +            }
 +            // inline direct local arguments
 +            [_, ..] if expr_as_name_ref(expr).is_some() => {
 +                cov_mark::hit!(inline_call_inline_locals);
 +                usages.iter().for_each(|usage| inline_direct(usage, expr));
 +            }
 +            // can't inline, emit a let statement
 +            _ => {
 +                let ty =
 +                    sema.type_of_expr(expr).filter(TypeInfo::has_adjustment).and(param_ty.clone());
 +                if let Some(stmt_list) = body.stmt_list() {
 +                    stmt_list.push_front(
 +                        make::let_stmt(pat.clone(), ty, Some(expr.clone()))
 +                            .clone_for_update()
 +                            .into(),
 +                    )
 +                }
 +            }
 +        }
 +    }
 +    if let Some(generic_arg_list) = generic_arg_list.clone() {
 +        if let Some((target, source)) = &sema.scope(node.syntax()).zip(sema.scope(fn_body.syntax()))
 +        {
 +            PathTransform::function_call(target, source, function, generic_arg_list)
 +                .apply(body.syntax());
 +        }
 +    }
 +
 +    let original_indentation = match node {
 +        ast::CallableExpr::Call(it) => it.indent_level(),
 +        ast::CallableExpr::MethodCall(it) => it.indent_level(),
 +    };
 +    body.reindent_to(original_indentation);
 +
 +    match body.tail_expr() {
 +        Some(expr) if body.statements().next().is_none() => expr,
 +        _ => match node
 +            .syntax()
 +            .parent()
 +            .and_then(ast::BinExpr::cast)
 +            .and_then(|bin_expr| bin_expr.lhs())
 +        {
 +            Some(lhs) if lhs.syntax() == node.syntax() => {
 +                make::expr_paren(ast::Expr::BlockExpr(body)).clone_for_update()
 +            }
 +            _ => ast::Expr::BlockExpr(body),
 +        },
 +    }
 +}
 +
 +fn path_expr_as_record_field(usage: &PathExpr) -> Option<ast::RecordExprField> {
 +    let path = usage.path()?;
 +    let name_ref = path.as_single_name_ref()?;
 +    ast::RecordExprField::for_name_ref(&name_ref)
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use crate::tests::{check_assist, check_assist_not_applicable};
 +
 +    use super::*;
 +
 +    #[test]
 +    fn no_args_or_return_value_gets_inlined_without_block() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn foo() { println!("Hello, World!"); }
 +fn main() {
 +    fo$0o();
 +}
 +"#,
 +            r#"
 +fn foo() { println!("Hello, World!"); }
 +fn main() {
 +    { println!("Hello, World!"); };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn not_applicable_when_incorrect_number_of_parameters_are_provided() {
 +        cov_mark::check!(inline_call_incorrect_number_of_arguments);
 +        check_assist_not_applicable(
 +            inline_call,
 +            r#"
 +fn add(a: u32, b: u32) -> u32 { a + b }
 +fn main() { let x = add$0(42); }
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn args_with_side_effects() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn foo(name: String) {
 +    println!("Hello, {}!", name);
 +}
 +fn main() {
 +    foo$0(String::from("Michael"));
 +}
 +"#,
 +            r#"
 +fn foo(name: String) {
 +    println!("Hello, {}!", name);
 +}
 +fn main() {
 +    {
 +        let name = String::from("Michael");
 +        println!("Hello, {}!", name);
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn function_with_multiple_statements() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn foo(a: u32, b: u32) -> u32 {
 +    let x = a + b;
 +    let y = x - b;
 +    x * y
 +}
 +
 +fn main() {
 +    let x = foo$0(1, 2);
 +}
 +"#,
 +            r#"
 +fn foo(a: u32, b: u32) -> u32 {
 +    let x = a + b;
 +    let y = x - b;
 +    x * y
 +}
 +
 +fn main() {
 +    let x = {
 +        let b = 2;
 +        let x = 1 + b;
 +        let y = x - b;
 +        x * y
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn function_with_self_param() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +struct Foo(u32);
 +
 +impl Foo {
 +    fn add(self, a: u32) -> Self {
 +        Foo(self.0 + a)
 +    }
 +}
 +
 +fn main() {
 +    let x = Foo::add$0(Foo(3), 2);
 +}
 +"#,
 +            r#"
 +struct Foo(u32);
 +
 +impl Foo {
 +    fn add(self, a: u32) -> Self {
 +        Foo(self.0 + a)
 +    }
 +}
 +
 +fn main() {
 +    let x = {
 +        let this = Foo(3);
 +        Foo(this.0 + 2)
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn method_by_val() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +struct Foo(u32);
 +
 +impl Foo {
 +    fn add(self, a: u32) -> Self {
 +        Foo(self.0 + a)
 +    }
 +}
 +
 +fn main() {
 +    let x = Foo(3).add$0(2);
 +}
 +"#,
 +            r#"
 +struct Foo(u32);
 +
 +impl Foo {
 +    fn add(self, a: u32) -> Self {
 +        Foo(self.0 + a)
 +    }
 +}
 +
 +fn main() {
 +    let x = {
 +        let this = Foo(3);
 +        Foo(this.0 + 2)
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn method_by_ref() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +struct Foo(u32);
 +
 +impl Foo {
 +    fn add(&self, a: u32) -> Self {
 +        Foo(self.0 + a)
 +    }
 +}
 +
 +fn main() {
 +    let x = Foo(3).add$0(2);
 +}
 +"#,
 +            r#"
 +struct Foo(u32);
 +
 +impl Foo {
 +    fn add(&self, a: u32) -> Self {
 +        Foo(self.0 + a)
 +    }
 +}
 +
 +fn main() {
 +    let x = {
 +        let ref this = Foo(3);
 +        Foo(this.0 + 2)
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn method_by_ref_mut() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +struct Foo(u32);
 +
 +impl Foo {
 +    fn clear(&mut self) {
 +        self.0 = 0;
 +    }
 +}
 +
 +fn main() {
 +    let mut foo = Foo(3);
 +    foo.clear$0();
 +}
 +"#,
 +            r#"
 +struct Foo(u32);
 +
 +impl Foo {
 +    fn clear(&mut self) {
 +        self.0 = 0;
 +    }
 +}
 +
 +fn main() {
 +    let mut foo = Foo(3);
 +    {
 +        let ref mut this = foo;
 +        this.0 = 0;
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn function_multi_use_expr_in_param() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn square(x: u32) -> u32 {
 +    x * x
 +}
 +fn main() {
 +    let x = 51;
 +    let y = square$0(10 + x);
 +}
 +"#,
 +            r#"
 +fn square(x: u32) -> u32 {
 +    x * x
 +}
 +fn main() {
 +    let x = 51;
 +    let y = {
 +        let x = 10 + x;
 +        x * x
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn function_use_local_in_param() {
 +        cov_mark::check!(inline_call_inline_locals);
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn square(x: u32) -> u32 {
 +    x * x
 +}
 +fn main() {
 +    let local = 51;
 +    let y = square$0(local);
 +}
 +"#,
 +            r#"
 +fn square(x: u32) -> u32 {
 +    x * x
 +}
 +fn main() {
 +    let local = 51;
 +    let y = local * local;
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn method_in_impl() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +struct Foo;
 +impl Foo {
 +    fn foo(&self) {
 +        self;
 +        self;
 +    }
 +    fn bar(&self) {
 +        self.foo$0();
 +    }
 +}
 +"#,
 +            r#"
 +struct Foo;
 +impl Foo {
 +    fn foo(&self) {
 +        self;
 +        self;
 +    }
 +    fn bar(&self) {
 +        {
 +            let ref this = self;
 +            this;
 +            this;
 +        };
 +    }
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn wraps_closure_in_paren() {
 +        cov_mark::check!(inline_call_inline_closure);
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn foo(x: fn()) {
 +    x();
 +}
 +
 +fn main() {
 +    foo$0(|| {})
 +}
 +"#,
 +            r#"
 +fn foo(x: fn()) {
 +    x();
 +}
 +
 +fn main() {
 +    {
 +        (|| {})();
 +    }
 +}
 +"#,
 +        );
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn foo(x: fn()) {
 +    x();
 +}
 +
 +fn main() {
 +    foo$0(main)
 +}
 +"#,
 +            r#"
 +fn foo(x: fn()) {
 +    x();
 +}
 +
 +fn main() {
 +    {
 +        main();
 +    }
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inline_single_literal_expr() {
 +        cov_mark::check!(inline_call_inline_literal);
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn foo(x: u32) -> u32{
 +    x
 +}
 +
 +fn main() {
 +    foo$0(222);
 +}
 +"#,
 +            r#"
 +fn foo(x: u32) -> u32{
 +    x
 +}
 +
 +fn main() {
 +    222;
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inline_emits_type_for_coercion() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn foo(x: *const u32) -> u32 {
 +    x as u32
 +}
 +
 +fn main() {
 +    foo$0(&222);
 +}
 +"#,
 +            r#"
 +fn foo(x: *const u32) -> u32 {
 +    x as u32
 +}
 +
 +fn main() {
 +    {
 +        let x: *const u32 = &222;
 +        x as u32
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    // FIXME: const generics aren't being substituted, this is blocked on better support for them
 +    #[test]
 +    fn inline_substitutes_generics() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn foo<T, const N: usize>() {
 +    bar::<T, N>()
 +}
 +
 +fn bar<U, const M: usize>() {}
 +
 +fn main() {
 +    foo$0::<usize, {0}>();
 +}
 +"#,
 +            r#"
 +fn foo<T, const N: usize>() {
 +    bar::<T, N>()
 +}
 +
 +fn bar<U, const M: usize>() {}
 +
 +fn main() {
 +    bar::<usize, N>();
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inline_callers() {
 +        check_assist(
 +            inline_into_callers,
 +            r#"
 +fn do_the_math$0(b: u32) -> u32 {
 +    let foo = 10;
 +    foo * b + foo
 +}
 +fn foo() {
 +    do_the_math(0);
 +    let bar = 10;
 +    do_the_math(bar);
 +}
 +"#,
 +            r#"
 +
 +fn foo() {
 +    {
 +        let foo = 10;
 +        foo * 0 + foo
 +    };
 +    let bar = 10;
 +    {
 +        let foo = 10;
 +        foo * bar + foo
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inline_callers_across_files() {
 +        check_assist(
 +            inline_into_callers,
 +            r#"
 +//- /lib.rs
 +mod foo;
 +fn do_the_math$0(b: u32) -> u32 {
 +    let foo = 10;
 +    foo * b + foo
 +}
 +//- /foo.rs
 +use super::do_the_math;
 +fn foo() {
 +    do_the_math(0);
 +    let bar = 10;
 +    do_the_math(bar);
 +}
 +"#,
 +            r#"
 +//- /lib.rs
 +mod foo;
 +
 +//- /foo.rs
 +fn foo() {
 +    {
 +        let foo = 10;
 +        foo * 0 + foo
 +    };
 +    let bar = 10;
 +    {
 +        let foo = 10;
 +        foo * bar + foo
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inline_callers_across_files_with_def_file() {
 +        check_assist(
 +            inline_into_callers,
 +            r#"
 +//- /lib.rs
 +mod foo;
 +fn do_the_math$0(b: u32) -> u32 {
 +    let foo = 10;
 +    foo * b + foo
 +}
 +fn bar(a: u32, b: u32) -> u32 {
 +    do_the_math(0);
 +}
 +//- /foo.rs
 +use super::do_the_math;
 +fn foo() {
 +    do_the_math(0);
 +}
 +"#,
 +            r#"
 +//- /lib.rs
 +mod foo;
 +
 +fn bar(a: u32, b: u32) -> u32 {
 +    {
 +        let foo = 10;
 +        foo * 0 + foo
 +    };
 +}
 +//- /foo.rs
 +fn foo() {
 +    {
 +        let foo = 10;
 +        foo * 0 + foo
 +    };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inline_callers_recursive() {
 +        cov_mark::check!(inline_into_callers_recursive);
 +        check_assist_not_applicable(
 +            inline_into_callers,
 +            r#"
 +fn foo$0() {
 +    foo();
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inline_call_recursive() {
 +        cov_mark::check!(inline_call_recursive);
 +        check_assist_not_applicable(
 +            inline_call,
 +            r#"
 +fn foo() {
 +    foo$0();
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inline_call_field_shorthand() {
 +        cov_mark::check!(inline_call_inline_direct_field);
 +        check_assist(
 +            inline_call,
 +            r#"
 +struct Foo {
 +    field: u32,
 +    field1: u32,
 +    field2: u32,
 +    field3: u32,
 +}
 +fn foo(field: u32, field1: u32, val2: u32, val3: u32) -> Foo {
 +    Foo {
 +        field,
 +        field1,
 +        field2: val2,
 +        field3: val3,
 +    }
 +}
 +fn main() {
 +    let bar = 0;
 +    let baz = 0;
 +    foo$0(bar, 0, baz, 0);
 +}
 +"#,
 +            r#"
 +struct Foo {
 +    field: u32,
 +    field1: u32,
 +    field2: u32,
 +    field3: u32,
 +}
 +fn foo(field: u32, field1: u32, val2: u32, val3: u32) -> Foo {
 +    Foo {
 +        field,
 +        field1,
 +        field2: val2,
 +        field3: val3,
 +    }
 +}
 +fn main() {
 +    let bar = 0;
 +    let baz = 0;
 +    Foo {
 +            field: bar,
 +            field1: 0,
 +            field2: baz,
 +            field3: 0,
 +        };
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inline_callers_wrapped_in_parentheses() {
 +        check_assist(
 +            inline_into_callers,
 +            r#"
 +fn foo$0() -> u32 {
 +    let x = 0;
 +    x
 +}
 +fn bar() -> u32 {
 +    foo() + foo()
 +}
 +"#,
 +            r#"
 +
 +fn bar() -> u32 {
 +    ({
 +        let x = 0;
 +        x
 +    }) + {
 +        let x = 0;
 +        x
 +    }
 +}
 +"#,
 +        )
 +    }
 +
 +    #[test]
 +    fn inline_call_wrapped_in_parentheses() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +fn foo() -> u32 {
 +    let x = 0;
 +    x
 +}
 +fn bar() -> u32 {
 +    foo$0() + foo()
 +}
 +"#,
 +            r#"
 +fn foo() -> u32 {
 +    let x = 0;
 +    x
 +}
 +fn bar() -> u32 {
 +    ({
 +        let x = 0;
 +        x
 +    }) + foo()
 +}
 +"#,
 +        )
 +    }
 +
 +    #[test]
 +    fn inline_call_defined_in_macro() {
 +        cov_mark::check!(inline_call_defined_in_macro);
 +        check_assist(
 +            inline_call,
 +            r#"
 +macro_rules! define_foo {
 +    () => { fn foo() -> u32 {
 +        let x = 0;
 +        x
 +    } };
 +}
 +define_foo!();
 +fn bar() -> u32 {
 +    foo$0()
 +}
 +"#,
 +            r#"
 +macro_rules! define_foo {
 +    () => { fn foo() -> u32 {
 +        let x = 0;
 +        x
 +    } };
 +}
 +define_foo!();
 +fn bar() -> u32 {
 +    {
 +      let x = 0;
 +      x
 +    }
 +}
 +"#,
 +        )
 +    }
 +
 +    #[test]
 +    fn inline_call_with_self_type() {
 +        check_assist(
 +            inline_call,
 +            r#"
 +struct A(u32);
 +impl A {
 +    fn f() -> Self { Self(114514) }
 +}
 +fn main() {
 +    A::f$0();
 +}
 +"#,
 +            r#"
 +struct A(u32);
 +impl A {
 +    fn f() -> Self { Self(114514) }
 +}
 +fn main() {
 +    A(114514);
 +}
++"#,
++        )
++    }
++
++    #[test]
++    fn inline_call_with_self_type_but_within_same_impl() {
++        check_assist(
++            inline_call,
++            r#"
++struct A(u32);
++impl A {
++    fn f() -> Self { Self(1919810) }
++    fn main() {
++        Self::f$0();
++    }
++}
++"#,
++            r#"
++struct A(u32);
++impl A {
++    fn f() -> Self { Self(1919810) }
++    fn main() {
++        Self(1919810);
++    }
++}
 +"#,
 +        )
 +    }
 +}
index 759742d347237520b0de7d09410ac5b92916f5b9,0000000000000000000000000000000000000000..a5e854b74df9d268504f2b4e39684c6002e4c335
mode 100644,000000..100644
--- /dev/null
@@@ -1,640 -1,0 +1,643 @@@
 +//! See `CompletionContext` structure.
 +
 +mod analysis;
 +#[cfg(test)]
 +mod tests;
 +
 +use std::iter;
 +
 +use base_db::SourceDatabaseExt;
 +use hir::{
 +    HasAttrs, Local, Name, PathResolution, ScopeDef, Semantics, SemanticsScope, Type, TypeInfo,
 +};
 +use ide_db::{
 +    base_db::{FilePosition, SourceDatabase},
 +    famous_defs::FamousDefs,
 +    FxHashMap, FxHashSet, RootDatabase,
 +};
 +use syntax::{
 +    ast::{self, AttrKind, NameOrNameRef},
 +    AstNode,
 +    SyntaxKind::{self, *},
 +    SyntaxToken, TextRange, TextSize,
 +};
 +use text_edit::Indel;
 +
 +use crate::CompletionConfig;
 +
 +const COMPLETION_MARKER: &str = "intellijRulezz";
 +
 +#[derive(Copy, Clone, Debug, PartialEq, Eq)]
 +pub(crate) enum PatternRefutability {
 +    Refutable,
 +    Irrefutable,
 +}
 +
 +#[derive(Debug)]
 +pub(crate) enum Visible {
 +    Yes,
 +    Editable,
 +    No,
 +}
 +
 +/// Existing qualifiers for the thing we are currently completing.
 +#[derive(Debug, Default)]
 +pub(super) struct QualifierCtx {
 +    pub(super) unsafe_tok: Option<SyntaxToken>,
 +    pub(super) vis_node: Option<ast::Visibility>,
 +}
 +
 +impl QualifierCtx {
 +    pub(super) fn none(&self) -> bool {
 +        self.unsafe_tok.is_none() && self.vis_node.is_none()
 +    }
 +}
 +
 +/// The state of the path we are currently completing.
 +#[derive(Debug)]
 +pub(crate) struct PathCompletionCtx {
 +    /// If this is a call with () already there (or {} in case of record patterns)
 +    pub(super) has_call_parens: bool,
 +    /// If this has a macro call bang !
 +    pub(super) has_macro_bang: bool,
 +    /// The qualifier of the current path.
 +    pub(super) qualified: Qualified,
 +    /// The parent of the path we are completing.
 +    pub(super) parent: Option<ast::Path>,
++    #[allow(dead_code)]
 +    /// The path of which we are completing the segment
 +    pub(super) path: ast::Path,
++    /// The path of which we are completing the segment in the original file
++    pub(crate) original_path: Option<ast::Path>,
 +    pub(super) kind: PathKind,
 +    /// Whether the path segment has type args or not.
 +    pub(super) has_type_args: bool,
 +    /// Whether the qualifier comes from a use tree parent or not
 +    pub(crate) use_tree_parent: bool,
 +}
 +
 +impl PathCompletionCtx {
 +    pub(super) fn is_trivial_path(&self) -> bool {
 +        matches!(
 +            self,
 +            PathCompletionCtx {
 +                has_call_parens: false,
 +                has_macro_bang: false,
 +                qualified: Qualified::No,
 +                parent: None,
 +                has_type_args: false,
 +                ..
 +            }
 +        )
 +    }
 +}
 +
 +/// The kind of path we are completing right now.
 +#[derive(Debug, PartialEq, Eq)]
 +pub(super) enum PathKind {
 +    Expr {
 +        expr_ctx: ExprCtx,
 +    },
 +    Type {
 +        location: TypeLocation,
 +    },
 +    Attr {
 +        attr_ctx: AttrCtx,
 +    },
 +    Derive {
 +        existing_derives: ExistingDerives,
 +    },
 +    /// Path in item position, that is inside an (Assoc)ItemList
 +    Item {
 +        kind: ItemListKind,
 +    },
 +    Pat {
 +        pat_ctx: PatternContext,
 +    },
 +    Vis {
 +        has_in_token: bool,
 +    },
 +    Use,
 +}
 +
 +pub(crate) type ExistingDerives = FxHashSet<hir::Macro>;
 +
 +#[derive(Debug, PartialEq, Eq)]
 +pub(crate) struct AttrCtx {
 +    pub(crate) kind: AttrKind,
 +    pub(crate) annotated_item_kind: Option<SyntaxKind>,
 +}
 +
 +#[derive(Debug, PartialEq, Eq)]
 +pub(crate) struct ExprCtx {
 +    pub(crate) in_block_expr: bool,
 +    pub(crate) in_loop_body: bool,
 +    pub(crate) after_if_expr: bool,
 +    /// Whether this expression is the direct condition of an if or while expression
 +    pub(crate) in_condition: bool,
 +    pub(crate) incomplete_let: bool,
 +    pub(crate) ref_expr_parent: Option<ast::RefExpr>,
 +    /// The surrounding RecordExpression we are completing a functional update
 +    pub(crate) is_func_update: Option<ast::RecordExpr>,
 +    pub(crate) self_param: Option<hir::SelfParam>,
 +    pub(crate) innermost_ret_ty: Option<hir::Type>,
 +    pub(crate) impl_: Option<ast::Impl>,
 +    /// Whether this expression occurs in match arm guard position: before the
 +    /// fat arrow token
 +    pub(crate) in_match_guard: bool,
 +}
 +
 +/// Original file ast nodes
 +#[derive(Clone, Debug, PartialEq, Eq)]
 +pub(crate) enum TypeLocation {
 +    TupleField,
 +    TypeAscription(TypeAscriptionTarget),
 +    GenericArgList(Option<ast::GenericArgList>),
 +    TypeBound,
 +    ImplTarget,
 +    ImplTrait,
 +    Other,
 +}
 +
 +#[derive(Clone, Debug, PartialEq, Eq)]
 +pub(crate) enum TypeAscriptionTarget {
 +    Let(Option<ast::Pat>),
 +    FnParam(Option<ast::Pat>),
 +    RetType(Option<ast::Expr>),
 +    Const(Option<ast::Expr>),
 +}
 +
 +/// The kind of item list a [`PathKind::Item`] belongs to.
 +#[derive(Debug, PartialEq, Eq)]
 +pub(super) enum ItemListKind {
 +    SourceFile,
 +    Module,
 +    Impl,
 +    TraitImpl(Option<ast::Impl>),
 +    Trait,
 +    ExternBlock,
 +}
 +
 +#[derive(Debug)]
 +pub(super) enum Qualified {
 +    No,
 +    With {
 +        path: ast::Path,
 +        resolution: Option<PathResolution>,
 +        /// How many `super` segments are present in the path
 +        ///
 +        /// This would be None, if path is not solely made of
 +        /// `super` segments, e.g.
 +        ///
 +        /// ```rust
 +        ///   use super::foo;
 +        /// ```
 +        ///
 +        /// Otherwise it should be Some(count of `super`)
 +        super_chain_len: Option<usize>,
 +    },
 +    /// <_>::
 +    TypeAnchor {
 +        ty: Option<hir::Type>,
 +        trait_: Option<hir::Trait>,
 +    },
 +    /// Whether the path is an absolute path
 +    Absolute,
 +}
 +
 +/// The state of the pattern we are completing.
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub(super) struct PatternContext {
 +    pub(super) refutability: PatternRefutability,
 +    pub(super) param_ctx: Option<ParamContext>,
 +    pub(super) has_type_ascription: bool,
 +    pub(super) parent_pat: Option<ast::Pat>,
 +    pub(super) ref_token: Option<SyntaxToken>,
 +    pub(super) mut_token: Option<SyntaxToken>,
 +    /// The record pattern this name or ref is a field of
 +    pub(super) record_pat: Option<ast::RecordPat>,
 +    pub(super) impl_: Option<ast::Impl>,
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq)]
 +pub(super) struct ParamContext {
 +    pub(super) param_list: ast::ParamList,
 +    pub(super) param: ast::Param,
 +    pub(super) kind: ParamKind,
 +}
 +
 +/// The state of the lifetime we are completing.
 +#[derive(Debug)]
 +pub(super) struct LifetimeContext {
 +    pub(super) lifetime: Option<ast::Lifetime>,
 +    pub(super) kind: LifetimeKind,
 +}
 +
 +/// The kind of lifetime we are completing.
 +#[derive(Debug)]
 +pub(super) enum LifetimeKind {
 +    LifetimeParam { is_decl: bool, param: ast::LifetimeParam },
 +    Lifetime,
 +    LabelRef,
 +    LabelDef,
 +}
 +
 +/// The state of the name we are completing.
 +#[derive(Debug)]
 +pub(super) struct NameContext {
 +    #[allow(dead_code)]
 +    pub(super) name: Option<ast::Name>,
 +    pub(super) kind: NameKind,
 +}
 +
 +/// The kind of the name we are completing.
 +#[derive(Debug)]
 +#[allow(dead_code)]
 +pub(super) enum NameKind {
 +    Const,
 +    ConstParam,
 +    Enum,
 +    Function,
 +    IdentPat(PatternContext),
 +    MacroDef,
 +    MacroRules,
 +    /// Fake node
 +    Module(ast::Module),
 +    RecordField,
 +    Rename,
 +    SelfParam,
 +    Static,
 +    Struct,
 +    Trait,
 +    TypeAlias,
 +    TypeParam,
 +    Union,
 +    Variant,
 +}
 +
 +/// The state of the NameRef we are completing.
 +#[derive(Debug)]
 +pub(super) struct NameRefContext {
 +    /// NameRef syntax in the original file
 +    pub(super) nameref: Option<ast::NameRef>,
 +    pub(super) kind: NameRefKind,
 +}
 +
 +/// The kind of the NameRef we are completing.
 +#[derive(Debug)]
 +pub(super) enum NameRefKind {
 +    Path(PathCompletionCtx),
 +    DotAccess(DotAccess),
 +    /// Position where we are only interested in keyword completions
 +    Keyword(ast::Item),
 +    /// The record expression this nameref is a field of and whether a dot precedes the completion identifier.
 +    RecordExpr {
 +        dot_prefix: bool,
 +        expr: ast::RecordExpr,
 +    },
 +    Pattern(PatternContext),
 +}
 +
 +/// The identifier we are currently completing.
 +#[derive(Debug)]
 +pub(super) enum CompletionAnalysis {
 +    Name(NameContext),
 +    NameRef(NameRefContext),
 +    Lifetime(LifetimeContext),
 +    /// The string the cursor is currently inside
 +    String {
 +        /// original token
 +        original: ast::String,
 +        /// fake token
 +        expanded: Option<ast::String>,
 +    },
 +    /// Set if we are currently completing in an unexpanded attribute, this usually implies a builtin attribute like `allow($0)`
 +    UnexpandedAttrTT {
 +        colon_prefix: bool,
 +        fake_attribute_under_caret: Option<ast::Attr>,
 +    },
 +}
 +
 +/// Information about the field or method access we are completing.
 +#[derive(Debug)]
 +pub(super) struct DotAccess {
 +    pub(super) receiver: Option<ast::Expr>,
 +    pub(super) receiver_ty: Option<TypeInfo>,
 +    pub(super) kind: DotAccessKind,
 +}
 +
 +#[derive(Debug)]
 +pub(super) enum DotAccessKind {
 +    Field {
 +        /// True if the receiver is an integer and there is no ident in the original file after it yet
 +        /// like `0.$0`
 +        receiver_is_ambiguous_float_literal: bool,
 +    },
 +    Method {
 +        has_parens: bool,
 +    },
 +}
 +
 +#[derive(Clone, Debug, PartialEq, Eq)]
 +pub(crate) enum ParamKind {
 +    Function(ast::Fn),
 +    Closure(ast::ClosureExpr),
 +}
 +
 +/// `CompletionContext` is created early during completion to figure out, where
 +/// exactly is the cursor, syntax-wise.
 +#[derive(Debug)]
 +pub(crate) struct CompletionContext<'a> {
 +    pub(super) sema: Semantics<'a, RootDatabase>,
 +    pub(super) scope: SemanticsScope<'a>,
 +    pub(super) db: &'a RootDatabase,
 +    pub(super) config: &'a CompletionConfig,
 +    pub(super) position: FilePosition,
 +
 +    /// The token before the cursor, in the original file.
 +    pub(super) original_token: SyntaxToken,
 +    /// The token before the cursor, in the macro-expanded file.
 +    pub(super) token: SyntaxToken,
 +    /// The crate of the current file.
 +    pub(super) krate: hir::Crate,
 +    /// The module of the `scope`.
 +    pub(super) module: hir::Module,
 +
 +    /// The expected name of what we are completing.
 +    /// This is usually the parameter name of the function argument we are completing.
 +    pub(super) expected_name: Option<NameOrNameRef>,
 +    /// The expected type of what we are completing.
 +    pub(super) expected_type: Option<Type>,
 +
 +    pub(super) qualifier_ctx: QualifierCtx,
 +
 +    pub(super) locals: FxHashMap<Name, Local>,
 +
 +    /// The module depth of the current module of the cursor position.
 +    /// - crate-root
 +    ///  - mod foo
 +    ///   - mod bar
 +    /// Here depth will be 2
 +    pub(super) depth_from_crate_root: usize,
 +}
 +
 +impl<'a> CompletionContext<'a> {
 +    /// The range of the identifier that is being completed.
 +    pub(crate) fn source_range(&self) -> TextRange {
 +        // check kind of macro-expanded token, but use range of original token
 +        let kind = self.token.kind();
 +        match kind {
 +            CHAR => {
 +                // assume we are completing a lifetime but the user has only typed the '
 +                cov_mark::hit!(completes_if_lifetime_without_idents);
 +                TextRange::at(self.original_token.text_range().start(), TextSize::from(1))
 +            }
 +            IDENT | LIFETIME_IDENT | UNDERSCORE => self.original_token.text_range(),
 +            _ if kind.is_keyword() => self.original_token.text_range(),
 +            _ => TextRange::empty(self.position.offset),
 +        }
 +    }
 +
 +    pub(crate) fn famous_defs(&self) -> FamousDefs<'_, '_> {
 +        FamousDefs(&self.sema, self.krate)
 +    }
 +
 +    /// Checks if an item is visible and not `doc(hidden)` at the completion site.
 +    pub(crate) fn def_is_visible(&self, item: &ScopeDef) -> Visible {
 +        match item {
 +            ScopeDef::ModuleDef(def) => match def {
 +                hir::ModuleDef::Module(it) => self.is_visible(it),
 +                hir::ModuleDef::Function(it) => self.is_visible(it),
 +                hir::ModuleDef::Adt(it) => self.is_visible(it),
 +                hir::ModuleDef::Variant(it) => self.is_visible(it),
 +                hir::ModuleDef::Const(it) => self.is_visible(it),
 +                hir::ModuleDef::Static(it) => self.is_visible(it),
 +                hir::ModuleDef::Trait(it) => self.is_visible(it),
 +                hir::ModuleDef::TypeAlias(it) => self.is_visible(it),
 +                hir::ModuleDef::Macro(it) => self.is_visible(it),
 +                hir::ModuleDef::BuiltinType(_) => Visible::Yes,
 +            },
 +            ScopeDef::GenericParam(_)
 +            | ScopeDef::ImplSelfType(_)
 +            | ScopeDef::AdtSelfType(_)
 +            | ScopeDef::Local(_)
 +            | ScopeDef::Label(_)
 +            | ScopeDef::Unknown => Visible::Yes,
 +        }
 +    }
 +
 +    /// Checks if an item is visible and not `doc(hidden)` at the completion site.
 +    pub(crate) fn is_visible<I>(&self, item: &I) -> Visible
 +    where
 +        I: hir::HasVisibility + hir::HasAttrs + hir::HasCrate + Copy,
 +    {
 +        let vis = item.visibility(self.db);
 +        let attrs = item.attrs(self.db);
 +        self.is_visible_impl(&vis, &attrs, item.krate(self.db))
 +    }
 +
 +    /// Check if an item is `#[doc(hidden)]`.
 +    pub(crate) fn is_item_hidden(&self, item: &hir::ItemInNs) -> bool {
 +        let attrs = item.attrs(self.db);
 +        let krate = item.krate(self.db);
 +        match (attrs, krate) {
 +            (Some(attrs), Some(krate)) => self.is_doc_hidden(&attrs, krate),
 +            _ => false,
 +        }
 +    }
 +
 +    /// Whether the given trait is an operator trait or not.
 +    pub(crate) fn is_ops_trait(&self, trait_: hir::Trait) -> bool {
 +        match trait_.attrs(self.db).lang() {
 +            Some(lang) => OP_TRAIT_LANG_NAMES.contains(&lang.as_str()),
 +            None => false,
 +        }
 +    }
 +
 +    /// Returns the traits in scope, with the [`Drop`] trait removed.
 +    pub(crate) fn traits_in_scope(&self) -> hir::VisibleTraits {
 +        let mut traits_in_scope = self.scope.visible_traits();
 +        if let Some(drop) = self.famous_defs().core_ops_Drop() {
 +            traits_in_scope.0.remove(&drop.into());
 +        }
 +        traits_in_scope
 +    }
 +
 +    pub(crate) fn iterate_path_candidates(
 +        &self,
 +        ty: &hir::Type,
 +        mut cb: impl FnMut(hir::AssocItem),
 +    ) {
 +        let mut seen = FxHashSet::default();
 +        ty.iterate_path_candidates(
 +            self.db,
 +            &self.scope,
 +            &self.traits_in_scope(),
 +            Some(self.module),
 +            None,
 +            |item| {
 +                // We might iterate candidates of a trait multiple times here, so deduplicate
 +                // them.
 +                if seen.insert(item) {
 +                    cb(item)
 +                }
 +                None::<()>
 +            },
 +        );
 +    }
 +
 +    /// A version of [`SemanticsScope::process_all_names`] that filters out `#[doc(hidden)]` items.
 +    pub(crate) fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
 +        let _p = profile::span("CompletionContext::process_all_names");
 +        self.scope.process_all_names(&mut |name, def| {
 +            if self.is_scope_def_hidden(def) {
 +                return;
 +            }
 +
 +            f(name, def);
 +        });
 +    }
 +
 +    pub(crate) fn process_all_names_raw(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
 +        let _p = profile::span("CompletionContext::process_all_names_raw");
 +        self.scope.process_all_names(&mut |name, def| f(name, def));
 +    }
 +
 +    fn is_scope_def_hidden(&self, scope_def: ScopeDef) -> bool {
 +        if let (Some(attrs), Some(krate)) = (scope_def.attrs(self.db), scope_def.krate(self.db)) {
 +            return self.is_doc_hidden(&attrs, krate);
 +        }
 +
 +        false
 +    }
 +
 +    fn is_visible_impl(
 +        &self,
 +        vis: &hir::Visibility,
 +        attrs: &hir::Attrs,
 +        defining_crate: hir::Crate,
 +    ) -> Visible {
 +        if !vis.is_visible_from(self.db, self.module.into()) {
 +            if !self.config.enable_private_editable {
 +                return Visible::No;
 +            }
 +            // If the definition location is editable, also show private items
 +            let root_file = defining_crate.root_file(self.db);
 +            let source_root_id = self.db.file_source_root(root_file);
 +            let is_editable = !self.db.source_root(source_root_id).is_library;
 +            return if is_editable { Visible::Editable } else { Visible::No };
 +        }
 +
 +        if self.is_doc_hidden(attrs, defining_crate) {
 +            Visible::No
 +        } else {
 +            Visible::Yes
 +        }
 +    }
 +
 +    fn is_doc_hidden(&self, attrs: &hir::Attrs, defining_crate: hir::Crate) -> bool {
 +        // `doc(hidden)` items are only completed within the defining crate.
 +        self.krate != defining_crate && attrs.has_doc_hidden()
 +    }
 +}
 +
 +// CompletionContext construction
 +impl<'a> CompletionContext<'a> {
 +    pub(super) fn new(
 +        db: &'a RootDatabase,
 +        position @ FilePosition { file_id, offset }: FilePosition,
 +        config: &'a CompletionConfig,
 +    ) -> Option<(CompletionContext<'a>, CompletionAnalysis)> {
 +        let _p = profile::span("CompletionContext::new");
 +        let sema = Semantics::new(db);
 +
 +        let original_file = sema.parse(file_id);
 +
 +        // Insert a fake ident to get a valid parse tree. We will use this file
 +        // to determine context, though the original_file will be used for
 +        // actual completion.
 +        let file_with_fake_ident = {
 +            let parse = db.parse(file_id);
 +            let edit = Indel::insert(offset, COMPLETION_MARKER.to_string());
 +            parse.reparse(&edit).tree()
 +        };
 +        let fake_ident_token =
 +            file_with_fake_ident.syntax().token_at_offset(offset).right_biased()?;
 +
 +        let original_token = original_file.syntax().token_at_offset(offset).left_biased()?;
 +        let token = sema.descend_into_macros_single(original_token.clone());
 +
 +        // adjust for macro input, this still fails if there is no token written yet
 +        let scope_offset = if original_token == token { offset } else { token.text_range().end() };
 +        let scope = sema.scope_at_offset(&token.parent()?, scope_offset)?;
 +
 +        let krate = scope.krate();
 +        let module = scope.module();
 +
 +        let mut locals = FxHashMap::default();
 +        scope.process_all_names(&mut |name, scope| {
 +            if let ScopeDef::Local(local) = scope {
 +                locals.insert(name, local);
 +            }
 +        });
 +
 +        let depth_from_crate_root = iter::successors(module.parent(db), |m| m.parent(db)).count();
 +
 +        let mut ctx = CompletionContext {
 +            sema,
 +            scope,
 +            db,
 +            config,
 +            position,
 +            original_token,
 +            token,
 +            krate,
 +            module,
 +            expected_name: None,
 +            expected_type: None,
 +            qualifier_ctx: Default::default(),
 +            locals,
 +            depth_from_crate_root,
 +        };
 +        let ident_ctx = ctx.expand_and_analyze(
 +            original_file.syntax().clone(),
 +            file_with_fake_ident.syntax().clone(),
 +            offset,
 +            fake_ident_token,
 +        )?;
 +        Some((ctx, ident_ctx))
 +    }
 +}
 +
 +const OP_TRAIT_LANG_NAMES: &[&str] = &[
 +    "add_assign",
 +    "add",
 +    "bitand_assign",
 +    "bitand",
 +    "bitor_assign",
 +    "bitor",
 +    "bitxor_assign",
 +    "bitxor",
 +    "deref_mut",
 +    "deref",
 +    "div_assign",
 +    "div",
 +    "eq",
 +    "fn_mut",
 +    "fn_once",
 +    "fn",
 +    "index_mut",
 +    "index",
 +    "mul_assign",
 +    "mul",
 +    "neg",
 +    "not",
 +    "partial_ord",
 +    "rem_assign",
 +    "rem",
 +    "shl_assign",
 +    "shl",
 +    "shr_assign",
 +    "shr",
 +    "sub",
 +];
index 22ec7cead4988cc54d33b9f8c49e6c4b34cd1d88,0000000000000000000000000000000000000000..01dd9a234f553c354a47eb3487fc610a37b8df26
mode 100644,000000..100644
--- /dev/null
@@@ -1,1293 -1,0 +1,1296 @@@
 +//! Module responsible for analyzing the code surrounding the cursor for completion.
 +use std::iter;
 +
 +use hir::{Semantics, Type, TypeInfo};
 +use ide_db::{active_parameter::ActiveParameter, RootDatabase};
 +use syntax::{
 +    algo::{find_node_at_offset, non_trivia_sibling},
 +    ast::{self, AttrKind, HasArgList, HasLoopBody, HasName, NameOrNameRef},
 +    match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
 +    SyntaxToken, TextRange, TextSize, T,
 +};
 +
 +use crate::context::{
 +    AttrCtx, CompletionAnalysis, CompletionContext, DotAccess, DotAccessKind, ExprCtx,
 +    ItemListKind, LifetimeContext, LifetimeKind, NameContext, NameKind, NameRefContext,
 +    NameRefKind, ParamContext, ParamKind, PathCompletionCtx, PathKind, PatternContext,
 +    PatternRefutability, Qualified, QualifierCtx, TypeAscriptionTarget, TypeLocation,
 +    COMPLETION_MARKER,
 +};
 +
 +impl<'a> CompletionContext<'a> {
 +    /// Expand attributes and macro calls at the current cursor position for both the original file
 +    /// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
 +    /// and speculative states stay in sync.
 +    pub(super) fn expand_and_analyze(
 +        &mut self,
 +        mut original_file: SyntaxNode,
 +        mut speculative_file: SyntaxNode,
 +        mut offset: TextSize,
 +        mut fake_ident_token: SyntaxToken,
 +    ) -> Option<CompletionAnalysis> {
 +        let _p = profile::span("CompletionContext::expand_and_fill");
 +        let mut derive_ctx = None;
 +
 +        'expansion: loop {
 +            let parent_item =
 +                |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
 +            let ancestor_items = iter::successors(
 +                Option::zip(
 +                    find_node_at_offset::<ast::Item>(&original_file, offset),
 +                    find_node_at_offset::<ast::Item>(&speculative_file, offset),
 +                ),
 +                |(a, b)| parent_item(a).zip(parent_item(b)),
 +            );
 +
 +            // first try to expand attributes as these are always the outermost macro calls
 +            'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
 +                match (
 +                    self.sema.expand_attr_macro(&actual_item),
 +                    self.sema.speculative_expand_attr_macro(
 +                        &actual_item,
 +                        &item_with_fake_ident,
 +                        fake_ident_token.clone(),
 +                    ),
 +                ) {
 +                    // maybe parent items have attributes, so continue walking the ancestors
 +                    (None, None) => continue 'ancestors,
 +                    // successful expansions
 +                    (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
 +                        let new_offset = fake_mapped_token.text_range().start();
 +                        if new_offset > actual_expansion.text_range().end() {
 +                            // offset outside of bounds from the original expansion,
 +                            // stop here to prevent problems from happening
 +                            break 'expansion;
 +                        }
 +                        original_file = actual_expansion;
 +                        speculative_file = fake_expansion;
 +                        fake_ident_token = fake_mapped_token;
 +                        offset = new_offset;
 +                        continue 'expansion;
 +                    }
 +                    // exactly one expansion failed, inconsistent state so stop expanding completely
 +                    _ => break 'expansion,
 +                }
 +            }
 +
 +            // No attributes have been expanded, so look for macro_call! token trees or derive token trees
 +            let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
 +                Some(it) => it,
 +                None => break 'expansion,
 +            };
 +            let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
 +                Some(it) => it,
 +                None => break 'expansion,
 +            };
 +
 +            // Expand pseudo-derive expansion
 +            if let (Some(orig_attr), Some(spec_attr)) = (
 +                orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
 +                spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
 +            ) {
 +                if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
 +                    self.sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
 +                    self.sema.speculative_expand_derive_as_pseudo_attr_macro(
 +                        &orig_attr,
 +                        &spec_attr,
 +                        fake_ident_token.clone(),
 +                    ),
 +                ) {
 +                    derive_ctx = Some((
 +                        actual_expansion,
 +                        fake_expansion,
 +                        fake_mapped_token.text_range().start(),
 +                        orig_attr,
 +                    ));
 +                }
 +                // at this point we won't have any more successful expansions, so stop
 +                break 'expansion;
 +            }
 +
 +            // Expand fn-like macro calls
 +            if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
 +                orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
 +                spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
 +            ) {
 +                let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
 +                let mac_call_path1 =
 +                    macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
 +
 +                // inconsistent state, stop expanding
 +                if mac_call_path0 != mac_call_path1 {
 +                    break 'expansion;
 +                }
 +                let speculative_args = match macro_call_with_fake_ident.token_tree() {
 +                    Some(tt) => tt,
 +                    None => break 'expansion,
 +                };
 +
 +                match (
 +                    self.sema.expand(&actual_macro_call),
 +                    self.sema.speculative_expand(
 +                        &actual_macro_call,
 +                        &speculative_args,
 +                        fake_ident_token.clone(),
 +                    ),
 +                ) {
 +                    // successful expansions
 +                    (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
 +                        let new_offset = fake_mapped_token.text_range().start();
 +                        if new_offset > actual_expansion.text_range().end() {
 +                            // offset outside of bounds from the original expansion,
 +                            // stop here to prevent problems from happening
 +                            break 'expansion;
 +                        }
 +                        original_file = actual_expansion;
 +                        speculative_file = fake_expansion;
 +                        fake_ident_token = fake_mapped_token;
 +                        offset = new_offset;
 +                        continue 'expansion;
 +                    }
 +                    // at least on expansion failed, we won't have anything to expand from this point
 +                    // onwards so break out
 +                    _ => break 'expansion,
 +                }
 +            }
 +
 +            // none of our states have changed so stop the loop
 +            break 'expansion;
 +        }
 +
 +        self.analyze(&original_file, speculative_file, offset, derive_ctx)
 +    }
 +
 +    /// Calculate the expected type and name of the cursor position.
 +    fn expected_type_and_name(
 +        &self,
 +        name_like: &ast::NameLike,
 +    ) -> (Option<Type>, Option<NameOrNameRef>) {
 +        let mut node = match self.token.parent() {
 +            Some(it) => it,
 +            None => return (None, None),
 +        };
 +
 +        let strip_refs = |mut ty: Type| match name_like {
 +            ast::NameLike::NameRef(n) => {
 +                let p = match n.syntax().parent() {
 +                    Some(it) => it,
 +                    None => return ty,
 +                };
 +                let top_syn = match_ast! {
 +                    match p {
 +                        ast::FieldExpr(e) => e
 +                            .syntax()
 +                            .ancestors()
 +                            .map_while(ast::FieldExpr::cast)
 +                            .last()
 +                            .map(|it| it.syntax().clone()),
 +                        ast::PathSegment(e) => e
 +                            .syntax()
 +                            .ancestors()
 +                            .skip(1)
 +                            .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
 +                            .find_map(ast::PathExpr::cast)
 +                            .map(|it| it.syntax().clone()),
 +                        _ => None
 +                    }
 +                };
 +                let top_syn = match top_syn {
 +                    Some(it) => it,
 +                    None => return ty,
 +                };
 +                for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
 +                    cov_mark::hit!(expected_type_fn_param_ref);
 +                    ty = ty.strip_reference();
 +                }
 +                ty
 +            }
 +            _ => ty,
 +        };
 +
 +        loop {
 +            break match_ast! {
 +                match node {
 +                    ast::LetStmt(it) => {
 +                        cov_mark::hit!(expected_type_let_with_leading_char);
 +                        cov_mark::hit!(expected_type_let_without_leading_char);
 +                        let ty = it.pat()
 +                            .and_then(|pat| self.sema.type_of_pat(&pat))
 +                            .or_else(|| it.initializer().and_then(|it| self.sema.type_of_expr(&it)))
 +                            .map(TypeInfo::original);
 +                        let name = match it.pat() {
 +                            Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
 +                            Some(_) | None => None,
 +                        };
 +
 +                        (ty, name)
 +                    },
 +                    ast::LetExpr(it) => {
 +                        cov_mark::hit!(expected_type_if_let_without_leading_char);
 +                        let ty = it.pat()
 +                            .and_then(|pat| self.sema.type_of_pat(&pat))
 +                            .or_else(|| it.expr().and_then(|it| self.sema.type_of_expr(&it)))
 +                            .map(TypeInfo::original);
 +                        (ty, None)
 +                    },
 +                    ast::ArgList(_) => {
 +                        cov_mark::hit!(expected_type_fn_param);
 +                        ActiveParameter::at_token(
 +                            &self.sema,
 +                            self.token.clone(),
 +                        ).map(|ap| {
 +                            let name = ap.ident().map(NameOrNameRef::Name);
 +
 +                            let ty = strip_refs(ap.ty);
 +                            (Some(ty), name)
 +                        })
 +                        .unwrap_or((None, None))
 +                    },
 +                    ast::RecordExprFieldList(it) => {
 +                        // wouldn't try {} be nice...
 +                        (|| {
 +                            if self.token.kind() == T![..]
 +                                || self.token.prev_token().map(|t| t.kind()) == Some(T![..])
 +                            {
 +                                cov_mark::hit!(expected_type_struct_func_update);
 +                                let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
 +                                let ty = self.sema.type_of_expr(&record_expr.into())?;
 +                                Some((
 +                                    Some(ty.original),
 +                                    None
 +                                ))
 +                            } else {
 +                                cov_mark::hit!(expected_type_struct_field_without_leading_char);
 +                                let expr_field = self.token.prev_sibling_or_token()?
 +                                    .into_node()
 +                                    .and_then(ast::RecordExprField::cast)?;
 +                                let (_, _, ty) = self.sema.resolve_record_field(&expr_field)?;
 +                                Some((
 +                                    Some(ty),
 +                                    expr_field.field_name().map(NameOrNameRef::NameRef),
 +                                ))
 +                            }
 +                        })().unwrap_or((None, None))
 +                    },
 +                    ast::RecordExprField(it) => {
 +                        if let Some(expr) = it.expr() {
 +                            cov_mark::hit!(expected_type_struct_field_with_leading_char);
 +                            (
 +                                self.sema.type_of_expr(&expr).map(TypeInfo::original),
 +                                it.field_name().map(NameOrNameRef::NameRef),
 +                            )
 +                        } else {
 +                            cov_mark::hit!(expected_type_struct_field_followed_by_comma);
 +                            let ty = self.sema.resolve_record_field(&it)
 +                                .map(|(_, _, ty)| ty);
 +                            (
 +                                ty,
 +                                it.field_name().map(NameOrNameRef::NameRef),
 +                            )
 +                        }
 +                    },
 +                    // match foo { $0 }
 +                    // match foo { ..., pat => $0 }
 +                    ast::MatchExpr(it) => {
 +                        let on_arrow = previous_non_trivia_token(self.token.clone()).map_or(false, |it| T![=>] == it.kind());
 +
 +                        let ty = if on_arrow {
 +                            // match foo { ..., pat => $0 }
 +                            cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
 +                            cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
 +                            self.sema.type_of_expr(&it.into())
 +                        } else {
 +                            // match foo { $0 }
 +                            cov_mark::hit!(expected_type_match_arm_without_leading_char);
 +                            it.expr().and_then(|e| self.sema.type_of_expr(&e))
 +                        }.map(TypeInfo::original);
 +                        (ty, None)
 +                    },
 +                    ast::IfExpr(it) => {
 +                        let ty = it.condition()
 +                            .and_then(|e| self.sema.type_of_expr(&e))
 +                            .map(TypeInfo::original);
 +                        (ty, None)
 +                    },
 +                    ast::IdentPat(it) => {
 +                        cov_mark::hit!(expected_type_if_let_with_leading_char);
 +                        cov_mark::hit!(expected_type_match_arm_with_leading_char);
 +                        let ty = self.sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
 +                        (ty, None)
 +                    },
 +                    ast::Fn(it) => {
 +                        cov_mark::hit!(expected_type_fn_ret_with_leading_char);
 +                        cov_mark::hit!(expected_type_fn_ret_without_leading_char);
 +                        let def = self.sema.to_def(&it);
 +                        (def.map(|def| def.ret_type(self.db)), None)
 +                    },
 +                    ast::ClosureExpr(it) => {
 +                        let ty = self.sema.type_of_expr(&it.into());
 +                        ty.and_then(|ty| ty.original.as_callable(self.db))
 +                            .map(|c| (Some(c.return_type()), None))
 +                            .unwrap_or((None, None))
 +                    },
 +                    ast::ParamList(_) => (None, None),
 +                    ast::Stmt(_) => (None, None),
 +                    ast::Item(_) => (None, None),
 +                    _ => {
 +                        match node.parent() {
 +                            Some(n) => {
 +                                node = n;
 +                                continue;
 +                            },
 +                            None => (None, None),
 +                        }
 +                    },
 +                }
 +            };
 +        }
 +    }
 +
 +    /// Fill the completion context, this is what does semantic reasoning about the surrounding context
 +    /// of the completion location.
 +    fn analyze(
 +        &mut self,
 +        original_file: &SyntaxNode,
 +        file_with_fake_ident: SyntaxNode,
 +        offset: TextSize,
 +        derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
 +    ) -> Option<CompletionAnalysis> {
 +        let fake_ident_token = file_with_fake_ident.token_at_offset(offset).right_biased()?;
 +        let syntax_element = NodeOrToken::Token(fake_ident_token);
 +        if is_in_token_of_for_loop(syntax_element.clone()) {
 +            // for pat $0
 +            // there is nothing to complete here except `in` keyword
 +            // don't bother populating the context
 +            // FIXME: the completion calculations should end up good enough
 +            // such that this special case becomes unnecessary
 +            return None;
 +        }
 +
 +        // Overwrite the path kind for derives
 +        if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
 +            if let Some(ast::NameLike::NameRef(name_ref)) =
 +                find_node_at_offset(&file_with_fake_ident, offset)
 +            {
 +                let parent = name_ref.syntax().parent()?;
 +                let (mut nameref_ctx, _) =
 +                    Self::classify_name_ref(&self.sema, &original_file, name_ref, parent)?;
 +                if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
 +                    path_ctx.kind = PathKind::Derive {
 +                        existing_derives: self
 +                            .sema
 +                            .resolve_derive_macro(&origin_attr)
 +                            .into_iter()
 +                            .flatten()
 +                            .flatten()
 +                            .collect(),
 +                    };
 +                }
 +                return Some(CompletionAnalysis::NameRef(nameref_ctx));
 +            }
 +            return None;
 +        }
 +
 +        let name_like = match find_node_at_offset(&file_with_fake_ident, offset) {
 +            Some(it) => it,
 +            None => {
 +                let analysis =
 +                    if let Some(original) = ast::String::cast(self.original_token.clone()) {
 +                        CompletionAnalysis::String {
 +                            original,
 +                            expanded: ast::String::cast(self.token.clone()),
 +                        }
 +                    } else {
 +                        // Fix up trailing whitespace problem
 +                        // #[attr(foo = $0
 +                        let token =
 +                            syntax::algo::skip_trivia_token(self.token.clone(), Direction::Prev)?;
 +                        let p = token.parent()?;
 +                        if p.kind() == SyntaxKind::TOKEN_TREE
 +                            && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
 +                        {
 +                            let colon_prefix = previous_non_trivia_token(self.token.clone())
 +                                .map_or(false, |it| T![:] == it.kind());
 +                            CompletionAnalysis::UnexpandedAttrTT {
 +                                fake_attribute_under_caret: syntax_element
 +                                    .ancestors()
 +                                    .find_map(ast::Attr::cast),
 +                                colon_prefix,
 +                            }
 +                        } else {
 +                            return None;
 +                        }
 +                    };
 +                return Some(analysis);
 +            }
 +        };
 +        (self.expected_type, self.expected_name) = self.expected_type_and_name(&name_like);
 +        let analysis = match name_like {
 +            ast::NameLike::Lifetime(lifetime) => CompletionAnalysis::Lifetime(
 +                Self::classify_lifetime(&self.sema, original_file, lifetime)?,
 +            ),
 +            ast::NameLike::NameRef(name_ref) => {
 +                let parent = name_ref.syntax().parent()?;
 +                let (nameref_ctx, qualifier_ctx) =
 +                    Self::classify_name_ref(&self.sema, &original_file, name_ref, parent.clone())?;
 +
 +                self.qualifier_ctx = qualifier_ctx;
 +                CompletionAnalysis::NameRef(nameref_ctx)
 +            }
 +            ast::NameLike::Name(name) => {
 +                let name_ctx = Self::classify_name(&self.sema, original_file, name)?;
 +                CompletionAnalysis::Name(name_ctx)
 +            }
 +        };
 +        Some(analysis)
 +    }
 +
 +    fn classify_lifetime(
 +        _sema: &Semantics<'_, RootDatabase>,
 +        original_file: &SyntaxNode,
 +        lifetime: ast::Lifetime,
 +    ) -> Option<LifetimeContext> {
 +        let parent = lifetime.syntax().parent()?;
 +        if parent.kind() == SyntaxKind::ERROR {
 +            return None;
 +        }
 +
 +        let kind = match_ast! {
 +            match parent {
 +                ast::LifetimeParam(param) => LifetimeKind::LifetimeParam {
 +                    is_decl: param.lifetime().as_ref() == Some(&lifetime),
 +                    param
 +                },
 +                ast::BreakExpr(_) => LifetimeKind::LabelRef,
 +                ast::ContinueExpr(_) => LifetimeKind::LabelRef,
 +                ast::Label(_) => LifetimeKind::LabelDef,
 +                _ => LifetimeKind::Lifetime,
 +            }
 +        };
 +        let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
 +
 +        Some(LifetimeContext { lifetime, kind })
 +    }
 +
 +    fn classify_name(
 +        sema: &Semantics<'_, RootDatabase>,
 +        original_file: &SyntaxNode,
 +        name: ast::Name,
 +    ) -> Option<NameContext> {
 +        let parent = name.syntax().parent()?;
 +        let kind = match_ast! {
 +            match parent {
 +                ast::Const(_) => NameKind::Const,
 +                ast::ConstParam(_) => NameKind::ConstParam,
 +                ast::Enum(_) => NameKind::Enum,
 +                ast::Fn(_) => NameKind::Function,
 +                ast::IdentPat(bind_pat) => {
 +                    let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
 +                    if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
 +                        pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
 +                    }
 +
 +                    NameKind::IdentPat(pat_ctx)
 +                },
 +                ast::MacroDef(_) => NameKind::MacroDef,
 +                ast::MacroRules(_) => NameKind::MacroRules,
 +                ast::Module(module) => NameKind::Module(module),
 +                ast::RecordField(_) => NameKind::RecordField,
 +                ast::Rename(_) => NameKind::Rename,
 +                ast::SelfParam(_) => NameKind::SelfParam,
 +                ast::Static(_) => NameKind::Static,
 +                ast::Struct(_) => NameKind::Struct,
 +                ast::Trait(_) => NameKind::Trait,
 +                ast::TypeAlias(_) => NameKind::TypeAlias,
 +                ast::TypeParam(_) => NameKind::TypeParam,
 +                ast::Union(_) => NameKind::Union,
 +                ast::Variant(_) => NameKind::Variant,
 +                _ => return None,
 +            }
 +        };
 +        let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
 +        Some(NameContext { name, kind })
 +    }
 +
 +    fn classify_name_ref(
 +        sema: &Semantics<'_, RootDatabase>,
 +        original_file: &SyntaxNode,
 +        name_ref: ast::NameRef,
 +        parent: SyntaxNode,
 +    ) -> Option<(NameRefContext, QualifierCtx)> {
 +        let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
 +
 +        let make_res =
 +            |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
 +
 +        if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
 +            let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
 +                .map_or(false, |it| T![.] == it.kind());
 +
 +            return find_node_in_file_compensated(
 +                sema,
 +                original_file,
 +                &record_field.parent_record_lit(),
 +            )
 +            .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
 +            .map(make_res);
 +        }
 +        if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
 +            let kind = NameRefKind::Pattern(PatternContext {
 +                param_ctx: None,
 +                has_type_ascription: false,
 +                ref_token: None,
 +                mut_token: None,
 +                record_pat: find_node_in_file_compensated(
 +                    sema,
 +                    original_file,
 +                    &record_field.parent_record_pat(),
 +                ),
 +                ..pattern_context_for(
 +                    sema,
 +                    original_file,
 +                    record_field.parent_record_pat().clone().into(),
 +                )
 +            });
 +            return Some(make_res(kind));
 +        }
 +
 +        let segment = match_ast! {
 +            match parent {
 +                ast::PathSegment(segment) => segment,
 +                ast::FieldExpr(field) => {
 +                    let receiver = find_opt_node_in_file(original_file, field.expr());
 +                    let receiver_is_ambiguous_float_literal = match &receiver {
 +                        Some(ast::Expr::Literal(l)) => matches! {
 +                            l.kind(),
 +                            ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.'))
 +                        },
 +                        _ => false,
 +                    };
 +                    let kind = NameRefKind::DotAccess(DotAccess {
 +                        receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
 +                        kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
 +                        receiver
 +                    });
 +                    return Some(make_res(kind));
 +                },
 +                ast::MethodCallExpr(method) => {
 +                    let receiver = find_opt_node_in_file(original_file, method.receiver());
 +                    let kind = NameRefKind::DotAccess(DotAccess {
 +                        receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
 +                        kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) },
 +                        receiver
 +                    });
 +                    return Some(make_res(kind));
 +                },
 +                _ => return None,
 +            }
 +        };
 +
 +        let path = segment.parent_path();
++        let original_path = find_node_in_file_compensated(sema, original_file, &path);
++
 +        let mut path_ctx = PathCompletionCtx {
 +            has_call_parens: false,
 +            has_macro_bang: false,
 +            qualified: Qualified::No,
 +            parent: None,
 +            path: path.clone(),
++            original_path,
 +            kind: PathKind::Item { kind: ItemListKind::SourceFile },
 +            has_type_args: false,
 +            use_tree_parent: false,
 +        };
 +
 +        let is_in_block = |it: &SyntaxNode| {
 +            it.parent()
 +                .map(|node| {
 +                    ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
 +                })
 +                .unwrap_or(false)
 +        };
 +        let func_update_record = |syn: &SyntaxNode| {
 +            if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
 +                find_node_in_file_compensated(sema, original_file, &record_expr)
 +            } else {
 +                None
 +            }
 +        };
 +        let after_if_expr = |node: SyntaxNode| {
 +            let prev_expr = (|| {
 +                let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
 +                ast::ExprStmt::cast(prev_sibling)?.expr()
 +            })();
 +            matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
 +        };
 +
 +        // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
 +        // ex. trait Foo $0 {}
 +        // in these cases parser recovery usually kicks in for our inserted identifier, causing it
 +        // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
 +        // expression or an item list.
 +        // The following code checks if the body is missing, if it is we either cut off the body
 +        // from the item or it was missing in the first place
 +        let inbetween_body_and_decl_check = |node: SyntaxNode| {
 +            if let Some(NodeOrToken::Node(n)) =
 +                syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
 +            {
 +                if let Some(item) = ast::Item::cast(n) {
 +                    let is_inbetween = match &item {
 +                        ast::Item::Const(it) => it.body().is_none(),
 +                        ast::Item::Enum(it) => it.variant_list().is_none(),
 +                        ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
 +                        ast::Item::Fn(it) => it.body().is_none(),
 +                        ast::Item::Impl(it) => it.assoc_item_list().is_none(),
 +                        ast::Item::Module(it) => it.item_list().is_none(),
 +                        ast::Item::Static(it) => it.body().is_none(),
 +                        ast::Item::Struct(it) => it.field_list().is_none(),
 +                        ast::Item::Trait(it) => it.assoc_item_list().is_none(),
 +                        ast::Item::TypeAlias(it) => it.ty().is_none(),
 +                        ast::Item::Union(it) => it.record_field_list().is_none(),
 +                        _ => false,
 +                    };
 +                    if is_inbetween {
 +                        return Some(item);
 +                    }
 +                }
 +            }
 +            None
 +        };
 +
 +        let type_location = |node: &SyntaxNode| {
 +            let parent = node.parent()?;
 +            let res = match_ast! {
 +                match parent {
 +                    ast::Const(it) => {
 +                        let name = find_opt_node_in_file(original_file, it.name())?;
 +                        let original = ast::Const::cast(name.syntax().parent()?)?;
 +                        TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
 +                    },
 +                    ast::RetType(it) => {
 +                        if it.thin_arrow_token().is_none() {
 +                            return None;
 +                        }
 +                        let parent = match ast::Fn::cast(parent.parent()?) {
 +                            Some(x) => x.param_list(),
 +                            None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
 +                        };
 +
 +                        let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
 +                        TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
 +                            match parent {
 +                                ast::ClosureExpr(it) => {
 +                                    it.body()
 +                                },
 +                                ast::Fn(it) => {
 +                                    it.body().map(ast::Expr::BlockExpr)
 +                                },
 +                                _ => return None,
 +                            }
 +                        }))
 +                    },
 +                    ast::Param(it) => {
 +                        if it.colon_token().is_none() {
 +                            return None;
 +                        }
 +                        TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
 +                    },
 +                    ast::LetStmt(it) => {
 +                        if it.colon_token().is_none() {
 +                            return None;
 +                        }
 +                        TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
 +                    },
 +                    ast::Impl(it) => {
 +                        match it.trait_() {
 +                            Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
 +                            _ => match it.self_ty() {
 +                                Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
 +                                _ => return None,
 +                            },
 +                        }
 +                    },
 +                    ast::TypeBound(_) => TypeLocation::TypeBound,
 +                    // is this case needed?
 +                    ast::TypeBoundList(_) => TypeLocation::TypeBound,
 +                    ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
 +                    // is this case needed?
 +                    ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
 +                    ast::TupleField(_) => TypeLocation::TupleField,
 +                    _ => return None,
 +                }
 +            };
 +            Some(res)
 +        };
 +
 +        let is_in_condition = |it: &ast::Expr| {
 +            (|| {
 +                let parent = it.syntax().parent()?;
 +                if let Some(expr) = ast::WhileExpr::cast(parent.clone()) {
 +                    Some(expr.condition()? == *it)
 +                } else if let Some(expr) = ast::IfExpr::cast(parent) {
 +                    Some(expr.condition()? == *it)
 +                } else {
 +                    None
 +                }
 +            })()
 +            .unwrap_or(false)
 +        };
 +
 +        let make_path_kind_expr = |expr: ast::Expr| {
 +            let it = expr.syntax();
 +            let in_block_expr = is_in_block(it);
 +            let in_loop_body = is_in_loop_body(it);
 +            let after_if_expr = after_if_expr(it.clone());
 +            let ref_expr_parent =
 +                path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
 +            let (innermost_ret_ty, self_param) = {
 +                let find_ret_ty = |it: SyntaxNode| {
 +                    if let Some(item) = ast::Item::cast(it.clone()) {
 +                        match item {
 +                            ast::Item::Fn(f) => {
 +                                Some(sema.to_def(&f).map(|it| it.ret_type(sema.db)))
 +                            }
 +                            ast::Item::MacroCall(_) => None,
 +                            _ => Some(None),
 +                        }
 +                    } else {
 +                        let expr = ast::Expr::cast(it)?;
 +                        let callable = match expr {
 +                            // FIXME
 +                            // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
 +                            ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
 +                            _ => return None,
 +                        };
 +                        Some(
 +                            callable
 +                                .and_then(|c| c.adjusted().as_callable(sema.db))
 +                                .map(|it| it.return_type()),
 +                        )
 +                    }
 +                };
 +                let find_fn_self_param = |it| match it {
 +                    ast::Item::Fn(fn_) => {
 +                        Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db)))
 +                    }
 +                    ast::Item::MacroCall(_) => None,
 +                    _ => Some(None),
 +                };
 +
 +                match find_node_in_file_compensated(sema, original_file, &expr) {
 +                    Some(it) => {
 +                        let innermost_ret_ty = sema
 +                            .ancestors_with_macros(it.syntax().clone())
 +                            .find_map(find_ret_ty)
 +                            .flatten();
 +
 +                        let self_param = sema
 +                            .ancestors_with_macros(it.syntax().clone())
 +                            .filter_map(ast::Item::cast)
 +                            .find_map(find_fn_self_param)
 +                            .flatten();
 +                        (innermost_ret_ty, self_param)
 +                    }
 +                    None => (None, None),
 +                }
 +            };
 +            let is_func_update = func_update_record(it);
 +            let in_condition = is_in_condition(&expr);
 +            let incomplete_let = it
 +                .parent()
 +                .and_then(ast::LetStmt::cast)
 +                .map_or(false, |it| it.semicolon_token().is_none());
 +            let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
 +
 +            let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
 +                Some(arm) => arm
 +                    .fat_arrow_token()
 +                    .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()),
 +                None => false,
 +            };
 +
 +            PathKind::Expr {
 +                expr_ctx: ExprCtx {
 +                    in_block_expr,
 +                    in_loop_body,
 +                    after_if_expr,
 +                    in_condition,
 +                    ref_expr_parent,
 +                    is_func_update,
 +                    innermost_ret_ty,
 +                    self_param,
 +                    incomplete_let,
 +                    impl_,
 +                    in_match_guard,
 +                },
 +            }
 +        };
 +        let make_path_kind_type = |ty: ast::Type| {
 +            let location = type_location(ty.syntax());
 +            PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
 +        };
 +
 +        let mut kind_macro_call = |it: ast::MacroCall| {
 +            path_ctx.has_macro_bang = it.excl_token().is_some();
 +            let parent = it.syntax().parent()?;
 +            // Any path in an item list will be treated as a macro call by the parser
 +            let kind = match_ast! {
 +                match parent {
 +                    ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
 +                    ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
 +                    ast::MacroType(ty) => make_path_kind_type(ty.into()),
 +                    ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
 +                    ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
 +                        Some(it) => match_ast! {
 +                            match it {
 +                                ast::Trait(_) => ItemListKind::Trait,
 +                                ast::Impl(it) => if it.trait_().is_some() {
 +                                    ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
 +                                } else {
 +                                    ItemListKind::Impl
 +                                },
 +                                _ => return None
 +                            }
 +                        },
 +                        None => return None,
 +                    } },
 +                    ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
 +                    ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
 +                    _ => return None,
 +                }
 +            };
 +            Some(kind)
 +        };
 +        let make_path_kind_attr = |meta: ast::Meta| {
 +            let attr = meta.parent_attr()?;
 +            let kind = attr.kind();
 +            let attached = attr.syntax().parent()?;
 +            let is_trailing_outer_attr = kind != AttrKind::Inner
 +                && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next)
 +                    .is_none();
 +            let annotated_item_kind =
 +                if is_trailing_outer_attr { None } else { Some(attached.kind()) };
 +            Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
 +        };
 +
 +        // Infer the path kind
 +        let parent = path.syntax().parent()?;
 +        let kind = match_ast! {
 +            match parent {
 +                ast::PathType(it) => make_path_kind_type(it.into()),
 +                ast::PathExpr(it) => {
 +                    if let Some(p) = it.syntax().parent() {
 +                        if ast::ExprStmt::can_cast(p.kind()) {
 +                            if let Some(kind) = inbetween_body_and_decl_check(p) {
 +                                return Some(make_res(NameRefKind::Keyword(kind)));
 +                            }
 +                        }
 +                    }
 +
 +                    path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
 +
 +                    make_path_kind_expr(it.into())
 +                },
 +                ast::TupleStructPat(it) => {
 +                    path_ctx.has_call_parens = true;
 +                    PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
 +                },
 +                ast::RecordPat(it) => {
 +                    path_ctx.has_call_parens = true;
 +                    PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
 +                },
 +                ast::PathPat(it) => {
 +                    PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
 +                },
 +                ast::MacroCall(it) => {
 +                    // A macro call in this position is usually a result of parsing recovery, so check that
 +                    if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
 +                        return Some(make_res(NameRefKind::Keyword(kind)));
 +                    }
 +
 +                    kind_macro_call(it)?
 +                },
 +                ast::Meta(meta) => make_path_kind_attr(meta)?,
 +                ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
 +                ast::UseTree(_) => PathKind::Use,
 +                // completing inside a qualifier
 +                ast::Path(parent) => {
 +                    path_ctx.parent = Some(parent.clone());
 +                    let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
 +                    match_ast! {
 +                        match parent {
 +                            ast::PathType(it) => make_path_kind_type(it.into()),
 +                            ast::PathExpr(it) => {
 +                                path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
 +
 +                                make_path_kind_expr(it.into())
 +                            },
 +                            ast::TupleStructPat(it) => {
 +                                path_ctx.has_call_parens = true;
 +                                PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
 +                            },
 +                            ast::RecordPat(it) => {
 +                                path_ctx.has_call_parens = true;
 +                                PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
 +                            },
 +                            ast::PathPat(it) => {
 +                                PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
 +                            },
 +                            ast::MacroCall(it) => {
 +                                kind_macro_call(it)?
 +                            },
 +                            ast::Meta(meta) => make_path_kind_attr(meta)?,
 +                            ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
 +                            ast::UseTree(_) => PathKind::Use,
 +                            ast::RecordExpr(it) => make_path_kind_expr(it.into()),
 +                            _ => return None,
 +                        }
 +                    }
 +                },
 +                ast::RecordExpr(it) => make_path_kind_expr(it.into()),
 +                _ => return None,
 +            }
 +        };
 +
 +        path_ctx.kind = kind;
 +        path_ctx.has_type_args = segment.generic_arg_list().is_some();
 +
 +        // calculate the qualifier context
 +        if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
 +            path_ctx.use_tree_parent = use_tree_parent;
 +            if !use_tree_parent && segment.coloncolon_token().is_some() {
 +                path_ctx.qualified = Qualified::Absolute;
 +            } else {
 +                let qualifier = qualifier
 +                    .segment()
 +                    .and_then(|it| find_node_in_file(original_file, &it))
 +                    .map(|it| it.parent_path());
 +                if let Some(qualifier) = qualifier {
 +                    let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
 +                        Some(ast::PathSegmentKind::Type {
 +                            type_ref: Some(type_ref),
 +                            trait_ref,
 +                        }) if qualifier.qualifier().is_none() => Some((type_ref, trait_ref)),
 +                        _ => None,
 +                    };
 +
 +                    path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
 +                        let ty = match ty {
 +                            ast::Type::InferType(_) => None,
 +                            ty => sema.resolve_type(&ty),
 +                        };
 +                        let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
 +                        Qualified::TypeAnchor { ty, trait_ }
 +                    } else {
 +                        let res = sema.resolve_path(&qualifier);
 +
 +                        // For understanding how and why super_chain_len is calculated the way it
 +                        // is check the documentation at it's definition
 +                        let mut segment_count = 0;
 +                        let super_count =
 +                            iter::successors(Some(qualifier.clone()), |p| p.qualifier())
 +                                .take_while(|p| {
 +                                    p.segment()
 +                                        .and_then(|s| {
 +                                            segment_count += 1;
 +                                            s.super_token()
 +                                        })
 +                                        .is_some()
 +                                })
 +                                .count();
 +
 +                        let super_chain_len =
 +                            if segment_count > super_count { None } else { Some(super_count) };
 +
 +                        Qualified::With { path: qualifier, resolution: res, super_chain_len }
 +                    }
 +                };
 +            }
 +        } else if let Some(segment) = path.segment() {
 +            if segment.coloncolon_token().is_some() {
 +                path_ctx.qualified = Qualified::Absolute;
 +            }
 +        }
 +
 +        let mut qualifier_ctx = QualifierCtx::default();
 +        if path_ctx.is_trivial_path() {
 +            // fetch the full expression that may have qualifiers attached to it
 +            let top_node = match path_ctx.kind {
 +                PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => {
 +                    parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
 +                        let parent = p.parent()?;
 +                        if ast::StmtList::can_cast(parent.kind()) {
 +                            Some(p)
 +                        } else if ast::ExprStmt::can_cast(parent.kind()) {
 +                            Some(parent)
 +                        } else {
 +                            None
 +                        }
 +                    })
 +                }
 +                PathKind::Item { .. } => {
 +                    parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
 +                }
 +                _ => None,
 +            };
 +            if let Some(top) = top_node {
 +                if let Some(NodeOrToken::Node(error_node)) =
 +                    syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
 +                {
 +                    if error_node.kind() == SyntaxKind::ERROR {
 +                        qualifier_ctx.unsafe_tok = error_node
 +                            .children_with_tokens()
 +                            .filter_map(NodeOrToken::into_token)
 +                            .find(|it| it.kind() == T![unsafe]);
 +                        qualifier_ctx.vis_node =
 +                            error_node.children().find_map(ast::Visibility::cast);
 +                    }
 +                }
 +
 +                if let PathKind::Item { .. } = path_ctx.kind {
 +                    if qualifier_ctx.none() {
 +                        if let Some(t) = top.first_token() {
 +                            if let Some(prev) = t
 +                                .prev_token()
 +                                .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
 +                            {
 +                                if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) {
 +                                    // This was inferred to be an item position path, but it seems
 +                                    // to be part of some other broken node which leaked into an item
 +                                    // list
 +                                    return None;
 +                                }
 +                            }
 +                        }
 +                    }
 +                }
 +            }
 +        }
 +        Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
 +    }
 +}
 +
 +fn pattern_context_for(
 +    sema: &Semantics<'_, RootDatabase>,
 +    original_file: &SyntaxNode,
 +    pat: ast::Pat,
 +) -> PatternContext {
 +    let mut param_ctx = None;
 +    let (refutability, has_type_ascription) =
 +    pat
 +        .syntax()
 +        .ancestors()
 +        .skip_while(|it| ast::Pat::can_cast(it.kind()))
 +        .next()
 +        .map_or((PatternRefutability::Irrefutable, false), |node| {
 +            let refutability = match_ast! {
 +                match node {
 +                    ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()),
 +                    ast::Param(param) => {
 +                        let has_type_ascription = param.ty().is_some();
 +                        param_ctx = (|| {
 +                            let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
 +                            let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
 +                            let param_list_owner = param_list.syntax().parent()?;
 +                            let kind = match_ast! {
 +                                match param_list_owner {
 +                                    ast::ClosureExpr(closure) => ParamKind::Closure(closure),
 +                                    ast::Fn(fn_) => ParamKind::Function(fn_),
 +                                    _ => return None,
 +                                }
 +                            };
 +                            Some(ParamContext {
 +                                param_list, param, kind
 +                            })
 +                        })();
 +                        return (PatternRefutability::Irrefutable, has_type_ascription)
 +                    },
 +                    ast::MatchArm(_) => PatternRefutability::Refutable,
 +                    ast::LetExpr(_) => PatternRefutability::Refutable,
 +                    ast::ForExpr(_) => PatternRefutability::Irrefutable,
 +                    _ => PatternRefutability::Irrefutable,
 +                }
 +            };
 +            (refutability, false)
 +        });
 +    let (ref_token, mut_token) = match &pat {
 +        ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
 +        _ => (None, None),
 +    };
 +
 +    PatternContext {
 +        refutability,
 +        param_ctx,
 +        has_type_ascription,
 +        parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
 +        mut_token,
 +        ref_token,
 +        record_pat: None,
 +        impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
 +    }
 +}
 +
 +fn fetch_immediate_impl(
 +    sema: &Semantics<'_, RootDatabase>,
 +    original_file: &SyntaxNode,
 +    node: &SyntaxNode,
 +) -> Option<ast::Impl> {
 +    let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
 +        .filter_map(ast::Item::cast)
 +        .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
 +
 +    match ancestors.next()? {
 +        ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
 +        ast::Item::Impl(it) => return Some(it),
 +        _ => return None,
 +    }
 +    match ancestors.next()? {
 +        ast::Item::Impl(it) => Some(it),
 +        _ => None,
 +    }
 +}
 +
 +/// Attempts to find `node` inside `syntax` via `node`'s text range.
 +/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
 +fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
 +    find_node_in_file(syntax, &node?)
 +}
 +
 +/// Attempts to find `node` inside `syntax` via `node`'s text range.
 +/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
 +fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
 +    let syntax_range = syntax.text_range();
 +    let range = node.syntax().text_range();
 +    let intersection = range.intersect(syntax_range)?;
 +    syntax.covering_element(intersection).ancestors().find_map(N::cast)
 +}
 +
 +/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
 +/// for the offset introduced by the fake ident.
 +/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
 +fn find_node_in_file_compensated<N: AstNode>(
 +    sema: &Semantics<'_, RootDatabase>,
 +    in_file: &SyntaxNode,
 +    node: &N,
 +) -> Option<N> {
 +    ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
 +}
 +
 +fn ancestors_in_file_compensated<'sema>(
 +    sema: &'sema Semantics<'_, RootDatabase>,
 +    in_file: &SyntaxNode,
 +    node: &SyntaxNode,
 +) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
 +    let syntax_range = in_file.text_range();
 +    let range = node.text_range();
 +    let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
 +    if end < range.start() {
 +        return None;
 +    }
 +    let range = TextRange::new(range.start(), end);
 +    // our inserted ident could cause `range` to go outside of the original syntax, so cap it
 +    let intersection = range.intersect(syntax_range)?;
 +    let node = match in_file.covering_element(intersection) {
 +        NodeOrToken::Node(node) => node,
 +        NodeOrToken::Token(tok) => tok.parent()?,
 +    };
 +    Some(sema.ancestors_with_macros(node))
 +}
 +
 +/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
 +/// for the offset introduced by the fake ident..
 +/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
 +fn find_opt_node_in_file_compensated<N: AstNode>(
 +    sema: &Semantics<'_, RootDatabase>,
 +    syntax: &SyntaxNode,
 +    node: Option<N>,
 +) -> Option<N> {
 +    find_node_in_file_compensated(sema, syntax, &node?)
 +}
 +
 +fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
 +    if let Some(qual) = path.qualifier() {
 +        return Some((qual, false));
 +    }
 +    let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
 +    let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
 +    Some((use_tree.path()?, true))
 +}
 +
 +pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
 +    // oh my ...
 +    (|| {
 +        let syntax_token = element.into_token()?;
 +        let range = syntax_token.text_range();
 +        let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?;
 +
 +        // check if the current token is the `in` token of a for loop
 +        if let Some(token) = for_expr.in_token() {
 +            return Some(syntax_token == token);
 +        }
 +        let pat = for_expr.pat()?;
 +        if range.end() < pat.syntax().text_range().end() {
 +            // if we are inside or before the pattern we can't be at the `in` token position
 +            return None;
 +        }
 +        let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
 +        Some(match next_sibl {
 +            // the loop body is some node, if our token is at the start we are at the `in` position,
 +            // otherwise we could be in a recovered expression, we don't wanna ruin completions there
 +            syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(),
 +            // the loop body consists of a single token, if we are this we are certainly at the `in` token position
 +            syntax::NodeOrToken::Token(t) => t == syntax_token,
 +        })
 +    })()
 +    .unwrap_or(false)
 +}
 +
 +#[test]
 +fn test_for_is_prev2() {
 +    crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop);
 +}
 +
 +pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
 +    node.ancestors()
 +        .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
 +        .find_map(|it| {
 +            let loop_body = match_ast! {
 +                match it {
 +                    ast::ForExpr(it) => it.loop_body(),
 +                    ast::WhileExpr(it) => it.loop_body(),
 +                    ast::LoopExpr(it) => it.loop_body(),
 +                    _ => None,
 +                }
 +            };
 +            loop_body.filter(|it| it.syntax().text_range().contains_range(node.text_range()))
 +        })
 +        .is_some()
 +}
 +
 +fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
 +    let mut token = match e.into() {
 +        SyntaxElement::Node(n) => n.first_token()?,
 +        SyntaxElement::Token(t) => t,
 +    }
 +    .prev_token();
 +    while let Some(inner) = token {
 +        if !inner.kind().is_trivia() {
 +            return Some(inner);
 +        } else {
 +            token = inner.prev_token();
 +        }
 +    }
 +    None
 +}
 +
 +fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
 +    let mut e = ele.next_sibling_or_token();
 +    while let Some(inner) = e {
 +        if !inner.kind().is_trivia() {
 +            return Some(inner);
 +        } else {
 +            e = inner.next_sibling_or_token();
 +        }
 +    }
 +    None
 +}
index a2cf6d68e5b3a08529ece70ce8b455a232cf5010,0000000000000000000000000000000000000000..86302cb0678f198687dfe2b4eed26997c642efa6
mode 100644,000000..100644
--- /dev/null
@@@ -1,1913 -1,0 +1,1934 @@@
-         if let Some(ref_match) = compute_ref_match(completion, &ty) {
-             item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
-         }
 +//! `render` module provides utilities for rendering completion suggestions
 +//! into code pieces that will be presented to user.
 +
 +pub(crate) mod macro_;
 +pub(crate) mod function;
 +pub(crate) mod const_;
 +pub(crate) mod pattern;
 +pub(crate) mod type_alias;
 +pub(crate) mod variant;
 +pub(crate) mod union_literal;
 +pub(crate) mod literal;
 +
 +use hir::{AsAssocItem, HasAttrs, HirDisplay, ScopeDef};
 +use ide_db::{
 +    helpers::item_name, imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind,
 +};
 +use syntax::{AstNode, SmolStr, SyntaxKind, TextRange};
 +
 +use crate::{
 +    context::{DotAccess, PathCompletionCtx, PathKind, PatternContext},
 +    item::{Builder, CompletionRelevanceTypeMatch},
 +    render::{
 +        function::render_fn,
 +        literal::render_variant_lit,
 +        macro_::{render_macro, render_macro_pat},
 +    },
 +    CompletionContext, CompletionItem, CompletionItemKind, CompletionRelevance,
 +};
 +/// Interface for data and methods required for items rendering.
 +#[derive(Debug, Clone)]
 +pub(crate) struct RenderContext<'a> {
 +    completion: &'a CompletionContext<'a>,
 +    is_private_editable: bool,
 +    import_to_add: Option<LocatedImport>,
 +}
 +
 +impl<'a> RenderContext<'a> {
 +    pub(crate) fn new(completion: &'a CompletionContext<'a>) -> RenderContext<'a> {
 +        RenderContext { completion, is_private_editable: false, import_to_add: None }
 +    }
 +
 +    pub(crate) fn private_editable(mut self, private_editable: bool) -> Self {
 +        self.is_private_editable = private_editable;
 +        self
 +    }
 +
 +    pub(crate) fn import_to_add(mut self, import_to_add: Option<LocatedImport>) -> Self {
 +        self.import_to_add = import_to_add;
 +        self
 +    }
 +
 +    fn snippet_cap(&self) -> Option<SnippetCap> {
 +        self.completion.config.snippet_cap
 +    }
 +
 +    fn db(&self) -> &'a RootDatabase {
 +        self.completion.db
 +    }
 +
 +    fn source_range(&self) -> TextRange {
 +        self.completion.source_range()
 +    }
 +
 +    fn completion_relevance(&self) -> CompletionRelevance {
 +        CompletionRelevance {
 +            is_private_editable: self.is_private_editable,
 +            requires_import: self.import_to_add.is_some(),
 +            ..Default::default()
 +        }
 +    }
 +
 +    fn is_immediately_after_macro_bang(&self) -> bool {
 +        self.completion.token.kind() == SyntaxKind::BANG
 +            && self
 +                .completion
 +                .token
 +                .parent()
 +                .map_or(false, |it| it.kind() == SyntaxKind::MACRO_CALL)
 +    }
 +
 +    fn is_deprecated(&self, def: impl HasAttrs) -> bool {
 +        let attrs = def.attrs(self.db());
 +        attrs.by_key("deprecated").exists()
 +    }
 +
 +    fn is_deprecated_assoc_item(&self, as_assoc_item: impl AsAssocItem) -> bool {
 +        let db = self.db();
 +        let assoc = match as_assoc_item.as_assoc_item(db) {
 +            Some(assoc) => assoc,
 +            None => return false,
 +        };
 +
 +        let is_assoc_deprecated = match assoc {
 +            hir::AssocItem::Function(it) => self.is_deprecated(it),
 +            hir::AssocItem::Const(it) => self.is_deprecated(it),
 +            hir::AssocItem::TypeAlias(it) => self.is_deprecated(it),
 +        };
 +        is_assoc_deprecated
 +            || assoc
 +                .containing_trait_or_trait_impl(db)
 +                .map(|trait_| self.is_deprecated(trait_))
 +                .unwrap_or(false)
 +    }
 +
 +    // FIXME: remove this
 +    fn docs(&self, def: impl HasAttrs) -> Option<hir::Documentation> {
 +        def.docs(self.db())
 +    }
 +}
 +
 +pub(crate) fn render_field(
 +    ctx: RenderContext<'_>,
 +    dot_access: &DotAccess,
 +    receiver: Option<hir::Name>,
 +    field: hir::Field,
 +    ty: &hir::Type,
 +) -> CompletionItem {
 +    let is_deprecated = ctx.is_deprecated(field);
 +    let name = field.name(ctx.db());
 +    let (name, escaped_name) = (name.unescaped().to_smol_str(), name.to_smol_str());
 +    let mut item = CompletionItem::new(
 +        SymbolKind::Field,
 +        ctx.source_range(),
 +        field_with_receiver(receiver.as_ref(), &name),
 +    );
 +    item.set_relevance(CompletionRelevance {
 +        type_match: compute_type_match(ctx.completion, ty),
 +        exact_name_match: compute_exact_name_match(ctx.completion, name.as_str()),
 +        ..CompletionRelevance::default()
 +    });
 +    item.detail(ty.display(ctx.db()).to_string())
 +        .set_documentation(field.docs(ctx.db()))
 +        .set_deprecated(is_deprecated)
 +        .lookup_by(name.clone());
 +    item.insert_text(field_with_receiver(receiver.as_ref(), &escaped_name));
 +    if let Some(receiver) = &dot_access.receiver {
 +        if let Some(original) = ctx.completion.sema.original_ast_node(receiver.clone()) {
 +            if let Some(ref_match) = compute_ref_match(ctx.completion, ty) {
 +                item.ref_match(ref_match, original.syntax().text_range().start());
 +            }
 +        }
 +    }
 +    item.build()
 +}
 +
 +fn field_with_receiver(receiver: Option<&hir::Name>, field_name: &str) -> SmolStr {
 +    receiver
 +        .map_or_else(|| field_name.into(), |receiver| format!("{}.{}", receiver, field_name).into())
 +}
 +
 +pub(crate) fn render_tuple_field(
 +    ctx: RenderContext<'_>,
 +    receiver: Option<hir::Name>,
 +    field: usize,
 +    ty: &hir::Type,
 +) -> CompletionItem {
 +    let mut item = CompletionItem::new(
 +        SymbolKind::Field,
 +        ctx.source_range(),
 +        field_with_receiver(receiver.as_ref(), &field.to_string()),
 +    );
 +    item.detail(ty.display(ctx.db()).to_string()).lookup_by(field.to_string());
 +    item.build()
 +}
 +
 +pub(crate) fn render_type_inference(
 +    ty_string: String,
 +    ctx: &CompletionContext<'_>,
 +) -> CompletionItem {
 +    let mut builder =
 +        CompletionItem::new(CompletionItemKind::InferredType, ctx.source_range(), ty_string);
 +    builder.set_relevance(CompletionRelevance { is_definite: true, ..Default::default() });
 +    builder.build()
 +}
 +
 +pub(crate) fn render_path_resolution(
 +    ctx: RenderContext<'_>,
 +    path_ctx: &PathCompletionCtx,
 +    local_name: hir::Name,
 +    resolution: ScopeDef,
 +) -> Builder {
 +    render_resolution_path(ctx, path_ctx, local_name, None, resolution)
 +}
 +
 +pub(crate) fn render_pattern_resolution(
 +    ctx: RenderContext<'_>,
 +    pattern_ctx: &PatternContext,
 +    local_name: hir::Name,
 +    resolution: ScopeDef,
 +) -> Builder {
 +    render_resolution_pat(ctx, pattern_ctx, local_name, None, resolution)
 +}
 +
 +pub(crate) fn render_resolution_with_import(
 +    ctx: RenderContext<'_>,
 +    path_ctx: &PathCompletionCtx,
 +    import_edit: LocatedImport,
 +) -> Option<Builder> {
 +    let resolution = ScopeDef::from(import_edit.original_item);
 +    let local_name = scope_def_to_name(resolution, &ctx, &import_edit)?;
 +
 +    Some(render_resolution_path(ctx, path_ctx, local_name, Some(import_edit), resolution))
 +}
 +
 +pub(crate) fn render_resolution_with_import_pat(
 +    ctx: RenderContext<'_>,
 +    pattern_ctx: &PatternContext,
 +    import_edit: LocatedImport,
 +) -> Option<Builder> {
 +    let resolution = ScopeDef::from(import_edit.original_item);
 +    let local_name = scope_def_to_name(resolution, &ctx, &import_edit)?;
 +    Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution))
 +}
 +
 +fn scope_def_to_name(
 +    resolution: ScopeDef,
 +    ctx: &RenderContext<'_>,
 +    import_edit: &LocatedImport,
 +) -> Option<hir::Name> {
 +    Some(match resolution {
 +        ScopeDef::ModuleDef(hir::ModuleDef::Function(f)) => f.name(ctx.completion.db),
 +        ScopeDef::ModuleDef(hir::ModuleDef::Const(c)) => c.name(ctx.completion.db)?,
 +        ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(t)) => t.name(ctx.completion.db),
 +        _ => item_name(ctx.db(), import_edit.original_item)?,
 +    })
 +}
 +
 +fn render_resolution_pat(
 +    ctx: RenderContext<'_>,
 +    pattern_ctx: &PatternContext,
 +    local_name: hir::Name,
 +    import_to_add: Option<LocatedImport>,
 +    resolution: ScopeDef,
 +) -> Builder {
 +    let _p = profile::span("render_resolution");
 +    use hir::ModuleDef::*;
 +
 +    match resolution {
 +        ScopeDef::ModuleDef(Macro(mac)) => {
 +            let ctx = ctx.import_to_add(import_to_add);
 +            return render_macro_pat(ctx, pattern_ctx, local_name, mac);
 +        }
 +        _ => (),
 +    }
 +
 +    render_resolution_simple_(ctx, &local_name, import_to_add, resolution)
 +}
 +
 +fn render_resolution_path(
 +    ctx: RenderContext<'_>,
 +    path_ctx: &PathCompletionCtx,
 +    local_name: hir::Name,
 +    import_to_add: Option<LocatedImport>,
 +    resolution: ScopeDef,
 +) -> Builder {
 +    let _p = profile::span("render_resolution");
 +    use hir::ModuleDef::*;
 +
 +    match resolution {
 +        ScopeDef::ModuleDef(Macro(mac)) => {
 +            let ctx = ctx.import_to_add(import_to_add);
 +            return render_macro(ctx, path_ctx, local_name, mac);
 +        }
 +        ScopeDef::ModuleDef(Function(func)) => {
 +            let ctx = ctx.import_to_add(import_to_add);
 +            return render_fn(ctx, path_ctx, Some(local_name), func);
 +        }
 +        ScopeDef::ModuleDef(Variant(var)) => {
 +            let ctx = ctx.clone().import_to_add(import_to_add.clone());
 +            if let Some(item) =
 +                render_variant_lit(ctx, path_ctx, Some(local_name.clone()), var, None)
 +            {
 +                return item;
 +            }
 +        }
 +        _ => (),
 +    }
 +
 +    let completion = ctx.completion;
 +    let cap = ctx.snippet_cap();
 +    let db = completion.db;
 +    let config = completion.config;
 +
 +    let name = local_name.to_smol_str();
 +    let mut item = render_resolution_simple_(ctx, &local_name, import_to_add, resolution);
 +    if local_name.is_escaped() {
 +        item.insert_text(local_name.to_smol_str());
 +    }
 +    // Add `<>` for generic types
 +    let type_path_no_ty_args = matches!(
 +        path_ctx,
 +        PathCompletionCtx { kind: PathKind::Type { .. }, has_type_args: false, .. }
 +    ) && config.callable.is_some();
 +    if type_path_no_ty_args {
 +        if let Some(cap) = cap {
 +            let has_non_default_type_params = match resolution {
 +                ScopeDef::ModuleDef(hir::ModuleDef::Adt(it)) => it.has_non_default_type_params(db),
 +                ScopeDef::ModuleDef(hir::ModuleDef::TypeAlias(it)) => {
 +                    it.has_non_default_type_params(db)
 +                }
 +                _ => false,
 +            };
 +
 +            if has_non_default_type_params {
 +                cov_mark::hit!(inserts_angle_brackets_for_generics);
 +                item.lookup_by(name.clone())
 +                    .label(SmolStr::from_iter([&name, "<…>"]))
 +                    .trigger_call_info()
 +                    .insert_snippet(cap, format!("{}<$0>", local_name));
 +            }
 +        }
 +    }
 +    if let ScopeDef::Local(local) = resolution {
 +        let ty = local.ty(db);
 +        if !ty.is_unknown() {
 +            item.detail(ty.display(db).to_string());
 +        }
 +
 +        item.set_relevance(CompletionRelevance {
 +            type_match: compute_type_match(completion, &ty),
 +            exact_name_match: compute_exact_name_match(completion, &name),
 +            is_local: true,
 +            ..CompletionRelevance::default()
 +        });
 +
++        path_ref_match(completion, path_ctx, &ty, &mut item);
 +    };
 +    item
 +}
 +
 +fn render_resolution_simple_(
 +    ctx: RenderContext<'_>,
 +    local_name: &hir::Name,
 +    import_to_add: Option<LocatedImport>,
 +    resolution: ScopeDef,
 +) -> Builder {
 +    let _p = profile::span("render_resolution");
 +
 +    let db = ctx.db();
 +    let ctx = ctx.import_to_add(import_to_add);
 +    let kind = res_to_kind(resolution);
 +
 +    let mut item =
 +        CompletionItem::new(kind, ctx.source_range(), local_name.unescaped().to_smol_str());
 +    item.set_relevance(ctx.completion_relevance())
 +        .set_documentation(scope_def_docs(db, resolution))
 +        .set_deprecated(scope_def_is_deprecated(&ctx, resolution));
 +
 +    if let Some(import_to_add) = ctx.import_to_add {
 +        item.add_import(import_to_add);
 +    }
 +    item
 +}
 +
 +fn res_to_kind(resolution: ScopeDef) -> CompletionItemKind {
 +    use hir::ModuleDef::*;
 +    match resolution {
 +        ScopeDef::Unknown => CompletionItemKind::UnresolvedReference,
 +        ScopeDef::ModuleDef(Function(_)) => CompletionItemKind::SymbolKind(SymbolKind::Function),
 +        ScopeDef::ModuleDef(Variant(_)) => CompletionItemKind::SymbolKind(SymbolKind::Variant),
 +        ScopeDef::ModuleDef(Macro(_)) => CompletionItemKind::SymbolKind(SymbolKind::Macro),
 +        ScopeDef::ModuleDef(Module(..)) => CompletionItemKind::SymbolKind(SymbolKind::Module),
 +        ScopeDef::ModuleDef(Adt(adt)) => CompletionItemKind::SymbolKind(match adt {
 +            hir::Adt::Struct(_) => SymbolKind::Struct,
 +            hir::Adt::Union(_) => SymbolKind::Union,
 +            hir::Adt::Enum(_) => SymbolKind::Enum,
 +        }),
 +        ScopeDef::ModuleDef(Const(..)) => CompletionItemKind::SymbolKind(SymbolKind::Const),
 +        ScopeDef::ModuleDef(Static(..)) => CompletionItemKind::SymbolKind(SymbolKind::Static),
 +        ScopeDef::ModuleDef(Trait(..)) => CompletionItemKind::SymbolKind(SymbolKind::Trait),
 +        ScopeDef::ModuleDef(TypeAlias(..)) => CompletionItemKind::SymbolKind(SymbolKind::TypeAlias),
 +        ScopeDef::ModuleDef(BuiltinType(..)) => CompletionItemKind::BuiltinType,
 +        ScopeDef::GenericParam(param) => CompletionItemKind::SymbolKind(match param {
 +            hir::GenericParam::TypeParam(_) => SymbolKind::TypeParam,
 +            hir::GenericParam::ConstParam(_) => SymbolKind::ConstParam,
 +            hir::GenericParam::LifetimeParam(_) => SymbolKind::LifetimeParam,
 +        }),
 +        ScopeDef::Local(..) => CompletionItemKind::SymbolKind(SymbolKind::Local),
 +        ScopeDef::Label(..) => CompletionItemKind::SymbolKind(SymbolKind::Label),
 +        ScopeDef::AdtSelfType(..) | ScopeDef::ImplSelfType(..) => {
 +            CompletionItemKind::SymbolKind(SymbolKind::SelfParam)
 +        }
 +    }
 +}
 +
 +fn scope_def_docs(db: &RootDatabase, resolution: ScopeDef) -> Option<hir::Documentation> {
 +    use hir::ModuleDef::*;
 +    match resolution {
 +        ScopeDef::ModuleDef(Module(it)) => it.docs(db),
 +        ScopeDef::ModuleDef(Adt(it)) => it.docs(db),
 +        ScopeDef::ModuleDef(Variant(it)) => it.docs(db),
 +        ScopeDef::ModuleDef(Const(it)) => it.docs(db),
 +        ScopeDef::ModuleDef(Static(it)) => it.docs(db),
 +        ScopeDef::ModuleDef(Trait(it)) => it.docs(db),
 +        ScopeDef::ModuleDef(TypeAlias(it)) => it.docs(db),
 +        _ => None,
 +    }
 +}
 +
 +fn scope_def_is_deprecated(ctx: &RenderContext<'_>, resolution: ScopeDef) -> bool {
 +    match resolution {
 +        ScopeDef::ModuleDef(it) => ctx.is_deprecated_assoc_item(it),
 +        ScopeDef::GenericParam(it) => ctx.is_deprecated(it),
 +        ScopeDef::AdtSelfType(it) => ctx.is_deprecated(it),
 +        _ => false,
 +    }
 +}
 +
 +fn compute_type_match(
 +    ctx: &CompletionContext<'_>,
 +    completion_ty: &hir::Type,
 +) -> Option<CompletionRelevanceTypeMatch> {
 +    let expected_type = ctx.expected_type.as_ref()?;
 +
 +    // We don't ever consider unit type to be an exact type match, since
 +    // nearly always this is not meaningful to the user.
 +    if expected_type.is_unit() {
 +        return None;
 +    }
 +
 +    if completion_ty == expected_type {
 +        Some(CompletionRelevanceTypeMatch::Exact)
 +    } else if expected_type.could_unify_with(ctx.db, completion_ty) {
 +        Some(CompletionRelevanceTypeMatch::CouldUnify)
 +    } else {
 +        None
 +    }
 +}
 +
 +fn compute_exact_name_match(ctx: &CompletionContext<'_>, completion_name: &str) -> bool {
 +    ctx.expected_name.as_ref().map_or(false, |name| name.text() == completion_name)
 +}
 +
 +fn compute_ref_match(
 +    ctx: &CompletionContext<'_>,
 +    completion_ty: &hir::Type,
 +) -> Option<hir::Mutability> {
 +    let expected_type = ctx.expected_type.as_ref()?;
 +    if completion_ty != expected_type {
 +        let expected_type_without_ref = expected_type.remove_ref()?;
 +        if completion_ty.autoderef(ctx.db).any(|deref_ty| deref_ty == expected_type_without_ref) {
 +            cov_mark::hit!(suggest_ref);
 +            let mutability = if expected_type.is_mutable_reference() {
 +                hir::Mutability::Mut
 +            } else {
 +                hir::Mutability::Shared
 +            };
 +            return Some(mutability);
 +        };
 +    }
 +    None
 +}
 +
++fn path_ref_match(
++    completion: &CompletionContext<'_>,
++    path_ctx: &PathCompletionCtx,
++    ty: &hir::Type,
++    item: &mut Builder,
++) {
++    if let Some(original_path) = &path_ctx.original_path {
++        // At least one char was typed by the user already, in that case look for the original path
++        if let Some(original_path) = completion.sema.original_ast_node(original_path.clone()) {
++            if let Some(ref_match) = compute_ref_match(completion, ty) {
++                item.ref_match(ref_match, original_path.syntax().text_range().start());
++            }
++        }
++    } else {
++        // completion requested on an empty identifier, there is no path here yet.
++        // FIXME: This might create inconsistent completions where we show a ref match in macro inputs
++        // as long as nothing was typed yet
++        if let Some(ref_match) = compute_ref_match(completion, ty) {
++            item.ref_match(ref_match, completion.position.offset);
++        }
++    }
++}
++
 +#[cfg(test)]
 +mod tests {
 +    use std::cmp;
 +
 +    use expect_test::{expect, Expect};
 +    use ide_db::SymbolKind;
 +    use itertools::Itertools;
 +
 +    use crate::{
 +        item::CompletionRelevanceTypeMatch,
 +        tests::{check_edit, do_completion, get_all_items, TEST_CONFIG},
 +        CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch,
 +    };
 +
 +    #[track_caller]
 +    fn check(ra_fixture: &str, kind: impl Into<CompletionItemKind>, expect: Expect) {
 +        let actual = do_completion(ra_fixture, kind.into());
 +        expect.assert_debug_eq(&actual);
 +    }
 +
 +    #[track_caller]
 +    fn check_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) {
 +        let actual: Vec<_> =
 +            kinds.iter().flat_map(|&kind| do_completion(ra_fixture, kind)).collect();
 +        expect.assert_debug_eq(&actual);
 +    }
 +
 +    #[track_caller]
 +    fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) {
 +        let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
 +        actual.retain(|it| kinds.contains(&it.kind()));
 +        actual.sort_by_key(|it| cmp::Reverse(it.relevance().score()));
 +        check_relevance_(actual, expect);
 +    }
 +
 +    #[track_caller]
 +    fn check_relevance(ra_fixture: &str, expect: Expect) {
 +        let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None);
 +        actual.retain(|it| it.kind() != CompletionItemKind::Snippet);
 +        actual.retain(|it| it.kind() != CompletionItemKind::Keyword);
 +        actual.retain(|it| it.kind() != CompletionItemKind::BuiltinType);
 +        actual.sort_by_key(|it| cmp::Reverse(it.relevance().score()));
 +        check_relevance_(actual, expect);
 +    }
 +
 +    #[track_caller]
 +    fn check_relevance_(actual: Vec<CompletionItem>, expect: Expect) {
 +        let actual = actual
 +            .into_iter()
 +            .flat_map(|it| {
 +                let mut items = vec![];
 +
 +                let tag = it.kind().tag();
 +                let relevance = display_relevance(it.relevance());
 +                items.push(format!("{} {} {}\n", tag, it.label(), relevance));
 +
 +                if let Some((mutability, _offset, relevance)) = it.ref_match() {
 +                    let label = format!("&{}{}", mutability.as_keyword_for_ref(), it.label());
 +                    let relevance = display_relevance(relevance);
 +
 +                    items.push(format!("{} {} {}\n", tag, label, relevance));
 +                }
 +
 +                items
 +            })
 +            .collect::<String>();
 +
 +        expect.assert_eq(&actual);
 +
 +        fn display_relevance(relevance: CompletionRelevance) -> String {
 +            let relevance_factors = vec![
 +                (relevance.type_match == Some(CompletionRelevanceTypeMatch::Exact), "type"),
 +                (
 +                    relevance.type_match == Some(CompletionRelevanceTypeMatch::CouldUnify),
 +                    "type_could_unify",
 +                ),
 +                (relevance.exact_name_match, "name"),
 +                (relevance.is_local, "local"),
 +                (
 +                    relevance.postfix_match == Some(CompletionRelevancePostfixMatch::Exact),
 +                    "snippet",
 +                ),
 +                (relevance.is_op_method, "op_method"),
 +                (relevance.requires_import, "requires_import"),
 +            ]
 +            .into_iter()
 +            .filter_map(|(cond, desc)| if cond { Some(desc) } else { None })
 +            .join("+");
 +
 +            format!("[{}]", relevance_factors)
 +        }
 +    }
 +
 +    #[test]
 +    fn enum_detail_includes_record_fields() {
 +        check(
 +            r#"
 +enum Foo { Foo { x: i32, y: i32 } }
 +
 +fn main() { Foo::Fo$0 }
 +"#,
 +            SymbolKind::Variant,
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "Foo {…}",
 +                        source_range: 54..56,
 +                        delete: 54..56,
 +                        insert: "Foo { x: ${1:()}, y: ${2:()} }$0",
 +                        kind: SymbolKind(
 +                            Variant,
 +                        ),
 +                        lookup: "Foo{}",
 +                        detail: "Foo { x: i32, y: i32 }",
 +                    },
 +                ]
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn enum_detail_includes_tuple_fields() {
 +        check(
 +            r#"
 +enum Foo { Foo (i32, i32) }
 +
 +fn main() { Foo::Fo$0 }
 +"#,
 +            SymbolKind::Variant,
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "Foo(…)",
 +                        source_range: 46..48,
 +                        delete: 46..48,
 +                        insert: "Foo(${1:()}, ${2:()})$0",
 +                        kind: SymbolKind(
 +                            Variant,
 +                        ),
 +                        lookup: "Foo()",
 +                        detail: "Foo(i32, i32)",
 +                    },
 +                ]
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn fn_detail_includes_args_and_return_type() {
 +        check(
 +            r#"
 +fn foo<T>(a: u32, b: u32, t: T) -> (u32, T) { (a, t) }
 +
 +fn main() { fo$0 }
 +"#,
 +            SymbolKind::Function,
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "foo(…)",
 +                        source_range: 68..70,
 +                        delete: 68..70,
 +                        insert: "foo(${1:a}, ${2:b}, ${3:t})$0",
 +                        kind: SymbolKind(
 +                            Function,
 +                        ),
 +                        lookup: "foo",
 +                        detail: "fn(u32, u32, T) -> (u32, T)",
 +                        trigger_call_info: true,
 +                    },
 +                    CompletionItem {
 +                        label: "main()",
 +                        source_range: 68..70,
 +                        delete: 68..70,
 +                        insert: "main()$0",
 +                        kind: SymbolKind(
 +                            Function,
 +                        ),
 +                        lookup: "main",
 +                        detail: "fn()",
 +                    },
 +                ]
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn enum_detail_just_name_for_unit() {
 +        check(
 +            r#"
 +enum Foo { Foo }
 +
 +fn main() { Foo::Fo$0 }
 +"#,
 +            SymbolKind::Variant,
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "Foo",
 +                        source_range: 35..37,
 +                        delete: 35..37,
 +                        insert: "Foo$0",
 +                        kind: SymbolKind(
 +                            Variant,
 +                        ),
 +                        detail: "Foo",
 +                    },
 +                ]
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn lookup_enums_by_two_qualifiers() {
 +        check_kinds(
 +            r#"
 +mod m {
 +    pub enum Spam { Foo, Bar(i32) }
 +}
 +fn main() { let _: m::Spam = S$0 }
 +"#,
 +            &[
 +                CompletionItemKind::SymbolKind(SymbolKind::Function),
 +                CompletionItemKind::SymbolKind(SymbolKind::Module),
 +                CompletionItemKind::SymbolKind(SymbolKind::Variant),
 +            ],
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "main()",
 +                        source_range: 75..76,
 +                        delete: 75..76,
 +                        insert: "main()$0",
 +                        kind: SymbolKind(
 +                            Function,
 +                        ),
 +                        lookup: "main",
 +                        detail: "fn()",
 +                    },
 +                    CompletionItem {
 +                        label: "m",
 +                        source_range: 75..76,
 +                        delete: 75..76,
 +                        insert: "m",
 +                        kind: SymbolKind(
 +                            Module,
 +                        ),
 +                    },
 +                    CompletionItem {
 +                        label: "m::Spam::Bar(…)",
 +                        source_range: 75..76,
 +                        delete: 75..76,
 +                        insert: "m::Spam::Bar(${1:()})$0",
 +                        kind: SymbolKind(
 +                            Variant,
 +                        ),
 +                        lookup: "Spam::Bar()",
 +                        detail: "m::Spam::Bar(i32)",
 +                        relevance: CompletionRelevance {
 +                            exact_name_match: false,
 +                            type_match: Some(
 +                                Exact,
 +                            ),
 +                            is_local: false,
 +                            is_item_from_trait: false,
 +                            is_name_already_imported: false,
 +                            requires_import: false,
 +                            is_op_method: false,
 +                            is_private_editable: false,
 +                            postfix_match: None,
 +                            is_definite: false,
 +                        },
 +                    },
 +                    CompletionItem {
 +                        label: "m::Spam::Foo",
 +                        source_range: 75..76,
 +                        delete: 75..76,
 +                        insert: "m::Spam::Foo$0",
 +                        kind: SymbolKind(
 +                            Variant,
 +                        ),
 +                        lookup: "Spam::Foo",
 +                        detail: "m::Spam::Foo",
 +                        relevance: CompletionRelevance {
 +                            exact_name_match: false,
 +                            type_match: Some(
 +                                Exact,
 +                            ),
 +                            is_local: false,
 +                            is_item_from_trait: false,
 +                            is_name_already_imported: false,
 +                            requires_import: false,
 +                            is_op_method: false,
 +                            is_private_editable: false,
 +                            postfix_match: None,
 +                            is_definite: false,
 +                        },
 +                    },
 +                ]
 +            "#]],
 +        )
 +    }
 +
 +    #[test]
 +    fn sets_deprecated_flag_in_items() {
 +        check(
 +            r#"
 +#[deprecated]
 +fn something_deprecated() {}
 +
 +fn main() { som$0 }
 +"#,
 +            SymbolKind::Function,
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "main()",
 +                        source_range: 56..59,
 +                        delete: 56..59,
 +                        insert: "main()$0",
 +                        kind: SymbolKind(
 +                            Function,
 +                        ),
 +                        lookup: "main",
 +                        detail: "fn()",
 +                    },
 +                    CompletionItem {
 +                        label: "something_deprecated()",
 +                        source_range: 56..59,
 +                        delete: 56..59,
 +                        insert: "something_deprecated()$0",
 +                        kind: SymbolKind(
 +                            Function,
 +                        ),
 +                        lookup: "something_deprecated",
 +                        detail: "fn()",
 +                        deprecated: true,
 +                    },
 +                ]
 +            "#]],
 +        );
 +
 +        check(
 +            r#"
 +struct A { #[deprecated] the_field: u32 }
 +fn foo() { A { the$0 } }
 +"#,
 +            SymbolKind::Field,
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "the_field",
 +                        source_range: 57..60,
 +                        delete: 57..60,
 +                        insert: "the_field",
 +                        kind: SymbolKind(
 +                            Field,
 +                        ),
 +                        detail: "u32",
 +                        deprecated: true,
 +                        relevance: CompletionRelevance {
 +                            exact_name_match: false,
 +                            type_match: Some(
 +                                CouldUnify,
 +                            ),
 +                            is_local: false,
 +                            is_item_from_trait: false,
 +                            is_name_already_imported: false,
 +                            requires_import: false,
 +                            is_op_method: false,
 +                            is_private_editable: false,
 +                            postfix_match: None,
 +                            is_definite: false,
 +                        },
 +                    },
 +                ]
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn renders_docs() {
 +        check_kinds(
 +            r#"
 +struct S {
 +    /// Field docs
 +    foo:
 +}
 +impl S {
 +    /// Method docs
 +    fn bar(self) { self.$0 }
 +}"#,
 +            &[CompletionItemKind::Method, CompletionItemKind::SymbolKind(SymbolKind::Field)],
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "bar()",
 +                        source_range: 94..94,
 +                        delete: 94..94,
 +                        insert: "bar()$0",
 +                        kind: Method,
 +                        lookup: "bar",
 +                        detail: "fn(self)",
 +                        documentation: Documentation(
 +                            "Method docs",
 +                        ),
 +                    },
 +                    CompletionItem {
 +                        label: "foo",
 +                        source_range: 94..94,
 +                        delete: 94..94,
 +                        insert: "foo",
 +                        kind: SymbolKind(
 +                            Field,
 +                        ),
 +                        detail: "{unknown}",
 +                        documentation: Documentation(
 +                            "Field docs",
 +                        ),
 +                    },
 +                ]
 +            "#]],
 +        );
 +
 +        check_kinds(
 +            r#"
 +use self::my$0;
 +
 +/// mod docs
 +mod my { }
 +
 +/// enum docs
 +enum E {
 +    /// variant docs
 +    V
 +}
 +use self::E::*;
 +"#,
 +            &[
 +                CompletionItemKind::SymbolKind(SymbolKind::Module),
 +                CompletionItemKind::SymbolKind(SymbolKind::Variant),
 +                CompletionItemKind::SymbolKind(SymbolKind::Enum),
 +            ],
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "my",
 +                        source_range: 10..12,
 +                        delete: 10..12,
 +                        insert: "my",
 +                        kind: SymbolKind(
 +                            Module,
 +                        ),
 +                        documentation: Documentation(
 +                            "mod docs",
 +                        ),
 +                    },
 +                    CompletionItem {
 +                        label: "V",
 +                        source_range: 10..12,
 +                        delete: 10..12,
 +                        insert: "V$0",
 +                        kind: SymbolKind(
 +                            Variant,
 +                        ),
 +                        detail: "V",
 +                        documentation: Documentation(
 +                            "variant docs",
 +                        ),
 +                    },
 +                    CompletionItem {
 +                        label: "E",
 +                        source_range: 10..12,
 +                        delete: 10..12,
 +                        insert: "E",
 +                        kind: SymbolKind(
 +                            Enum,
 +                        ),
 +                        documentation: Documentation(
 +                            "enum docs",
 +                        ),
 +                    },
 +                ]
 +            "#]],
 +        )
 +    }
 +
 +    #[test]
 +    fn dont_render_attrs() {
 +        check(
 +            r#"
 +struct S;
 +impl S {
 +    #[inline]
 +    fn the_method(&self) { }
 +}
 +fn foo(s: S) { s.$0 }
 +"#,
 +            CompletionItemKind::Method,
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "the_method()",
 +                        source_range: 81..81,
 +                        delete: 81..81,
 +                        insert: "the_method()$0",
 +                        kind: Method,
 +                        lookup: "the_method",
 +                        detail: "fn(&self)",
 +                    },
 +                ]
 +            "#]],
 +        )
 +    }
 +
 +    #[test]
 +    fn no_call_parens_if_fn_ptr_needed() {
 +        cov_mark::check!(no_call_parens_if_fn_ptr_needed);
 +        check_edit(
 +            "foo",
 +            r#"
 +fn foo(foo: u8, bar: u8) {}
 +struct ManualVtable { f: fn(u8, u8) }
 +
 +fn main() -> ManualVtable {
 +    ManualVtable { f: f$0 }
 +}
 +"#,
 +            r#"
 +fn foo(foo: u8, bar: u8) {}
 +struct ManualVtable { f: fn(u8, u8) }
 +
 +fn main() -> ManualVtable {
 +    ManualVtable { f: foo }
 +}
 +"#,
 +        );
 +        check_edit(
 +            "type",
 +            r#"
 +struct RawIdentTable { r#type: u32 }
 +
 +fn main() -> RawIdentTable {
 +    RawIdentTable { t$0: 42 }
 +}
 +"#,
 +            r#"
 +struct RawIdentTable { r#type: u32 }
 +
 +fn main() -> RawIdentTable {
 +    RawIdentTable { r#type: 42 }
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn no_parens_in_use_item() {
 +        check_edit(
 +            "foo",
 +            r#"
 +mod m { pub fn foo() {} }
 +use crate::m::f$0;
 +"#,
 +            r#"
 +mod m { pub fn foo() {} }
 +use crate::m::foo;
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn no_parens_in_call() {
 +        check_edit(
 +            "foo",
 +            r#"
 +fn foo(x: i32) {}
 +fn main() { f$0(); }
 +"#,
 +            r#"
 +fn foo(x: i32) {}
 +fn main() { foo(); }
 +"#,
 +        );
 +        check_edit(
 +            "foo",
 +            r#"
 +struct Foo;
 +impl Foo { fn foo(&self){} }
 +fn f(foo: &Foo) { foo.f$0(); }
 +"#,
 +            r#"
 +struct Foo;
 +impl Foo { fn foo(&self){} }
 +fn f(foo: &Foo) { foo.foo(); }
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inserts_angle_brackets_for_generics() {
 +        cov_mark::check!(inserts_angle_brackets_for_generics);
 +        check_edit(
 +            "Vec",
 +            r#"
 +struct Vec<T> {}
 +fn foo(xs: Ve$0)
 +"#,
 +            r#"
 +struct Vec<T> {}
 +fn foo(xs: Vec<$0>)
 +"#,
 +        );
 +        check_edit(
 +            "Vec",
 +            r#"
 +type Vec<T> = (T,);
 +fn foo(xs: Ve$0)
 +"#,
 +            r#"
 +type Vec<T> = (T,);
 +fn foo(xs: Vec<$0>)
 +"#,
 +        );
 +        check_edit(
 +            "Vec",
 +            r#"
 +struct Vec<T = i128> {}
 +fn foo(xs: Ve$0)
 +"#,
 +            r#"
 +struct Vec<T = i128> {}
 +fn foo(xs: Vec)
 +"#,
 +        );
 +        check_edit(
 +            "Vec",
 +            r#"
 +struct Vec<T> {}
 +fn foo(xs: Ve$0<i128>)
 +"#,
 +            r#"
 +struct Vec<T> {}
 +fn foo(xs: Vec<i128>)
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn active_param_relevance() {
 +        check_relevance(
 +            r#"
 +struct S { foo: i64, bar: u32, baz: u32 }
 +fn test(bar: u32) { }
 +fn foo(s: S) { test(s.$0) }
 +"#,
 +            expect![[r#"
 +                fd bar [type+name]
 +                fd baz [type]
 +                fd foo []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn record_field_relevances() {
 +        check_relevance(
 +            r#"
 +struct A { foo: i64, bar: u32, baz: u32 }
 +struct B { x: (), y: f32, bar: u32 }
 +fn foo(a: A) { B { bar: a.$0 }; }
 +"#,
 +            expect![[r#"
 +                fd bar [type+name]
 +                fd baz [type]
 +                fd foo []
 +            "#]],
 +        )
 +    }
 +
 +    #[test]
 +    fn record_field_and_call_relevances() {
 +        check_relevance(
 +            r#"
 +struct A { foo: i64, bar: u32, baz: u32 }
 +struct B { x: (), y: f32, bar: u32 }
 +fn f(foo: i64) {  }
 +fn foo(a: A) { B { bar: f(a.$0) }; }
 +"#,
 +            expect![[r#"
 +                fd foo [type+name]
 +                fd bar []
 +                fd baz []
 +            "#]],
 +        );
 +        check_relevance(
 +            r#"
 +struct A { foo: i64, bar: u32, baz: u32 }
 +struct B { x: (), y: f32, bar: u32 }
 +fn f(foo: i64) {  }
 +fn foo(a: A) { f(B { bar: a.$0 }); }
 +"#,
 +            expect![[r#"
 +                fd bar [type+name]
 +                fd baz [type]
 +                fd foo []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn prioritize_exact_ref_match() {
 +        check_relevance(
 +            r#"
 +struct WorldSnapshot { _f: () };
 +fn go(world: &WorldSnapshot) { go(w$0) }
 +"#,
 +            expect![[r#"
 +                lc world [type+name+local]
 +                st WorldSnapshot {…} []
 +                st &WorldSnapshot {…} [type]
 +                st WorldSnapshot []
 +                fn go(…) []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn too_many_arguments() {
 +        cov_mark::check!(too_many_arguments);
 +        check_relevance(
 +            r#"
 +struct Foo;
 +fn f(foo: &Foo) { f(foo, w$0) }
 +"#,
 +            expect![[r#"
 +                lc foo [local]
 +                st Foo []
 +                fn f(…) []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn score_fn_type_and_name_match() {
 +        check_relevance(
 +            r#"
 +struct A { bar: u8 }
 +fn baz() -> u8 { 0 }
 +fn bar() -> u8 { 0 }
 +fn f() { A { bar: b$0 }; }
 +"#,
 +            expect![[r#"
 +                fn bar() [type+name]
 +                fn baz() [type]
 +                st A []
 +                fn f() []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn score_method_type_and_name_match() {
 +        check_relevance(
 +            r#"
 +fn baz(aaa: u32){}
 +struct Foo;
 +impl Foo {
 +fn aaa(&self) -> u32 { 0 }
 +fn bbb(&self) -> u32 { 0 }
 +fn ccc(&self) -> u64 { 0 }
 +}
 +fn f() {
 +    baz(Foo.$0
 +}
 +"#,
 +            expect![[r#"
 +                me aaa() [type+name]
 +                me bbb() [type]
 +                me ccc() []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn score_method_name_match_only() {
 +        check_relevance(
 +            r#"
 +fn baz(aaa: u32){}
 +struct Foo;
 +impl Foo {
 +fn aaa(&self) -> u64 { 0 }
 +}
 +fn f() {
 +    baz(Foo.$0
 +}
 +"#,
 +            expect![[r#"
 +                me aaa() [name]
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn suggest_ref_mut() {
 +        cov_mark::check!(suggest_ref);
 +        check_relevance(
 +            r#"
 +struct S;
 +fn foo(s: &mut S) {}
 +fn main() {
 +    let mut s = S;
 +    foo($0);
 +}
 +            "#,
 +            expect![[r#"
 +                lc s [name+local]
 +                lc &mut s [type+name+local]
 +                st S []
 +                st &mut S [type]
 +                st S []
 +                fn foo(…) []
 +                fn main() []
 +            "#]],
 +        );
 +        check_relevance(
 +            r#"
 +struct S;
 +fn foo(s: &mut S) {}
 +fn main() {
 +    let mut s = S;
 +    foo(&mut $0);
 +}
 +            "#,
 +            expect![[r#"
 +                lc s [type+name+local]
 +                st S [type]
 +                st S []
 +                fn foo(…) []
 +                fn main() []
 +            "#]],
 +        );
 +        check_relevance(
 +            r#"
 +struct S;
 +fn foo(s: &mut S) {}
 +fn main() {
 +    let mut ssss = S;
 +    foo(&mut s$0);
 +}
 +            "#,
 +            expect![[r#"
 +                lc ssss [type+local]
 +                st S [type]
 +                st S []
 +                fn foo(…) []
 +                fn main() []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn suggest_deref() {
 +        check_relevance(
 +            r#"
 +//- minicore: deref
 +struct S;
 +struct T(S);
 +
 +impl core::ops::Deref for T {
 +    type Target = S;
 +
 +    fn deref(&self) -> &Self::Target {
 +        &self.0
 +    }
 +}
 +
 +fn foo(s: &S) {}
 +
 +fn main() {
 +    let t = T(S);
 +    let m = 123;
 +
 +    foo($0);
 +}
 +            "#,
 +            expect![[r#"
 +                lc m [local]
 +                lc t [local]
 +                lc &t [type+local]
 +                st S []
 +                st &S [type]
 +                st S []
 +                st T []
 +                fn foo(…) []
 +                fn main() []
 +                md core []
 +            "#]],
 +        )
 +    }
 +
 +    #[test]
 +    fn suggest_deref_mut() {
 +        check_relevance(
 +            r#"
 +//- minicore: deref_mut
 +struct S;
 +struct T(S);
 +
 +impl core::ops::Deref for T {
 +    type Target = S;
 +
 +    fn deref(&self) -> &Self::Target {
 +        &self.0
 +    }
 +}
 +
 +impl core::ops::DerefMut for T {
 +    fn deref_mut(&mut self) -> &mut Self::Target {
 +        &mut self.0
 +    }
 +}
 +
 +fn foo(s: &mut S) {}
 +
 +fn main() {
 +    let t = T(S);
 +    let m = 123;
 +
 +    foo($0);
 +}
 +            "#,
 +            expect![[r#"
 +                lc m [local]
 +                lc t [local]
 +                lc &mut t [type+local]
 +                st S []
 +                st &mut S [type]
 +                st S []
 +                st T []
 +                fn foo(…) []
 +                fn main() []
 +                md core []
 +            "#]],
 +        )
 +    }
 +
 +    #[test]
 +    fn locals() {
 +        check_relevance(
 +            r#"
 +fn foo(bar: u32) {
 +    let baz = 0;
 +
 +    f$0
 +}
 +"#,
 +            expect![[r#"
 +                lc baz [local]
 +                lc bar [local]
 +                fn foo(…) []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn enum_owned() {
 +        check_relevance(
 +            r#"
 +enum Foo { A, B }
 +fn foo() {
 +    bar($0);
 +}
 +fn bar(t: Foo) {}
 +"#,
 +            expect![[r#"
 +                ev Foo::A [type]
 +                ev Foo::B [type]
 +                en Foo []
 +                fn bar(…) []
 +                fn foo() []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn enum_ref() {
 +        check_relevance(
 +            r#"
 +enum Foo { A, B }
 +fn foo() {
 +    bar($0);
 +}
 +fn bar(t: &Foo) {}
 +"#,
 +            expect![[r#"
 +                ev Foo::A []
 +                ev &Foo::A [type]
 +                ev Foo::B []
 +                ev &Foo::B [type]
 +                en Foo []
 +                fn bar(…) []
 +                fn foo() []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn suggest_deref_fn_ret() {
 +        check_relevance(
 +            r#"
 +//- minicore: deref
 +struct S;
 +struct T(S);
 +
 +impl core::ops::Deref for T {
 +    type Target = S;
 +
 +    fn deref(&self) -> &Self::Target {
 +        &self.0
 +    }
 +}
 +
 +fn foo(s: &S) {}
 +fn bar() -> T {}
 +
 +fn main() {
 +    foo($0);
 +}
 +"#,
 +            expect![[r#"
 +                st S []
 +                st &S [type]
 +                st S []
 +                st T []
 +                fn bar() []
 +                fn &bar() [type]
 +                fn foo(…) []
 +                fn main() []
 +                md core []
 +            "#]],
 +        )
 +    }
 +
 +    #[test]
 +    fn op_function_relevances() {
 +        check_relevance(
 +            r#"
 +#[lang = "sub"]
 +trait Sub {
 +    fn sub(self, other: Self) -> Self { self }
 +}
 +impl Sub for u32 {}
 +fn foo(a: u32) { a.$0 }
 +"#,
 +            expect![[r#"
 +                me sub(…) (as Sub) [op_method]
 +            "#]],
 +        );
 +        check_relevance(
 +            r#"
 +struct Foo;
 +impl Foo {
 +    fn new() -> Self {}
 +}
 +#[lang = "eq"]
 +pub trait PartialEq<Rhs: ?Sized = Self> {
 +    fn eq(&self, other: &Rhs) -> bool;
 +    fn ne(&self, other: &Rhs) -> bool;
 +}
 +
 +impl PartialEq for Foo {}
 +fn main() {
 +    Foo::$0
 +}
 +"#,
 +            expect![[r#"
 +                fn new() []
 +                me eq(…) (as PartialEq) [op_method]
 +                me ne(…) (as PartialEq) [op_method]
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn struct_field_method_ref() {
 +        check_kinds(
 +            r#"
 +struct Foo { bar: u32 }
 +impl Foo { fn baz(&self) -> u32 { 0 } }
 +
 +fn foo(f: Foo) { let _: &u32 = f.b$0 }
 +"#,
 +            &[CompletionItemKind::Method, CompletionItemKind::SymbolKind(SymbolKind::Field)],
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "baz()",
 +                        source_range: 98..99,
 +                        delete: 98..99,
 +                        insert: "baz()$0",
 +                        kind: Method,
 +                        lookup: "baz",
 +                        detail: "fn(&self) -> u32",
 +                        ref_match: "&@96",
 +                    },
 +                    CompletionItem {
 +                        label: "bar",
 +                        source_range: 98..99,
 +                        delete: 98..99,
 +                        insert: "bar",
 +                        kind: SymbolKind(
 +                            Field,
 +                        ),
 +                        detail: "u32",
 +                        ref_match: "&@96",
 +                    },
 +                ]
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn qualified_path_ref() {
 +        check_kinds(
 +            r#"
 +struct S;
 +
 +struct T;
 +impl T {
 +    fn foo() -> S {}
 +}
 +
 +fn bar(s: &S) {}
 +
 +fn main() {
 +    bar(T::$0);
 +}
 +"#,
 +            &[CompletionItemKind::SymbolKind(SymbolKind::Function)],
 +            expect![[r#"
 +                [
 +                    CompletionItem {
 +                        label: "foo()",
 +                        source_range: 95..95,
 +                        delete: 95..95,
 +                        insert: "foo()$0",
 +                        kind: SymbolKind(
 +                            Function,
 +                        ),
 +                        lookup: "foo",
 +                        detail: "fn() -> S",
 +                        ref_match: "&@92",
 +                    },
 +                ]
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn generic_enum() {
 +        check_relevance(
 +            r#"
 +enum Foo<T> { A(T), B }
 +// bar() should not be an exact type match
 +// because the generic parameters are different
 +fn bar() -> Foo<u8> { Foo::B }
 +// FIXME baz() should be an exact type match
 +// because the types could unify, but it currently
 +// is not. This is due to the T here being
 +// TyKind::Placeholder rather than TyKind::Missing.
 +fn baz<T>() -> Foo<T> { Foo::B }
 +fn foo() {
 +    let foo: Foo<u32> = Foo::B;
 +    let _: Foo<u32> = f$0;
 +}
 +"#,
 +            expect![[r#"
 +                lc foo [type+local]
 +                ev Foo::A(…) [type_could_unify]
 +                ev Foo::B [type_could_unify]
 +                fn foo() []
 +                en Foo []
 +                fn bar() []
 +                fn baz() []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn postfix_exact_match_is_high_priority() {
 +        cov_mark::check!(postfix_exact_match_is_high_priority);
 +        check_relevance_for_kinds(
 +            r#"
 +mod ops {
 +    pub trait Not {
 +        type Output;
 +        fn not(self) -> Self::Output;
 +    }
 +
 +    impl Not for bool {
 +        type Output = bool;
 +        fn not(self) -> bool { if self { false } else { true }}
 +    }
 +}
 +
 +fn main() {
 +    let _: bool = (9 > 2).not$0;
 +}
 +    "#,
 +            &[CompletionItemKind::Snippet, CompletionItemKind::Method],
 +            expect![[r#"
 +                sn not [snippet]
 +                me not() (use ops::Not) [type_could_unify+requires_import]
 +                sn if []
 +                sn while []
 +                sn ref []
 +                sn refm []
 +                sn match []
 +                sn box []
 +                sn dbg []
 +                sn dbgr []
 +                sn call []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn postfix_inexact_match_is_low_priority() {
 +        cov_mark::check!(postfix_inexact_match_is_low_priority);
 +        check_relevance_for_kinds(
 +            r#"
 +struct S;
 +impl S {
 +    fn f(&self) {}
 +}
 +fn main() {
 +    S.$0
 +}
 +    "#,
 +            &[CompletionItemKind::Snippet, CompletionItemKind::Method],
 +            expect![[r#"
 +                me f() []
 +                sn ref []
 +                sn refm []
 +                sn match []
 +                sn box []
 +                sn dbg []
 +                sn dbgr []
 +                sn call []
 +                sn let []
 +                sn letm []
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn flyimport_reduced_relevance() {
 +        check_relevance(
 +            r#"
 +mod std {
 +    pub mod io {
 +        pub trait BufRead {}
 +        pub struct BufReader;
 +        pub struct BufWriter;
 +    }
 +}
 +struct Buffer;
 +
 +fn f() {
 +    Buf$0
 +}
 +"#,
 +            expect![[r#"
 +                st Buffer []
 +                fn f() []
 +                md std []
 +                tt BufRead (use std::io::BufRead) [requires_import]
 +                st BufReader (use std::io::BufReader) [requires_import]
 +                st BufWriter (use std::io::BufWriter) [requires_import]
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn completes_struct_with_raw_identifier() {
 +        check_edit(
 +            "type",
 +            r#"
 +mod m { pub struct r#type {} }
 +fn main() {
 +    let r#type = m::t$0;
 +}
 +"#,
 +            r#"
 +mod m { pub struct r#type {} }
 +fn main() {
 +    let r#type = m::r#type;
 +}
 +"#,
 +        )
 +    }
 +
 +    #[test]
 +    fn completes_fn_with_raw_identifier() {
 +        check_edit(
 +            "type",
 +            r#"
 +mod m { pub fn r#type {} }
 +fn main() {
 +    m::t$0
 +}
 +"#,
 +            r#"
 +mod m { pub fn r#type {} }
 +fn main() {
 +    m::r#type()$0
 +}
 +"#,
 +        )
 +    }
 +
 +    #[test]
 +    fn completes_macro_with_raw_identifier() {
 +        check_edit(
 +            "let!",
 +            r#"
 +macro_rules! r#let { () => {} }
 +fn main() {
 +    $0
 +}
 +"#,
 +            r#"
 +macro_rules! r#let { () => {} }
 +fn main() {
 +    r#let!($0)
 +}
 +"#,
 +        )
 +    }
 +
 +    #[test]
 +    fn completes_variant_with_raw_identifier() {
 +        check_edit(
 +            "type",
 +            r#"
 +enum A { r#type }
 +fn main() {
 +    let a = A::t$0
 +}
 +"#,
 +            r#"
 +enum A { r#type }
 +fn main() {
 +    let a = A::r#type$0
 +}
 +"#,
 +        )
 +    }
 +
 +    #[test]
 +    fn completes_field_with_raw_identifier() {
 +        check_edit(
 +            "fn",
 +            r#"
 +mod r#type {
 +    pub struct r#struct {
 +        pub r#fn: u32
 +    }
 +}
 +
 +fn main() {
 +    let a = r#type::r#struct {};
 +    a.$0
 +}
 +"#,
 +            r#"
 +mod r#type {
 +    pub struct r#struct {
 +        pub r#fn: u32
 +    }
 +}
 +
 +fn main() {
 +    let a = r#type::r#struct {};
 +    a.r#fn
 +}
 +"#,
 +        )
 +    }
 +
 +    #[test]
 +    fn completes_const_with_raw_identifier() {
 +        check_edit(
 +            "type",
 +            r#"
 +struct r#struct {}
 +impl r#struct { pub const r#type: u8 = 1; }
 +fn main() {
 +    r#struct::t$0
 +}
 +"#,
 +            r#"
 +struct r#struct {}
 +impl r#struct { pub const r#type: u8 = 1; }
 +fn main() {
 +    r#struct::r#type
 +}
 +"#,
 +        )
 +    }
 +
 +    #[test]
 +    fn completes_type_alias_with_raw_identifier() {
 +        check_edit(
 +            "type type",
 +            r#"
 +struct r#struct {}
 +trait r#trait { type r#type; }
 +impl r#trait for r#struct { type t$0 }
 +"#,
 +            r#"
 +struct r#struct {}
 +trait r#trait { type r#type; }
 +impl r#trait for r#struct { type r#type = $0; }
 +"#,
 +        )
 +    }
 +
 +    #[test]
 +    fn field_access_includes_self() {
 +        check_edit(
 +            "length",
 +            r#"
 +struct S {
 +    length: i32
 +}
 +
 +impl S {
 +    fn some_fn(&self) {
 +        let l = len$0
 +    }
 +}
 +"#,
 +            r#"
 +struct S {
 +    length: i32
 +}
 +
 +impl S {
 +    fn some_fn(&self) {
 +        let l = self.length
 +    }
 +}
 +"#,
 +        )
 +    }
 +}
index 9cf64691298ebcfe74576bbbbbc844edd217d3b4,0000000000000000000000000000000000000000..37612084604764eee2c66ec568d9dc7fda0c236f
mode 100644,000000..100644
--- /dev/null
@@@ -1,671 -1,0 +1,671 @@@
-     if let Some(ref_match) = compute_ref_match(completion, &ret_type) {
-         match func_kind {
-             FuncKind::Function(path_ctx) => {
-                 item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
-             }
-             FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => {
-                 if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) {
 +//! Renderer for function calls.
 +
 +use hir::{db::HirDatabase, AsAssocItem, HirDisplay};
 +use ide_db::{SnippetCap, SymbolKind};
 +use itertools::Itertools;
 +use stdx::{format_to, to_lower_snake_case};
 +use syntax::{AstNode, SmolStr};
 +
 +use crate::{
 +    context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind},
 +    item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance},
 +    render::{compute_exact_name_match, compute_ref_match, compute_type_match, RenderContext},
 +    CallableSnippets,
 +};
 +
 +#[derive(Debug)]
 +enum FuncKind<'ctx> {
 +    Function(&'ctx PathCompletionCtx),
 +    Method(&'ctx DotAccess, Option<hir::Name>),
 +}
 +
 +pub(crate) fn render_fn(
 +    ctx: RenderContext<'_>,
 +    path_ctx: &PathCompletionCtx,
 +    local_name: Option<hir::Name>,
 +    func: hir::Function,
 +) -> Builder {
 +    let _p = profile::span("render_fn");
 +    render(ctx, local_name, func, FuncKind::Function(path_ctx))
 +}
 +
 +pub(crate) fn render_method(
 +    ctx: RenderContext<'_>,
 +    dot_access: &DotAccess,
 +    receiver: Option<hir::Name>,
 +    local_name: Option<hir::Name>,
 +    func: hir::Function,
 +) -> Builder {
 +    let _p = profile::span("render_method");
 +    render(ctx, local_name, func, FuncKind::Method(dot_access, receiver))
 +}
 +
 +fn render(
 +    ctx @ RenderContext { completion, .. }: RenderContext<'_>,
 +    local_name: Option<hir::Name>,
 +    func: hir::Function,
 +    func_kind: FuncKind<'_>,
 +) -> Builder {
 +    let db = completion.db;
 +
 +    let name = local_name.unwrap_or_else(|| func.name(db));
 +
 +    let (call, escaped_call) = match &func_kind {
 +        FuncKind::Method(_, Some(receiver)) => (
 +            format!("{}.{}", receiver.unescaped(), name.unescaped()).into(),
 +            format!("{}.{}", receiver, name).into(),
 +        ),
 +        _ => (name.unescaped().to_smol_str(), name.to_smol_str()),
 +    };
 +    let mut item = CompletionItem::new(
 +        if func.self_param(db).is_some() {
 +            CompletionItemKind::Method
 +        } else {
 +            CompletionItemKind::SymbolKind(SymbolKind::Function)
 +        },
 +        ctx.source_range(),
 +        call.clone(),
 +    );
 +
 +    let ret_type = func.ret_type(db);
 +    let is_op_method = func
 +        .as_assoc_item(ctx.db())
 +        .and_then(|trait_| trait_.containing_trait_or_trait_impl(ctx.db()))
 +        .map_or(false, |trait_| completion.is_ops_trait(trait_));
 +    item.set_relevance(CompletionRelevance {
 +        type_match: compute_type_match(completion, &ret_type),
 +        exact_name_match: compute_exact_name_match(completion, &call),
 +        is_op_method,
 +        ..ctx.completion_relevance()
 +    });
 +
-             _ => (),
++    match func_kind {
++        FuncKind::Function(path_ctx) => {
++            super::path_ref_match(completion, path_ctx, &ret_type, &mut item);
++        }
++        FuncKind::Method(DotAccess { receiver: Some(receiver), .. }, _) => {
++            if let Some(original_expr) = completion.sema.original_ast_node(receiver.clone()) {
++                if let Some(ref_match) = compute_ref_match(completion, &ret_type) {
 +                    item.ref_match(ref_match, original_expr.syntax().text_range().start());
 +                }
 +            }
 +        }
++        _ => (),
 +    }
 +
 +    item.set_documentation(ctx.docs(func))
 +        .set_deprecated(ctx.is_deprecated(func) || ctx.is_deprecated_assoc_item(func))
 +        .detail(detail(db, func))
 +        .lookup_by(name.unescaped().to_smol_str());
 +
 +    match ctx.completion.config.snippet_cap {
 +        Some(cap) => {
 +            let complete_params = match func_kind {
 +                FuncKind::Function(PathCompletionCtx {
 +                    kind: PathKind::Expr { .. },
 +                    has_call_parens: false,
 +                    ..
 +                }) => Some(false),
 +                FuncKind::Method(
 +                    DotAccess {
 +                        kind:
 +                            DotAccessKind::Method { has_parens: false } | DotAccessKind::Field { .. },
 +                        ..
 +                    },
 +                    _,
 +                ) => Some(true),
 +                _ => None,
 +            };
 +            if let Some(has_dot_receiver) = complete_params {
 +                if let Some((self_param, params)) =
 +                    params(ctx.completion, func, &func_kind, has_dot_receiver)
 +                {
 +                    add_call_parens(
 +                        &mut item,
 +                        completion,
 +                        cap,
 +                        call,
 +                        escaped_call,
 +                        self_param,
 +                        params,
 +                    );
 +                }
 +            }
 +        }
 +        _ => (),
 +    };
 +
 +    match ctx.import_to_add {
 +        Some(import_to_add) => {
 +            item.add_import(import_to_add);
 +        }
 +        None => {
 +            if let Some(actm) = func.as_assoc_item(db) {
 +                if let Some(trt) = actm.containing_trait_or_trait_impl(db) {
 +                    item.trait_name(trt.name(db).to_smol_str());
 +                }
 +            }
 +        }
 +    }
 +    item
 +}
 +
 +pub(super) fn add_call_parens<'b>(
 +    builder: &'b mut Builder,
 +    ctx: &CompletionContext<'_>,
 +    cap: SnippetCap,
 +    name: SmolStr,
 +    escaped_name: SmolStr,
 +    self_param: Option<hir::SelfParam>,
 +    params: Vec<hir::Param>,
 +) -> &'b mut Builder {
 +    cov_mark::hit!(inserts_parens_for_function_calls);
 +
 +    let (snippet, label_suffix) = if self_param.is_none() && params.is_empty() {
 +        (format!("{}()$0", escaped_name), "()")
 +    } else {
 +        builder.trigger_call_info();
 +        let snippet = if let Some(CallableSnippets::FillArguments) = ctx.config.callable {
 +            let offset = if self_param.is_some() { 2 } else { 1 };
 +            let function_params_snippet =
 +                params.iter().enumerate().format_with(", ", |(index, param), f| {
 +                    match param.name(ctx.db) {
 +                        Some(n) => {
 +                            let smol_str = n.to_smol_str();
 +                            let text = smol_str.as_str().trim_start_matches('_');
 +                            let ref_ = ref_of_param(ctx, text, param.ty());
 +                            f(&format_args!("${{{}:{}{}}}", index + offset, ref_, text))
 +                        }
 +                        None => {
 +                            let name = match param.ty().as_adt() {
 +                                None => "_".to_string(),
 +                                Some(adt) => adt
 +                                    .name(ctx.db)
 +                                    .as_text()
 +                                    .map(|s| to_lower_snake_case(s.as_str()))
 +                                    .unwrap_or_else(|| "_".to_string()),
 +                            };
 +                            f(&format_args!("${{{}:{}}}", index + offset, name))
 +                        }
 +                    }
 +                });
 +            match self_param {
 +                Some(self_param) => {
 +                    format!(
 +                        "{}(${{1:{}}}{}{})$0",
 +                        escaped_name,
 +                        self_param.display(ctx.db),
 +                        if params.is_empty() { "" } else { ", " },
 +                        function_params_snippet
 +                    )
 +                }
 +                None => {
 +                    format!("{}({})$0", escaped_name, function_params_snippet)
 +                }
 +            }
 +        } else {
 +            cov_mark::hit!(suppress_arg_snippets);
 +            format!("{}($0)", escaped_name)
 +        };
 +
 +        (snippet, "(…)")
 +    };
 +    builder.label(SmolStr::from_iter([&name, label_suffix])).insert_snippet(cap, snippet)
 +}
 +
 +fn ref_of_param(ctx: &CompletionContext<'_>, arg: &str, ty: &hir::Type) -> &'static str {
 +    if let Some(derefed_ty) = ty.remove_ref() {
 +        for (name, local) in ctx.locals.iter() {
 +            if name.as_text().as_deref() == Some(arg) {
 +                return if local.ty(ctx.db) == derefed_ty {
 +                    if ty.is_mutable_reference() {
 +                        "&mut "
 +                    } else {
 +                        "&"
 +                    }
 +                } else {
 +                    ""
 +                };
 +            }
 +        }
 +    }
 +    ""
 +}
 +
 +fn detail(db: &dyn HirDatabase, func: hir::Function) -> String {
 +    let mut ret_ty = func.ret_type(db);
 +    let mut detail = String::new();
 +
 +    if func.is_const(db) {
 +        format_to!(detail, "const ");
 +    }
 +    if func.is_async(db) {
 +        format_to!(detail, "async ");
 +        if let Some(async_ret) = func.async_ret_type(db) {
 +            ret_ty = async_ret;
 +        }
 +    }
 +    if func.is_unsafe_to_call(db) {
 +        format_to!(detail, "unsafe ");
 +    }
 +
 +    format_to!(detail, "fn({})", params_display(db, func));
 +    if !ret_ty.is_unit() {
 +        format_to!(detail, " -> {}", ret_ty.display(db));
 +    }
 +    detail
 +}
 +
 +fn params_display(db: &dyn HirDatabase, func: hir::Function) -> String {
 +    if let Some(self_param) = func.self_param(db) {
 +        let assoc_fn_params = func.assoc_fn_params(db);
 +        let params = assoc_fn_params
 +            .iter()
 +            .skip(1) // skip the self param because we are manually handling that
 +            .map(|p| p.ty().display(db));
 +        format!(
 +            "{}{}",
 +            self_param.display(db),
 +            params.format_with("", |display, f| {
 +                f(&", ")?;
 +                f(&display)
 +            })
 +        )
 +    } else {
 +        let assoc_fn_params = func.assoc_fn_params(db);
 +        assoc_fn_params.iter().map(|p| p.ty().display(db)).join(", ")
 +    }
 +}
 +
 +fn params(
 +    ctx: &CompletionContext<'_>,
 +    func: hir::Function,
 +    func_kind: &FuncKind<'_>,
 +    has_dot_receiver: bool,
 +) -> Option<(Option<hir::SelfParam>, Vec<hir::Param>)> {
 +    if ctx.config.callable.is_none() {
 +        return None;
 +    }
 +
 +    // Don't add parentheses if the expected type is some function reference.
 +    if let Some(ty) = &ctx.expected_type {
 +        // FIXME: check signature matches?
 +        if ty.is_fn() {
 +            cov_mark::hit!(no_call_parens_if_fn_ptr_needed);
 +            return None;
 +        }
 +    }
 +
 +    let self_param = if has_dot_receiver || matches!(func_kind, FuncKind::Method(_, Some(_))) {
 +        None
 +    } else {
 +        func.self_param(ctx.db)
 +    };
 +    Some((self_param, func.params_without_self(ctx.db)))
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use crate::{
 +        tests::{check_edit, check_edit_with_config, TEST_CONFIG},
 +        CallableSnippets, CompletionConfig,
 +    };
 +
 +    #[test]
 +    fn inserts_parens_for_function_calls() {
 +        cov_mark::check!(inserts_parens_for_function_calls);
 +        check_edit(
 +            "no_args",
 +            r#"
 +fn no_args() {}
 +fn main() { no_$0 }
 +"#,
 +            r#"
 +fn no_args() {}
 +fn main() { no_args()$0 }
 +"#,
 +        );
 +
 +        check_edit(
 +            "with_args",
 +            r#"
 +fn with_args(x: i32, y: String) {}
 +fn main() { with_$0 }
 +"#,
 +            r#"
 +fn with_args(x: i32, y: String) {}
 +fn main() { with_args(${1:x}, ${2:y})$0 }
 +"#,
 +        );
 +
 +        check_edit(
 +            "foo",
 +            r#"
 +struct S;
 +impl S {
 +    fn foo(&self) {}
 +}
 +fn bar(s: &S) { s.f$0 }
 +"#,
 +            r#"
 +struct S;
 +impl S {
 +    fn foo(&self) {}
 +}
 +fn bar(s: &S) { s.foo()$0 }
 +"#,
 +        );
 +
 +        check_edit(
 +            "foo",
 +            r#"
 +struct S {}
 +impl S {
 +    fn foo(&self, x: i32) {}
 +}
 +fn bar(s: &S) {
 +    s.f$0
 +}
 +"#,
 +            r#"
 +struct S {}
 +impl S {
 +    fn foo(&self, x: i32) {}
 +}
 +fn bar(s: &S) {
 +    s.foo(${1:x})$0
 +}
 +"#,
 +        );
 +
 +        check_edit(
 +            "foo",
 +            r#"
 +struct S {}
 +impl S {
 +    fn foo(&self, x: i32) {
 +        $0
 +    }
 +}
 +"#,
 +            r#"
 +struct S {}
 +impl S {
 +    fn foo(&self, x: i32) {
 +        self.foo(${1:x})$0
 +    }
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn parens_for_method_call_as_assoc_fn() {
 +        check_edit(
 +            "foo",
 +            r#"
 +struct S;
 +impl S {
 +    fn foo(&self) {}
 +}
 +fn main() { S::f$0 }
 +"#,
 +            r#"
 +struct S;
 +impl S {
 +    fn foo(&self) {}
 +}
 +fn main() { S::foo(${1:&self})$0 }
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn suppress_arg_snippets() {
 +        cov_mark::check!(suppress_arg_snippets);
 +        check_edit_with_config(
 +            CompletionConfig { callable: Some(CallableSnippets::AddParentheses), ..TEST_CONFIG },
 +            "with_args",
 +            r#"
 +fn with_args(x: i32, y: String) {}
 +fn main() { with_$0 }
 +"#,
 +            r#"
 +fn with_args(x: i32, y: String) {}
 +fn main() { with_args($0) }
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn strips_underscores_from_args() {
 +        check_edit(
 +            "foo",
 +            r#"
 +fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {}
 +fn main() { f$0 }
 +"#,
 +            r#"
 +fn foo(_foo: i32, ___bar: bool, ho_ge_: String) {}
 +fn main() { foo(${1:foo}, ${2:bar}, ${3:ho_ge_})$0 }
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn insert_ref_when_matching_local_in_scope() {
 +        check_edit(
 +            "ref_arg",
 +            r#"
 +struct Foo {}
 +fn ref_arg(x: &Foo) {}
 +fn main() {
 +    let x = Foo {};
 +    ref_ar$0
 +}
 +"#,
 +            r#"
 +struct Foo {}
 +fn ref_arg(x: &Foo) {}
 +fn main() {
 +    let x = Foo {};
 +    ref_arg(${1:&x})$0
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn insert_mut_ref_when_matching_local_in_scope() {
 +        check_edit(
 +            "ref_arg",
 +            r#"
 +struct Foo {}
 +fn ref_arg(x: &mut Foo) {}
 +fn main() {
 +    let x = Foo {};
 +    ref_ar$0
 +}
 +"#,
 +            r#"
 +struct Foo {}
 +fn ref_arg(x: &mut Foo) {}
 +fn main() {
 +    let x = Foo {};
 +    ref_arg(${1:&mut x})$0
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn insert_ref_when_matching_local_in_scope_for_method() {
 +        check_edit(
 +            "apply_foo",
 +            r#"
 +struct Foo {}
 +struct Bar {}
 +impl Bar {
 +    fn apply_foo(&self, x: &Foo) {}
 +}
 +
 +fn main() {
 +    let x = Foo {};
 +    let y = Bar {};
 +    y.$0
 +}
 +"#,
 +            r#"
 +struct Foo {}
 +struct Bar {}
 +impl Bar {
 +    fn apply_foo(&self, x: &Foo) {}
 +}
 +
 +fn main() {
 +    let x = Foo {};
 +    let y = Bar {};
 +    y.apply_foo(${1:&x})$0
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn trim_mut_keyword_in_func_completion() {
 +        check_edit(
 +            "take_mutably",
 +            r#"
 +fn take_mutably(mut x: &i32) {}
 +
 +fn main() {
 +    take_m$0
 +}
 +"#,
 +            r#"
 +fn take_mutably(mut x: &i32) {}
 +
 +fn main() {
 +    take_mutably(${1:x})$0
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn complete_pattern_args_with_type_name_if_adt() {
 +        check_edit(
 +            "qux",
 +            r#"
 +struct Foo {
 +    bar: i32
 +}
 +
 +fn qux(Foo { bar }: Foo) {
 +    println!("{}", bar);
 +}
 +
 +fn main() {
 +  qu$0
 +}
 +"#,
 +            r#"
 +struct Foo {
 +    bar: i32
 +}
 +
 +fn qux(Foo { bar }: Foo) {
 +    println!("{}", bar);
 +}
 +
 +fn main() {
 +  qux(${1:foo})$0
 +}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn complete_fn_param() {
 +        // has mut kw
 +        check_edit(
 +            "mut bar: u32",
 +            r#"
 +fn f(foo: (), mut bar: u32) {}
 +fn g(foo: (), mut ba$0)
 +"#,
 +            r#"
 +fn f(foo: (), mut bar: u32) {}
 +fn g(foo: (), mut bar: u32)
 +"#,
 +        );
 +
 +        // has type param
 +        check_edit(
 +            "mut bar: u32",
 +            r#"
 +fn g(foo: (), mut ba$0: u32)
 +fn f(foo: (), mut bar: u32) {}
 +"#,
 +            r#"
 +fn g(foo: (), mut bar: u32)
 +fn f(foo: (), mut bar: u32) {}
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn complete_fn_mut_param_add_comma() {
 +        // add leading and trailing comma
 +        check_edit(
 +            ", mut bar: u32,",
 +            r#"
 +fn f(foo: (), mut bar: u32) {}
 +fn g(foo: ()mut ba$0 baz: ())
 +"#,
 +            r#"
 +fn f(foo: (), mut bar: u32) {}
 +fn g(foo: (), mut bar: u32, baz: ())
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn complete_fn_mut_param_has_attribute() {
 +        check_edit(
 +            r#"#[baz = "qux"] mut bar: u32"#,
 +            r#"
 +fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
 +fn g(foo: (), mut ba$0)
 +"#,
 +            r#"
 +fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
 +fn g(foo: (), #[baz = "qux"] mut bar: u32)
 +"#,
 +        );
 +
 +        check_edit(
 +            r#"#[baz = "qux"] mut bar: u32"#,
 +            r#"
 +fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
 +fn g(foo: (), #[baz = "qux"] mut ba$0)
 +"#,
 +            r#"
 +fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
 +fn g(foo: (), #[baz = "qux"] mut bar: u32)
 +"#,
 +        );
 +
 +        check_edit(
 +            r#", #[baz = "qux"] mut bar: u32"#,
 +            r#"
 +fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
 +fn g(foo: ()#[baz = "qux"] mut ba$0)
 +"#,
 +            r#"
 +fn f(foo: (), #[baz = "qux"] mut bar: u32) {}
 +fn g(foo: (), #[baz = "qux"] mut bar: u32)
 +"#,
 +        );
 +    }
 +}
index af9c88a7e0a6cc87cbcb1f11ea22e9f4c207238b,0000000000000000000000000000000000000000..0c791ac570c566f95f511c1a94805ffe03b96af1
mode 100644,000000..100644
--- /dev/null
@@@ -1,195 -1,0 +1,193 @@@
- use syntax::AstNode;
 +//! Renderer for `enum` variants.
 +
 +use hir::{db::HirDatabase, Documentation, HasAttrs, StructKind};
 +use ide_db::SymbolKind;
-         compute_ref_match, compute_type_match,
 +
 +use crate::{
 +    context::{CompletionContext, PathCompletionCtx, PathKind},
 +    item::{Builder, CompletionItem},
 +    render::{
-     if let Some(ref_match) = compute_ref_match(completion, &ty) {
-         item.ref_match(ref_match, path_ctx.path.syntax().text_range().start());
-     }
++        compute_type_match,
 +        variant::{
 +            format_literal_label, format_literal_lookup, render_record_lit, render_tuple_lit,
 +            visible_fields, RenderedLiteral,
 +        },
 +        RenderContext,
 +    },
 +    CompletionItemKind, CompletionRelevance,
 +};
 +
 +pub(crate) fn render_variant_lit(
 +    ctx: RenderContext<'_>,
 +    path_ctx: &PathCompletionCtx,
 +    local_name: Option<hir::Name>,
 +    variant: hir::Variant,
 +    path: Option<hir::ModPath>,
 +) -> Option<Builder> {
 +    let _p = profile::span("render_enum_variant");
 +    let db = ctx.db();
 +
 +    let name = local_name.unwrap_or_else(|| variant.name(db));
 +    render(ctx, path_ctx, Variant::EnumVariant(variant), name, path)
 +}
 +
 +pub(crate) fn render_struct_literal(
 +    ctx: RenderContext<'_>,
 +    path_ctx: &PathCompletionCtx,
 +    strukt: hir::Struct,
 +    path: Option<hir::ModPath>,
 +    local_name: Option<hir::Name>,
 +) -> Option<Builder> {
 +    let _p = profile::span("render_struct_literal");
 +    let db = ctx.db();
 +
 +    let name = local_name.unwrap_or_else(|| strukt.name(db));
 +    render(ctx, path_ctx, Variant::Struct(strukt), name, path)
 +}
 +
 +fn render(
 +    ctx @ RenderContext { completion, .. }: RenderContext<'_>,
 +    path_ctx: &PathCompletionCtx,
 +    thing: Variant,
 +    name: hir::Name,
 +    path: Option<hir::ModPath>,
 +) -> Option<Builder> {
 +    let db = completion.db;
 +    let mut kind = thing.kind(db);
 +    let should_add_parens = match &path_ctx {
 +        PathCompletionCtx { has_call_parens: true, .. } => false,
 +        PathCompletionCtx { kind: PathKind::Use | PathKind::Type { .. }, .. } => false,
 +        _ => true,
 +    };
 +
 +    let fields = thing.fields(completion)?;
 +    let (qualified_name, short_qualified_name, qualified) = match path {
 +        Some(path) => {
 +            let short = hir::ModPath::from_segments(
 +                hir::PathKind::Plain,
 +                path.segments().iter().skip(path.segments().len().saturating_sub(2)).cloned(),
 +            );
 +            (path, short, true)
 +        }
 +        None => (name.clone().into(), name.into(), false),
 +    };
 +    let (qualified_name, escaped_qualified_name) =
 +        (qualified_name.unescaped().to_string(), qualified_name.to_string());
 +    let snippet_cap = ctx.snippet_cap();
 +
 +    let mut rendered = match kind {
 +        StructKind::Tuple if should_add_parens => {
 +            render_tuple_lit(db, snippet_cap, &fields, &escaped_qualified_name)
 +        }
 +        StructKind::Record if should_add_parens => {
 +            render_record_lit(db, snippet_cap, &fields, &escaped_qualified_name)
 +        }
 +        _ => RenderedLiteral {
 +            literal: escaped_qualified_name.clone(),
 +            detail: escaped_qualified_name.clone(),
 +        },
 +    };
 +
 +    if snippet_cap.is_some() {
 +        rendered.literal.push_str("$0");
 +    }
 +
 +    // only show name in label if not adding parens
 +    if !should_add_parens {
 +        kind = StructKind::Unit;
 +    }
 +    let label = format_literal_label(&qualified_name, kind);
 +    let lookup = if qualified {
 +        format_literal_lookup(&short_qualified_name.to_string(), kind)
 +    } else {
 +        format_literal_lookup(&qualified_name, kind)
 +    };
 +
 +    let mut item = CompletionItem::new(
 +        CompletionItemKind::SymbolKind(thing.symbol_kind()),
 +        ctx.source_range(),
 +        label,
 +    );
 +
 +    item.lookup_by(lookup);
 +    item.detail(rendered.detail);
 +
 +    match snippet_cap {
 +        Some(snippet_cap) => item.insert_snippet(snippet_cap, rendered.literal),
 +        None => item.insert_text(rendered.literal),
 +    };
 +
 +    item.set_documentation(thing.docs(db)).set_deprecated(thing.is_deprecated(&ctx));
 +
 +    let ty = thing.ty(db);
 +    item.set_relevance(CompletionRelevance {
 +        type_match: compute_type_match(ctx.completion, &ty),
 +        ..ctx.completion_relevance()
 +    });
++
++    super::path_ref_match(completion, path_ctx, &ty, &mut item);
 +
 +    if let Some(import_to_add) = ctx.import_to_add {
 +        item.add_import(import_to_add);
 +    }
 +    Some(item)
 +}
 +
 +#[derive(Clone, Copy)]
 +enum Variant {
 +    Struct(hir::Struct),
 +    EnumVariant(hir::Variant),
 +}
 +
 +impl Variant {
 +    fn fields(self, ctx: &CompletionContext<'_>) -> Option<Vec<hir::Field>> {
 +        let fields = match self {
 +            Variant::Struct(it) => it.fields(ctx.db),
 +            Variant::EnumVariant(it) => it.fields(ctx.db),
 +        };
 +        let (visible_fields, fields_omitted) = match self {
 +            Variant::Struct(it) => visible_fields(ctx, &fields, it)?,
 +            Variant::EnumVariant(it) => visible_fields(ctx, &fields, it)?,
 +        };
 +        if !fields_omitted {
 +            Some(visible_fields)
 +        } else {
 +            None
 +        }
 +    }
 +
 +    fn kind(self, db: &dyn HirDatabase) -> StructKind {
 +        match self {
 +            Variant::Struct(it) => it.kind(db),
 +            Variant::EnumVariant(it) => it.kind(db),
 +        }
 +    }
 +
 +    fn symbol_kind(self) -> SymbolKind {
 +        match self {
 +            Variant::Struct(_) => SymbolKind::Struct,
 +            Variant::EnumVariant(_) => SymbolKind::Variant,
 +        }
 +    }
 +
 +    fn docs(self, db: &dyn HirDatabase) -> Option<Documentation> {
 +        match self {
 +            Variant::Struct(it) => it.docs(db),
 +            Variant::EnumVariant(it) => it.docs(db),
 +        }
 +    }
 +
 +    fn is_deprecated(self, ctx: &RenderContext<'_>) -> bool {
 +        match self {
 +            Variant::Struct(it) => ctx.is_deprecated(it),
 +            Variant::EnumVariant(it) => ctx.is_deprecated(it),
 +        }
 +    }
 +
 +    fn ty(self, db: &dyn HirDatabase) -> hir::Type {
 +        match self {
 +            Variant::Struct(it) => it.ty(db),
 +            Variant::EnumVariant(it) => it.parent_enum(db).ty(db),
 +        }
 +    }
 +}
index 966bba616f6277aa3a97d9dbbec808927fce05b8,0000000000000000000000000000000000000000..1ec62a8425a39b3fcb5f1fb6287047e7cf1b4002
mode 100644,000000..100644
--- /dev/null
@@@ -1,246 -1,0 +1,247 @@@
-     fn relevant_crates(&self, file_id: FileId) -> Arc<FxHashSet<CrateId>> {
 +//! This crate defines the core datastructure representing IDE state -- `RootDatabase`.
 +//!
 +//! It is mainly a `HirDatabase` for semantic analysis, plus a `SymbolsDatabase`, for fuzzy search.
 +
 +#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 +
 +mod apply_change;
 +
 +pub mod active_parameter;
 +pub mod assists;
 +pub mod defs;
 +pub mod famous_defs;
 +pub mod helpers;
 +pub mod items_locator;
 +pub mod label;
 +pub mod line_index;
 +pub mod path_transform;
 +pub mod rename;
 +pub mod rust_doc;
 +pub mod search;
 +pub mod source_change;
 +pub mod symbol_index;
 +pub mod traits;
 +pub mod ty_filter;
 +pub mod use_trivial_contructor;
 +
 +pub mod imports {
 +    pub mod import_assets;
 +    pub mod insert_use;
 +    pub mod merge_imports;
 +}
 +
 +pub mod generated {
 +    pub mod lints;
 +}
 +
 +pub mod syntax_helpers {
 +    pub mod node_ext;
 +    pub mod insert_whitespace_into_node;
 +    pub mod format_string;
 +
 +    pub use parser::LexedStr;
 +}
 +
 +use std::{fmt, mem::ManuallyDrop, sync::Arc};
 +
 +use base_db::{
 +    salsa::{self, Durability},
 +    AnchoredPath, CrateId, FileId, FileLoader, FileLoaderDelegate, SourceDatabase, Upcast,
 +};
 +use hir::{
 +    db::{AstDatabase, DefDatabase, HirDatabase},
 +    symbols::FileSymbolKind,
 +};
++use stdx::hash::NoHashHashSet;
 +
 +use crate::{line_index::LineIndex, symbol_index::SymbolsDatabase};
 +pub use rustc_hash::{FxHashMap, FxHashSet, FxHasher};
 +
 +/// `base_db` is normally also needed in places where `ide_db` is used, so this re-export is for convenience.
 +pub use base_db;
 +
 +pub type FxIndexSet<T> = indexmap::IndexSet<T, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
 +pub type FxIndexMap<K, V> =
 +    indexmap::IndexMap<K, V, std::hash::BuildHasherDefault<rustc_hash::FxHasher>>;
 +
 +#[salsa::database(
 +    base_db::SourceDatabaseExtStorage,
 +    base_db::SourceDatabaseStorage,
 +    hir::db::AstDatabaseStorage,
 +    hir::db::DefDatabaseStorage,
 +    hir::db::HirDatabaseStorage,
 +    hir::db::InternDatabaseStorage,
 +    LineIndexDatabaseStorage,
 +    symbol_index::SymbolsDatabaseStorage
 +)]
 +pub struct RootDatabase {
 +    // We use `ManuallyDrop` here because every codegen unit that contains a
 +    // `&RootDatabase -> &dyn OtherDatabase` cast will instantiate its drop glue in the vtable,
 +    // which duplicates `Weak::drop` and `Arc::drop` tens of thousands of times, which makes
 +    // compile times of all `ide_*` and downstream crates suffer greatly.
 +    storage: ManuallyDrop<salsa::Storage<RootDatabase>>,
 +}
 +
 +impl Drop for RootDatabase {
 +    fn drop(&mut self) {
 +        unsafe { ManuallyDrop::drop(&mut self.storage) };
 +    }
 +}
 +
 +impl fmt::Debug for RootDatabase {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        f.debug_struct("RootDatabase").finish()
 +    }
 +}
 +
 +impl Upcast<dyn AstDatabase> for RootDatabase {
 +    fn upcast(&self) -> &(dyn AstDatabase + 'static) {
 +        &*self
 +    }
 +}
 +
 +impl Upcast<dyn DefDatabase> for RootDatabase {
 +    fn upcast(&self) -> &(dyn DefDatabase + 'static) {
 +        &*self
 +    }
 +}
 +
 +impl Upcast<dyn HirDatabase> for RootDatabase {
 +    fn upcast(&self) -> &(dyn HirDatabase + 'static) {
 +        &*self
 +    }
 +}
 +
 +impl FileLoader for RootDatabase {
 +    fn file_text(&self, file_id: FileId) -> Arc<String> {
 +        FileLoaderDelegate(self).file_text(file_id)
 +    }
 +    fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
 +        FileLoaderDelegate(self).resolve_path(path)
 +    }
++    fn relevant_crates(&self, file_id: FileId) -> Arc<NoHashHashSet<CrateId>> {
 +        FileLoaderDelegate(self).relevant_crates(file_id)
 +    }
 +}
 +
 +impl salsa::Database for RootDatabase {}
 +
 +impl Default for RootDatabase {
 +    fn default() -> RootDatabase {
 +        RootDatabase::new(None)
 +    }
 +}
 +
 +impl RootDatabase {
 +    pub fn new(lru_capacity: Option<usize>) -> RootDatabase {
 +        let mut db = RootDatabase { storage: ManuallyDrop::new(salsa::Storage::default()) };
 +        db.set_crate_graph_with_durability(Default::default(), Durability::HIGH);
 +        db.set_local_roots_with_durability(Default::default(), Durability::HIGH);
 +        db.set_library_roots_with_durability(Default::default(), Durability::HIGH);
 +        db.set_enable_proc_attr_macros(false);
 +        db.update_lru_capacity(lru_capacity);
 +        db
 +    }
 +
 +    pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) {
 +        let lru_capacity = lru_capacity.unwrap_or(base_db::DEFAULT_LRU_CAP);
 +        base_db::ParseQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
 +        hir::db::ParseMacroExpansionQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
 +        hir::db::MacroExpandQuery.in_db_mut(self).set_lru_capacity(lru_capacity);
 +    }
 +}
 +
 +impl salsa::ParallelDatabase for RootDatabase {
 +    fn snapshot(&self) -> salsa::Snapshot<RootDatabase> {
 +        salsa::Snapshot::new(RootDatabase { storage: ManuallyDrop::new(self.storage.snapshot()) })
 +    }
 +}
 +
 +#[salsa::query_group(LineIndexDatabaseStorage)]
 +pub trait LineIndexDatabase: base_db::SourceDatabase {
 +    fn line_index(&self, file_id: FileId) -> Arc<LineIndex>;
 +}
 +
 +fn line_index(db: &dyn LineIndexDatabase, file_id: FileId) -> Arc<LineIndex> {
 +    let text = db.file_text(file_id);
 +    Arc::new(LineIndex::new(&*text))
 +}
 +
 +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
 +pub enum SymbolKind {
 +    Attribute,
 +    BuiltinAttr,
 +    Const,
 +    ConstParam,
 +    Derive,
 +    DeriveHelper,
 +    Enum,
 +    Field,
 +    Function,
 +    Impl,
 +    Label,
 +    LifetimeParam,
 +    Local,
 +    Macro,
 +    Module,
 +    SelfParam,
 +    SelfType,
 +    Static,
 +    Struct,
 +    ToolModule,
 +    Trait,
 +    TypeAlias,
 +    TypeParam,
 +    Union,
 +    ValueParam,
 +    Variant,
 +}
 +
 +impl From<hir::MacroKind> for SymbolKind {
 +    fn from(it: hir::MacroKind) -> Self {
 +        match it {
 +            hir::MacroKind::Declarative | hir::MacroKind::BuiltIn | hir::MacroKind::ProcMacro => {
 +                SymbolKind::Macro
 +            }
 +            hir::MacroKind::Derive => SymbolKind::Derive,
 +            hir::MacroKind::Attr => SymbolKind::Attribute,
 +        }
 +    }
 +}
 +
 +impl From<FileSymbolKind> for SymbolKind {
 +    fn from(it: FileSymbolKind) -> Self {
 +        match it {
 +            FileSymbolKind::Const => SymbolKind::Const,
 +            FileSymbolKind::Enum => SymbolKind::Enum,
 +            FileSymbolKind::Function => SymbolKind::Function,
 +            FileSymbolKind::Macro => SymbolKind::Macro,
 +            FileSymbolKind::Module => SymbolKind::Module,
 +            FileSymbolKind::Static => SymbolKind::Static,
 +            FileSymbolKind::Struct => SymbolKind::Struct,
 +            FileSymbolKind::Trait => SymbolKind::Trait,
 +            FileSymbolKind::TypeAlias => SymbolKind::TypeAlias,
 +            FileSymbolKind::Union => SymbolKind::Union,
 +        }
 +    }
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq)]
 +pub struct SnippetCap {
 +    _private: (),
 +}
 +
 +impl SnippetCap {
 +    pub const fn new(allow_snippets: bool) -> Option<SnippetCap> {
 +        if allow_snippets {
 +            Some(SnippetCap { _private: () })
 +        } else {
 +            None
 +        }
 +    }
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    mod sourcegen_lints;
 +}
index 68ad07ee83fde307cbf5f754fae1872a48063fcb,0000000000000000000000000000000000000000..75d49ff2fd77fc040779ee51238811c3b4348777
mode 100644,000000..100644
--- /dev/null
@@@ -1,300 -1,0 +1,300 @@@
- use rustc_hash::FxHashMap;
 +//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)`
 +//! representation.
 +use std::{iter, mem};
 +
-     pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
++use stdx::hash::NoHashHashMap;
 +use syntax::{TextRange, TextSize};
 +
 +#[derive(Clone, Debug, PartialEq, Eq)]
 +pub struct LineIndex {
 +    /// Offset the the beginning of each line, zero-based
 +    pub(crate) newlines: Vec<TextSize>,
 +    /// List of non-ASCII characters on each line
-         let mut utf16_lines = FxHashMap::default();
++    pub(crate) utf16_lines: NoHashHashMap<u32, Vec<Utf16Char>>,
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct LineColUtf16 {
 +    /// Zero-based
 +    pub line: u32,
 +    /// Zero-based
 +    pub col: u32,
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
 +pub struct LineCol {
 +    /// Zero-based
 +    pub line: u32,
 +    /// Zero-based utf8 offset
 +    pub col: u32,
 +}
 +
 +#[derive(Clone, Debug, Hash, PartialEq, Eq)]
 +pub(crate) struct Utf16Char {
 +    /// Start offset of a character inside a line, zero-based
 +    pub(crate) start: TextSize,
 +    /// End offset of a character inside a line, zero-based
 +    pub(crate) end: TextSize,
 +}
 +
 +impl Utf16Char {
 +    /// Returns the length in 8-bit UTF-8 code units.
 +    fn len(&self) -> TextSize {
 +        self.end - self.start
 +    }
 +
 +    /// Returns the length in 16-bit UTF-16 code units.
 +    fn len_utf16(&self) -> usize {
 +        if self.len() == TextSize::from(4) {
 +            2
 +        } else {
 +            1
 +        }
 +    }
 +}
 +
 +impl LineIndex {
 +    pub fn new(text: &str) -> LineIndex {
++        let mut utf16_lines = NoHashHashMap::default();
 +        let mut utf16_chars = Vec::new();
 +
 +        let mut newlines = vec![0.into()];
 +        let mut curr_row @ mut curr_col = 0.into();
 +        let mut line = 0;
 +        for c in text.chars() {
 +            let c_len = TextSize::of(c);
 +            curr_row += c_len;
 +            if c == '\n' {
 +                newlines.push(curr_row);
 +
 +                // Save any utf-16 characters seen in the previous line
 +                if !utf16_chars.is_empty() {
 +                    utf16_lines.insert(line, mem::take(&mut utf16_chars));
 +                }
 +
 +                // Prepare for processing the next line
 +                curr_col = 0.into();
 +                line += 1;
 +                continue;
 +            }
 +
 +            if !c.is_ascii() {
 +                utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len });
 +            }
 +
 +            curr_col += c_len;
 +        }
 +
 +        // Save any utf-16 characters seen in the last line
 +        if !utf16_chars.is_empty() {
 +            utf16_lines.insert(line, utf16_chars);
 +        }
 +
 +        LineIndex { newlines, utf16_lines }
 +    }
 +
 +    pub fn line_col(&self, offset: TextSize) -> LineCol {
 +        let line = self.newlines.partition_point(|&it| it <= offset) - 1;
 +        let line_start_offset = self.newlines[line];
 +        let col = offset - line_start_offset;
 +        LineCol { line: line as u32, col: col.into() }
 +    }
 +
 +    pub fn offset(&self, line_col: LineCol) -> Option<TextSize> {
 +        self.newlines
 +            .get(line_col.line as usize)
 +            .map(|offset| offset + TextSize::from(line_col.col))
 +    }
 +
 +    pub fn to_utf16(&self, line_col: LineCol) -> LineColUtf16 {
 +        let col = self.utf8_to_utf16_col(line_col.line, line_col.col.into());
 +        LineColUtf16 { line: line_col.line, col: col as u32 }
 +    }
 +
 +    pub fn to_utf8(&self, line_col: LineColUtf16) -> LineCol {
 +        let col = self.utf16_to_utf8_col(line_col.line, line_col.col);
 +        LineCol { line: line_col.line, col: col.into() }
 +    }
 +
 +    pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ {
 +        let lo = self.newlines.partition_point(|&it| it < range.start());
 +        let hi = self.newlines.partition_point(|&it| it <= range.end());
 +        let all = iter::once(range.start())
 +            .chain(self.newlines[lo..hi].iter().copied())
 +            .chain(iter::once(range.end()));
 +
 +        all.clone()
 +            .zip(all.skip(1))
 +            .map(|(lo, hi)| TextRange::new(lo, hi))
 +            .filter(|it| !it.is_empty())
 +    }
 +
 +    fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize {
 +        let mut res: usize = col.into();
 +        if let Some(utf16_chars) = self.utf16_lines.get(&line) {
 +            for c in utf16_chars {
 +                if c.end <= col {
 +                    res -= usize::from(c.len()) - c.len_utf16();
 +                } else {
 +                    // From here on, all utf16 characters come *after* the character we are mapping,
 +                    // so we don't need to take them into account
 +                    break;
 +                }
 +            }
 +        }
 +        res
 +    }
 +
 +    fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
 +        if let Some(utf16_chars) = self.utf16_lines.get(&line) {
 +            for c in utf16_chars {
 +                if col > u32::from(c.start) {
 +                    col += u32::from(c.len()) - c.len_utf16() as u32;
 +                } else {
 +                    // From here on, all utf16 characters come *after* the character we are mapping,
 +                    // so we don't need to take them into account
 +                    break;
 +                }
 +            }
 +        }
 +
 +        col.into()
 +    }
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use super::*;
 +
 +    #[test]
 +    fn test_line_index() {
 +        let text = "hello\nworld";
 +        let table = [
 +            (00, 0, 0),
 +            (01, 0, 1),
 +            (05, 0, 5),
 +            (06, 1, 0),
 +            (07, 1, 1),
 +            (08, 1, 2),
 +            (10, 1, 4),
 +            (11, 1, 5),
 +            (12, 1, 6),
 +        ];
 +
 +        let index = LineIndex::new(text);
 +        for &(offset, line, col) in &table {
 +            assert_eq!(index.line_col(offset.into()), LineCol { line, col });
 +        }
 +
 +        let text = "\nhello\nworld";
 +        let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
 +        let index = LineIndex::new(text);
 +        for &(offset, line, col) in &table {
 +            assert_eq!(index.line_col(offset.into()), LineCol { line, col });
 +        }
 +    }
 +
 +    #[test]
 +    fn test_char_len() {
 +        assert_eq!('メ'.len_utf8(), 3);
 +        assert_eq!('メ'.len_utf16(), 1);
 +    }
 +
 +    #[test]
 +    fn test_empty_index() {
 +        let col_index = LineIndex::new(
 +            "
 +const C: char = 'x';
 +",
 +        );
 +        assert_eq!(col_index.utf16_lines.len(), 0);
 +    }
 +
 +    #[test]
 +    fn test_single_char() {
 +        let col_index = LineIndex::new(
 +            "
 +const C: char = 'メ';
 +",
 +        );
 +
 +        assert_eq!(col_index.utf16_lines.len(), 1);
 +        assert_eq!(col_index.utf16_lines[&1].len(), 1);
 +        assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
 +
 +        // UTF-8 to UTF-16, no changes
 +        assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
 +
 +        // UTF-8 to UTF-16
 +        assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20);
 +
 +        // UTF-16 to UTF-8, no changes
 +        assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
 +
 +        // UTF-16 to UTF-8
 +        assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
 +
 +        let col_index = LineIndex::new("a𐐏b");
 +        assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
 +    }
 +
 +    #[test]
 +    fn test_string() {
 +        let col_index = LineIndex::new(
 +            "
 +const C: char = \"メ メ\";
 +",
 +        );
 +
 +        assert_eq!(col_index.utf16_lines.len(), 1);
 +        assert_eq!(col_index.utf16_lines[&1].len(), 2);
 +        assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
 +        assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() });
 +
 +        // UTF-8 to UTF-16
 +        assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
 +
 +        assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19);
 +        assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21);
 +
 +        assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
 +
 +        // UTF-16 to UTF-8
 +        assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
 +
 +        // メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1
 +        assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20
 +        assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space
 +        assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24
 +
 +        assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
 +    }
 +
 +    #[test]
 +    fn test_splitlines() {
 +        fn r(lo: u32, hi: u32) -> TextRange {
 +            TextRange::new(lo.into(), hi.into())
 +        }
 +
 +        let text = "a\nbb\nccc\n";
 +        let line_index = LineIndex::new(text);
 +
 +        let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>();
 +        let expected = vec![r(0, 2), r(2, 5), r(5, 9)];
 +        assert_eq!(actual, expected);
 +
 +        let text = "";
 +        let line_index = LineIndex::new(text);
 +
 +        let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>();
 +        let expected = vec![];
 +        assert_eq!(actual, expected);
 +
 +        let text = "\n";
 +        let line_index = LineIndex::new(text);
 +
 +        let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>();
 +        let expected = vec![r(0, 1)];
 +        assert_eq!(actual, expected)
 +    }
 +}
index 2f4aa113170a6f6865a3b6e560db8aa2727cfcce,0000000000000000000000000000000000000000..7deffe8e0f637917c2ab05cac45ee069037caf69
mode 100644,000000..100644
--- /dev/null
@@@ -1,787 -1,0 +1,785 @@@
- use rustc_hash::FxHashMap;
 +//! Implementation of find-usages functionality.
 +//!
 +//! It is based on the standard ide trick: first, we run a fast text search to
 +//! get a super-set of matches. Then, we we confirm each match using precise
 +//! name resolution.
 +
 +use std::{mem, sync::Arc};
 +
 +use base_db::{FileId, FileRange, SourceDatabase, SourceDatabaseExt};
 +use hir::{DefWithBody, HasAttrs, HasSource, InFile, ModuleSource, Semantics, Visibility};
 +use once_cell::unsync::Lazy;
-     pub references: FxHashMap<FileId, Vec<FileReference>>,
++use stdx::hash::NoHashHashMap;
 +use syntax::{ast, match_ast, AstNode, TextRange, TextSize};
 +
 +use crate::{
 +    defs::{Definition, NameClass, NameRefClass},
 +    traits::{as_trait_assoc_def, convert_to_def_in_trait},
 +    RootDatabase,
 +};
 +
 +#[derive(Debug, Default, Clone)]
 +pub struct UsageSearchResult {
-     type IntoIter = <FxHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
++    pub references: NoHashHashMap<FileId, Vec<FileReference>>,
 +}
 +
 +impl UsageSearchResult {
 +    pub fn is_empty(&self) -> bool {
 +        self.references.is_empty()
 +    }
 +
 +    pub fn len(&self) -> usize {
 +        self.references.len()
 +    }
 +
 +    pub fn iter(&self) -> impl Iterator<Item = (&FileId, &[FileReference])> + '_ {
 +        self.references.iter().map(|(file_id, refs)| (file_id, &**refs))
 +    }
 +
 +    pub fn file_ranges(&self) -> impl Iterator<Item = FileRange> + '_ {
 +        self.references.iter().flat_map(|(&file_id, refs)| {
 +            refs.iter().map(move |&FileReference { range, .. }| FileRange { file_id, range })
 +        })
 +    }
 +}
 +
 +impl IntoIterator for UsageSearchResult {
 +    type Item = (FileId, Vec<FileReference>);
-     entries: FxHashMap<FileId, Option<TextRange>>,
++    type IntoIter = <NoHashHashMap<FileId, Vec<FileReference>> as IntoIterator>::IntoIter;
 +
 +    fn into_iter(self) -> Self::IntoIter {
 +        self.references.into_iter()
 +    }
 +}
 +
 +#[derive(Debug, Clone)]
 +pub struct FileReference {
 +    /// The range of the reference in the original file
 +    pub range: TextRange,
 +    /// The node of the reference in the (macro-)file
 +    pub name: ast::NameLike,
 +    pub category: Option<ReferenceCategory>,
 +}
 +
 +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
 +pub enum ReferenceCategory {
 +    // FIXME: Add this variant and delete the `retain_adt_literal_usages` function.
 +    // Create
 +    Write,
 +    Read,
 +    // FIXME: Some day should be able to search in doc comments. Would probably
 +    // need to switch from enum to bitflags then?
 +    // DocComment
 +}
 +
 +/// Generally, `search_scope` returns files that might contain references for the element.
 +/// For `pub(crate)` things it's a crate, for `pub` things it's a crate and dependant crates.
 +/// In some cases, the location of the references is known to within a `TextRange`,
 +/// e.g. for things like local variables.
 +#[derive(Clone, Debug)]
 +pub struct SearchScope {
-     fn new(entries: FxHashMap<FileId, Option<TextRange>>) -> SearchScope {
++    entries: NoHashHashMap<FileId, Option<TextRange>>,
 +}
 +
 +impl SearchScope {
-         let mut entries = FxHashMap::default();
++    fn new(entries: NoHashHashMap<FileId, Option<TextRange>>) -> SearchScope {
 +        SearchScope { entries }
 +    }
 +
 +    /// Build a search scope spanning the entire crate graph of files.
 +    fn crate_graph(db: &RootDatabase) -> SearchScope {
-         let mut entries = FxHashMap::default();
++        let mut entries = NoHashHashMap::default();
 +
 +        let graph = db.crate_graph();
 +        for krate in graph.iter() {
 +            let root_file = graph[krate].root_file_id;
 +            let source_root_id = db.file_source_root(root_file);
 +            let source_root = db.source_root(source_root_id);
 +            entries.extend(source_root.iter().map(|id| (id, None)));
 +        }
 +        SearchScope { entries }
 +    }
 +
 +    /// Build a search scope spanning all the reverse dependencies of the given crate.
 +    fn reverse_dependencies(db: &RootDatabase, of: hir::Crate) -> SearchScope {
-         SearchScope {
-             entries: source_root.iter().map(|id| (id, None)).collect::<FxHashMap<_, _>>(),
-         }
++        let mut entries = NoHashHashMap::default();
 +        for rev_dep in of.transitive_reverse_dependencies(db) {
 +            let root_file = rev_dep.root_file(db);
 +            let source_root_id = db.file_source_root(root_file);
 +            let source_root = db.source_root(source_root_id);
 +            entries.extend(source_root.iter().map(|id| (id, None)));
 +        }
 +        SearchScope { entries }
 +    }
 +
 +    /// Build a search scope spanning the given crate.
 +    fn krate(db: &RootDatabase, of: hir::Crate) -> SearchScope {
 +        let root_file = of.root_file(db);
 +        let source_root_id = db.file_source_root(root_file);
 +        let source_root = db.source_root(source_root_id);
-         let mut entries = FxHashMap::default();
++        SearchScope { entries: source_root.iter().map(|id| (id, None)).collect() }
 +    }
 +
 +    /// Build a search scope spanning the given module and all its submodules.
 +    fn module_and_children(db: &RootDatabase, module: hir::Module) -> SearchScope {
-         SearchScope::new(FxHashMap::default())
++        let mut entries = NoHashHashMap::default();
 +
 +        let (file_id, range) = {
 +            let InFile { file_id, value } = module.definition_source(db);
 +            if let Some((file_id, call_source)) = file_id.original_call_node(db) {
 +                (file_id, Some(call_source.text_range()))
 +            } else {
 +                (
 +                    file_id.original_file(db),
 +                    match value {
 +                        ModuleSource::SourceFile(_) => None,
 +                        ModuleSource::Module(it) => Some(it.syntax().text_range()),
 +                        ModuleSource::BlockExpr(it) => Some(it.syntax().text_range()),
 +                    },
 +                )
 +            }
 +        };
 +        entries.insert(file_id, range);
 +
 +        let mut to_visit: Vec<_> = module.children(db).collect();
 +        while let Some(module) = to_visit.pop() {
 +            if let InFile { file_id, value: ModuleSource::SourceFile(_) } =
 +                module.definition_source(db)
 +            {
 +                entries.insert(file_id.original_file(db), None);
 +            }
 +            to_visit.extend(module.children(db));
 +        }
 +        SearchScope { entries }
 +    }
 +
 +    /// Build an empty search scope.
 +    pub fn empty() -> SearchScope {
++        SearchScope::new(NoHashHashMap::default())
 +    }
 +
 +    /// Build a empty search scope spanning the given file.
 +    pub fn single_file(file: FileId) -> SearchScope {
 +        SearchScope::new(std::iter::once((file, None)).collect())
 +    }
 +
 +    /// Build a empty search scope spanning the text range of the given file.
 +    pub fn file_range(range: FileRange) -> SearchScope {
 +        SearchScope::new(std::iter::once((range.file_id, Some(range.range))).collect())
 +    }
 +
 +    /// Build a empty search scope spanning the given files.
 +    pub fn files(files: &[FileId]) -> SearchScope {
 +        SearchScope::new(files.iter().map(|f| (*f, None)).collect())
 +    }
 +
 +    pub fn intersection(&self, other: &SearchScope) -> SearchScope {
 +        let (mut small, mut large) = (&self.entries, &other.entries);
 +        if small.len() > large.len() {
 +            mem::swap(&mut small, &mut large)
 +        }
 +
 +        let intersect_ranges =
 +            |r1: Option<TextRange>, r2: Option<TextRange>| -> Option<Option<TextRange>> {
 +                match (r1, r2) {
 +                    (None, r) | (r, None) => Some(r),
 +                    (Some(r1), Some(r2)) => r1.intersect(r2).map(Some),
 +                }
 +            };
 +        let res = small
 +            .iter()
 +            .filter_map(|(&file_id, &r1)| {
 +                let &r2 = large.get(&file_id)?;
 +                let r = intersect_ranges(r1, r2)?;
 +                Some((file_id, r))
 +            })
 +            .collect();
 +
 +        SearchScope::new(res)
 +    }
 +}
 +
 +impl IntoIterator for SearchScope {
 +    type Item = (FileId, Option<TextRange>);
 +    type IntoIter = std::collections::hash_map::IntoIter<FileId, Option<TextRange>>;
 +
 +    fn into_iter(self) -> Self::IntoIter {
 +        self.entries.into_iter()
 +    }
 +}
 +
 +impl Definition {
 +    fn search_scope(&self, db: &RootDatabase) -> SearchScope {
 +        let _p = profile::span("search_scope");
 +
 +        if let Definition::BuiltinType(_) = self {
 +            return SearchScope::crate_graph(db);
 +        }
 +
 +        // def is crate root
 +        // FIXME: We don't do searches for crates currently, as a crate does not actually have a single name
 +        if let &Definition::Module(module) = self {
 +            if module.is_crate_root(db) {
 +                return SearchScope::reverse_dependencies(db, module.krate());
 +            }
 +        }
 +
 +        let module = match self.module(db) {
 +            Some(it) => it,
 +            None => return SearchScope::empty(),
 +        };
 +        let InFile { file_id, value: module_source } = module.definition_source(db);
 +        let file_id = file_id.original_file(db);
 +
 +        if let Definition::Local(var) = self {
 +            let def = match var.parent(db) {
 +                DefWithBody::Function(f) => f.source(db).map(|src| src.syntax().cloned()),
 +                DefWithBody::Const(c) => c.source(db).map(|src| src.syntax().cloned()),
 +                DefWithBody::Static(s) => s.source(db).map(|src| src.syntax().cloned()),
 +            };
 +            return match def {
 +                Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
 +                None => SearchScope::single_file(file_id),
 +            };
 +        }
 +
 +        if let Definition::SelfType(impl_) = self {
 +            return match impl_.source(db).map(|src| src.syntax().cloned()) {
 +                Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
 +                None => SearchScope::single_file(file_id),
 +            };
 +        }
 +
 +        if let Definition::GenericParam(hir::GenericParam::LifetimeParam(param)) = self {
 +            let def = match param.parent(db) {
 +                hir::GenericDef::Function(it) => it.source(db).map(|src| src.syntax().cloned()),
 +                hir::GenericDef::Adt(it) => it.source(db).map(|src| src.syntax().cloned()),
 +                hir::GenericDef::Trait(it) => it.source(db).map(|src| src.syntax().cloned()),
 +                hir::GenericDef::TypeAlias(it) => it.source(db).map(|src| src.syntax().cloned()),
 +                hir::GenericDef::Impl(it) => it.source(db).map(|src| src.syntax().cloned()),
 +                hir::GenericDef::Variant(it) => it.source(db).map(|src| src.syntax().cloned()),
 +                hir::GenericDef::Const(it) => it.source(db).map(|src| src.syntax().cloned()),
 +            };
 +            return match def {
 +                Some(def) => SearchScope::file_range(def.as_ref().original_file_range(db)),
 +                None => SearchScope::single_file(file_id),
 +            };
 +        }
 +
 +        if let Definition::Macro(macro_def) = self {
 +            return match macro_def.kind(db) {
 +                hir::MacroKind::Declarative => {
 +                    if macro_def.attrs(db).by_key("macro_export").exists() {
 +                        SearchScope::reverse_dependencies(db, module.krate())
 +                    } else {
 +                        SearchScope::krate(db, module.krate())
 +                    }
 +                }
 +                hir::MacroKind::BuiltIn => SearchScope::crate_graph(db),
 +                hir::MacroKind::Derive | hir::MacroKind::Attr | hir::MacroKind::ProcMacro => {
 +                    SearchScope::reverse_dependencies(db, module.krate())
 +                }
 +            };
 +        }
 +
 +        if let Definition::DeriveHelper(_) = self {
 +            return SearchScope::reverse_dependencies(db, module.krate());
 +        }
 +
 +        let vis = self.visibility(db);
 +        if let Some(Visibility::Public) = vis {
 +            return SearchScope::reverse_dependencies(db, module.krate());
 +        }
 +        if let Some(Visibility::Module(module)) = vis {
 +            return SearchScope::module_and_children(db, module.into());
 +        }
 +
 +        let range = match module_source {
 +            ModuleSource::Module(m) => Some(m.syntax().text_range()),
 +            ModuleSource::BlockExpr(b) => Some(b.syntax().text_range()),
 +            ModuleSource::SourceFile(_) => None,
 +        };
 +        match range {
 +            Some(range) => SearchScope::file_range(FileRange { file_id, range }),
 +            None => SearchScope::single_file(file_id),
 +        }
 +    }
 +
 +    pub fn usages<'a>(self, sema: &'a Semantics<'_, RootDatabase>) -> FindUsages<'a> {
 +        FindUsages {
 +            local_repr: match self {
 +                Definition::Local(local) => Some(local.representative(sema.db)),
 +                _ => None,
 +            },
 +            def: self,
 +            trait_assoc_def: as_trait_assoc_def(sema.db, self),
 +            sema,
 +            scope: None,
 +            include_self_kw_refs: None,
 +            search_self_mod: false,
 +        }
 +    }
 +}
 +
 +#[derive(Clone)]
 +pub struct FindUsages<'a> {
 +    def: Definition,
 +    /// If def is an assoc item from a trait or trait impl, this is the corresponding item of the trait definition
 +    trait_assoc_def: Option<Definition>,
 +    sema: &'a Semantics<'a, RootDatabase>,
 +    scope: Option<SearchScope>,
 +    include_self_kw_refs: Option<hir::Type>,
 +    local_repr: Option<hir::Local>,
 +    search_self_mod: bool,
 +}
 +
 +impl<'a> FindUsages<'a> {
 +    /// Enable searching for `Self` when the definition is a type or `self` for modules.
 +    pub fn include_self_refs(mut self) -> FindUsages<'a> {
 +        self.include_self_kw_refs = def_to_ty(self.sema, &self.def);
 +        self.search_self_mod = true;
 +        self
 +    }
 +
 +    /// Limit the search to a given [`SearchScope`].
 +    pub fn in_scope(self, scope: SearchScope) -> FindUsages<'a> {
 +        self.set_scope(Some(scope))
 +    }
 +
 +    /// Limit the search to a given [`SearchScope`].
 +    pub fn set_scope(mut self, scope: Option<SearchScope>) -> FindUsages<'a> {
 +        assert!(self.scope.is_none());
 +        self.scope = scope;
 +        self
 +    }
 +
 +    pub fn at_least_one(&self) -> bool {
 +        let mut found = false;
 +        self.search(&mut |_, _| {
 +            found = true;
 +            true
 +        });
 +        found
 +    }
 +
 +    pub fn all(self) -> UsageSearchResult {
 +        let mut res = UsageSearchResult::default();
 +        self.search(&mut |file_id, reference| {
 +            res.references.entry(file_id).or_default().push(reference);
 +            false
 +        });
 +        res
 +    }
 +
 +    fn search(&self, sink: &mut dyn FnMut(FileId, FileReference) -> bool) {
 +        let _p = profile::span("FindUsages:search");
 +        let sema = self.sema;
 +
 +        let search_scope = {
 +            let base = self.trait_assoc_def.unwrap_or(self.def).search_scope(sema.db);
 +            match &self.scope {
 +                None => base,
 +                Some(scope) => base.intersection(scope),
 +            }
 +        };
 +
 +        let name = match self.def {
 +            // special case crate modules as these do not have a proper name
 +            Definition::Module(module) if module.is_crate_root(self.sema.db) => {
 +                // FIXME: This assumes the crate name is always equal to its display name when it really isn't
 +                module
 +                    .krate()
 +                    .display_name(self.sema.db)
 +                    .map(|crate_name| crate_name.crate_name().as_smol_str().clone())
 +            }
 +            _ => {
 +                let self_kw_refs = || {
 +                    self.include_self_kw_refs.as_ref().and_then(|ty| {
 +                        ty.as_adt()
 +                            .map(|adt| adt.name(self.sema.db))
 +                            .or_else(|| ty.as_builtin().map(|builtin| builtin.name()))
 +                    })
 +                };
 +                // We need to unescape the name in case it is written without "r#" in earlier
 +                // editions of Rust where it isn't a keyword.
 +                self.def.name(sema.db).or_else(self_kw_refs).map(|it| it.unescaped().to_smol_str())
 +            }
 +        };
 +        let name = match &name {
 +            Some(s) => s.as_str(),
 +            None => return,
 +        };
 +
 +        // these can't be closures because rust infers the lifetimes wrong ...
 +        fn match_indices<'a>(
 +            text: &'a str,
 +            name: &'a str,
 +            search_range: TextRange,
 +        ) -> impl Iterator<Item = TextSize> + 'a {
 +            text.match_indices(name).filter_map(move |(idx, _)| {
 +                let offset: TextSize = idx.try_into().unwrap();
 +                if !search_range.contains_inclusive(offset) {
 +                    return None;
 +                }
 +                Some(offset)
 +            })
 +        }
 +
 +        fn scope_files<'a>(
 +            sema: &'a Semantics<'_, RootDatabase>,
 +            scope: &'a SearchScope,
 +        ) -> impl Iterator<Item = (Arc<String>, FileId, TextRange)> + 'a {
 +            scope.entries.iter().map(|(&file_id, &search_range)| {
 +                let text = sema.db.file_text(file_id);
 +                let search_range =
 +                    search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str())));
 +
 +                (text, file_id, search_range)
 +            })
 +        }
 +
 +        // FIXME: There should be optimization potential here
 +        // Currently we try to descend everything we find which
 +        // means we call `Semantics::descend_into_macros` on
 +        // every textual hit. That function is notoriously
 +        // expensive even for things that do not get down mapped
 +        // into macros.
 +        for (text, file_id, search_range) in scope_files(sema, &search_scope) {
 +            let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
 +
 +            // Search for occurrences of the items name
 +            for offset in match_indices(&text, name, search_range) {
 +                for name in sema.find_nodes_at_offset_with_descend(&tree, offset) {
 +                    if match name {
 +                        ast::NameLike::NameRef(name_ref) => self.found_name_ref(&name_ref, sink),
 +                        ast::NameLike::Name(name) => self.found_name(&name, sink),
 +                        ast::NameLike::Lifetime(lifetime) => self.found_lifetime(&lifetime, sink),
 +                    } {
 +                        return;
 +                    }
 +                }
 +            }
 +            // Search for occurrences of the `Self` referring to our type
 +            if let Some(self_ty) = &self.include_self_kw_refs {
 +                for offset in match_indices(&text, "Self", search_range) {
 +                    for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
 +                        if self.found_self_ty_name_ref(self_ty, &name_ref, sink) {
 +                            return;
 +                        }
 +                    }
 +                }
 +            }
 +        }
 +
 +        // Search for `super` and `crate` resolving to our module
 +        match self.def {
 +            Definition::Module(module) => {
 +                let scope = search_scope
 +                    .intersection(&SearchScope::module_and_children(self.sema.db, module));
 +
 +                let is_crate_root = module.is_crate_root(self.sema.db);
 +
 +                for (text, file_id, search_range) in scope_files(sema, &scope) {
 +                    let tree = Lazy::new(move || sema.parse(file_id).syntax().clone());
 +
 +                    for offset in match_indices(&text, "super", search_range) {
 +                        for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
 +                            if self.found_name_ref(&name_ref, sink) {
 +                                return;
 +                            }
 +                        }
 +                    }
 +                    if is_crate_root {
 +                        for offset in match_indices(&text, "crate", search_range) {
 +                            for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
 +                                if self.found_name_ref(&name_ref, sink) {
 +                                    return;
 +                                }
 +                            }
 +                        }
 +                    }
 +                }
 +            }
 +            _ => (),
 +        }
 +
 +        // search for module `self` references in our module's definition source
 +        match self.def {
 +            Definition::Module(module) if self.search_self_mod => {
 +                let src = module.definition_source(sema.db);
 +                let file_id = src.file_id.original_file(sema.db);
 +                let (file_id, search_range) = match src.value {
 +                    ModuleSource::Module(m) => (file_id, Some(m.syntax().text_range())),
 +                    ModuleSource::BlockExpr(b) => (file_id, Some(b.syntax().text_range())),
 +                    ModuleSource::SourceFile(_) => (file_id, None),
 +                };
 +
 +                let search_range = if let Some(&range) = search_scope.entries.get(&file_id) {
 +                    match (range, search_range) {
 +                        (None, range) | (range, None) => range,
 +                        (Some(range), Some(search_range)) => match range.intersect(search_range) {
 +                            Some(range) => Some(range),
 +                            None => return,
 +                        },
 +                    }
 +                } else {
 +                    return;
 +                };
 +
 +                let text = sema.db.file_text(file_id);
 +                let search_range =
 +                    search_range.unwrap_or_else(|| TextRange::up_to(TextSize::of(text.as_str())));
 +
 +                let tree = Lazy::new(|| sema.parse(file_id).syntax().clone());
 +
 +                for offset in match_indices(&text, "self", search_range) {
 +                    for name_ref in sema.find_nodes_at_offset_with_descend(&tree, offset) {
 +                        if self.found_self_module_name_ref(&name_ref, sink) {
 +                            return;
 +                        }
 +                    }
 +                }
 +            }
 +            _ => {}
 +        }
 +    }
 +
 +    fn found_self_ty_name_ref(
 +        &self,
 +        self_ty: &hir::Type,
 +        name_ref: &ast::NameRef,
 +        sink: &mut dyn FnMut(FileId, FileReference) -> bool,
 +    ) -> bool {
 +        match NameRefClass::classify(self.sema, name_ref) {
 +            Some(NameRefClass::Definition(Definition::SelfType(impl_)))
 +                if impl_.self_ty(self.sema.db) == *self_ty =>
 +            {
 +                let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
 +                let reference = FileReference {
 +                    range,
 +                    name: ast::NameLike::NameRef(name_ref.clone()),
 +                    category: None,
 +                };
 +                sink(file_id, reference)
 +            }
 +            _ => false,
 +        }
 +    }
 +
 +    fn found_self_module_name_ref(
 +        &self,
 +        name_ref: &ast::NameRef,
 +        sink: &mut dyn FnMut(FileId, FileReference) -> bool,
 +    ) -> bool {
 +        match NameRefClass::classify(self.sema, name_ref) {
 +            Some(NameRefClass::Definition(def @ Definition::Module(_))) if def == self.def => {
 +                let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
 +                let reference = FileReference {
 +                    range,
 +                    name: ast::NameLike::NameRef(name_ref.clone()),
 +                    category: None,
 +                };
 +                sink(file_id, reference)
 +            }
 +            _ => false,
 +        }
 +    }
 +
 +    fn found_lifetime(
 +        &self,
 +        lifetime: &ast::Lifetime,
 +        sink: &mut dyn FnMut(FileId, FileReference) -> bool,
 +    ) -> bool {
 +        match NameRefClass::classify_lifetime(self.sema, lifetime) {
 +            Some(NameRefClass::Definition(def)) if def == self.def => {
 +                let FileRange { file_id, range } = self.sema.original_range(lifetime.syntax());
 +                let reference = FileReference {
 +                    range,
 +                    name: ast::NameLike::Lifetime(lifetime.clone()),
 +                    category: None,
 +                };
 +                sink(file_id, reference)
 +            }
 +            _ => false,
 +        }
 +    }
 +
 +    fn found_name_ref(
 +        &self,
 +        name_ref: &ast::NameRef,
 +        sink: &mut dyn FnMut(FileId, FileReference) -> bool,
 +    ) -> bool {
 +        match NameRefClass::classify(self.sema, name_ref) {
 +            Some(NameRefClass::Definition(def @ Definition::Local(local)))
 +                if matches!(
 +                    self.local_repr, Some(repr) if repr == local.representative(self.sema.db)
 +                ) =>
 +            {
 +                let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
 +                let reference = FileReference {
 +                    range,
 +                    name: ast::NameLike::NameRef(name_ref.clone()),
 +                    category: ReferenceCategory::new(&def, name_ref),
 +                };
 +                sink(file_id, reference)
 +            }
 +            Some(NameRefClass::Definition(def))
 +                if match self.trait_assoc_def {
 +                    Some(trait_assoc_def) => {
 +                        // we have a trait assoc item, so force resolve all assoc items to their trait version
 +                        convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def
 +                    }
 +                    None => self.def == def,
 +                } =>
 +            {
 +                let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
 +                let reference = FileReference {
 +                    range,
 +                    name: ast::NameLike::NameRef(name_ref.clone()),
 +                    category: ReferenceCategory::new(&def, name_ref),
 +                };
 +                sink(file_id, reference)
 +            }
 +            Some(NameRefClass::Definition(def)) if self.include_self_kw_refs.is_some() => {
 +                if self.include_self_kw_refs == def_to_ty(self.sema, &def) {
 +                    let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
 +                    let reference = FileReference {
 +                        range,
 +                        name: ast::NameLike::NameRef(name_ref.clone()),
 +                        category: ReferenceCategory::new(&def, name_ref),
 +                    };
 +                    sink(file_id, reference)
 +                } else {
 +                    false
 +                }
 +            }
 +            Some(NameRefClass::FieldShorthand { local_ref: local, field_ref: field }) => {
 +                let field = Definition::Field(field);
 +                let FileRange { file_id, range } = self.sema.original_range(name_ref.syntax());
 +                let access = match self.def {
 +                    Definition::Field(_) if field == self.def => {
 +                        ReferenceCategory::new(&field, name_ref)
 +                    }
 +                    Definition::Local(_) if matches!(self.local_repr, Some(repr) if repr == local.representative(self.sema.db)) => {
 +                        ReferenceCategory::new(&Definition::Local(local), name_ref)
 +                    }
 +                    _ => return false,
 +                };
 +                let reference = FileReference {
 +                    range,
 +                    name: ast::NameLike::NameRef(name_ref.clone()),
 +                    category: access,
 +                };
 +                sink(file_id, reference)
 +            }
 +            _ => false,
 +        }
 +    }
 +
 +    fn found_name(
 +        &self,
 +        name: &ast::Name,
 +        sink: &mut dyn FnMut(FileId, FileReference) -> bool,
 +    ) -> bool {
 +        match NameClass::classify(self.sema, name) {
 +            Some(NameClass::PatFieldShorthand { local_def: _, field_ref })
 +                if matches!(
 +                    self.def, Definition::Field(_) if Definition::Field(field_ref) == self.def
 +                ) =>
 +            {
 +                let FileRange { file_id, range } = self.sema.original_range(name.syntax());
 +                let reference = FileReference {
 +                    range,
 +                    name: ast::NameLike::Name(name.clone()),
 +                    // FIXME: mutable patterns should have `Write` access
 +                    category: Some(ReferenceCategory::Read),
 +                };
 +                sink(file_id, reference)
 +            }
 +            Some(NameClass::ConstReference(def)) if self.def == def => {
 +                let FileRange { file_id, range } = self.sema.original_range(name.syntax());
 +                let reference = FileReference {
 +                    range,
 +                    name: ast::NameLike::Name(name.clone()),
 +                    category: None,
 +                };
 +                sink(file_id, reference)
 +            }
 +            Some(NameClass::Definition(def @ Definition::Local(local))) if def != self.def => {
 +                if matches!(
 +                    self.local_repr,
 +                    Some(repr) if local.representative(self.sema.db) == repr
 +                ) {
 +                    let FileRange { file_id, range } = self.sema.original_range(name.syntax());
 +                    let reference = FileReference {
 +                        range,
 +                        name: ast::NameLike::Name(name.clone()),
 +                        category: None,
 +                    };
 +                    return sink(file_id, reference);
 +                }
 +                false
 +            }
 +            Some(NameClass::Definition(def)) if def != self.def => {
 +                // if the def we are looking for is a trait (impl) assoc item, we'll have to resolve the items to trait definition assoc item
 +                if !matches!(
 +                    self.trait_assoc_def,
 +                    Some(trait_assoc_def)
 +                        if convert_to_def_in_trait(self.sema.db, def) == trait_assoc_def
 +                ) {
 +                    return false;
 +                }
 +                let FileRange { file_id, range } = self.sema.original_range(name.syntax());
 +                let reference = FileReference {
 +                    range,
 +                    name: ast::NameLike::Name(name.clone()),
 +                    category: None,
 +                };
 +                sink(file_id, reference)
 +            }
 +            _ => false,
 +        }
 +    }
 +}
 +
 +fn def_to_ty(sema: &Semantics<'_, RootDatabase>, def: &Definition) -> Option<hir::Type> {
 +    match def {
 +        Definition::Adt(adt) => Some(adt.ty(sema.db)),
 +        Definition::TypeAlias(it) => Some(it.ty(sema.db)),
 +        Definition::BuiltinType(it) => Some(it.ty(sema.db)),
 +        Definition::SelfType(it) => Some(it.self_ty(sema.db)),
 +        _ => None,
 +    }
 +}
 +
 +impl ReferenceCategory {
 +    fn new(def: &Definition, r: &ast::NameRef) -> Option<ReferenceCategory> {
 +        // Only Locals and Fields have accesses for now.
 +        if !matches!(def, Definition::Local(_) | Definition::Field(_)) {
 +            return None;
 +        }
 +
 +        let mode = r.syntax().ancestors().find_map(|node| {
 +        match_ast! {
 +            match node {
 +                ast::BinExpr(expr) => {
 +                    if matches!(expr.op_kind()?, ast::BinaryOp::Assignment { .. }) {
 +                        // If the variable or field ends on the LHS's end then it's a Write (covers fields and locals).
 +                        // FIXME: This is not terribly accurate.
 +                        if let Some(lhs) = expr.lhs() {
 +                            if lhs.syntax().text_range().end() == r.syntax().text_range().end() {
 +                                return Some(ReferenceCategory::Write);
 +                            }
 +                        }
 +                    }
 +                    Some(ReferenceCategory::Read)
 +                },
 +                _ => None
 +            }
 +        }
 +    });
 +
 +        // Default Locals and Fields to read
 +        mode.or(Some(ReferenceCategory::Read))
 +    }
 +}
index 21314ad74ef9387eee0e0b81d8d8bc3d4e5a10bf,0000000000000000000000000000000000000000..8e338061df43345c6d18b2cd6eb1d7fefb41a707
mode 100644,000000..100644
--- /dev/null
@@@ -1,231 -1,0 +1,230 @@@
- use rustc_hash::FxHashMap;
- use stdx::never;
 +//! This modules defines type to represent changes to the source code, that flow
 +//! from the server to the client.
 +//!
 +//! It can be viewed as a dual for `Change`.
 +
 +use std::{collections::hash_map::Entry, iter, mem};
 +
 +use base_db::{AnchoredPathBuf, FileId};
-     pub source_file_edits: FxHashMap<FileId, TextEdit>,
++use stdx::{hash::NoHashHashMap, never};
 +use syntax::{algo, AstNode, SyntaxNode, SyntaxNodePtr, TextRange, TextSize};
 +use text_edit::{TextEdit, TextEditBuilder};
 +
 +use crate::SnippetCap;
 +
 +#[derive(Default, Debug, Clone)]
 +pub struct SourceChange {
-         source_file_edits: FxHashMap<FileId, TextEdit>,
++    pub source_file_edits: NoHashHashMap<FileId, TextEdit>,
 +    pub file_system_edits: Vec<FileSystemEdit>,
 +    pub is_snippet: bool,
 +}
 +
 +impl SourceChange {
 +    /// Creates a new SourceChange with the given label
 +    /// from the edits.
 +    pub fn from_edits(
- impl From<FxHashMap<FileId, TextEdit>> for SourceChange {
-     fn from(source_file_edits: FxHashMap<FileId, TextEdit>) -> SourceChange {
++        source_file_edits: NoHashHashMap<FileId, TextEdit>,
 +        file_system_edits: Vec<FileSystemEdit>,
 +    ) -> Self {
 +        SourceChange { source_file_edits, file_system_edits, is_snippet: false }
 +    }
 +
 +    pub fn from_text_edit(file_id: FileId, edit: TextEdit) -> Self {
 +        SourceChange {
 +            source_file_edits: iter::once((file_id, edit)).collect(),
 +            ..Default::default()
 +        }
 +    }
 +
 +    /// Inserts a [`TextEdit`] for the given [`FileId`]. This properly handles merging existing
 +    /// edits for a file if some already exist.
 +    pub fn insert_source_edit(&mut self, file_id: FileId, edit: TextEdit) {
 +        match self.source_file_edits.entry(file_id) {
 +            Entry::Occupied(mut entry) => {
 +                never!(entry.get_mut().union(edit).is_err(), "overlapping edits for same file");
 +            }
 +            Entry::Vacant(entry) => {
 +                entry.insert(edit);
 +            }
 +        }
 +    }
 +
 +    pub fn push_file_system_edit(&mut self, edit: FileSystemEdit) {
 +        self.file_system_edits.push(edit);
 +    }
 +
 +    pub fn get_source_edit(&self, file_id: FileId) -> Option<&TextEdit> {
 +        self.source_file_edits.get(&file_id)
 +    }
 +
 +    pub fn merge(mut self, other: SourceChange) -> SourceChange {
 +        self.extend(other.source_file_edits);
 +        self.extend(other.file_system_edits);
 +        self.is_snippet |= other.is_snippet;
 +        self
 +    }
 +}
 +
 +impl Extend<(FileId, TextEdit)> for SourceChange {
 +    fn extend<T: IntoIterator<Item = (FileId, TextEdit)>>(&mut self, iter: T) {
 +        iter.into_iter().for_each(|(file_id, edit)| self.insert_source_edit(file_id, edit));
 +    }
 +}
 +
 +impl Extend<FileSystemEdit> for SourceChange {
 +    fn extend<T: IntoIterator<Item = FileSystemEdit>>(&mut self, iter: T) {
 +        iter.into_iter().for_each(|edit| self.push_file_system_edit(edit));
 +    }
 +}
 +
++impl From<NoHashHashMap<FileId, TextEdit>> for SourceChange {
++    fn from(source_file_edits: NoHashHashMap<FileId, TextEdit>) -> SourceChange {
 +        SourceChange { source_file_edits, file_system_edits: Vec::new(), is_snippet: false }
 +    }
 +}
 +
 +pub struct SourceChangeBuilder {
 +    pub edit: TextEditBuilder,
 +    pub file_id: FileId,
 +    pub source_change: SourceChange,
 +    pub trigger_signature_help: bool,
 +
 +    /// Maps the original, immutable `SyntaxNode` to a `clone_for_update` twin.
 +    pub mutated_tree: Option<TreeMutator>,
 +}
 +
 +pub struct TreeMutator {
 +    immutable: SyntaxNode,
 +    mutable_clone: SyntaxNode,
 +}
 +
 +impl TreeMutator {
 +    pub fn new(immutable: &SyntaxNode) -> TreeMutator {
 +        let immutable = immutable.ancestors().last().unwrap();
 +        let mutable_clone = immutable.clone_for_update();
 +        TreeMutator { immutable, mutable_clone }
 +    }
 +
 +    pub fn make_mut<N: AstNode>(&self, node: &N) -> N {
 +        N::cast(self.make_syntax_mut(node.syntax())).unwrap()
 +    }
 +
 +    pub fn make_syntax_mut(&self, node: &SyntaxNode) -> SyntaxNode {
 +        let ptr = SyntaxNodePtr::new(node);
 +        ptr.to_node(&self.mutable_clone)
 +    }
 +}
 +
 +impl SourceChangeBuilder {
 +    pub fn new(file_id: FileId) -> SourceChangeBuilder {
 +        SourceChangeBuilder {
 +            edit: TextEdit::builder(),
 +            file_id,
 +            source_change: SourceChange::default(),
 +            trigger_signature_help: false,
 +            mutated_tree: None,
 +        }
 +    }
 +
 +    pub fn edit_file(&mut self, file_id: FileId) {
 +        self.commit();
 +        self.file_id = file_id;
 +    }
 +
 +    fn commit(&mut self) {
 +        if let Some(tm) = self.mutated_tree.take() {
 +            algo::diff(&tm.immutable, &tm.mutable_clone).into_text_edit(&mut self.edit)
 +        }
 +
 +        let edit = mem::take(&mut self.edit).finish();
 +        if !edit.is_empty() {
 +            self.source_change.insert_source_edit(self.file_id, edit);
 +        }
 +    }
 +
 +    pub fn make_mut<N: AstNode>(&mut self, node: N) -> N {
 +        self.mutated_tree.get_or_insert_with(|| TreeMutator::new(node.syntax())).make_mut(&node)
 +    }
 +    /// Returns a copy of the `node`, suitable for mutation.
 +    ///
 +    /// Syntax trees in rust-analyzer are typically immutable, and mutating
 +    /// operations panic at runtime. However, it is possible to make a copy of
 +    /// the tree and mutate the copy freely. Mutation is based on interior
 +    /// mutability, and different nodes in the same tree see the same mutations.
 +    ///
 +    /// The typical pattern for an assist is to find specific nodes in the read
 +    /// phase, and then get their mutable couterparts using `make_mut` in the
 +    /// mutable state.
 +    pub fn make_syntax_mut(&mut self, node: SyntaxNode) -> SyntaxNode {
 +        self.mutated_tree.get_or_insert_with(|| TreeMutator::new(&node)).make_syntax_mut(&node)
 +    }
 +
 +    /// Remove specified `range` of text.
 +    pub fn delete(&mut self, range: TextRange) {
 +        self.edit.delete(range)
 +    }
 +    /// Append specified `text` at the given `offset`
 +    pub fn insert(&mut self, offset: TextSize, text: impl Into<String>) {
 +        self.edit.insert(offset, text.into())
 +    }
 +    /// Append specified `snippet` at the given `offset`
 +    pub fn insert_snippet(
 +        &mut self,
 +        _cap: SnippetCap,
 +        offset: TextSize,
 +        snippet: impl Into<String>,
 +    ) {
 +        self.source_change.is_snippet = true;
 +        self.insert(offset, snippet);
 +    }
 +    /// Replaces specified `range` of text with a given string.
 +    pub fn replace(&mut self, range: TextRange, replace_with: impl Into<String>) {
 +        self.edit.replace(range, replace_with.into())
 +    }
 +    /// Replaces specified `range` of text with a given `snippet`.
 +    pub fn replace_snippet(
 +        &mut self,
 +        _cap: SnippetCap,
 +        range: TextRange,
 +        snippet: impl Into<String>,
 +    ) {
 +        self.source_change.is_snippet = true;
 +        self.replace(range, snippet);
 +    }
 +    pub fn replace_ast<N: AstNode>(&mut self, old: N, new: N) {
 +        algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit)
 +    }
 +    pub fn create_file(&mut self, dst: AnchoredPathBuf, content: impl Into<String>) {
 +        let file_system_edit = FileSystemEdit::CreateFile { dst, initial_contents: content.into() };
 +        self.source_change.push_file_system_edit(file_system_edit);
 +    }
 +    pub fn move_file(&mut self, src: FileId, dst: AnchoredPathBuf) {
 +        let file_system_edit = FileSystemEdit::MoveFile { src, dst };
 +        self.source_change.push_file_system_edit(file_system_edit);
 +    }
 +    pub fn trigger_signature_help(&mut self) {
 +        self.trigger_signature_help = true;
 +    }
 +
 +    pub fn finish(mut self) -> SourceChange {
 +        self.commit();
 +        mem::take(&mut self.source_change)
 +    }
 +}
 +
 +#[derive(Debug, Clone)]
 +pub enum FileSystemEdit {
 +    CreateFile { dst: AnchoredPathBuf, initial_contents: String },
 +    MoveFile { src: FileId, dst: AnchoredPathBuf },
 +    MoveDir { src: AnchoredPathBuf, src_id: FileId, dst: AnchoredPathBuf },
 +}
 +
 +impl From<FileSystemEdit> for SourceChange {
 +    fn from(edit: FileSystemEdit) -> SourceChange {
 +        SourceChange {
 +            source_file_edits: Default::default(),
 +            file_system_edits: vec![edit],
 +            is_snippet: false,
 +        }
 +    }
 +}
index 5694f33525e0114890f3ebd29e7fb16c4e65af81,0000000000000000000000000000000000000000..04918891b254ca4a45f7cd12ef8187fc60d9c2c2
mode 100644,000000..100644
--- /dev/null
@@@ -1,144 -1,0 +1,143 @@@
-         // FIXME these currently don't work, hence the *
 +use cfg::DnfExpr;
 +use stdx::format_to;
 +
 +use crate::{Diagnostic, DiagnosticsContext, Severity};
 +
 +// Diagnostic: inactive-code
 +//
 +// This diagnostic is shown for code with inactive `#[cfg]` attributes.
 +pub(crate) fn inactive_code(
 +    ctx: &DiagnosticsContext<'_>,
 +    d: &hir::InactiveCode,
 +) -> Option<Diagnostic> {
 +    // If there's inactive code somewhere in a macro, don't propagate to the call-site.
 +    if d.node.file_id.is_macro() {
 +        return None;
 +    }
 +
 +    let inactive = DnfExpr::new(d.cfg.clone()).why_inactive(&d.opts);
 +    let mut message = "code is inactive due to #[cfg] directives".to_string();
 +
 +    if let Some(inactive) = inactive {
 +        let inactive_reasons = inactive.to_string();
 +
 +        if inactive_reasons.is_empty() {
 +            format_to!(message);
 +        } else {
 +            format_to!(message, ": {}", inactive);
 +        }
 +    }
 +
 +    let res = Diagnostic::new(
 +        "inactive-code",
 +        message,
 +        ctx.sema.diagnostics_display_range(d.node.clone()).range,
 +    )
 +    .severity(Severity::WeakWarning)
 +    .with_unused(true);
 +    Some(res)
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use crate::{tests::check_diagnostics_with_config, DiagnosticsConfig};
 +
 +    pub(crate) fn check(ra_fixture: &str) {
 +        let config = DiagnosticsConfig::test_sample();
 +        check_diagnostics_with_config(config, ra_fixture)
 +    }
 +
 +    #[test]
 +    fn cfg_diagnostics() {
 +        check(
 +            r#"
 +fn f() {
 +    // The three g̶e̶n̶d̶e̶r̶s̶ statements:
 +
 +    #[cfg(a)] fn f() {}  // Item statement
 +  //^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
 +    #[cfg(a)] {}         // Expression statement
 +  //^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
 +    #[cfg(a)] let x = 0; // let statement
 +  //^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
 +
 +    abc(#[cfg(a)] 0);
 +      //^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
 +    let x = Struct {
 +        #[cfg(a)] f: 0,
 +      //^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
 +    };
 +    match () {
 +        () => (),
 +        #[cfg(a)] () => (),
 +      //^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
 +    }
 +
 +    #[cfg(a)] 0          // Trailing expression of block
 +  //^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled
 +}
 +        "#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inactive_item() {
 +        // Additional tests in `cfg` crate. This only tests disabled cfgs.
 +
 +        check(
 +            r#"
 +    #[cfg(no)] pub fn f() {}
 +  //^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
 +
 +    #[cfg(no)] #[cfg(no2)] mod m;
 +  //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no and no2 are disabled
 +
 +    #[cfg(all(not(a), b))] enum E {}
 +  //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: b is disabled
 +
 +    #[cfg(feature = "std")] use std;
 +  //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: feature = "std" is disabled
 +
 +    #[cfg(any())] pub fn f() {}
 +  //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
 +"#,
 +        );
 +    }
 +
 +    #[test]
 +    fn inactive_assoc_item() {
-   //*************************** weak: code is inactive due to #[cfg] directives
 +        check(
 +            r#"
 +struct Foo;
 +impl Foo {
 +    #[cfg(any())] pub fn f() {}
-   //*************************** weak: code is inactive due to #[cfg] directives
++  //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
 +}
 +
 +trait Bar {
 +    #[cfg(any())] pub fn f() {}
++  //^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives
 +}
 +"#,
 +        );
 +    }
 +
 +    /// Tests that `cfg` attributes behind `cfg_attr` is handled properly.
 +    #[test]
 +    fn inactive_via_cfg_attr() {
 +        cov_mark::check!(cfg_attr_active);
 +        check(
 +            r#"
 +    #[cfg_attr(not(never), cfg(no))] fn f() {}
 +  //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
 +
 +    #[cfg_attr(not(never), cfg(not(no)))] fn f() {}
 +
 +    #[cfg_attr(never, cfg(no))] fn g() {}
 +
 +    #[cfg_attr(not(never), inline, cfg(no))] fn h() {}
 +  //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: no is disabled
 +"#,
 +        );
 +    }
 +}
index d36dd02d45c5976a5f2576cbbbbe8ae1fe6f24e2,0000000000000000000000000000000000000000..73314e0f330bc67a30b291e55bdb588ff30bdde7
mode 100644,000000..100644
--- /dev/null
@@@ -1,26 -1,0 +1,27 @@@
 +[package]
 +name = "ide-ssr"
 +version = "0.0.0"
 +description = "Structural search and replace of Rust code"
 +license = "MIT OR Apache-2.0"
 +repository = "https://github.com/rust-lang/rust-analyzer"
 +edition = "2021"
 +rust-version = "1.57"
 +
 +[lib]
 +doctest = false
 +
 +[dependencies]
 +cov-mark = "2.0.0-pre.1"
 +
 +itertools = "0.10.3"
 +
 +text-edit = { path = "../text-edit", version = "0.0.0" }
 +parser = { path = "../parser", version = "0.0.0" }
 +syntax = { path = "../syntax", version = "0.0.0" }
 +ide-db = { path = "../ide-db", version = "0.0.0" }
 +hir = { path = "../hir", version = "0.0.0" }
++stdx = { path = "../stdx", version = "0.0.0" }
 +
 +[dev-dependencies]
 +test-utils = { path = "../test-utils" }
 +expect-test = "1.4.0"
index 739e0ccb436dbeebad6112430c610ca79a93688e,0000000000000000000000000000000000000000..d9834ee63adccf1368681f8397672e93ef0c3e66
mode 100644,000000..100644
--- /dev/null
@@@ -1,358 -1,0 +1,356 @@@
- use ide_db::{
-     base_db::{FileId, FilePosition, FileRange},
-     FxHashMap,
- };
 +//! Structural Search Replace
 +//!
 +//! Allows searching the AST for code that matches one or more patterns and then replacing that code
 +//! based on a template.
 +
 +#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 +
 +// Feature: Structural Search and Replace
 +//
 +// Search and replace with named wildcards that will match any expression, type, path, pattern or item.
 +// The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
 +// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
 +// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
 +//
 +// All paths in both the search pattern and the replacement template must resolve in the context
 +// in which this command is invoked. Paths in the search pattern will then match the code if they
 +// resolve to the same item, even if they're written differently. For example if we invoke the
 +// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
 +// to `foo::Bar` will match.
 +//
 +// Paths in the replacement template will be rendered appropriately for the context in which the
 +// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
 +// code in the `foo` module, we'll insert just `Bar`.
 +//
 +// Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
 +// match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. When a
 +// placeholder is the receiver of a method call in the search pattern (e.g. `$s.foo()`), but not in
 +// the replacement template (e.g. `bar($s)`), then *, & and &mut will be added as needed to mirror
 +// whatever autoderef and autoref was happening implicitly in the matched code.
 +//
 +// The scope of the search / replace will be restricted to the current selection if any, otherwise
 +// it will apply to the whole workspace.
 +//
 +// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
 +//
 +// Supported constraints:
 +//
 +// |===
 +// | Constraint    | Restricts placeholder
 +//
 +// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`)
 +// | not(a)        | Negates the constraint `a`
 +// |===
 +//
 +// Available via the command `rust-analyzer.ssr`.
 +//
 +// ```rust
 +// // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)]
 +//
 +// // BEFORE
 +// String::from(foo(y + 5, z))
 +//
 +// // AFTER
 +// String::from((y + 5).foo(z))
 +// ```
 +//
 +// |===
 +// | Editor  | Action Name
 +//
 +// | VS Code | **rust-analyzer: Structural Search Replace**
 +// |===
 +//
 +// Also available as an assist, by writing a comment containing the structural
 +// search and replace rule. You will only see the assist if the comment can
 +// be parsed as a valid structural search and replace rule.
 +//
 +// ```rust
 +// // Place the cursor on the line below to see the assist 💡.
 +// // foo($a, $b) ==>> ($a).foo($b)
 +// ```
 +
 +mod from_comment;
 +mod matching;
 +mod nester;
 +mod parsing;
 +mod fragments;
 +mod replacing;
 +mod resolving;
 +mod search;
 +#[macro_use]
 +mod errors;
 +#[cfg(test)]
 +mod tests;
 +
 +pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Match};
 +
 +use crate::{errors::bail, matching::MatchFailureReason};
 +use hir::Semantics;
-     pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
++use ide_db::base_db::{FileId, FilePosition, FileRange};
 +use resolving::ResolvedRule;
++use stdx::hash::NoHashHashMap;
 +use syntax::{ast, AstNode, SyntaxNode, TextRange};
 +use text_edit::TextEdit;
 +
 +// A structured search replace rule. Create by calling `parse` on a str.
 +#[derive(Debug)]
 +pub struct SsrRule {
 +    /// A structured pattern that we're searching for.
 +    pattern: parsing::RawPattern,
 +    /// What we'll replace it with.
 +    template: parsing::RawPattern,
 +    parsed_rules: Vec<parsing::ParsedRule>,
 +}
 +
 +#[derive(Debug)]
 +pub struct SsrPattern {
 +    parsed_rules: Vec<parsing::ParsedRule>,
 +}
 +
 +#[derive(Debug, Default)]
 +pub struct SsrMatches {
 +    pub matches: Vec<Match>,
 +}
 +
 +/// Searches a crate for pattern matches and possibly replaces them with something else.
 +pub struct MatchFinder<'db> {
 +    /// Our source of information about the user's code.
 +    sema: Semantics<'db, ide_db::RootDatabase>,
 +    rules: Vec<ResolvedRule>,
 +    resolution_scope: resolving::ResolutionScope<'db>,
 +    restrict_ranges: Vec<FileRange>,
 +}
 +
 +impl<'db> MatchFinder<'db> {
 +    /// Constructs a new instance where names will be looked up as if they appeared at
 +    /// `lookup_context`.
 +    pub fn in_context(
 +        db: &'db ide_db::RootDatabase,
 +        lookup_context: FilePosition,
 +        mut restrict_ranges: Vec<FileRange>,
 +    ) -> Result<MatchFinder<'db>, SsrError> {
 +        restrict_ranges.retain(|range| !range.range.is_empty());
 +        let sema = Semantics::new(db);
 +        let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context)
 +            .ok_or_else(|| SsrError("no resolution scope for file".into()))?;
 +        Ok(MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges })
 +    }
 +
 +    /// Constructs an instance using the start of the first file in `db` as the lookup context.
 +    pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
 +        use ide_db::base_db::SourceDatabaseExt;
 +        use ide_db::symbol_index::SymbolsDatabase;
 +        if let Some(first_file_id) =
 +            db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
 +        {
 +            MatchFinder::in_context(
 +                db,
 +                FilePosition { file_id: first_file_id, offset: 0.into() },
 +                vec![],
 +            )
 +        } else {
 +            bail!("No files to search");
 +        }
 +    }
 +
 +    /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
 +    /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
 +    /// match to it.
 +    pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
 +        for parsed_rule in rule.parsed_rules {
 +            self.rules.push(ResolvedRule::new(
 +                parsed_rule,
 +                &self.resolution_scope,
 +                self.rules.len(),
 +            )?);
 +        }
 +        Ok(())
 +    }
 +
 +    /// Finds matches for all added rules and returns edits for all found matches.
-         let mut matches_by_file = FxHashMap::default();
++    pub fn edits(&self) -> NoHashHashMap<FileId, TextEdit> {
 +        use ide_db::base_db::SourceDatabaseExt;
++        let mut matches_by_file = NoHashHashMap::default();
 +        for m in self.matches().matches {
 +            matches_by_file
 +                .entry(m.range.file_id)
 +                .or_insert_with(SsrMatches::default)
 +                .matches
 +                .push(m);
 +        }
 +        matches_by_file
 +            .into_iter()
 +            .map(|(file_id, matches)| {
 +                (
 +                    file_id,
 +                    replacing::matches_to_edit(
 +                        &matches,
 +                        &self.sema.db.file_text(file_id),
 +                        &self.rules,
 +                    ),
 +                )
 +            })
 +            .collect()
 +    }
 +
 +    /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
 +    /// intend to do replacement, use `add_rule` instead.
 +    pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
 +        for parsed_rule in pattern.parsed_rules {
 +            self.rules.push(ResolvedRule::new(
 +                parsed_rule,
 +                &self.resolution_scope,
 +                self.rules.len(),
 +            )?);
 +        }
 +        Ok(())
 +    }
 +
 +    /// Returns matches for all added rules.
 +    pub fn matches(&self) -> SsrMatches {
 +        let mut matches = Vec::new();
 +        let mut usage_cache = search::UsageCache::default();
 +        for rule in &self.rules {
 +            self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
 +        }
 +        nester::nest_and_remove_collisions(matches, &self.sema)
 +    }
 +
 +    /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
 +    /// them, while recording reasons why they don't match. This API is useful for command
 +    /// line-based debugging where providing a range is difficult.
 +    pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
 +        use ide_db::base_db::SourceDatabaseExt;
 +        let file = self.sema.parse(file_id);
 +        let mut res = Vec::new();
 +        let file_text = self.sema.db.file_text(file_id);
 +        let mut remaining_text = file_text.as_str();
 +        let mut base = 0;
 +        let len = snippet.len() as u32;
 +        while let Some(offset) = remaining_text.find(snippet) {
 +            let start = base + offset as u32;
 +            let end = start + len;
 +            self.output_debug_for_nodes_at_range(
 +                file.syntax(),
 +                FileRange { file_id, range: TextRange::new(start.into(), end.into()) },
 +                &None,
 +                &mut res,
 +            );
 +            remaining_text = &remaining_text[offset + snippet.len()..];
 +            base = end;
 +        }
 +        res
 +    }
 +
 +    fn output_debug_for_nodes_at_range(
 +        &self,
 +        node: &SyntaxNode,
 +        range: FileRange,
 +        restrict_range: &Option<FileRange>,
 +        out: &mut Vec<MatchDebugInfo>,
 +    ) {
 +        for node in node.children() {
 +            let node_range = self.sema.original_range(&node);
 +            if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range)
 +            {
 +                continue;
 +            }
 +            if node_range.range == range.range {
 +                for rule in &self.rules {
 +                    // For now we ignore rules that have a different kind than our node, otherwise
 +                    // we get lots of noise. If at some point we add support for restricting rules
 +                    // to a particular kind of thing (e.g. only match type references), then we can
 +                    // relax this. We special-case expressions, since function calls can match
 +                    // method calls.
 +                    if rule.pattern.node.kind() != node.kind()
 +                        && !(ast::Expr::can_cast(rule.pattern.node.kind())
 +                            && ast::Expr::can_cast(node.kind()))
 +                    {
 +                        continue;
 +                    }
 +                    out.push(MatchDebugInfo {
 +                        matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
 +                            .map_err(|e| MatchFailureReason {
 +                                reason: e.reason.unwrap_or_else(|| {
 +                                    "Match failed, but no reason was given".to_owned()
 +                                }),
 +                            }),
 +                        pattern: rule.pattern.node.clone(),
 +                        node: node.clone(),
 +                    });
 +                }
 +            } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
 +                if let Some(expanded) = self.sema.expand(&macro_call) {
 +                    if let Some(tt) = macro_call.token_tree() {
 +                        self.output_debug_for_nodes_at_range(
 +                            &expanded,
 +                            range,
 +                            &Some(self.sema.original_range(tt.syntax())),
 +                            out,
 +                        );
 +                    }
 +                }
 +            }
 +            self.output_debug_for_nodes_at_range(&node, range, restrict_range, out);
 +        }
 +    }
 +}
 +
 +pub struct MatchDebugInfo {
 +    node: SyntaxNode,
 +    /// Our search pattern parsed as an expression or item, etc
 +    pattern: SyntaxNode,
 +    matched: Result<Match, MatchFailureReason>,
 +}
 +
 +impl std::fmt::Debug for MatchDebugInfo {
 +    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
 +        match &self.matched {
 +            Ok(_) => writeln!(f, "Node matched")?,
 +            Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?,
 +        }
 +        writeln!(
 +            f,
 +            "============ AST ===========\n\
 +            {:#?}",
 +            self.node
 +        )?;
 +        writeln!(f, "========= PATTERN ==========")?;
 +        writeln!(f, "{:#?}", self.pattern)?;
 +        writeln!(f, "============================")?;
 +        Ok(())
 +    }
 +}
 +
 +impl SsrMatches {
 +    /// Returns `self` with any nested matches removed and made into top-level matches.
 +    pub fn flattened(self) -> SsrMatches {
 +        let mut out = SsrMatches::default();
 +        self.flatten_into(&mut out);
 +        out
 +    }
 +
 +    fn flatten_into(self, out: &mut SsrMatches) {
 +        for mut m in self.matches {
 +            for p in m.placeholder_values.values_mut() {
 +                std::mem::take(&mut p.inner_matches).flatten_into(out);
 +            }
 +            out.matches.push(m);
 +        }
 +    }
 +}
 +
 +impl Match {
 +    pub fn matched_text(&self) -> String {
 +        self.matched_node.text().to_string()
 +    }
 +}
 +
 +impl std::error::Error for SsrError {}
 +
 +#[cfg(test)]
 +impl MatchDebugInfo {
 +    pub(crate) fn match_failure_reason(&self) -> Option<&str> {
 +        self.matched.as_ref().err().map(|r| r.reason.as_str())
 +    }
 +}
index 582e9fe7e808c05b933de4293d5e66bc719dba70,0000000000000000000000000000000000000000..92ce26b422e1dab0f13af86930c8c62e674fd7fd
mode 100644,000000..100644
--- /dev/null
@@@ -1,549 -1,0 +1,549 @@@
-         | Definition::SelfType(_)
 +//! Extracts, resolves and rewrites links and intra-doc links in markdown documentation.
 +
 +#[cfg(test)]
 +mod tests;
 +
 +mod intra_doc_links;
 +
 +use pulldown_cmark::{BrokenLink, CowStr, Event, InlineStr, LinkType, Options, Parser, Tag};
 +use pulldown_cmark_to_cmark::{cmark_resume_with_options, Options as CMarkOptions};
 +use stdx::format_to;
 +use url::Url;
 +
 +use hir::{db::HirDatabase, Adt, AsAssocItem, AssocItem, AssocItemContainer, HasAttrs};
 +use ide_db::{
 +    base_db::{CrateOrigin, LangCrateOrigin, SourceDatabase},
 +    defs::{Definition, NameClass, NameRefClass},
 +    helpers::pick_best_token,
 +    RootDatabase,
 +};
 +use syntax::{
 +    ast::{self, IsString},
 +    match_ast, AstNode, AstToken,
 +    SyntaxKind::*,
 +    SyntaxNode, SyntaxToken, TextRange, TextSize, T,
 +};
 +
 +use crate::{
 +    doc_links::intra_doc_links::{parse_intra_doc_link, strip_prefixes_suffixes},
 +    FilePosition, Semantics,
 +};
 +
 +/// Weblink to an item's documentation.
 +pub(crate) type DocumentationLink = String;
 +
 +const MARKDOWN_OPTIONS: Options =
 +    Options::ENABLE_FOOTNOTES.union(Options::ENABLE_TABLES).union(Options::ENABLE_TASKLISTS);
 +
 +/// Rewrite documentation links in markdown to point to an online host (e.g. docs.rs)
 +pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: Definition) -> String {
 +    let mut cb = broken_link_clone_cb;
 +    let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb));
 +
 +    let doc = map_links(doc, |target, title| {
 +        // This check is imperfect, there's some overlap between valid intra-doc links
 +        // and valid URLs so we choose to be too eager to try to resolve what might be
 +        // a URL.
 +        if target.contains("://") {
 +            (Some(LinkType::Inline), target.to_string(), title.to_string())
 +        } else {
 +            // Two possibilities:
 +            // * path-based links: `../../module/struct.MyStruct.html`
 +            // * module-based links (AKA intra-doc links): `super::super::module::MyStruct`
 +            if let Some((target, title)) = rewrite_intra_doc_link(db, definition, target, title) {
 +                return (None, target, title);
 +            }
 +            if let Some(target) = rewrite_url_link(db, definition, target) {
 +                return (Some(LinkType::Inline), target, title.to_string());
 +            }
 +
 +            (None, target.to_string(), title.to_string())
 +        }
 +    });
 +    let mut out = String::new();
 +    cmark_resume_with_options(
 +        doc,
 +        &mut out,
 +        None,
 +        CMarkOptions { code_block_token_count: 3, ..Default::default() },
 +    )
 +    .ok();
 +    out
 +}
 +
 +/// Remove all links in markdown documentation.
 +pub(crate) fn remove_links(markdown: &str) -> String {
 +    let mut drop_link = false;
 +
 +    let mut cb = |_: BrokenLink<'_>| {
 +        let empty = InlineStr::try_from("").unwrap();
 +        Some((CowStr::Inlined(empty), CowStr::Inlined(empty)))
 +    };
 +    let doc = Parser::new_with_broken_link_callback(markdown, MARKDOWN_OPTIONS, Some(&mut cb));
 +    let doc = doc.filter_map(move |evt| match evt {
 +        Event::Start(Tag::Link(link_type, target, title)) => {
 +            if link_type == LinkType::Inline && target.contains("://") {
 +                Some(Event::Start(Tag::Link(link_type, target, title)))
 +            } else {
 +                drop_link = true;
 +                None
 +            }
 +        }
 +        Event::End(_) if drop_link => {
 +            drop_link = false;
 +            None
 +        }
 +        _ => Some(evt),
 +    });
 +
 +    let mut out = String::new();
 +    cmark_resume_with_options(
 +        doc,
 +        &mut out,
 +        None,
 +        CMarkOptions { code_block_token_count: 3, ..Default::default() },
 +    )
 +    .ok();
 +    out
 +}
 +
 +/// Retrieve a link to documentation for the given symbol.
 +pub(crate) fn external_docs(
 +    db: &RootDatabase,
 +    position: &FilePosition,
 +) -> Option<DocumentationLink> {
 +    let sema = &Semantics::new(db);
 +    let file = sema.parse(position.file_id).syntax().clone();
 +    let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
 +        IDENT | INT_NUMBER | T![self] => 3,
 +        T!['('] | T![')'] => 2,
 +        kind if kind.is_trivia() => 0,
 +        _ => 1,
 +    })?;
 +    let token = sema.descend_into_macros_single(token);
 +
 +    let node = token.parent()?;
 +    let definition = match_ast! {
 +        match node {
 +            ast::NameRef(name_ref) => match NameRefClass::classify(sema, &name_ref)? {
 +                NameRefClass::Definition(def) => def,
 +                NameRefClass::FieldShorthand { local_ref: _, field_ref } => {
 +                    Definition::Field(field_ref)
 +                }
 +            },
 +            ast::Name(name) => match NameClass::classify(sema, &name)? {
 +                NameClass::Definition(it) | NameClass::ConstReference(it) => it,
 +                NameClass::PatFieldShorthand { local_def: _, field_ref } => Definition::Field(field_ref),
 +            },
 +            _ => return None,
 +        }
 +    };
 +
 +    get_doc_link(db, definition)
 +}
 +
 +/// Extracts all links from a given markdown text returning the definition text range, link-text
 +/// and the namespace if known.
 +pub(crate) fn extract_definitions_from_docs(
 +    docs: &hir::Documentation,
 +) -> Vec<(TextRange, String, Option<hir::Namespace>)> {
 +    Parser::new_with_broken_link_callback(
 +        docs.as_str(),
 +        MARKDOWN_OPTIONS,
 +        Some(&mut broken_link_clone_cb),
 +    )
 +    .into_offset_iter()
 +    .filter_map(|(event, range)| match event {
 +        Event::Start(Tag::Link(_, target, _)) => {
 +            let (link, ns) = parse_intra_doc_link(&target);
 +            Some((
 +                TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?),
 +                link.to_string(),
 +                ns,
 +            ))
 +        }
 +        _ => None,
 +    })
 +    .collect()
 +}
 +
 +pub(crate) fn resolve_doc_path_for_def(
 +    db: &dyn HirDatabase,
 +    def: Definition,
 +    link: &str,
 +    ns: Option<hir::Namespace>,
 +) -> Option<Definition> {
 +    match def {
 +        Definition::Module(it) => it.resolve_doc_path(db, link, ns),
 +        Definition::Function(it) => it.resolve_doc_path(db, link, ns),
 +        Definition::Adt(it) => it.resolve_doc_path(db, link, ns),
 +        Definition::Variant(it) => it.resolve_doc_path(db, link, ns),
 +        Definition::Const(it) => it.resolve_doc_path(db, link, ns),
 +        Definition::Static(it) => it.resolve_doc_path(db, link, ns),
 +        Definition::Trait(it) => it.resolve_doc_path(db, link, ns),
 +        Definition::TypeAlias(it) => it.resolve_doc_path(db, link, ns),
 +        Definition::Macro(it) => it.resolve_doc_path(db, link, ns),
 +        Definition::Field(it) => it.resolve_doc_path(db, link, ns),
++        Definition::SelfType(it) => it.resolve_doc_path(db, link, ns),
 +        Definition::BuiltinAttr(_)
 +        | Definition::ToolModule(_)
 +        | Definition::BuiltinType(_)
 +        | Definition::Local(_)
 +        | Definition::GenericParam(_)
 +        | Definition::Label(_)
 +        | Definition::DeriveHelper(_) => None,
 +    }
 +    .map(Definition::from)
 +}
 +
 +pub(crate) fn doc_attributes(
 +    sema: &Semantics<'_, RootDatabase>,
 +    node: &SyntaxNode,
 +) -> Option<(hir::AttrsWithOwner, Definition)> {
 +    match_ast! {
 +        match node {
 +            ast::SourceFile(it)  => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Module(def))),
 +            ast::Module(it)      => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Module(def))),
 +            ast::Fn(it)          => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Function(def))),
 +            ast::Struct(it)      => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Adt(hir::Adt::Struct(def)))),
 +            ast::Union(it)       => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Adt(hir::Adt::Union(def)))),
 +            ast::Enum(it)        => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Adt(hir::Adt::Enum(def)))),
 +            ast::Variant(it)     => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Variant(def))),
 +            ast::Trait(it)       => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Trait(def))),
 +            ast::Static(it)      => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Static(def))),
 +            ast::Const(it)       => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Const(def))),
 +            ast::TypeAlias(it)   => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::TypeAlias(def))),
 +            ast::Impl(it)        => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::SelfType(def))),
 +            ast::RecordField(it) => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Field(def))),
 +            ast::TupleField(it)  => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Field(def))),
 +            ast::Macro(it)       => sema.to_def(&it).map(|def| (def.attrs(sema.db), Definition::Macro(def))),
 +            // ast::Use(it) => sema.to_def(&it).map(|def| (Box::new(it) as _, def.attrs(sema.db))),
 +            _ => None
 +        }
 +    }
 +}
 +
 +pub(crate) struct DocCommentToken {
 +    doc_token: SyntaxToken,
 +    prefix_len: TextSize,
 +}
 +
 +pub(crate) fn token_as_doc_comment(doc_token: &SyntaxToken) -> Option<DocCommentToken> {
 +    (match_ast! {
 +        match doc_token {
 +            ast::Comment(comment) => TextSize::try_from(comment.prefix().len()).ok(),
 +            ast::String(string) => doc_token.parent_ancestors().find_map(ast::Attr::cast)
 +                .filter(|attr| attr.simple_name().as_deref() == Some("doc")).and_then(|_| string.open_quote_text_range().map(|it| it.len())),
 +            _ => None,
 +        }
 +    }).map(|prefix_len| DocCommentToken { prefix_len, doc_token: doc_token.clone() })
 +}
 +
 +impl DocCommentToken {
 +    pub(crate) fn get_definition_with_descend_at<T>(
 +        self,
 +        sema: &Semantics<'_, RootDatabase>,
 +        offset: TextSize,
 +        // Definition, CommentOwner, range of intra doc link in original file
 +        mut cb: impl FnMut(Definition, SyntaxNode, TextRange) -> Option<T>,
 +    ) -> Option<T> {
 +        let DocCommentToken { prefix_len, doc_token } = self;
 +        // offset relative to the comments contents
 +        let original_start = doc_token.text_range().start();
 +        let relative_comment_offset = offset - original_start - prefix_len;
 +
 +        sema.descend_into_macros(doc_token).into_iter().find_map(|t| {
 +            let (node, descended_prefix_len) = match_ast! {
 +                match t {
 +                    ast::Comment(comment) => (t.parent()?, TextSize::try_from(comment.prefix().len()).ok()?),
 +                    ast::String(string) => (t.parent_ancestors().skip_while(|n| n.kind() != ATTR).nth(1)?, string.open_quote_text_range()?.len()),
 +                    _ => return None,
 +                }
 +            };
 +            let token_start = t.text_range().start();
 +            let abs_in_expansion_offset = token_start + relative_comment_offset + descended_prefix_len;
 +
 +            let (attributes, def) = doc_attributes(sema, &node)?;
 +            let (docs, doc_mapping) = attributes.docs_with_rangemap(sema.db)?;
 +            let (in_expansion_range, link, ns) =
 +                extract_definitions_from_docs(&docs).into_iter().find_map(|(range, link, ns)| {
 +                    let mapped = doc_mapping.map(range)?;
 +                    (mapped.value.contains(abs_in_expansion_offset)).then(|| (mapped.value, link, ns))
 +                })?;
 +            // get the relative range to the doc/attribute in the expansion
 +            let in_expansion_relative_range = in_expansion_range - descended_prefix_len - token_start;
 +            // Apply relative range to the original input comment
 +            let absolute_range = in_expansion_relative_range + original_start + prefix_len;
 +            let def = resolve_doc_path_for_def(sema.db, def, &link, ns)?;
 +            cb(def, node, absolute_range)
 +        })
 +    }
 +}
 +
 +fn broken_link_clone_cb<'a>(link: BrokenLink<'a>) -> Option<(CowStr<'a>, CowStr<'a>)> {
 +    Some((/*url*/ link.reference.clone(), /*title*/ link.reference))
 +}
 +
 +// FIXME:
 +// BUG: For Option::Some
 +// Returns https://doc.rust-lang.org/nightly/core/prelude/v1/enum.Option.html#variant.Some
 +// Instead of https://doc.rust-lang.org/nightly/core/option/enum.Option.html
 +//
 +// This should cease to be a problem if RFC2988 (Stable Rustdoc URLs) is implemented
 +// https://github.com/rust-lang/rfcs/pull/2988
 +fn get_doc_link(db: &RootDatabase, def: Definition) -> Option<String> {
 +    let (target, file, frag) = filename_and_frag_for_def(db, def)?;
 +
 +    let mut url = get_doc_base_url(db, target)?;
 +
 +    if let Some(path) = mod_path_of_def(db, target) {
 +        url = url.join(&path).ok()?;
 +    }
 +
 +    url = url.join(&file).ok()?;
 +    url.set_fragment(frag.as_deref());
 +
 +    Some(url.into())
 +}
 +
 +fn rewrite_intra_doc_link(
 +    db: &RootDatabase,
 +    def: Definition,
 +    target: &str,
 +    title: &str,
 +) -> Option<(String, String)> {
 +    let (link, ns) = parse_intra_doc_link(target);
 +
 +    let resolved = resolve_doc_path_for_def(db, def, link, ns)?;
 +    let mut url = get_doc_base_url(db, resolved)?;
 +
 +    let (_, file, frag) = filename_and_frag_for_def(db, resolved)?;
 +    if let Some(path) = mod_path_of_def(db, resolved) {
 +        url = url.join(&path).ok()?;
 +    }
 +
 +    url = url.join(&file).ok()?;
 +    url.set_fragment(frag.as_deref());
 +
 +    Some((url.into(), strip_prefixes_suffixes(title).to_string()))
 +}
 +
 +/// Try to resolve path to local documentation via path-based links (i.e. `../gateway/struct.Shard.html`).
 +fn rewrite_url_link(db: &RootDatabase, def: Definition, target: &str) -> Option<String> {
 +    if !(target.contains('#') || target.contains(".html")) {
 +        return None;
 +    }
 +
 +    let mut url = get_doc_base_url(db, def)?;
 +    let (def, file, frag) = filename_and_frag_for_def(db, def)?;
 +
 +    if let Some(path) = mod_path_of_def(db, def) {
 +        url = url.join(&path).ok()?;
 +    }
 +
 +    url = url.join(&file).ok()?;
 +    url.set_fragment(frag.as_deref());
 +    url.join(target).ok().map(Into::into)
 +}
 +
 +fn mod_path_of_def(db: &RootDatabase, def: Definition) -> Option<String> {
 +    def.canonical_module_path(db).map(|it| {
 +        let mut path = String::new();
 +        it.flat_map(|it| it.name(db)).for_each(|name| format_to!(path, "{}/", name));
 +        path
 +    })
 +}
 +
 +/// Rewrites a markdown document, applying 'callback' to each link.
 +fn map_links<'e>(
 +    events: impl Iterator<Item = Event<'e>>,
 +    callback: impl Fn(&str, &str) -> (Option<LinkType>, String, String),
 +) -> impl Iterator<Item = Event<'e>> {
 +    let mut in_link = false;
 +    // holds the origin link target on start event and the rewritten one on end event
 +    let mut end_link_target: Option<CowStr<'_>> = None;
 +    // normally link's type is determined by the type of link tag in the end event,
 +    // however in some cases we want to change the link type, for example,
 +    // `Shortcut` type parsed from Start/End tags doesn't make sense for url links
 +    let mut end_link_type: Option<LinkType> = None;
 +
 +    events.map(move |evt| match evt {
 +        Event::Start(Tag::Link(link_type, ref target, _)) => {
 +            in_link = true;
 +            end_link_target = Some(target.clone());
 +            end_link_type = Some(link_type);
 +            evt
 +        }
 +        Event::End(Tag::Link(link_type, target, _)) => {
 +            in_link = false;
 +            Event::End(Tag::Link(
 +                end_link_type.unwrap_or(link_type),
 +                end_link_target.take().unwrap_or(target),
 +                CowStr::Borrowed(""),
 +            ))
 +        }
 +        Event::Text(s) if in_link => {
 +            let (link_type, link_target_s, link_name) =
 +                callback(&end_link_target.take().unwrap(), &s);
 +            end_link_target = Some(CowStr::Boxed(link_target_s.into()));
 +            if !matches!(end_link_type, Some(LinkType::Autolink)) {
 +                end_link_type = link_type;
 +            }
 +            Event::Text(CowStr::Boxed(link_name.into()))
 +        }
 +        Event::Code(s) if in_link => {
 +            let (link_type, link_target_s, link_name) =
 +                callback(&end_link_target.take().unwrap(), &s);
 +            end_link_target = Some(CowStr::Boxed(link_target_s.into()));
 +            if !matches!(end_link_type, Some(LinkType::Autolink)) {
 +                end_link_type = link_type;
 +            }
 +            Event::Code(CowStr::Boxed(link_name.into()))
 +        }
 +        _ => evt,
 +    })
 +}
 +
 +/// Get the root URL for the documentation of a definition.
 +///
 +/// ```ignore
 +/// https://doc.rust-lang.org/std/iter/trait.Iterator.html#tymethod.next
 +/// ^^^^^^^^^^^^^^^^^^^^^^^^^^
 +/// ```
 +fn get_doc_base_url(db: &RootDatabase, def: Definition) -> Option<Url> {
 +    // special case base url of `BuiltinType` to core
 +    // https://github.com/rust-lang/rust-analyzer/issues/12250
 +    if let Definition::BuiltinType(..) = def {
 +        return Url::parse("https://doc.rust-lang.org/nightly/core/").ok();
 +    };
 +
 +    let krate = def.krate(db)?;
 +    let display_name = krate.display_name(db)?;
 +
 +    let base = match db.crate_graph()[krate.into()].origin {
 +        // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself.
 +        // FIXME: Use the toolchains channel instead of nightly
 +        CrateOrigin::Lang(
 +            origin @ (LangCrateOrigin::Alloc
 +            | LangCrateOrigin::Core
 +            | LangCrateOrigin::ProcMacro
 +            | LangCrateOrigin::Std
 +            | LangCrateOrigin::Test),
 +        ) => {
 +            format!("https://doc.rust-lang.org/nightly/{origin}")
 +        }
 +        _ => {
 +            krate.get_html_root_url(db).or_else(|| {
 +                let version = krate.version(db);
 +                // Fallback to docs.rs. This uses `display_name` and can never be
 +                // correct, but that's what fallbacks are about.
 +                //
 +                // FIXME: clicking on the link should just open the file in the editor,
 +                // instead of falling back to external urls.
 +                Some(format!(
 +                    "https://docs.rs/{krate}/{version}/",
 +                    krate = display_name,
 +                    version = version.as_deref().unwrap_or("*")
 +                ))
 +            })?
 +        }
 +    };
 +    Url::parse(&base).ok()?.join(&format!("{}/", display_name)).ok()
 +}
 +
 +/// Get the filename and extension generated for a symbol by rustdoc.
 +///
 +/// ```ignore
 +/// https://doc.rust-lang.org/std/iter/trait.Iterator.html#tymethod.next
 +///                                    ^^^^^^^^^^^^^^^^^^^
 +/// ```
 +fn filename_and_frag_for_def(
 +    db: &dyn HirDatabase,
 +    def: Definition,
 +) -> Option<(Definition, String, Option<String>)> {
 +    if let Some(assoc_item) = def.as_assoc_item(db) {
 +        let def = match assoc_item.container(db) {
 +            AssocItemContainer::Trait(t) => t.into(),
 +            AssocItemContainer::Impl(i) => i.self_ty(db).as_adt()?.into(),
 +        };
 +        let (_, file, _) = filename_and_frag_for_def(db, def)?;
 +        let frag = get_assoc_item_fragment(db, assoc_item)?;
 +        return Some((def, file, Some(frag)));
 +    }
 +
 +    let res = match def {
 +        Definition::Adt(adt) => match adt {
 +            Adt::Struct(s) => format!("struct.{}.html", s.name(db)),
 +            Adt::Enum(e) => format!("enum.{}.html", e.name(db)),
 +            Adt::Union(u) => format!("union.{}.html", u.name(db)),
 +        },
 +        Definition::Module(m) => match m.name(db) {
 +            // `#[doc(keyword = "...")]` is internal used only by rust compiler
 +            Some(name) => match m.attrs(db).by_key("doc").find_string_value_in_tt("keyword") {
 +                Some(kw) => {
 +                    format!("keyword.{}.html", kw.trim_matches('"'))
 +                }
 +                None => format!("{}/index.html", name),
 +            },
 +            None => String::from("index.html"),
 +        },
 +        Definition::Trait(t) => format!("trait.{}.html", t.name(db)),
 +        Definition::TypeAlias(t) => format!("type.{}.html", t.name(db)),
 +        Definition::BuiltinType(t) => format!("primitive.{}.html", t.name()),
 +        Definition::Function(f) => format!("fn.{}.html", f.name(db)),
 +        Definition::Variant(ev) => {
 +            format!("enum.{}.html#variant.{}", ev.parent_enum(db).name(db), ev.name(db))
 +        }
 +        Definition::Const(c) => format!("const.{}.html", c.name(db)?),
 +        Definition::Static(s) => format!("static.{}.html", s.name(db)),
 +        Definition::Macro(mac) => format!("macro.{}.html", mac.name(db)),
 +        Definition::Field(field) => {
 +            let def = match field.parent_def(db) {
 +                hir::VariantDef::Struct(it) => Definition::Adt(it.into()),
 +                hir::VariantDef::Union(it) => Definition::Adt(it.into()),
 +                hir::VariantDef::Variant(it) => Definition::Variant(it),
 +            };
 +            let (_, file, _) = filename_and_frag_for_def(db, def)?;
 +            return Some((def, file, Some(format!("structfield.{}", field.name(db)))));
 +        }
 +        Definition::SelfType(impl_) => {
 +            let adt = impl_.self_ty(db).as_adt()?.into();
 +            let (_, file, _) = filename_and_frag_for_def(db, adt)?;
 +            // FIXME fragment numbering
 +            return Some((adt, file, Some(String::from("impl"))));
 +        }
 +        Definition::Local(_)
 +        | Definition::GenericParam(_)
 +        | Definition::Label(_)
 +        | Definition::BuiltinAttr(_)
 +        | Definition::ToolModule(_)
 +        | Definition::DeriveHelper(_) => return None,
 +    };
 +
 +    Some((def, res, None))
 +}
 +
 +/// Get the fragment required to link to a specific field, method, associated type, or associated constant.
 +///
 +/// ```ignore
 +/// https://doc.rust-lang.org/std/iter/trait.Iterator.html#tymethod.next
 +///                                                       ^^^^^^^^^^^^^^
 +/// ```
 +fn get_assoc_item_fragment(db: &dyn HirDatabase, assoc_item: hir::AssocItem) -> Option<String> {
 +    Some(match assoc_item {
 +        AssocItem::Function(function) => {
 +            let is_trait_method =
 +                function.as_assoc_item(db).and_then(|assoc| assoc.containing_trait(db)).is_some();
 +            // This distinction may get more complicated when specialization is available.
 +            // Rustdoc makes this decision based on whether a method 'has defaultness'.
 +            // Currently this is only the case for provided trait methods.
 +            if is_trait_method && !function.has_body(db) {
 +                format!("tymethod.{}", function.name(db))
 +            } else {
 +                format!("method.{}", function.name(db))
 +            }
 +        }
 +        AssocItem::Const(constant) => format!("associatedconstant.{}", constant.name(db)?),
 +        AssocItem::TypeAlias(ty) => format!("associatedtype.{}", ty.name(db)),
 +    })
 +}
index dd108fa799970879d333b0d2d4de7b25d99f4d1e,0000000000000000000000000000000000000000..d61d69a090b33850049acfa36b8327ba7b887965
mode 100644,000000..100644
--- /dev/null
@@@ -1,702 -1,0 +1,710 @@@
-     moniker::{MonikerKind, MonikerResult, PackageInformation},
 +//! ide crate provides "ide-centric" APIs for the rust-analyzer. That is,
 +//! it generally operates with files and text ranges, and returns results as
 +//! Strings, suitable for displaying to the human.
 +//!
 +//! What powers this API are the `RootDatabase` struct, which defines a `salsa`
 +//! database, and the `hir` crate, where majority of the analysis happens.
 +//! However, IDE specific bits of the analysis (most notably completion) happen
 +//! in this crate.
 +
 +// For proving that RootDatabase is RefUnwindSafe.
 +#![recursion_limit = "128"]
 +#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 +
 +#[allow(unused)]
 +macro_rules! eprintln {
 +    ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
 +}
 +
 +#[cfg(test)]
 +mod fixture;
 +
 +mod markup;
 +mod prime_caches;
 +mod navigation_target;
 +
 +mod annotations;
 +mod call_hierarchy;
 +mod signature_help;
 +mod doc_links;
 +mod highlight_related;
 +mod expand_macro;
 +mod extend_selection;
 +mod file_structure;
 +mod fn_references;
 +mod folding_ranges;
 +mod goto_declaration;
 +mod goto_definition;
 +mod goto_implementation;
 +mod goto_type_definition;
 +mod hover;
 +mod inlay_hints;
 +mod join_lines;
 +mod markdown_remove;
 +mod matching_brace;
 +mod moniker;
 +mod move_item;
 +mod parent_module;
 +mod references;
 +mod rename;
 +mod runnables;
 +mod ssr;
 +mod static_index;
 +mod status;
 +mod syntax_highlighting;
 +mod syntax_tree;
 +mod typing;
 +mod view_crate_graph;
 +mod view_hir;
 +mod view_item_tree;
 +mod shuffle_crate_graph;
 +
 +use std::sync::Arc;
 +
 +use cfg::CfgOptions;
 +use ide_db::{
 +    base_db::{
 +        salsa::{self, ParallelDatabase},
 +        CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, VfsPath,
 +    },
 +    symbol_index, LineIndexDatabase,
 +};
 +use syntax::SourceFile;
 +
 +use crate::navigation_target::{ToNav, TryToNav};
 +
 +pub use crate::{
 +    annotations::{Annotation, AnnotationConfig, AnnotationKind},
 +    call_hierarchy::CallItem,
 +    expand_macro::ExpandedMacro,
 +    file_structure::{StructureNode, StructureNodeKind},
 +    folding_ranges::{Fold, FoldKind},
 +    highlight_related::{HighlightRelatedConfig, HighlightedRange},
 +    hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult},
 +    inlay_hints::{
 +        ClosureReturnTypeHints, InlayHint, InlayHintsConfig, InlayKind, InlayTooltip,
 +        LifetimeElisionHints, ReborrowHints,
 +    },
 +    join_lines::JoinLinesConfig,
 +    markup::Markup,
-         HlRange,
++    moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation},
 +    move_item::Direction,
 +    navigation_target::NavigationTarget,
 +    prime_caches::ParallelPrimeCachesProgress,
 +    references::ReferenceSearchResult,
 +    rename::RenameError,
 +    runnables::{Runnable, RunnableKind, TestId},
 +    signature_help::SignatureHelp,
 +    static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData},
 +    syntax_highlighting::{
 +        tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
-     pub fn highlight(&self, file_id: FileId) -> Cancellable<Vec<HlRange>> {
-         self.with_db(|db| syntax_highlighting::highlight(db, file_id, None, false))
++        HighlightConfig, HlRange,
 +    },
 +};
 +pub use hir::{Documentation, Semantics};
 +pub use ide_assists::{
 +    Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
 +};
 +pub use ide_completion::{
 +    CallableSnippets, CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance,
 +    Snippet, SnippetScope,
 +};
 +pub use ide_db::{
 +    base_db::{
 +        Cancelled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange,
 +        SourceRoot, SourceRootId,
 +    },
 +    label::Label,
 +    line_index::{LineCol, LineColUtf16, LineIndex},
 +    search::{ReferenceCategory, SearchScope},
 +    source_change::{FileSystemEdit, SourceChange},
 +    symbol_index::Query,
 +    RootDatabase, SymbolKind,
 +};
 +pub use ide_diagnostics::{Diagnostic, DiagnosticsConfig, ExprFillDefaultMode, Severity};
 +pub use ide_ssr::SsrError;
 +pub use syntax::{TextRange, TextSize};
 +pub use text_edit::{Indel, TextEdit};
 +
 +pub type Cancellable<T> = Result<T, Cancelled>;
 +
 +/// Info associated with a text range.
 +#[derive(Debug)]
 +pub struct RangeInfo<T> {
 +    pub range: TextRange,
 +    pub info: T,
 +}
 +
 +impl<T> RangeInfo<T> {
 +    pub fn new(range: TextRange, info: T) -> RangeInfo<T> {
 +        RangeInfo { range, info }
 +    }
 +}
 +
 +/// `AnalysisHost` stores the current state of the world.
 +#[derive(Debug)]
 +pub struct AnalysisHost {
 +    db: RootDatabase,
 +}
 +
 +impl AnalysisHost {
 +    pub fn new(lru_capacity: Option<usize>) -> AnalysisHost {
 +        AnalysisHost { db: RootDatabase::new(lru_capacity) }
 +    }
 +
 +    pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) {
 +        self.db.update_lru_capacity(lru_capacity);
 +    }
 +
 +    /// Returns a snapshot of the current state, which you can query for
 +    /// semantic information.
 +    pub fn analysis(&self) -> Analysis {
 +        Analysis { db: self.db.snapshot() }
 +    }
 +
 +    /// Applies changes to the current state of the world. If there are
 +    /// outstanding snapshots, they will be canceled.
 +    pub fn apply_change(&mut self, change: Change) {
 +        self.db.apply_change(change)
 +    }
 +
 +    /// NB: this clears the database
 +    pub fn per_query_memory_usage(&mut self) -> Vec<(String, profile::Bytes)> {
 +        self.db.per_query_memory_usage()
 +    }
 +    pub fn request_cancellation(&mut self) {
 +        self.db.request_cancellation();
 +    }
 +    pub fn raw_database(&self) -> &RootDatabase {
 +        &self.db
 +    }
 +    pub fn raw_database_mut(&mut self) -> &mut RootDatabase {
 +        &mut self.db
 +    }
 +
 +    pub fn shuffle_crate_graph(&mut self) {
 +        shuffle_crate_graph::shuffle_crate_graph(&mut self.db);
 +    }
 +}
 +
 +impl Default for AnalysisHost {
 +    fn default() -> AnalysisHost {
 +        AnalysisHost::new(None)
 +    }
 +}
 +
 +/// Analysis is a snapshot of a world state at a moment in time. It is the main
 +/// entry point for asking semantic information about the world. When the world
 +/// state is advanced using `AnalysisHost::apply_change` method, all existing
 +/// `Analysis` are canceled (most method return `Err(Canceled)`).
 +#[derive(Debug)]
 +pub struct Analysis {
 +    db: salsa::Snapshot<RootDatabase>,
 +}
 +
 +// As a general design guideline, `Analysis` API are intended to be independent
 +// from the language server protocol. That is, when exposing some functionality
 +// we should think in terms of "what API makes most sense" and not in terms of
 +// "what types LSP uses". Although currently LSP is the only consumer of the
 +// API, the API should in theory be usable as a library, or via a different
 +// protocol.
 +impl Analysis {
 +    // Creates an analysis instance for a single file, without any external
 +    // dependencies, stdlib support or ability to apply changes. See
 +    // `AnalysisHost` for creating a fully-featured analysis.
 +    pub fn from_single_file(text: String) -> (Analysis, FileId) {
 +        let mut host = AnalysisHost::default();
 +        let file_id = FileId(0);
 +        let mut file_set = FileSet::default();
 +        file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string()));
 +        let source_root = SourceRoot::new_local(file_set);
 +
 +        let mut change = Change::new();
 +        change.set_roots(vec![source_root]);
 +        let mut crate_graph = CrateGraph::default();
 +        // FIXME: cfg options
 +        // Default to enable test for single file.
 +        let mut cfg_options = CfgOptions::default();
 +        cfg_options.insert_atom("test".into());
 +        crate_graph.add_crate_root(
 +            file_id,
 +            Edition::CURRENT,
 +            None,
 +            None,
 +            cfg_options.clone(),
 +            cfg_options,
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +        change.change_file(file_id, Some(Arc::new(text)));
 +        change.set_crate_graph(crate_graph);
 +        host.apply_change(change);
 +        (host.analysis(), file_id)
 +    }
 +
 +    /// Debug info about the current state of the analysis.
 +    pub fn status(&self, file_id: Option<FileId>) -> Cancellable<String> {
 +        self.with_db(|db| status::status(&*db, file_id))
 +    }
 +
 +    pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()>
 +    where
 +        F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe,
 +    {
 +        self.with_db(move |db| prime_caches::parallel_prime_caches(db, num_worker_threads, &cb))
 +    }
 +
 +    /// Gets the text of the source file.
 +    pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<String>> {
 +        self.with_db(|db| db.file_text(file_id))
 +    }
 +
 +    /// Gets the syntax tree of the file.
 +    pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
 +        self.with_db(|db| db.parse(file_id).tree())
 +    }
 +
 +    /// Returns true if this file belongs to an immutable library.
 +    pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
 +        use ide_db::base_db::SourceDatabaseExt;
 +        self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
 +    }
 +
 +    /// Gets the file's `LineIndex`: data structure to convert between absolute
 +    /// offsets and line/column representation.
 +    pub fn file_line_index(&self, file_id: FileId) -> Cancellable<Arc<LineIndex>> {
 +        self.with_db(|db| db.line_index(file_id))
 +    }
 +
 +    /// Selects the next syntactic nodes encompassing the range.
 +    pub fn extend_selection(&self, frange: FileRange) -> Cancellable<TextRange> {
 +        self.with_db(|db| extend_selection::extend_selection(db, frange))
 +    }
 +
 +    /// Returns position of the matching brace (all types of braces are
 +    /// supported).
 +    pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
 +        self.with_db(|db| {
 +            let parse = db.parse(position.file_id);
 +            let file = parse.tree();
 +            matching_brace::matching_brace(&file, position.offset)
 +        })
 +    }
 +
 +    /// Returns a syntax tree represented as `String`, for debug purposes.
 +    // FIXME: use a better name here.
 +    pub fn syntax_tree(
 +        &self,
 +        file_id: FileId,
 +        text_range: Option<TextRange>,
 +    ) -> Cancellable<String> {
 +        self.with_db(|db| syntax_tree::syntax_tree(db, file_id, text_range))
 +    }
 +
 +    pub fn view_hir(&self, position: FilePosition) -> Cancellable<String> {
 +        self.with_db(|db| view_hir::view_hir(db, position))
 +    }
 +
 +    pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
 +        self.with_db(|db| view_item_tree::view_item_tree(db, file_id))
 +    }
 +
 +    /// Renders the crate graph to GraphViz "dot" syntax.
 +    pub fn view_crate_graph(&self, full: bool) -> Cancellable<Result<String, String>> {
 +        self.with_db(|db| view_crate_graph::view_crate_graph(db, full))
 +    }
 +
 +    pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<ExpandedMacro>> {
 +        self.with_db(|db| expand_macro::expand_macro(db, position))
 +    }
 +
 +    /// Returns an edit to remove all newlines in the range, cleaning up minor
 +    /// stuff like trailing commas.
 +    pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
 +        self.with_db(|db| {
 +            let parse = db.parse(frange.file_id);
 +            join_lines::join_lines(config, &parse.tree(), frange.range)
 +        })
 +    }
 +
 +    /// Returns an edit which should be applied when opening a new line, fixing
 +    /// up minor stuff like continuing the comment.
 +    /// The edit will be a snippet (with `$0`).
 +    pub fn on_enter(&self, position: FilePosition) -> Cancellable<Option<TextEdit>> {
 +        self.with_db(|db| typing::on_enter(db, position))
 +    }
 +
 +    /// Returns an edit which should be applied after a character was typed.
 +    ///
 +    /// This is useful for some on-the-fly fixups, like adding `;` to `let =`
 +    /// automatically.
 +    pub fn on_char_typed(
 +        &self,
 +        position: FilePosition,
 +        char_typed: char,
 +        autoclose: bool,
 +    ) -> Cancellable<Option<SourceChange>> {
 +        // Fast path to not even parse the file.
 +        if !typing::TRIGGER_CHARS.contains(char_typed) {
 +            return Ok(None);
 +        }
 +        if char_typed == '<' && !autoclose {
 +            return Ok(None);
 +        }
 +
 +        self.with_db(|db| typing::on_char_typed(db, position, char_typed))
 +    }
 +
 +    /// Returns a tree representation of symbols in the file. Useful to draw a
 +    /// file outline.
 +    pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> {
 +        self.with_db(|db| file_structure::file_structure(&db.parse(file_id).tree()))
 +    }
 +
 +    /// Returns a list of the places in the file where type hints can be displayed.
 +    pub fn inlay_hints(
 +        &self,
 +        config: &InlayHintsConfig,
 +        file_id: FileId,
 +        range: Option<FileRange>,
 +    ) -> Cancellable<Vec<InlayHint>> {
 +        self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config))
 +    }
 +
 +    /// Returns the set of folding ranges.
 +    pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
 +        self.with_db(|db| folding_ranges::folding_ranges(&db.parse(file_id).tree()))
 +    }
 +
 +    /// Fuzzy searches for a symbol.
 +    pub fn symbol_search(&self, query: Query) -> Cancellable<Vec<NavigationTarget>> {
 +        self.with_db(|db| {
 +            symbol_index::world_symbols(db, query)
 +                .into_iter() // xx: should we make this a par iter?
 +                .filter_map(|s| s.try_to_nav(db))
 +                .collect::<Vec<_>>()
 +        })
 +    }
 +
 +    /// Returns the definitions from the symbol at `position`.
 +    pub fn goto_definition(
 +        &self,
 +        position: FilePosition,
 +    ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
 +        self.with_db(|db| goto_definition::goto_definition(db, position))
 +    }
 +
 +    /// Returns the declaration from the symbol at `position`.
 +    pub fn goto_declaration(
 +        &self,
 +        position: FilePosition,
 +    ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
 +        self.with_db(|db| goto_declaration::goto_declaration(db, position))
 +    }
 +
 +    /// Returns the impls from the symbol at `position`.
 +    pub fn goto_implementation(
 +        &self,
 +        position: FilePosition,
 +    ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
 +        self.with_db(|db| goto_implementation::goto_implementation(db, position))
 +    }
 +
 +    /// Returns the type definitions for the symbol at `position`.
 +    pub fn goto_type_definition(
 +        &self,
 +        position: FilePosition,
 +    ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
 +        self.with_db(|db| goto_type_definition::goto_type_definition(db, position))
 +    }
 +
 +    /// Finds all usages of the reference at point.
 +    pub fn find_all_refs(
 +        &self,
 +        position: FilePosition,
 +        search_scope: Option<SearchScope>,
 +    ) -> Cancellable<Option<Vec<ReferenceSearchResult>>> {
 +        self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope))
 +    }
 +
 +    /// Finds all methods and free functions for the file. Does not return tests!
 +    pub fn find_all_methods(&self, file_id: FileId) -> Cancellable<Vec<FileRange>> {
 +        self.with_db(|db| fn_references::find_all_methods(db, file_id))
 +    }
 +
 +    /// Returns a short text describing element at position.
 +    pub fn hover(
 +        &self,
 +        config: &HoverConfig,
 +        range: FileRange,
 +    ) -> Cancellable<Option<RangeInfo<HoverResult>>> {
 +        self.with_db(|db| hover::hover(db, range, config))
 +    }
 +
 +    /// Returns moniker of symbol at position.
 +    pub fn moniker(
 +        &self,
 +        position: FilePosition,
 +    ) -> Cancellable<Option<RangeInfo<Vec<moniker::MonikerResult>>>> {
 +        self.with_db(|db| moniker::moniker(db, position))
 +    }
 +
 +    /// Return URL(s) for the documentation of the symbol under the cursor.
 +    pub fn external_docs(
 +        &self,
 +        position: FilePosition,
 +    ) -> Cancellable<Option<doc_links::DocumentationLink>> {
 +        self.with_db(|db| doc_links::external_docs(db, &position))
 +    }
 +
 +    /// Computes parameter information at the given position.
 +    pub fn signature_help(&self, position: FilePosition) -> Cancellable<Option<SignatureHelp>> {
 +        self.with_db(|db| signature_help::signature_help(db, position))
 +    }
 +
 +    /// Computes call hierarchy candidates for the given file position.
 +    pub fn call_hierarchy(
 +        &self,
 +        position: FilePosition,
 +    ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
 +        self.with_db(|db| call_hierarchy::call_hierarchy(db, position))
 +    }
 +
 +    /// Computes incoming calls for the given file position.
 +    pub fn incoming_calls(&self, position: FilePosition) -> Cancellable<Option<Vec<CallItem>>> {
 +        self.with_db(|db| call_hierarchy::incoming_calls(db, position))
 +    }
 +
 +    /// Computes outgoing calls for the given file position.
 +    pub fn outgoing_calls(&self, position: FilePosition) -> Cancellable<Option<Vec<CallItem>>> {
 +        self.with_db(|db| call_hierarchy::outgoing_calls(db, position))
 +    }
 +
 +    /// Returns a `mod name;` declaration which created the current module.
 +    pub fn parent_module(&self, position: FilePosition) -> Cancellable<Vec<NavigationTarget>> {
 +        self.with_db(|db| parent_module::parent_module(db, position))
 +    }
 +
 +    /// Returns crates this file belongs too.
 +    pub fn crate_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
 +        self.with_db(|db| parent_module::crate_for(db, file_id))
 +    }
 +
 +    /// Returns the edition of the given crate.
 +    pub fn crate_edition(&self, crate_id: CrateId) -> Cancellable<Edition> {
 +        self.with_db(|db| db.crate_graph()[crate_id].edition)
 +    }
 +
 +    /// Returns the root file of the given crate.
 +    pub fn crate_root(&self, crate_id: CrateId) -> Cancellable<FileId> {
 +        self.with_db(|db| db.crate_graph()[crate_id].root_file_id)
 +    }
 +
 +    /// Returns the set of possible targets to run for the current file.
 +    pub fn runnables(&self, file_id: FileId) -> Cancellable<Vec<Runnable>> {
 +        self.with_db(|db| runnables::runnables(db, file_id))
 +    }
 +
 +    /// Returns the set of tests for the given file position.
 +    pub fn related_tests(
 +        &self,
 +        position: FilePosition,
 +        search_scope: Option<SearchScope>,
 +    ) -> Cancellable<Vec<Runnable>> {
 +        self.with_db(|db| runnables::related_tests(db, position, search_scope))
 +    }
 +
 +    /// Computes syntax highlighting for the given file
-     pub fn highlight_range(&self, frange: FileRange) -> Cancellable<Vec<HlRange>> {
++    pub fn highlight(
++        &self,
++        highlight_config: HighlightConfig,
++        file_id: FileId,
++    ) -> Cancellable<Vec<HlRange>> {
++        self.with_db(|db| syntax_highlighting::highlight(db, highlight_config, file_id, None))
 +    }
 +
 +    /// Computes all ranges to highlight for a given item in a file.
 +    pub fn highlight_related(
 +        &self,
 +        config: HighlightRelatedConfig,
 +        position: FilePosition,
 +    ) -> Cancellable<Option<Vec<HighlightedRange>>> {
 +        self.with_db(|db| {
 +            highlight_related::highlight_related(&Semantics::new(db), config, position)
 +        })
 +    }
 +
 +    /// Computes syntax highlighting for the given file range.
-             syntax_highlighting::highlight(db, frange.file_id, Some(frange.range), false)
++    pub fn highlight_range(
++        &self,
++        highlight_config: HighlightConfig,
++        frange: FileRange,
++    ) -> Cancellable<Vec<HlRange>> {
 +        self.with_db(|db| {
++            syntax_highlighting::highlight(db, highlight_config, frange.file_id, Some(frange.range))
 +        })
 +    }
 +
 +    /// Computes syntax highlighting for the given file.
 +    pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancellable<String> {
 +        self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow))
 +    }
 +
 +    /// Computes completions at the given position.
 +    pub fn completions(
 +        &self,
 +        config: &CompletionConfig,
 +        position: FilePosition,
 +        trigger_character: Option<char>,
 +    ) -> Cancellable<Option<Vec<CompletionItem>>> {
 +        self.with_db(|db| {
 +            ide_completion::completions(db, config, position, trigger_character).map(Into::into)
 +        })
 +    }
 +
 +    /// Resolves additional completion data at the position given.
 +    pub fn resolve_completion_edits(
 +        &self,
 +        config: &CompletionConfig,
 +        position: FilePosition,
 +        imports: impl IntoIterator<Item = (String, String)> + std::panic::UnwindSafe,
 +    ) -> Cancellable<Vec<TextEdit>> {
 +        Ok(self
 +            .with_db(|db| ide_completion::resolve_completion_edits(db, config, position, imports))?
 +            .unwrap_or_default())
 +    }
 +
 +    /// Computes the set of diagnostics for the given file.
 +    pub fn diagnostics(
 +        &self,
 +        config: &DiagnosticsConfig,
 +        resolve: AssistResolveStrategy,
 +        file_id: FileId,
 +    ) -> Cancellable<Vec<Diagnostic>> {
 +        self.with_db(|db| ide_diagnostics::diagnostics(db, config, &resolve, file_id))
 +    }
 +
 +    /// Convenience function to return assists + quick fixes for diagnostics
 +    pub fn assists_with_fixes(
 +        &self,
 +        assist_config: &AssistConfig,
 +        diagnostics_config: &DiagnosticsConfig,
 +        resolve: AssistResolveStrategy,
 +        frange: FileRange,
 +    ) -> Cancellable<Vec<Assist>> {
 +        let include_fixes = match &assist_config.allowed {
 +            Some(it) => it.iter().any(|&it| it == AssistKind::None || it == AssistKind::QuickFix),
 +            None => true,
 +        };
 +
 +        self.with_db(|db| {
 +            let diagnostic_assists = if include_fixes {
 +                ide_diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id)
 +                    .into_iter()
 +                    .flat_map(|it| it.fixes.unwrap_or_default())
 +                    .filter(|it| it.target.intersect(frange.range).is_some())
 +                    .collect()
 +            } else {
 +                Vec::new()
 +            };
 +            let ssr_assists = ssr::ssr_assists(db, &resolve, frange);
 +            let assists = ide_assists::assists(db, assist_config, resolve, frange);
 +
 +            let mut res = diagnostic_assists;
 +            res.extend(ssr_assists.into_iter());
 +            res.extend(assists.into_iter());
 +
 +            res
 +        })
 +    }
 +
 +    /// Returns the edit required to rename reference at the position to the new
 +    /// name.
 +    pub fn rename(
 +        &self,
 +        position: FilePosition,
 +        new_name: &str,
 +    ) -> Cancellable<Result<SourceChange, RenameError>> {
 +        self.with_db(|db| rename::rename(db, position, new_name))
 +    }
 +
 +    pub fn prepare_rename(
 +        &self,
 +        position: FilePosition,
 +    ) -> Cancellable<Result<RangeInfo<()>, RenameError>> {
 +        self.with_db(|db| rename::prepare_rename(db, position))
 +    }
 +
 +    pub fn will_rename_file(
 +        &self,
 +        file_id: FileId,
 +        new_name_stem: &str,
 +    ) -> Cancellable<Option<SourceChange>> {
 +        self.with_db(|db| rename::will_rename_file(db, file_id, new_name_stem))
 +    }
 +
 +    pub fn structural_search_replace(
 +        &self,
 +        query: &str,
 +        parse_only: bool,
 +        resolve_context: FilePosition,
 +        selections: Vec<FileRange>,
 +    ) -> Cancellable<Result<SourceChange, SsrError>> {
 +        self.with_db(|db| {
 +            let rule: ide_ssr::SsrRule = query.parse()?;
 +            let mut match_finder =
 +                ide_ssr::MatchFinder::in_context(db, resolve_context, selections)?;
 +            match_finder.add_rule(rule)?;
 +            let edits = if parse_only { Default::default() } else { match_finder.edits() };
 +            Ok(SourceChange::from(edits))
 +        })
 +    }
 +
 +    pub fn annotations(
 +        &self,
 +        config: &AnnotationConfig,
 +        file_id: FileId,
 +    ) -> Cancellable<Vec<Annotation>> {
 +        self.with_db(|db| annotations::annotations(db, config, file_id))
 +    }
 +
 +    pub fn resolve_annotation(&self, annotation: Annotation) -> Cancellable<Annotation> {
 +        self.with_db(|db| annotations::resolve_annotation(db, annotation))
 +    }
 +
 +    pub fn move_item(
 +        &self,
 +        range: FileRange,
 +        direction: Direction,
 +    ) -> Cancellable<Option<TextEdit>> {
 +        self.with_db(|db| move_item::move_item(db, range, direction))
 +    }
 +
 +    /// Performs an operation on the database that may be canceled.
 +    ///
 +    /// rust-analyzer needs to be able to answer semantic questions about the
 +    /// code while the code is being modified. A common problem is that a
 +    /// long-running query is being calculated when a new change arrives.
 +    ///
 +    /// We can't just apply the change immediately: this will cause the pending
 +    /// query to see inconsistent state (it will observe an absence of
 +    /// repeatable read). So what we do is we **cancel** all pending queries
 +    /// before applying the change.
 +    ///
 +    /// Salsa implements cancellation by unwinding with a special value and
 +    /// catching it on the API boundary.
 +    fn with_db<F, T>(&self, f: F) -> Cancellable<T>
 +    where
 +        F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
 +    {
 +        Cancelled::catch(|| f(&self.db))
 +    }
 +}
 +
 +#[test]
 +fn analysis_is_send() {
 +    fn is_send<T: Send>() {}
 +    is_send::<Analysis>();
 +}
index 4f758967b46194538d41c753da10372f997ba7de,0000000000000000000000000000000000000000..600a526300c76a10594fe7f4b8b5c28ec4a072b8
mode 100644,000000..100644
--- /dev/null
@@@ -1,342 -1,0 +1,447 @@@
-     crate_name: String,
-     path: Vec<Name>,
 +//! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports)
 +//! for LSIF and LSP.
 +
 +use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
 +use ide_db::{
 +    base_db::{CrateOrigin, FileId, FileLoader, FilePosition, LangCrateOrigin},
 +    defs::{Definition, IdentClass},
 +    helpers::pick_best_token,
 +    RootDatabase,
 +};
 +use itertools::Itertools;
 +use syntax::{AstNode, SyntaxKind::*, T};
 +
 +use crate::{doc_links::token_as_doc_comment, RangeInfo};
 +
++#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
++pub enum MonikerDescriptorKind {
++    Namespace,
++    Type,
++    Term,
++    Method,
++    TypeParameter,
++    Parameter,
++    Macro,
++    Meta,
++}
++
++#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
++pub struct MonikerDescriptor {
++    pub name: Name,
++    pub desc: MonikerDescriptorKind,
++}
++
 +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
 +pub struct MonikerIdentifier {
-             MonikerIdentifier { path, crate_name } => {
-                 format!("{}::{}", crate_name, path.iter().map(|x| x.to_string()).join("::"))
++    pub crate_name: String,
++    pub description: Vec<MonikerDescriptor>,
 +}
 +
 +impl ToString for MonikerIdentifier {
 +    fn to_string(&self) -> String {
 +        match self {
-     if matches!(def, Definition::GenericParam(_) | Definition::SelfType(_) | Definition::Local(_)) {
++            MonikerIdentifier { description, crate_name } => {
++                format!(
++                    "{}::{}",
++                    crate_name,
++                    description.iter().map(|x| x.name.to_string()).join("::")
++                )
 +            }
 +        }
 +    }
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
 +pub enum MonikerKind {
 +    Import,
 +    Export,
 +}
 +
 +#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 +pub struct MonikerResult {
 +    pub identifier: MonikerIdentifier,
 +    pub kind: MonikerKind,
 +    pub package_information: PackageInformation,
 +}
 +
++impl MonikerResult {
++    pub fn from_def(db: &RootDatabase, def: Definition, from_crate: Crate) -> Option<Self> {
++        def_to_moniker(db, def, from_crate)
++    }
++}
++
 +#[derive(Debug, Clone, PartialEq, Eq, Hash)]
 +pub struct PackageInformation {
 +    pub name: String,
 +    pub repo: String,
 +    pub version: String,
 +}
 +
 +pub(crate) fn crate_for_file(db: &RootDatabase, file_id: FileId) -> Option<Crate> {
 +    for &krate in db.relevant_crates(file_id).iter() {
 +        let crate_def_map = db.crate_def_map(krate);
 +        for (_, data) in crate_def_map.modules() {
 +            if data.origin.file_id() == Some(file_id) {
 +                return Some(krate.into());
 +            }
 +        }
 +    }
 +    None
 +}
 +
 +pub(crate) fn moniker(
 +    db: &RootDatabase,
 +    FilePosition { file_id, offset }: FilePosition,
 +) -> Option<RangeInfo<Vec<MonikerResult>>> {
 +    let sema = &Semantics::new(db);
 +    let file = sema.parse(file_id).syntax().clone();
 +    let current_crate = crate_for_file(db, file_id)?;
 +    let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
 +        IDENT
 +        | INT_NUMBER
 +        | LIFETIME_IDENT
 +        | T![self]
 +        | T![super]
 +        | T![crate]
 +        | T![Self]
 +        | COMMENT => 2,
 +        kind if kind.is_trivia() => 0,
 +        _ => 1,
 +    })?;
 +    if let Some(doc_comment) = token_as_doc_comment(&original_token) {
 +        return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, _| {
 +            let m = def_to_moniker(db, def, current_crate)?;
 +            Some(RangeInfo::new(original_token.text_range(), vec![m]))
 +        });
 +    }
 +    let navs = sema
 +        .descend_into_macros(original_token.clone())
 +        .into_iter()
 +        .filter_map(|token| {
 +            IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {
 +                it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
 +            })
 +        })
 +        .flatten()
 +        .unique()
 +        .collect::<Vec<_>>();
 +    Some(RangeInfo::new(original_token.text_range(), navs))
 +}
 +
 +pub(crate) fn def_to_moniker(
 +    db: &RootDatabase,
 +    def: Definition,
 +    from_crate: Crate,
 +) -> Option<MonikerResult> {
-     let mut path = vec![];
-     path.extend(module.path_to_root(db).into_iter().filter_map(|x| x.name(db)));
++    if matches!(
++        def,
++        Definition::GenericParam(_)
++            | Definition::Label(_)
++            | Definition::DeriveHelper(_)
++            | Definition::BuiltinAttr(_)
++            | Definition::ToolModule(_)
++    ) {
 +        return None;
 +    }
++
 +    let module = def.module(db)?;
 +    let krate = module.krate();
-                 path.push(trait_.name(db));
++    let mut description = vec![];
++    description.extend(module.path_to_root(db).into_iter().filter_map(|x| {
++        Some(MonikerDescriptor { name: x.name(db)?, desc: MonikerDescriptorKind::Namespace })
++    }));
 +
 +    // Handle associated items within a trait
 +    if let Some(assoc) = def.as_assoc_item(db) {
 +        let container = assoc.container(db);
 +        match container {
 +            AssocItemContainer::Trait(trait_) => {
 +                // Because different traits can have functions with the same name,
 +                // we have to include the trait name as part of the moniker for uniqueness.
-                     path.push(adt.name(db));
++                description.push(MonikerDescriptor {
++                    name: trait_.name(db),
++                    desc: MonikerDescriptorKind::Type,
++                });
 +            }
 +            AssocItemContainer::Impl(impl_) => {
 +                // Because a struct can implement multiple traits, for implementations
 +                // we add both the struct name and the trait name to the path
 +                if let Some(adt) = impl_.self_ty(db).as_adt() {
-                     path.push(trait_.name(db));
++                    description.push(MonikerDescriptor {
++                        name: adt.name(db),
++                        desc: MonikerDescriptorKind::Type,
++                    });
 +                }
 +
 +                if let Some(trait_) = impl_.trait_(db) {
-         path.push(it.parent_def(db).name(db));
++                    description.push(MonikerDescriptor {
++                        name: trait_.name(db),
++                        desc: MonikerDescriptorKind::Type,
++                    });
 +                }
 +            }
 +        }
 +    }
 +
 +    if let Definition::Field(it) = def {
-     path.push(def.name(db)?);
++        description.push(MonikerDescriptor {
++            name: it.parent_def(db).name(db),
++            desc: MonikerDescriptorKind::Type,
++        });
 +    }
 +
-             path,
++    let name_desc = match def {
++        // These are handled by top-level guard (for performance).
++        Definition::GenericParam(_)
++        | Definition::Label(_)
++        | Definition::DeriveHelper(_)
++        | Definition::BuiltinAttr(_)
++        | Definition::ToolModule(_) => return None,
++
++        Definition::Local(local) => {
++            if !local.is_param(db) {
++                return None;
++            }
++
++            MonikerDescriptor { name: local.name(db), desc: MonikerDescriptorKind::Parameter }
++        }
++        Definition::Macro(m) => {
++            MonikerDescriptor { name: m.name(db), desc: MonikerDescriptorKind::Macro }
++        }
++        Definition::Function(f) => {
++            MonikerDescriptor { name: f.name(db), desc: MonikerDescriptorKind::Method }
++        }
++        Definition::Variant(v) => {
++            MonikerDescriptor { name: v.name(db), desc: MonikerDescriptorKind::Type }
++        }
++        Definition::Const(c) => {
++            MonikerDescriptor { name: c.name(db)?, desc: MonikerDescriptorKind::Term }
++        }
++        Definition::Trait(trait_) => {
++            MonikerDescriptor { name: trait_.name(db), desc: MonikerDescriptorKind::Type }
++        }
++        Definition::TypeAlias(ta) => {
++            MonikerDescriptor { name: ta.name(db), desc: MonikerDescriptorKind::TypeParameter }
++        }
++        Definition::Module(m) => {
++            MonikerDescriptor { name: m.name(db)?, desc: MonikerDescriptorKind::Namespace }
++        }
++        Definition::BuiltinType(b) => {
++            MonikerDescriptor { name: b.name(), desc: MonikerDescriptorKind::Type }
++        }
++        Definition::SelfType(imp) => MonikerDescriptor {
++            name: imp.self_ty(db).as_adt()?.name(db),
++            desc: MonikerDescriptorKind::Type,
++        },
++        Definition::Field(it) => {
++            MonikerDescriptor { name: it.name(db), desc: MonikerDescriptorKind::Term }
++        }
++        Definition::Adt(adt) => {
++            MonikerDescriptor { name: adt.name(db), desc: MonikerDescriptorKind::Type }
++        }
++        Definition::Static(s) => {
++            MonikerDescriptor { name: s.name(db), desc: MonikerDescriptorKind::Meta }
++        }
++    };
++
++    description.push(name_desc);
++
 +    Some(MonikerResult {
 +        identifier: MonikerIdentifier {
 +            crate_name: krate.display_name(db)?.crate_name().to_string(),
++            description,
 +        },
 +        kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import },
 +        package_information: {
 +            let name = krate.display_name(db)?.to_string();
 +            let (repo, version) = match krate.origin(db) {
 +                CrateOrigin::CratesIo { repo } => (repo?, krate.version(db)?),
 +                CrateOrigin::Lang(lang) => (
 +                    "https://github.com/rust-lang/rust/".to_string(),
 +                    match lang {
 +                        LangCrateOrigin::Other => {
 +                            "https://github.com/rust-lang/rust/library/".into()
 +                        }
 +                        lang => format!("https://github.com/rust-lang/rust/library/{lang}",),
 +                    },
 +                ),
 +            };
 +            PackageInformation { name, repo, version }
 +        },
 +    })
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use crate::fixture;
 +
 +    use super::MonikerKind;
 +
 +    #[track_caller]
 +    fn no_moniker(ra_fixture: &str) {
 +        let (analysis, position) = fixture::position(ra_fixture);
 +        if let Some(x) = analysis.moniker(position).unwrap() {
 +            assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {:?}", x);
 +        }
 +    }
 +
 +    #[track_caller]
 +    fn check_moniker(ra_fixture: &str, identifier: &str, package: &str, kind: MonikerKind) {
 +        let (analysis, position) = fixture::position(ra_fixture);
 +        let x = analysis.moniker(position).unwrap().expect("no moniker found").info;
 +        assert_eq!(x.len(), 1);
 +        let x = x.into_iter().next().unwrap();
 +        assert_eq!(identifier, x.identifier.to_string());
 +        assert_eq!(package, format!("{:?}", x.package_information));
 +        assert_eq!(kind, x.kind);
 +    }
 +
 +    #[test]
 +    fn basic() {
 +        check_moniker(
 +            r#"
 +//- /lib.rs crate:main deps:foo
 +use foo::module::func;
 +fn main() {
 +    func$0();
 +}
 +//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
 +pub mod module {
 +    pub fn func() {}
 +}
 +"#,
 +            "foo::module::func",
 +            r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
 +            MonikerKind::Import,
 +        );
 +        check_moniker(
 +            r#"
 +//- /lib.rs crate:main deps:foo
 +use foo::module::func;
 +fn main() {
 +    func();
 +}
 +//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
 +pub mod module {
 +    pub fn func$0() {}
 +}
 +"#,
 +            "foo::module::func",
 +            r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
 +            MonikerKind::Export,
 +        );
 +    }
 +
 +    #[test]
 +    fn moniker_for_trait() {
 +        check_moniker(
 +            r#"
 +//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
 +pub mod module {
 +    pub trait MyTrait {
 +        pub fn func$0() {}
 +    }
 +}
 +"#,
 +            "foo::module::MyTrait::func",
 +            r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
 +            MonikerKind::Export,
 +        );
 +    }
 +
 +    #[test]
 +    fn moniker_for_trait_constant() {
 +        check_moniker(
 +            r#"
 +//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
 +pub mod module {
 +    pub trait MyTrait {
 +        const MY_CONST$0: u8;
 +    }
 +}
 +"#,
 +            "foo::module::MyTrait::MY_CONST",
 +            r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
 +            MonikerKind::Export,
 +        );
 +    }
 +
 +    #[test]
 +    fn moniker_for_trait_type() {
 +        check_moniker(
 +            r#"
 +//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
 +pub mod module {
 +    pub trait MyTrait {
 +        type MyType$0;
 +    }
 +}
 +"#,
 +            "foo::module::MyTrait::MyType",
 +            r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
 +            MonikerKind::Export,
 +        );
 +    }
 +
 +    #[test]
 +    fn moniker_for_trait_impl_function() {
 +        check_moniker(
 +            r#"
 +//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
 +pub mod module {
 +    pub trait MyTrait {
 +        pub fn func() {}
 +    }
 +
 +    struct MyStruct {}
 +
 +    impl MyTrait for MyStruct {
 +        pub fn func$0() {}
 +    }
 +}
 +"#,
 +            "foo::module::MyStruct::MyTrait::func",
 +            r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
 +            MonikerKind::Export,
 +        );
 +    }
 +
 +    #[test]
 +    fn moniker_for_field() {
 +        check_moniker(
 +            r#"
 +//- /lib.rs crate:main deps:foo
 +use foo::St;
 +fn main() {
 +    let x = St { a$0: 2 };
 +}
 +//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
 +pub struct St {
 +    pub a: i32,
 +}
 +"#,
 +            "foo::St::a",
 +            r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
 +            MonikerKind::Import,
 +        );
 +    }
 +
 +    #[test]
 +    fn no_moniker_for_local() {
 +        no_moniker(
 +            r#"
 +//- /lib.rs crate:main deps:foo
 +use foo::module::func;
 +fn main() {
 +    func();
 +}
 +//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
 +pub mod module {
 +    pub fn func() {
 +        let x$0 = 2;
 +    }
 +}
 +"#,
 +        );
 +    }
 +}
index 296270036002b906a8ec950621291a11bdca9486,0000000000000000000000000000000000000000..87b3ef380c5943e0986ea4521be48591c74c9317
mode 100644,000000..100644
--- /dev/null
@@@ -1,158 -1,0 +1,159 @@@
-     FxHashSet, FxIndexMap,
 +//! rust-analyzer is lazy and doesn't compute anything unless asked. This
 +//! sometimes is counter productive when, for example, the first goto definition
 +//! request takes longer to compute. This modules implemented prepopulation of
 +//! various caches, it's not really advanced at the moment.
 +mod topologic_sort;
 +
 +use std::time::Duration;
 +
 +use hir::db::DefDatabase;
 +use ide_db::{
 +    base_db::{
 +        salsa::{Database, ParallelDatabase, Snapshot},
 +        Cancelled, CrateGraph, CrateId, SourceDatabase, SourceDatabaseExt,
 +    },
- fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> FxHashSet<CrateId> {
++    FxIndexMap,
 +};
++use stdx::hash::NoHashHashSet;
 +
 +use crate::RootDatabase;
 +
 +/// We're indexing many crates.
 +#[derive(Debug)]
 +pub struct ParallelPrimeCachesProgress {
 +    /// the crates that we are currently priming.
 +    pub crates_currently_indexing: Vec<String>,
 +    /// the total number of crates we want to prime.
 +    pub crates_total: usize,
 +    /// the total number of crates that have finished priming
 +    pub crates_done: usize,
 +}
 +
 +pub(crate) fn parallel_prime_caches(
 +    db: &RootDatabase,
 +    num_worker_threads: u8,
 +    cb: &(dyn Fn(ParallelPrimeCachesProgress) + Sync),
 +) {
 +    let _p = profile::span("prime_caches");
 +
 +    let graph = db.crate_graph();
 +    let mut crates_to_prime = {
 +        let crate_ids = compute_crates_to_prime(db, &graph);
 +
 +        let mut builder = topologic_sort::TopologicalSortIter::builder();
 +
 +        for &crate_id in &crate_ids {
 +            let crate_data = &graph[crate_id];
 +            let dependencies = crate_data
 +                .dependencies
 +                .iter()
 +                .map(|d| d.crate_id)
 +                .filter(|i| crate_ids.contains(i));
 +
 +            builder.add(crate_id, dependencies);
 +        }
 +
 +        builder.build()
 +    };
 +
 +    enum ParallelPrimeCacheWorkerProgress {
 +        BeginCrate { crate_id: CrateId, crate_name: String },
 +        EndCrate { crate_id: CrateId },
 +    }
 +
 +    let (work_sender, progress_receiver) = {
 +        let (progress_sender, progress_receiver) = crossbeam_channel::unbounded();
 +        let (work_sender, work_receiver) = crossbeam_channel::unbounded();
 +        let prime_caches_worker = move |db: Snapshot<RootDatabase>| {
 +            while let Ok((crate_id, crate_name)) = work_receiver.recv() {
 +                progress_sender
 +                    .send(ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name })?;
 +
 +                // This also computes the DefMap
 +                db.import_map(crate_id);
 +
 +                progress_sender.send(ParallelPrimeCacheWorkerProgress::EndCrate { crate_id })?;
 +            }
 +
 +            Ok::<_, crossbeam_channel::SendError<_>>(())
 +        };
 +
 +        for _ in 0..num_worker_threads {
 +            let worker = prime_caches_worker.clone();
 +            let db = db.snapshot();
 +            std::thread::spawn(move || Cancelled::catch(|| worker(db)));
 +        }
 +
 +        (work_sender, progress_receiver)
 +    };
 +
 +    let crates_total = crates_to_prime.pending();
 +    let mut crates_done = 0;
 +
 +    // an index map is used to preserve ordering so we can sort the progress report in order of
 +    // "longest crate to index" first
 +    let mut crates_currently_indexing =
 +        FxIndexMap::with_capacity_and_hasher(num_worker_threads as _, Default::default());
 +
 +    while crates_done < crates_total {
 +        db.unwind_if_cancelled();
 +
 +        for crate_id in &mut crates_to_prime {
 +            work_sender
 +                .send((
 +                    crate_id,
 +                    graph[crate_id].display_name.as_deref().unwrap_or_default().to_string(),
 +                ))
 +                .ok();
 +        }
 +
 +        // recv_timeout is somewhat a hack, we need a way to from this thread check to see if the current salsa revision
 +        // is cancelled on a regular basis. workers will only exit if they are processing a task that is cancelled, or
 +        // if this thread exits, and closes the work channel.
 +        let worker_progress = match progress_receiver.recv_timeout(Duration::from_millis(10)) {
 +            Ok(p) => p,
 +            Err(crossbeam_channel::RecvTimeoutError::Timeout) => {
 +                continue;
 +            }
 +            Err(crossbeam_channel::RecvTimeoutError::Disconnected) => {
 +                // our workers may have died from a cancelled task, so we'll check and re-raise here.
 +                db.unwind_if_cancelled();
 +                break;
 +            }
 +        };
 +        match worker_progress {
 +            ParallelPrimeCacheWorkerProgress::BeginCrate { crate_id, crate_name } => {
 +                crates_currently_indexing.insert(crate_id, crate_name);
 +            }
 +            ParallelPrimeCacheWorkerProgress::EndCrate { crate_id } => {
 +                crates_currently_indexing.remove(&crate_id);
 +                crates_to_prime.mark_done(crate_id);
 +                crates_done += 1;
 +            }
 +        };
 +
 +        let progress = ParallelPrimeCachesProgress {
 +            crates_currently_indexing: crates_currently_indexing.values().cloned().collect(),
 +            crates_done,
 +            crates_total,
 +        };
 +
 +        cb(progress);
 +    }
 +}
 +
++fn compute_crates_to_prime(db: &RootDatabase, graph: &CrateGraph) -> NoHashHashSet<CrateId> {
 +    // We're only interested in the workspace crates and the `ImportMap`s of their direct
 +    // dependencies, though in practice the latter also compute the `DefMap`s.
 +    // We don't prime transitive dependencies because they're generally not visible in
 +    // the current workspace.
 +    graph
 +        .iter()
 +        .filter(|&id| {
 +            let file_id = graph[id].root_file_id;
 +            let root_id = db.file_source_root(file_id);
 +            !db.source_root(root_id).is_library
 +        })
 +        .flat_map(|id| graph[id].dependencies.iter().map(|krate| krate.crate_id))
 +        .collect()
 +}
index 1a6beec1881b00faf061a2e3b52b54429cda90ec,0000000000000000000000000000000000000000..99614b645e48ff02160b38fcf377f12bf01b4705
mode 100644,000000..100644
--- /dev/null
@@@ -1,1636 -1,0 +1,1637 @@@
-     FxHashMap, RootDatabase,
 +//! This module implements a reference search.
 +//! First, the element at the cursor position must be either an `ast::Name`
 +//! or `ast::NameRef`. If it's an `ast::NameRef`, at the classification step we
 +//! try to resolve the direct tree parent of this element, otherwise we
 +//! already have a definition and just need to get its HIR together with
 +//! some information that is needed for further steps of searching.
 +//! After that, we collect files that might contain references and look
 +//! for text occurrences of the identifier. If there's an `ast::NameRef`
 +//! at the index that the match starts at and its tree parent is
 +//! resolved to the search element definition, we get a reference.
 +
 +use hir::{PathResolution, Semantics};
 +use ide_db::{
 +    base_db::FileId,
 +    defs::{Definition, NameClass, NameRefClass},
 +    search::{ReferenceCategory, SearchScope, UsageSearchResult},
-     pub references: FxHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
++    RootDatabase,
 +};
++use stdx::hash::NoHashHashMap;
 +use syntax::{
 +    algo::find_node_at_offset,
 +    ast::{self, HasName},
 +    match_ast, AstNode,
 +    SyntaxKind::*,
 +    SyntaxNode, TextRange, TextSize, T,
 +};
 +
 +use crate::{FilePosition, NavigationTarget, TryToNav};
 +
 +#[derive(Debug, Clone)]
 +pub struct ReferenceSearchResult {
 +    pub declaration: Option<Declaration>,
++    pub references: NoHashHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
 +}
 +
 +#[derive(Debug, Clone)]
 +pub struct Declaration {
 +    pub nav: NavigationTarget,
 +    pub is_mut: bool,
 +}
 +
 +// Feature: Find All References
 +//
 +// Shows all references of the item at the cursor location
 +//
 +// |===
 +// | Editor  | Shortcut
 +//
 +// | VS Code | kbd:[Shift+Alt+F12]
 +// |===
 +//
 +// image::https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif[]
 +pub(crate) fn find_all_refs(
 +    sema: &Semantics<'_, RootDatabase>,
 +    position: FilePosition,
 +    search_scope: Option<SearchScope>,
 +) -> Option<Vec<ReferenceSearchResult>> {
 +    let _p = profile::span("find_all_refs");
 +    let syntax = sema.parse(position.file_id).syntax().clone();
 +    let make_searcher = |literal_search: bool| {
 +        move |def: Definition| {
 +            let declaration = match def {
 +                Definition::Module(module) => {
 +                    Some(NavigationTarget::from_module_to_decl(sema.db, module))
 +                }
 +                def => def.try_to_nav(sema.db),
 +            }
 +            .map(|nav| {
 +                let decl_range = nav.focus_or_full_range();
 +                Declaration {
 +                    is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range),
 +                    nav,
 +                }
 +            });
 +            let mut usages =
 +                def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all();
 +
 +            if literal_search {
 +                retain_adt_literal_usages(&mut usages, def, sema);
 +            }
 +
 +            let references = usages
 +                .into_iter()
 +                .map(|(file_id, refs)| {
 +                    (
 +                        file_id,
 +                        refs.into_iter()
 +                            .map(|file_ref| (file_ref.range, file_ref.category))
 +                            .collect(),
 +                    )
 +                })
 +                .collect();
 +
 +            ReferenceSearchResult { declaration, references }
 +        }
 +    };
 +
 +    match name_for_constructor_search(&syntax, position) {
 +        Some(name) => {
 +            let def = match NameClass::classify(sema, &name)? {
 +                NameClass::Definition(it) | NameClass::ConstReference(it) => it,
 +                NameClass::PatFieldShorthand { local_def: _, field_ref } => {
 +                    Definition::Field(field_ref)
 +                }
 +            };
 +            Some(vec![make_searcher(true)(def)])
 +        }
 +        None => {
 +            let search = make_searcher(false);
 +            Some(find_defs(sema, &syntax, position.offset)?.map(search).collect())
 +        }
 +    }
 +}
 +
 +pub(crate) fn find_defs<'a>(
 +    sema: &'a Semantics<'_, RootDatabase>,
 +    syntax: &SyntaxNode,
 +    offset: TextSize,
 +) -> Option<impl Iterator<Item = Definition> + 'a> {
 +    let token = syntax.token_at_offset(offset).find(|t| {
 +        matches!(
 +            t.kind(),
 +            IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
 +        )
 +    });
 +    token.map(|token| {
 +        sema.descend_into_macros_with_same_text(token)
 +            .into_iter()
 +            .filter_map(|it| ast::NameLike::cast(it.parent()?))
 +            .filter_map(move |name_like| {
 +                let def = match name_like {
 +                    ast::NameLike::NameRef(name_ref) => {
 +                        match NameRefClass::classify(sema, &name_ref)? {
 +                            NameRefClass::Definition(def) => def,
 +                            NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
 +                                Definition::Local(local_ref)
 +                            }
 +                        }
 +                    }
 +                    ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
 +                        NameClass::Definition(it) | NameClass::ConstReference(it) => it,
 +                        NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
 +                            Definition::Local(local_def)
 +                        }
 +                    },
 +                    ast::NameLike::Lifetime(lifetime) => {
 +                        NameRefClass::classify_lifetime(sema, &lifetime)
 +                            .and_then(|class| match class {
 +                                NameRefClass::Definition(it) => Some(it),
 +                                _ => None,
 +                            })
 +                            .or_else(|| {
 +                                NameClass::classify_lifetime(sema, &lifetime)
 +                                    .and_then(NameClass::defined)
 +                            })?
 +                    }
 +                };
 +                Some(def)
 +            })
 +    })
 +}
 +
 +pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool {
 +    match def {
 +        Definition::Local(_) | Definition::Field(_) => {}
 +        _ => return false,
 +    };
 +
 +    match find_node_at_offset::<ast::LetStmt>(syntax, range.start()) {
 +        Some(stmt) if stmt.initializer().is_some() => match stmt.pat() {
 +            Some(ast::Pat::IdentPat(it)) => it.mut_token().is_some(),
 +            _ => false,
 +        },
 +        _ => false,
 +    }
 +}
 +
 +/// Filter out all non-literal usages for adt-defs
 +fn retain_adt_literal_usages(
 +    usages: &mut UsageSearchResult,
 +    def: Definition,
 +    sema: &Semantics<'_, RootDatabase>,
 +) {
 +    let refs = usages.references.values_mut();
 +    match def {
 +        Definition::Adt(hir::Adt::Enum(enum_)) => {
 +            refs.for_each(|it| {
 +                it.retain(|reference| {
 +                    reference
 +                        .name
 +                        .as_name_ref()
 +                        .map_or(false, |name_ref| is_enum_lit_name_ref(sema, enum_, name_ref))
 +                })
 +            });
 +            usages.references.retain(|_, it| !it.is_empty());
 +        }
 +        Definition::Adt(_) | Definition::Variant(_) => {
 +            refs.for_each(|it| {
 +                it.retain(|reference| reference.name.as_name_ref().map_or(false, is_lit_name_ref))
 +            });
 +            usages.references.retain(|_, it| !it.is_empty());
 +        }
 +        _ => {}
 +    }
 +}
 +
 +/// Returns `Some` if the cursor is at a position for an item to search for all its constructor/literal usages
 +fn name_for_constructor_search(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> {
 +    let token = syntax.token_at_offset(position.offset).right_biased()?;
 +    let token_parent = token.parent()?;
 +    let kind = token.kind();
 +    if kind == T![;] {
 +        ast::Struct::cast(token_parent)
 +            .filter(|struct_| struct_.field_list().is_none())
 +            .and_then(|struct_| struct_.name())
 +    } else if kind == T!['{'] {
 +        match_ast! {
 +            match token_parent {
 +                ast::RecordFieldList(rfl) => match_ast! {
 +                    match (rfl.syntax().parent()?) {
 +                        ast::Variant(it) => it.name(),
 +                        ast::Struct(it) => it.name(),
 +                        ast::Union(it) => it.name(),
 +                        _ => None,
 +                    }
 +                },
 +                ast::VariantList(vl) => ast::Enum::cast(vl.syntax().parent()?)?.name(),
 +                _ => None,
 +            }
 +        }
 +    } else if kind == T!['('] {
 +        let tfl = ast::TupleFieldList::cast(token_parent)?;
 +        match_ast! {
 +            match (tfl.syntax().parent()?) {
 +                ast::Variant(it) => it.name(),
 +                ast::Struct(it) => it.name(),
 +                _ => None,
 +            }
 +        }
 +    } else {
 +        None
 +    }
 +}
 +
 +fn is_enum_lit_name_ref(
 +    sema: &Semantics<'_, RootDatabase>,
 +    enum_: hir::Enum,
 +    name_ref: &ast::NameRef,
 +) -> bool {
 +    let path_is_variant_of_enum = |path: ast::Path| {
 +        matches!(
 +            sema.resolve_path(&path),
 +            Some(PathResolution::Def(hir::ModuleDef::Variant(variant)))
 +                if variant.parent_enum(sema.db) == enum_
 +        )
 +    };
 +    name_ref
 +        .syntax()
 +        .ancestors()
 +        .find_map(|ancestor| {
 +            match_ast! {
 +                match ancestor {
 +                    ast::PathExpr(path_expr) => path_expr.path().map(path_is_variant_of_enum),
 +                    ast::RecordExpr(record_expr) => record_expr.path().map(path_is_variant_of_enum),
 +                    _ => None,
 +                }
 +            }
 +        })
 +        .unwrap_or(false)
 +}
 +
 +fn path_ends_with(path: Option<ast::Path>, name_ref: &ast::NameRef) -> bool {
 +    path.and_then(|path| path.segment())
 +        .and_then(|segment| segment.name_ref())
 +        .map_or(false, |segment| segment == *name_ref)
 +}
 +
 +fn is_lit_name_ref(name_ref: &ast::NameRef) -> bool {
 +    name_ref.syntax().ancestors().find_map(|ancestor| {
 +        match_ast! {
 +            match ancestor {
 +                ast::PathExpr(path_expr) => Some(path_ends_with(path_expr.path(), name_ref)),
 +                ast::RecordExpr(record_expr) => Some(path_ends_with(record_expr.path(), name_ref)),
 +                _ => None,
 +            }
 +        }
 +    }).unwrap_or(false)
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use expect_test::{expect, Expect};
 +    use ide_db::{base_db::FileId, search::ReferenceCategory};
 +    use stdx::format_to;
 +
 +    use crate::{fixture, SearchScope};
 +
 +    #[test]
 +    fn test_struct_literal_after_space() {
 +        check(
 +            r#"
 +struct Foo $0{
 +    a: i32,
 +}
 +impl Foo {
 +    fn f() -> i32 { 42 }
 +}
 +fn main() {
 +    let f: Foo;
 +    f = Foo {a: Foo::f()};
 +}
 +"#,
 +            expect![[r#"
 +                Foo Struct FileId(0) 0..26 7..10
 +
 +                FileId(0) 101..104
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_struct_literal_before_space() {
 +        check(
 +            r#"
 +struct Foo$0 {}
 +    fn main() {
 +    let f: Foo;
 +    f = Foo {};
 +}
 +"#,
 +            expect![[r#"
 +                Foo Struct FileId(0) 0..13 7..10
 +
 +                FileId(0) 41..44
 +                FileId(0) 54..57
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_struct_literal_with_generic_type() {
 +        check(
 +            r#"
 +struct Foo<T> $0{}
 +    fn main() {
 +    let f: Foo::<i32>;
 +    f = Foo {};
 +}
 +"#,
 +            expect![[r#"
 +                Foo Struct FileId(0) 0..16 7..10
 +
 +                FileId(0) 64..67
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_struct_literal_for_tuple() {
 +        check(
 +            r#"
 +struct Foo$0(i32);
 +
 +fn main() {
 +    let f: Foo;
 +    f = Foo(1);
 +}
 +"#,
 +            expect![[r#"
 +                Foo Struct FileId(0) 0..16 7..10
 +
 +                FileId(0) 54..57
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_struct_literal_for_union() {
 +        check(
 +            r#"
 +union Foo $0{
 +    x: u32
 +}
 +
 +fn main() {
 +    let f: Foo;
 +    f = Foo { x: 1 };
 +}
 +"#,
 +            expect![[r#"
 +                Foo Union FileId(0) 0..24 6..9
 +
 +                FileId(0) 62..65
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_enum_after_space() {
 +        check(
 +            r#"
 +enum Foo $0{
 +    A,
 +    B(),
 +    C{},
 +}
 +fn main() {
 +    let f: Foo;
 +    f = Foo::A;
 +    f = Foo::B();
 +    f = Foo::C{};
 +}
 +"#,
 +            expect![[r#"
 +                Foo Enum FileId(0) 0..37 5..8
 +
 +                FileId(0) 74..77
 +                FileId(0) 90..93
 +                FileId(0) 108..111
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_variant_record_after_space() {
 +        check(
 +            r#"
 +enum Foo {
 +    A $0{ n: i32 },
 +    B,
 +}
 +fn main() {
 +    let f: Foo;
 +    f = Foo::B;
 +    f = Foo::A { n: 92 };
 +}
 +"#,
 +            expect![[r#"
 +                A Variant FileId(0) 15..27 15..16
 +
 +                FileId(0) 95..96
 +            "#]],
 +        );
 +    }
 +    #[test]
 +    fn test_variant_tuple_before_paren() {
 +        check(
 +            r#"
 +enum Foo {
 +    A$0(i32),
 +    B,
 +}
 +fn main() {
 +    let f: Foo;
 +    f = Foo::B;
 +    f = Foo::A(92);
 +}
 +"#,
 +            expect![[r#"
 +                A Variant FileId(0) 15..21 15..16
 +
 +                FileId(0) 89..90
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_enum_before_space() {
 +        check(
 +            r#"
 +enum Foo$0 {
 +    A,
 +    B,
 +}
 +fn main() {
 +    let f: Foo;
 +    f = Foo::A;
 +}
 +"#,
 +            expect![[r#"
 +                Foo Enum FileId(0) 0..26 5..8
 +
 +                FileId(0) 50..53
 +                FileId(0) 63..66
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_enum_with_generic_type() {
 +        check(
 +            r#"
 +enum Foo<T> $0{
 +    A(T),
 +    B,
 +}
 +fn main() {
 +    let f: Foo<i8>;
 +    f = Foo::A(1);
 +}
 +"#,
 +            expect![[r#"
 +                Foo Enum FileId(0) 0..32 5..8
 +
 +                FileId(0) 73..76
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_enum_for_tuple() {
 +        check(
 +            r#"
 +enum Foo$0{
 +    A(i8),
 +    B(i8),
 +}
 +fn main() {
 +    let f: Foo;
 +    f = Foo::A(1);
 +}
 +"#,
 +            expect![[r#"
 +                Foo Enum FileId(0) 0..33 5..8
 +
 +                FileId(0) 70..73
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_for_local() {
 +        check(
 +            r#"
 +fn main() {
 +    let mut i = 1;
 +    let j = 1;
 +    i = i$0 + j;
 +
 +    {
 +        i = 0;
 +    }
 +
 +    i = 5;
 +}"#,
 +            expect![[r#"
 +                i Local FileId(0) 20..25 24..25 Write
 +
 +                FileId(0) 50..51 Write
 +                FileId(0) 54..55 Read
 +                FileId(0) 76..77 Write
 +                FileId(0) 94..95 Write
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn search_filters_by_range() {
 +        check(
 +            r#"
 +fn foo() {
 +    let spam$0 = 92;
 +    spam + spam
 +}
 +fn bar() {
 +    let spam = 92;
 +    spam + spam
 +}
 +"#,
 +            expect![[r#"
 +                spam Local FileId(0) 19..23 19..23
 +
 +                FileId(0) 34..38 Read
 +                FileId(0) 41..45 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_for_param_inside() {
 +        check(
 +            r#"
 +fn foo(i : u32) -> u32 { i$0 }
 +"#,
 +            expect![[r#"
 +                i ValueParam FileId(0) 7..8 7..8
 +
 +                FileId(0) 25..26 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_for_fn_param() {
 +        check(
 +            r#"
 +fn foo(i$0 : u32) -> u32 { i }
 +"#,
 +            expect![[r#"
 +                i ValueParam FileId(0) 7..8 7..8
 +
 +                FileId(0) 25..26 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_field_name() {
 +        check(
 +            r#"
 +//- /lib.rs
 +struct Foo {
 +    pub spam$0: u32,
 +}
 +
 +fn main(s: Foo) {
 +    let f = s.spam;
 +}
 +"#,
 +            expect![[r#"
 +                spam Field FileId(0) 17..30 21..25
 +
 +                FileId(0) 67..71 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_impl_item_name() {
 +        check(
 +            r#"
 +struct Foo;
 +impl Foo {
 +    fn f$0(&self) {  }
 +}
 +"#,
 +            expect![[r#"
 +                f Function FileId(0) 27..43 30..31
 +
 +                (no references)
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_enum_var_name() {
 +        check(
 +            r#"
 +enum Foo {
 +    A,
 +    B$0,
 +    C,
 +}
 +"#,
 +            expect![[r#"
 +                B Variant FileId(0) 22..23 22..23
 +
 +                (no references)
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_enum_var_field() {
 +        check(
 +            r#"
 +enum Foo {
 +    A,
 +    B { field$0: u8 },
 +    C,
 +}
 +"#,
 +            expect![[r#"
 +                field Field FileId(0) 26..35 26..31
 +
 +                (no references)
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_two_modules() {
 +        check(
 +            r#"
 +//- /lib.rs
 +pub mod foo;
 +pub mod bar;
 +
 +fn f() {
 +    let i = foo::Foo { n: 5 };
 +}
 +
 +//- /foo.rs
 +use crate::bar;
 +
 +pub struct Foo {
 +    pub n: u32,
 +}
 +
 +fn f() {
 +    let i = bar::Bar { n: 5 };
 +}
 +
 +//- /bar.rs
 +use crate::foo;
 +
 +pub struct Bar {
 +    pub n: u32,
 +}
 +
 +fn f() {
 +    let i = foo::Foo$0 { n: 5 };
 +}
 +"#,
 +            expect![[r#"
 +                Foo Struct FileId(1) 17..51 28..31
 +
 +                FileId(0) 53..56
 +                FileId(2) 79..82
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_decl_module() {
 +        check(
 +            r#"
 +//- /lib.rs
 +mod foo$0;
 +
 +use foo::Foo;
 +
 +fn f() {
 +    let i = Foo { n: 5 };
 +}
 +
 +//- /foo.rs
 +pub struct Foo {
 +    pub n: u32,
 +}
 +"#,
 +            expect![[r#"
 +                foo Module FileId(0) 0..8 4..7
 +
 +                FileId(0) 14..17
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_decl_module_on_self() {
 +        check(
 +            r#"
 +//- /lib.rs
 +mod foo;
 +
 +//- /foo.rs
 +use self$0;
 +"#,
 +            expect![[r#"
 +                foo Module FileId(0) 0..8 4..7
 +
 +                FileId(1) 4..8
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_decl_module_on_self_crate_root() {
 +        check(
 +            r#"
 +//- /lib.rs
 +use self$0;
 +"#,
 +            expect![[r#"
 +                Module FileId(0) 0..10
 +
 +                FileId(0) 4..8
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_super_mod_vis() {
 +        check(
 +            r#"
 +//- /lib.rs
 +mod foo;
 +
 +//- /foo.rs
 +mod some;
 +use some::Foo;
 +
 +fn f() {
 +    let i = Foo { n: 5 };
 +}
 +
 +//- /foo/some.rs
 +pub(super) struct Foo$0 {
 +    pub n: u32,
 +}
 +"#,
 +            expect![[r#"
 +                Foo Struct FileId(2) 0..41 18..21
 +
 +                FileId(1) 20..23
 +                FileId(1) 47..50
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_with_scope() {
 +        let code = r#"
 +            //- /lib.rs
 +            mod foo;
 +            mod bar;
 +
 +            pub fn quux$0() {}
 +
 +            //- /foo.rs
 +            fn f() { super::quux(); }
 +
 +            //- /bar.rs
 +            fn f() { super::quux(); }
 +        "#;
 +
 +        check_with_scope(
 +            code,
 +            None,
 +            expect![[r#"
 +                quux Function FileId(0) 19..35 26..30
 +
 +                FileId(1) 16..20
 +                FileId(2) 16..20
 +            "#]],
 +        );
 +
 +        check_with_scope(
 +            code,
 +            Some(SearchScope::single_file(FileId(2))),
 +            expect![[r#"
 +                quux Function FileId(0) 19..35 26..30
 +
 +                FileId(2) 16..20
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_macro_def() {
 +        check(
 +            r#"
 +#[macro_export]
 +macro_rules! m1$0 { () => (()) }
 +
 +fn foo() {
 +    m1();
 +    m1();
 +}
 +"#,
 +            expect![[r#"
 +                m1 Macro FileId(0) 0..46 29..31
 +
 +                FileId(0) 63..65
 +                FileId(0) 73..75
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_basic_highlight_read_write() {
 +        check(
 +            r#"
 +fn foo() {
 +    let mut i$0 = 0;
 +    i = i + 1;
 +}
 +"#,
 +            expect![[r#"
 +                i Local FileId(0) 19..24 23..24 Write
 +
 +                FileId(0) 34..35 Write
 +                FileId(0) 38..39 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_basic_highlight_field_read_write() {
 +        check(
 +            r#"
 +struct S {
 +    f: u32,
 +}
 +
 +fn foo() {
 +    let mut s = S{f: 0};
 +    s.f$0 = 0;
 +}
 +"#,
 +            expect![[r#"
 +                f Field FileId(0) 15..21 15..16
 +
 +                FileId(0) 55..56 Read
 +                FileId(0) 68..69 Write
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_basic_highlight_decl_no_write() {
 +        check(
 +            r#"
 +fn foo() {
 +    let i$0;
 +    i = 1;
 +}
 +"#,
 +            expect![[r#"
 +                i Local FileId(0) 19..20 19..20
 +
 +                FileId(0) 26..27 Write
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_struct_function_refs_outside_module() {
 +        check(
 +            r#"
 +mod foo {
 +    pub struct Foo;
 +
 +    impl Foo {
 +        pub fn new$0() -> Foo { Foo }
 +    }
 +}
 +
 +fn main() {
 +    let _f = foo::Foo::new();
 +}
 +"#,
 +            expect![[r#"
 +                new Function FileId(0) 54..81 61..64
 +
 +                FileId(0) 126..129
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_nested_module() {
 +        check(
 +            r#"
 +//- /lib.rs
 +mod foo { mod bar; }
 +
 +fn f$0() {}
 +
 +//- /foo/bar.rs
 +use crate::f;
 +
 +fn g() { f(); }
 +"#,
 +            expect![[r#"
 +                f Function FileId(0) 22..31 25..26
 +
 +                FileId(1) 11..12
 +                FileId(1) 24..25
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_struct_pat() {
 +        check(
 +            r#"
 +struct S {
 +    field$0: u8,
 +}
 +
 +fn f(s: S) {
 +    match s {
 +        S { field } => {}
 +    }
 +}
 +"#,
 +            expect![[r#"
 +                field Field FileId(0) 15..24 15..20
 +
 +                FileId(0) 68..73 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_enum_var_pat() {
 +        check(
 +            r#"
 +enum En {
 +    Variant {
 +        field$0: u8,
 +    }
 +}
 +
 +fn f(e: En) {
 +    match e {
 +        En::Variant { field } => {}
 +    }
 +}
 +"#,
 +            expect![[r#"
 +                field Field FileId(0) 32..41 32..37
 +
 +                FileId(0) 102..107 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_all_refs_enum_var_privacy() {
 +        check(
 +            r#"
 +mod m {
 +    pub enum En {
 +        Variant {
 +            field$0: u8,
 +        }
 +    }
 +}
 +
 +fn f() -> m::En {
 +    m::En::Variant { field: 0 }
 +}
 +"#,
 +            expect![[r#"
 +                field Field FileId(0) 56..65 56..61
 +
 +                FileId(0) 125..130 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_self_refs() {
 +        check(
 +            r#"
 +struct Foo { bar: i32 }
 +
 +impl Foo {
 +    fn foo(self) {
 +        let x = self$0.bar;
 +        if true {
 +            let _ = match () {
 +                () => self,
 +            };
 +        }
 +    }
 +}
 +"#,
 +            expect![[r#"
 +                self SelfParam FileId(0) 47..51 47..51
 +
 +                FileId(0) 71..75 Read
 +                FileId(0) 152..156 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_self_refs_decl() {
 +        check(
 +            r#"
 +struct Foo { bar: i32 }
 +
 +impl Foo {
 +    fn foo(self$0) {
 +        self;
 +    }
 +}
 +"#,
 +            expect![[r#"
 +                self SelfParam FileId(0) 47..51 47..51
 +
 +                FileId(0) 63..67 Read
 +            "#]],
 +        );
 +    }
 +
 +    fn check(ra_fixture: &str, expect: Expect) {
 +        check_with_scope(ra_fixture, None, expect)
 +    }
 +
 +    fn check_with_scope(ra_fixture: &str, search_scope: Option<SearchScope>, expect: Expect) {
 +        let (analysis, pos) = fixture::position(ra_fixture);
 +        let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap();
 +
 +        let mut actual = String::new();
 +        for refs in refs {
 +            actual += "\n\n";
 +
 +            if let Some(decl) = refs.declaration {
 +                format_to!(actual, "{}", decl.nav.debug_render());
 +                if decl.is_mut {
 +                    format_to!(actual, " {:?}", ReferenceCategory::Write)
 +                }
 +                actual += "\n\n";
 +            }
 +
 +            for (file_id, references) in &refs.references {
 +                for (range, access) in references {
 +                    format_to!(actual, "{:?} {:?}", file_id, range);
 +                    if let Some(access) = access {
 +                        format_to!(actual, " {:?}", access);
 +                    }
 +                    actual += "\n";
 +                }
 +            }
 +
 +            if refs.references.is_empty() {
 +                actual += "(no references)\n";
 +            }
 +        }
 +        expect.assert_eq(actual.trim_start())
 +    }
 +
 +    #[test]
 +    fn test_find_lifetimes_function() {
 +        check(
 +            r#"
 +trait Foo<'a> {}
 +impl<'a> Foo<'a> for &'a () {}
 +fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> {
 +    fn bar<'a>(_: &'a ()) {}
 +    x
 +}
 +"#,
 +            expect![[r#"
 +                'a LifetimeParam FileId(0) 55..57 55..57
 +
 +                FileId(0) 63..65
 +                FileId(0) 71..73
 +                FileId(0) 82..84
 +                FileId(0) 95..97
 +                FileId(0) 106..108
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_lifetimes_type_alias() {
 +        check(
 +            r#"
 +type Foo<'a, T> where T: 'a$0 = &'a T;
 +"#,
 +            expect![[r#"
 +                'a LifetimeParam FileId(0) 9..11 9..11
 +
 +                FileId(0) 25..27
 +                FileId(0) 31..33
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_lifetimes_trait_impl() {
 +        check(
 +            r#"
 +trait Foo<'a> {
 +    fn foo() -> &'a ();
 +}
 +impl<'a> Foo<'a> for &'a () {
 +    fn foo() -> &'a$0 () {
 +        unimplemented!()
 +    }
 +}
 +"#,
 +            expect![[r#"
 +                'a LifetimeParam FileId(0) 47..49 47..49
 +
 +                FileId(0) 55..57
 +                FileId(0) 64..66
 +                FileId(0) 89..91
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_map_range_to_original() {
 +        check(
 +            r#"
 +macro_rules! foo {($i:ident) => {$i} }
 +fn main() {
 +    let a$0 = "test";
 +    foo!(a);
 +}
 +"#,
 +            expect![[r#"
 +                a Local FileId(0) 59..60 59..60
 +
 +                FileId(0) 80..81 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_map_range_to_original_ref() {
 +        check(
 +            r#"
 +macro_rules! foo {($i:ident) => {$i} }
 +fn main() {
 +    let a = "test";
 +    foo!(a$0);
 +}
 +"#,
 +            expect![[r#"
 +                a Local FileId(0) 59..60 59..60
 +
 +                FileId(0) 80..81 Read
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_labels() {
 +        check(
 +            r#"
 +fn foo<'a>() -> &'a () {
 +    'a: loop {
 +        'b: loop {
 +            continue 'a$0;
 +        }
 +        break 'a;
 +    }
 +}
 +"#,
 +            expect![[r#"
 +                'a Label FileId(0) 29..32 29..31
 +
 +                FileId(0) 80..82
 +                FileId(0) 108..110
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_find_const_param() {
 +        check(
 +            r#"
 +fn foo<const FOO$0: usize>() -> usize {
 +    FOO
 +}
 +"#,
 +            expect![[r#"
 +                FOO ConstParam FileId(0) 7..23 13..16
 +
 +                FileId(0) 42..45
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_trait() {
 +        check(
 +            r#"
 +trait Foo$0 where Self: {}
 +
 +impl Foo for () {}
 +"#,
 +            expect![[r#"
 +                Foo Trait FileId(0) 0..24 6..9
 +
 +                FileId(0) 31..34
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_trait_self() {
 +        check(
 +            r#"
 +trait Foo where Self$0 {
 +    fn f() -> Self;
 +}
 +
 +impl Foo for () {}
 +"#,
 +            expect![[r#"
 +                Self TypeParam FileId(0) 6..9 6..9
 +
 +                FileId(0) 16..20
 +                FileId(0) 37..41
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_self_ty() {
 +        check(
 +            r#"
 +        struct $0Foo;
 +
 +        impl Foo where Self: {
 +            fn f() -> Self;
 +        }
 +        "#,
 +            expect![[r#"
 +                Foo Struct FileId(0) 0..11 7..10
 +
 +                FileId(0) 18..21
 +                FileId(0) 28..32
 +                FileId(0) 50..54
 +            "#]],
 +        );
 +        check(
 +            r#"
 +struct Foo;
 +
 +impl Foo where Self: {
 +    fn f() -> Self$0;
 +}
 +"#,
 +            expect![[r#"
 +                impl Impl FileId(0) 13..57 18..21
 +
 +                FileId(0) 18..21
 +                FileId(0) 28..32
 +                FileId(0) 50..54
 +            "#]],
 +        );
 +    }
 +    #[test]
 +    fn test_self_variant_with_payload() {
 +        check(
 +            r#"
 +enum Foo { Bar() }
 +
 +impl Foo {
 +    fn foo(self) {
 +        match self {
 +            Self::Bar$0() => (),
 +        }
 +    }
 +}
 +
 +"#,
 +            expect![[r#"
 +                Bar Variant FileId(0) 11..16 11..14
 +
 +                FileId(0) 89..92
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_attr_differs_from_fn_with_same_name() {
 +        check(
 +            r#"
 +#[test]
 +fn test$0() {
 +    test();
 +}
 +"#,
 +            expect![[r#"
 +                test Function FileId(0) 0..33 11..15
 +
 +                FileId(0) 24..28
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_const_in_pattern() {
 +        check(
 +            r#"
 +const A$0: i32 = 42;
 +
 +fn main() {
 +    match A {
 +        A => (),
 +        _ => (),
 +    }
 +    if let A = A {}
 +}
 +"#,
 +            expect![[r#"
 +                A Const FileId(0) 0..18 6..7
 +
 +                FileId(0) 42..43
 +                FileId(0) 54..55
 +                FileId(0) 97..98
 +                FileId(0) 101..102
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_primitives() {
 +        check(
 +            r#"
 +fn foo(_: bool) -> bo$0ol { true }
 +"#,
 +            expect![[r#"
 +                FileId(0) 10..14
 +                FileId(0) 19..23
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_transitive() {
 +        check(
 +            r#"
 +//- /level3.rs new_source_root:local crate:level3
 +pub struct Fo$0o;
 +//- /level2.rs new_source_root:local crate:level2 deps:level3
 +pub use level3::Foo;
 +//- /level1.rs new_source_root:local crate:level1 deps:level2
 +pub use level2::Foo;
 +//- /level0.rs new_source_root:local crate:level0 deps:level1
 +pub use level1::Foo;
 +"#,
 +            expect![[r#"
 +                Foo Struct FileId(0) 0..15 11..14
 +
 +                FileId(1) 16..19
 +                FileId(2) 16..19
 +                FileId(3) 16..19
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn test_decl_macro_references() {
 +        check(
 +            r#"
 +//- /lib.rs crate:lib
 +#[macro_use]
 +mod qux;
 +mod bar;
 +
 +pub use self::foo;
 +//- /qux.rs
 +#[macro_export]
 +macro_rules! foo$0 {
 +    () => {struct Foo;};
 +}
 +//- /bar.rs
 +foo!();
 +//- /other.rs crate:other deps:lib new_source_root:local
 +lib::foo!();
 +"#,
 +            expect![[r#"
 +                foo Macro FileId(1) 0..61 29..32
 +
 +                FileId(0) 46..49
 +                FileId(2) 0..3
 +                FileId(3) 5..8
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn macro_doesnt_reference_attribute_on_call() {
 +        check(
 +            r#"
 +macro_rules! m {
 +    () => {};
 +}
 +
 +#[proc_macro_test::attr_noop]
 +m$0!();
 +
 +"#,
 +            expect![[r#"
 +                m Macro FileId(0) 0..32 13..14
 +
 +                FileId(0) 64..65
 +            "#]],
 +        );
 +    }
 +
 +    #[test]
 +    fn multi_def() {
 +        check(
 +            r#"
 +macro_rules! m {
 +    ($name:ident) => {
 +        mod module {
 +            pub fn $name() {}
 +        }
 +
 +        pub fn $name() {}
 +    }
 +}
 +
 +m!(func$0);
 +
 +fn f() {
 +    func();
 +    module::func();
 +}
 +            "#,
 +            expect![[r#"
 +                func Function FileId(0) 137..146 140..144
 +
 +                FileId(0) 161..165
 +
 +
 +                func Function FileId(0) 137..146 140..144
 +
 +                FileId(0) 181..185
 +            "#]],
 +        )
 +    }
 +
 +    #[test]
 +    fn attr_expanded() {
 +        check(
 +            r#"
 +//- proc_macros: identity
 +#[proc_macros::identity]
 +fn func$0() {
 +    func();
 +}
 +"#,
 +            expect![[r#"
 +                func Function FileId(0) 25..50 28..32
 +
 +                FileId(0) 41..45
 +            "#]],
 +        )
 +    }
 +
 +    #[test]
 +    fn attr_assoc_item() {
 +        check(
 +            r#"
 +//- proc_macros: identity
 +
 +trait Trait {
 +    #[proc_macros::identity]
 +    fn func() {
 +        Self::func$0();
 +    }
 +}
 +"#,
 +            expect![[r#"
 +                func Function FileId(0) 48..87 51..55
 +
 +                FileId(0) 74..78
 +            "#]],
 +        )
 +    }
 +
 +    // FIXME: import is classified as function
 +    #[test]
 +    fn attr() {
 +        check(
 +            r#"
 +//- proc_macros: identity
 +use proc_macros::identity;
 +
 +#[proc_macros::$0identity]
 +fn func() {}
 +"#,
 +            expect![[r#"
 +                identity Attribute FileId(1) 1..107 32..40
 +
 +                FileId(0) 43..51
 +            "#]],
 +        );
 +        check(
 +            r#"
 +#![crate_type="proc-macro"]
 +#[proc_macro_attribute]
 +fn func$0() {}
 +"#,
 +            expect![[r#"
 +                func Attribute FileId(0) 28..64 55..59
 +
 +                (no references)
 +            "#]],
 +        );
 +    }
 +
 +    // FIXME: import is classified as function
 +    #[test]
 +    fn proc_macro() {
 +        check(
 +            r#"
 +//- proc_macros: mirror
 +use proc_macros::mirror;
 +
 +mirror$0! {}
 +"#,
 +            expect![[r#"
 +                mirror Macro FileId(1) 1..77 22..28
 +
 +                FileId(0) 26..32
 +            "#]],
 +        )
 +    }
 +
 +    #[test]
 +    fn derive() {
 +        check(
 +            r#"
 +//- proc_macros: derive_identity
 +//- minicore: derive
 +use proc_macros::DeriveIdentity;
 +
 +#[derive(proc_macros::DeriveIdentity$0)]
 +struct Foo;
 +"#,
 +            expect![[r#"
 +                derive_identity Derive FileId(2) 1..107 45..60
 +
 +                FileId(0) 17..31
 +                FileId(0) 56..70
 +            "#]],
 +        );
 +        check(
 +            r#"
 +#![crate_type="proc-macro"]
 +#[proc_macro_derive(Derive, attributes(x))]
 +pub fn deri$0ve(_stream: TokenStream) -> TokenStream {}
 +"#,
 +            expect![[r#"
 +                derive Derive FileId(0) 28..125 79..85
 +
 +                (no references)
 +            "#]],
 +        );
 +    }
 +}
index 3fb49b45d9888ebb7fcac3d4e4431b3f9f596e75,0000000000000000000000000000000000000000..50371d620eb2a41954412a3f1dd79bc33c18756e
mode 100644,000000..100644
--- /dev/null
@@@ -1,449 -1,0 +1,487 @@@
- use ide_db::{FxHashMap, RootDatabase};
 +pub(crate) mod tags;
 +
 +mod highlights;
 +mod injector;
 +
 +mod highlight;
 +mod format;
 +mod macro_;
 +mod inject;
 +mod escape;
 +
 +mod html;
 +#[cfg(test)]
 +mod tests;
 +
 +use hir::{Name, Semantics};
-     FileId, HlMod, HlTag,
++use ide_db::{FxHashMap, RootDatabase, SymbolKind};
 +use syntax::{
 +    ast, AstNode, AstToken, NodeOrToken, SyntaxKind::*, SyntaxNode, TextRange, WalkEvent, T,
 +};
 +
 +use crate::{
 +    syntax_highlighting::{
 +        escape::highlight_escape_string, format::highlight_format_string, highlights::Highlights,
 +        macro_::MacroHighlighter, tags::Highlight,
 +    },
-     syntactic_name_ref_highlighting: bool,
++    FileId, HlMod, HlOperator, HlPunct, HlTag,
 +};
 +
 +pub(crate) use html::highlight_as_html;
 +
 +#[derive(Debug, Clone, Copy)]
 +pub struct HlRange {
 +    pub range: TextRange,
 +    pub highlight: Highlight,
 +    pub binding_hash: Option<u64>,
 +}
 +
++#[derive(Copy, Clone, Debug, PartialEq, Eq)]
++pub struct HighlightConfig {
++    /// Whether to highlight strings
++    pub strings: bool,
++    /// Whether to highlight punctuation
++    pub punctuation: bool,
++    /// Whether to specialize punctuation highlights
++    pub specialize_punctuation: bool,
++    /// Whether to highlight operator
++    pub operator: bool,
++    /// Whether to specialize operator highlights
++    pub specialize_operator: bool,
++    /// Whether to inject highlights into doc comments
++    pub inject_doc_comment: bool,
++    /// Whether to highlight the macro call bang
++    pub macro_bang: bool,
++    /// Whether to highlight unresolved things be their syntax
++    pub syntactic_name_ref_highlighting: bool,
++}
++
 +// Feature: Semantic Syntax Highlighting
 +//
 +// rust-analyzer highlights the code semantically.
 +// For example, `Bar` in `foo::Bar` might be colored differently depending on whether `Bar` is an enum or a trait.
 +// rust-analyzer does not specify colors directly, instead it assigns a tag (like `struct`) and a set of modifiers (like `declaration`) to each token.
 +// It's up to the client to map those to specific colors.
 +//
 +// The general rule is that a reference to an entity gets colored the same way as the entity itself.
 +// We also give special modifier for `mut` and `&mut` local variables.
 +//
 +//
 +// .Token Tags
 +//
 +// Rust-analyzer currently emits the following token tags:
 +//
 +// - For items:
 +// +
 +// [horizontal]
 +// attribute:: Emitted for attribute macros.
 +// enum:: Emitted for enums.
 +// function:: Emitted for free-standing functions.
 +// derive:: Emitted for derive macros.
 +// macro:: Emitted for function-like macros.
 +// method:: Emitted for associated functions, also knowns as methods.
 +// namespace:: Emitted for modules.
 +// struct:: Emitted for structs.
 +// trait:: Emitted for traits.
 +// typeAlias:: Emitted for type aliases and `Self` in `impl`s.
 +// union:: Emitted for unions.
 +//
 +// - For literals:
 +// +
 +// [horizontal]
 +// boolean:: Emitted for the boolean literals `true` and `false`.
 +// character:: Emitted for character literals.
 +// number:: Emitted for numeric literals.
 +// string:: Emitted for string literals.
 +// escapeSequence:: Emitted for escaped sequences inside strings like `\n`.
 +// formatSpecifier:: Emitted for format specifiers `{:?}` in `format!`-like macros.
 +//
 +// - For operators:
 +// +
 +// [horizontal]
 +// operator:: Emitted for general operators.
 +// arithmetic:: Emitted for the arithmetic operators `+`, `-`, `*`, `/`, `+=`, `-=`, `*=`, `/=`.
 +// bitwise:: Emitted for the bitwise operators `|`, `&`, `!`, `^`, `|=`, `&=`, `^=`.
 +// comparison:: Emitted for the comparison operators `>`, `<`, `==`, `>=`, `<=`, `!=`.
 +// logical:: Emitted for the logical operators `||`, `&&`, `!`.
 +//
 +// - For punctuation:
 +// +
 +// [horizontal]
 +// punctuation:: Emitted for general punctuation.
 +// attributeBracket:: Emitted for attribute invocation brackets, that is the `#[` and `]` tokens.
 +// angle:: Emitted for `<>` angle brackets.
 +// brace:: Emitted for `{}` braces.
 +// bracket:: Emitted for `[]` brackets.
 +// parenthesis:: Emitted for `()` parentheses.
 +// colon:: Emitted for the `:` token.
 +// comma:: Emitted for the `,` token.
 +// dot:: Emitted for the `.` token.
 +// semi:: Emitted for the `;` token.
 +// macroBang:: Emitted for the `!` token in macro calls.
 +//
 +// //-
 +//
 +// [horizontal]
 +// builtinAttribute:: Emitted for names to builtin attributes in attribute path, the `repr` in `#[repr(u8)]` for example.
 +// builtinType:: Emitted for builtin types like `u32`, `str` and `f32`.
 +// comment:: Emitted for comments.
 +// constParameter:: Emitted for const parameters.
 +// deriveHelper:: Emitted for derive helper attributes.
 +// enumMember:: Emitted for enum variants.
 +// generic:: Emitted for generic tokens that have no mapping.
 +// keyword:: Emitted for keywords.
 +// label:: Emitted for labels.
 +// lifetime:: Emitted for lifetimes.
 +// parameter:: Emitted for non-self function parameters.
 +// property:: Emitted for struct and union fields.
 +// selfKeyword:: Emitted for the self function parameter and self path-specifier.
 +// selfTypeKeyword:: Emitted for the Self type parameter.
 +// toolModule:: Emitted for tool modules.
 +// typeParameter:: Emitted for type parameters.
 +// unresolvedReference:: Emitted for unresolved references, names that rust-analyzer can't find the definition of.
 +// variable:: Emitted for locals, constants and statics.
 +//
 +//
 +// .Token Modifiers
 +//
 +// Token modifiers allow to style some elements in the source code more precisely.
 +//
 +// Rust-analyzer currently emits the following token modifiers:
 +//
 +// [horizontal]
 +// async:: Emitted for async functions and the `async` and `await` keywords.
 +// attribute:: Emitted for tokens inside attributes.
 +// callable:: Emitted for locals whose types implements one of the `Fn*` traits.
 +// constant:: Emitted for consts.
 +// consuming:: Emitted for locals that are being consumed when use in a function call.
 +// controlFlow:: Emitted for control-flow related tokens, this includes the `?` operator.
 +// crateRoot:: Emitted for crate names, like `serde` and `crate`.
 +// declaration:: Emitted for names of definitions, like `foo` in `fn foo() {}`.
 +// defaultLibrary:: Emitted for items from built-in crates (std, core, alloc, test and proc_macro).
 +// documentation:: Emitted for documentation comments.
 +// injected:: Emitted for doc-string injected highlighting like rust source blocks in documentation.
 +// intraDocLink:: Emitted for intra doc links in doc-strings.
 +// library:: Emitted for items that are defined outside of the current crate.
 +// mutable:: Emitted for mutable locals and statics as well as functions taking `&mut self`.
 +// public:: Emitted for items that are from the current crate and are `pub`.
 +// reference:: Emitted for locals behind a reference and functions taking `self` by reference.
 +// static:: Emitted for "static" functions, also known as functions that do not take a `self` param, as well as statics and consts.
 +// trait:: Emitted for associated trait items.
 +// unsafe:: Emitted for unsafe operations, like unsafe function calls, as well as the `unsafe` token.
 +//
 +//
 +// image::https://user-images.githubusercontent.com/48062697/113164457-06cfb980-9239-11eb-819b-0f93e646acf8.png[]
 +// image::https://user-images.githubusercontent.com/48062697/113187625-f7f50100-9250-11eb-825e-91c58f236071.png[]
 +pub(crate) fn highlight(
 +    db: &RootDatabase,
++    config: HighlightConfig,
 +    file_id: FileId,
 +    range_to_highlight: Option<TextRange>,
-     traverse(
-         &mut hl,
-         &sema,
-         file_id,
-         &root,
-         krate,
-         range_to_highlight,
-         syntactic_name_ref_highlighting,
-     );
 +) -> Vec<HlRange> {
 +    let _p = profile::span("highlight");
 +    let sema = Semantics::new(db);
 +
 +    // Determine the root based on the given range.
 +    let (root, range_to_highlight) = {
 +        let source_file = sema.parse(file_id);
 +        let source_file = source_file.syntax();
 +        match range_to_highlight {
 +            Some(range) => {
 +                let node = match source_file.covering_element(range) {
 +                    NodeOrToken::Node(it) => it,
 +                    NodeOrToken::Token(it) => it.parent().unwrap_or_else(|| source_file.clone()),
 +                };
 +                (node, range)
 +            }
 +            None => (source_file.clone(), source_file.text_range()),
 +        }
 +    };
 +
 +    let mut hl = highlights::Highlights::new(root.text_range());
 +    let krate = match sema.scope(&root) {
 +        Some(it) => it.krate(),
 +        None => return hl.to_vec(),
 +    };
-     syntactic_name_ref_highlighting: bool,
++    traverse(&mut hl, &sema, config, file_id, &root, krate, range_to_highlight);
 +    hl.to_vec()
 +}
 +
 +fn traverse(
 +    hl: &mut Highlights,
 +    sema: &Semantics<'_, RootDatabase>,
++    config: HighlightConfig,
 +    file_id: FileId,
 +    root: &SyntaxNode,
 +    krate: hir::Crate,
 +    range_to_highlight: TextRange,
-                 // Doc comment highlighting injection, we do this when leaving the node
-                 // so that we overwrite the highlighting of the doc comment itself.
-                 inject::doc_comment(hl, sema, file_id, &node);
 +) {
 +    let is_unlinked = sema.to_module_def(file_id).is_none();
 +    let mut bindings_shadow_count: FxHashMap<Name, u32> = FxHashMap::default();
 +
 +    enum AttrOrDerive {
 +        Attr(ast::Item),
 +        Derive(ast::Item),
 +    }
 +
 +    impl AttrOrDerive {
 +        fn item(&self) -> &ast::Item {
 +            match self {
 +                AttrOrDerive::Attr(item) | AttrOrDerive::Derive(item) => item,
 +            }
 +        }
 +    }
 +
 +    let mut tt_level = 0;
 +    let mut attr_or_derive_item = None;
 +    let mut current_macro: Option<ast::Macro> = None;
 +    let mut macro_highlighter = MacroHighlighter::default();
 +    let mut inside_attribute = false;
 +
 +    // Walk all nodes, keeping track of whether we are inside a macro or not.
 +    // If in macro, expand it first and highlight the expanded code.
 +    for event in root.preorder_with_tokens() {
 +        use WalkEvent::{Enter, Leave};
 +
 +        let range = match &event {
 +            Enter(it) | Leave(it) => it.text_range(),
 +        };
 +
 +        // Element outside of the viewport, no need to highlight
 +        if range_to_highlight.intersect(range).is_none() {
 +            continue;
 +        }
 +
 +        // set macro and attribute highlighting states
 +        match event.clone() {
 +            Enter(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
 +                tt_level += 1;
 +            }
 +            Leave(NodeOrToken::Node(node)) if ast::TokenTree::can_cast(node.kind()) => {
 +                tt_level -= 1;
 +            }
 +            Enter(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
 +                inside_attribute = true
 +            }
 +            Leave(NodeOrToken::Node(node)) if ast::Attr::can_cast(node.kind()) => {
 +                inside_attribute = false
 +            }
 +
 +            Enter(NodeOrToken::Node(node)) if ast::Item::can_cast(node.kind()) => {
 +                match ast::Item::cast(node.clone()) {
 +                    Some(ast::Item::MacroRules(mac)) => {
 +                        macro_highlighter.init();
 +                        current_macro = Some(mac.into());
 +                        continue;
 +                    }
 +                    Some(ast::Item::MacroDef(mac)) => {
 +                        macro_highlighter.init();
 +                        current_macro = Some(mac.into());
 +                        continue;
 +                    }
 +                    Some(item) => {
 +                        if matches!(node.kind(), FN | CONST | STATIC) {
 +                            bindings_shadow_count.clear();
 +                        }
 +
 +                        if attr_or_derive_item.is_none() {
 +                            if sema.is_attr_macro_call(&item) {
 +                                attr_or_derive_item = Some(AttrOrDerive::Attr(item));
 +                            } else {
 +                                let adt = match item {
 +                                    ast::Item::Enum(it) => Some(ast::Adt::Enum(it)),
 +                                    ast::Item::Struct(it) => Some(ast::Adt::Struct(it)),
 +                                    ast::Item::Union(it) => Some(ast::Adt::Union(it)),
 +                                    _ => None,
 +                                };
 +                                match adt {
 +                                    Some(adt) if sema.is_derive_annotated(&adt) => {
 +                                        attr_or_derive_item =
 +                                            Some(AttrOrDerive::Derive(ast::Item::from(adt)));
 +                                    }
 +                                    _ => (),
 +                                }
 +                            }
 +                        }
 +                    }
 +                    _ => (),
 +                }
 +            }
 +            Leave(NodeOrToken::Node(node)) if ast::Item::can_cast(node.kind()) => {
 +                match ast::Item::cast(node.clone()) {
 +                    Some(ast::Item::MacroRules(mac)) => {
 +                        assert_eq!(current_macro, Some(mac.into()));
 +                        current_macro = None;
 +                        macro_highlighter = MacroHighlighter::default();
 +                    }
 +                    Some(ast::Item::MacroDef(mac)) => {
 +                        assert_eq!(current_macro, Some(mac.into()));
 +                        current_macro = None;
 +                        macro_highlighter = MacroHighlighter::default();
 +                    }
 +                    Some(item)
 +                        if attr_or_derive_item.as_ref().map_or(false, |it| *it.item() == item) =>
 +                    {
 +                        attr_or_derive_item = None;
 +                    }
 +                    _ => (),
 +                }
 +            }
 +            _ => (),
 +        }
 +
 +        let element = match event {
 +            Enter(NodeOrToken::Token(tok)) if tok.kind() == WHITESPACE => continue,
 +            Enter(it) => it,
 +            Leave(NodeOrToken::Token(_)) => continue,
 +            Leave(NodeOrToken::Node(node)) => {
-                         if inject::ra_fixture(hl, sema, &string, &expanded_string).is_some() {
++                if config.inject_doc_comment {
++                    // Doc comment highlighting injection, we do this when leaving the node
++                    // so that we overwrite the highlighting of the doc comment itself.
++                    inject::doc_comment(hl, sema, config, file_id, &node);
++                }
 +                continue;
 +            }
 +        };
 +
 +        if current_macro.is_some() {
 +            if let Some(tok) = element.as_token() {
 +                macro_highlighter.advance(tok);
 +            }
 +        }
 +
 +        let element = match element.clone() {
 +            NodeOrToken::Node(n) => match ast::NameLike::cast(n) {
 +                Some(n) => NodeOrToken::Node(n),
 +                None => continue,
 +            },
 +            NodeOrToken::Token(t) => NodeOrToken::Token(t),
 +        };
 +        let token = element.as_token().cloned();
 +
 +        // Descending tokens into macros is expensive even if no descending occurs, so make sure
 +        // that we actually are in a position where descending is possible.
 +        let in_macro = tt_level > 0
 +            || match attr_or_derive_item {
 +                Some(AttrOrDerive::Attr(_)) => true,
 +                Some(AttrOrDerive::Derive(_)) => inside_attribute,
 +                None => false,
 +            };
 +        let descended_element = if in_macro {
 +            // Attempt to descend tokens into macro-calls.
 +            match element {
 +                NodeOrToken::Token(token) if token.kind() != COMMENT => {
 +                    let token = match attr_or_derive_item {
 +                        Some(AttrOrDerive::Attr(_)) => {
 +                            sema.descend_into_macros_with_kind_preference(token)
 +                        }
 +                        Some(AttrOrDerive::Derive(_)) | None => {
 +                            sema.descend_into_macros_single(token)
 +                        }
 +                    };
 +                    match token.parent().and_then(ast::NameLike::cast) {
 +                        // Remap the token into the wrapping single token nodes
 +                        Some(parent) => match (token.kind(), parent.syntax().kind()) {
 +                            (T![self] | T![ident], NAME | NAME_REF) => NodeOrToken::Node(parent),
 +                            (T![self] | T![super] | T![crate] | T![Self], NAME_REF) => {
 +                                NodeOrToken::Node(parent)
 +                            }
 +                            (INT_NUMBER, NAME_REF) => NodeOrToken::Node(parent),
 +                            (LIFETIME_IDENT, LIFETIME) => NodeOrToken::Node(parent),
 +                            _ => NodeOrToken::Token(token),
 +                        },
 +                        None => NodeOrToken::Token(token),
 +                    }
 +                }
 +                e => e,
 +            }
 +        } else {
 +            element
 +        };
 +
 +        // FIXME: do proper macro def highlighting https://github.com/rust-lang/rust-analyzer/issues/6232
 +        // Skip metavariables from being highlighted to prevent keyword highlighting in them
 +        if descended_element.as_token().and_then(|t| macro_highlighter.highlight(t)).is_some() {
 +            continue;
 +        }
 +
 +        // string highlight injections, note this does not use the descended element as proc-macros
 +        // can rewrite string literals which invalidates our indices
 +        if let (Some(token), Some(descended_token)) = (token, descended_element.as_token()) {
 +            if ast::String::can_cast(token.kind()) && ast::String::can_cast(descended_token.kind())
 +            {
 +                let string = ast::String::cast(token);
 +                let string_to_highlight = ast::String::cast(descended_token.clone());
 +                if let Some((string, expanded_string)) = string.zip(string_to_highlight) {
 +                    if string.is_raw() {
-                 syntactic_name_ref_highlighting,
++                        if inject::ra_fixture(hl, sema, config, &string, &expanded_string).is_some()
++                        {
 +                            continue;
 +                        }
 +                    }
 +                    highlight_format_string(hl, &string, &expanded_string, range);
 +                    highlight_escape_string(hl, &string, range.start());
 +                }
 +            } else if ast::ByteString::can_cast(token.kind())
 +                && ast::ByteString::can_cast(descended_token.kind())
 +            {
 +                if let Some(byte_string) = ast::ByteString::cast(token) {
 +                    highlight_escape_string(hl, &byte_string, range.start());
 +                }
 +            }
 +        }
 +
 +        let element = match descended_element {
 +            NodeOrToken::Node(name_like) => highlight::name_like(
 +                sema,
 +                krate,
 +                &mut bindings_shadow_count,
++                config.syntactic_name_ref_highlighting,
 +                name_like,
 +            ),
 +            NodeOrToken::Token(token) => highlight::token(sema, token).zip(Some(None)),
 +        };
 +        if let Some((mut highlight, binding_hash)) = element {
 +            if is_unlinked && highlight.tag == HlTag::UnresolvedReference {
 +                // do not emit unresolved references if the file is unlinked
 +                // let the editor do its highlighting for these tokens instead
 +                continue;
 +            }
 +            if highlight.tag == HlTag::UnresolvedReference
 +                && matches!(attr_or_derive_item, Some(AttrOrDerive::Derive(_)) if inside_attribute)
 +            {
 +                // do not emit unresolved references in derive helpers if the token mapping maps to
 +                // something unresolvable. FIXME: There should be a way to prevent that
 +                continue;
 +            }
++
++            // apply config filtering
++            match &mut highlight.tag {
++                HlTag::StringLiteral if !config.strings => continue,
++                // If punctuation is disabled, make the macro bang part of the macro call again.
++                tag @ HlTag::Punctuation(HlPunct::MacroBang) => {
++                    if !config.macro_bang {
++                        *tag = HlTag::Symbol(SymbolKind::Macro);
++                    } else if !config.specialize_punctuation {
++                        *tag = HlTag::Punctuation(HlPunct::Other);
++                    }
++                }
++                HlTag::Punctuation(_) if !config.punctuation => continue,
++                tag @ HlTag::Punctuation(_) if !config.specialize_punctuation => {
++                    *tag = HlTag::Punctuation(HlPunct::Other);
++                }
++                HlTag::Operator(_) if !config.operator && highlight.mods.is_empty() => continue,
++                tag @ HlTag::Operator(_) if !config.specialize_operator => {
++                    *tag = HlTag::Operator(HlOperator::Other);
++                }
++                _ => (),
++            }
++
 +            if inside_attribute {
 +                highlight |= HlMod::Attribute
 +            }
 +
 +            hl.add(HlRange { range, highlight, binding_hash });
 +        }
 +    }
 +}
index 9777c014c7a1648f933f88ead4341f2b14e1a903,0000000000000000000000000000000000000000..e91fd7f12571649cee96e51e652165cad39bc993
mode 100644,000000..100644
--- /dev/null
@@@ -1,97 -1,0 +1,114 @@@
- use crate::{syntax_highlighting::highlight, FileId, RootDatabase};
 +//! Renders a bit of code as HTML.
 +
 +use ide_db::base_db::SourceDatabase;
 +use oorandom::Rand32;
 +use stdx::format_to;
 +use syntax::AstNode;
 +
-     let hl_ranges = highlight(db, file_id, None, false);
++use crate::{
++    syntax_highlighting::{highlight, HighlightConfig},
++    FileId, RootDatabase,
++};
 +
 +pub(crate) fn highlight_as_html(db: &RootDatabase, file_id: FileId, rainbow: bool) -> String {
 +    let parse = db.parse(file_id);
 +
 +    fn rainbowify(seed: u64) -> String {
 +        let mut rng = Rand32::new(seed);
 +        format!(
 +            "hsl({h},{s}%,{l}%)",
 +            h = rng.rand_range(0..361),
 +            s = rng.rand_range(42..99),
 +            l = rng.rand_range(40..91),
 +        )
 +    }
 +
++    let hl_ranges = highlight(
++        db,
++        HighlightConfig {
++            strings: true,
++            punctuation: true,
++            specialize_punctuation: true,
++            specialize_operator: true,
++            operator: true,
++            inject_doc_comment: true,
++            macro_bang: true,
++            syntactic_name_ref_highlighting: false,
++        },
++        file_id,
++        None,
++    );
 +    let text = parse.tree().syntax().to_string();
 +    let mut buf = String::new();
 +    buf.push_str(STYLE);
 +    buf.push_str("<pre><code>");
 +    for r in &hl_ranges {
 +        let chunk = html_escape(&text[r.range]);
 +        if r.highlight.is_empty() {
 +            format_to!(buf, "{}", chunk);
 +            continue;
 +        }
 +
 +        let class = r.highlight.to_string().replace('.', " ");
 +        let color = match (rainbow, r.binding_hash) {
 +            (true, Some(hash)) => {
 +                format!(" data-binding-hash=\"{}\" style=\"color: {};\"", hash, rainbowify(hash))
 +            }
 +            _ => "".into(),
 +        };
 +        format_to!(buf, "<span class=\"{}\"{}>{}</span>", class, color, chunk);
 +    }
 +    buf.push_str("</code></pre>");
 +    buf
 +}
 +
 +//FIXME: like, real html escaping
 +fn html_escape(text: &str) -> String {
 +    text.replace('<', "&lt;").replace('>', "&gt;")
 +}
 +
 +const STYLE: &str = "
 +<style>
 +body                { margin: 0; }
 +pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
 +
 +.lifetime           { color: #DFAF8F; font-style: italic; }
 +.label              { color: #DFAF8F; font-style: italic; }
 +.comment            { color: #7F9F7F; }
 +.documentation      { color: #629755; }
 +.intra_doc_link     { font-style: italic; }
 +.injected           { opacity: 0.65 ; }
 +.struct, .enum      { color: #7CB8BB; }
 +.enum_variant       { color: #BDE0F3; }
 +.string_literal     { color: #CC9393; }
 +.field              { color: #94BFF3; }
 +.function           { color: #93E0E3; }
 +.function.unsafe    { color: #BC8383; }
 +.trait.unsafe       { color: #BC8383; }
 +.operator.unsafe    { color: #BC8383; }
 +.mutable.unsafe     { color: #BC8383; text-decoration: underline; }
 +.keyword.unsafe     { color: #BC8383; font-weight: bold; }
 +.macro.unsafe       { color: #BC8383; }
 +.parameter          { color: #94BFF3; }
 +.text               { color: #DCDCCC; }
 +.type               { color: #7CB8BB; }
 +.builtin_type       { color: #8CD0D3; }
 +.type_param         { color: #DFAF8F; }
 +.attribute          { color: #94BFF3; }
 +.numeric_literal    { color: #BFEBBF; }
 +.bool_literal       { color: #BFE6EB; }
 +.macro              { color: #94BFF3; }
 +.derive             { color: #94BFF3; font-style: italic; }
 +.module             { color: #AFD8AF; }
 +.value_param        { color: #DCDCCC; }
 +.variable           { color: #DCDCCC; }
 +.format_specifier   { color: #CC696B; }
 +.mutable            { text-decoration: underline; }
 +.escape_sequence    { color: #94BFF3; }
 +.keyword            { color: #F0DFAF; font-weight: bold; }
 +.control            { font-style: italic; }
 +.reference          { font-style: italic; font-weight: bold; }
 +
 +.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
 +</style>
 +";
index f376f9fda7a57799c0be9b774a81508a065d2893,0000000000000000000000000000000000000000..9139528c7ed96d08685e6bb646e50aa7482bf5e4
mode 100644,000000..100644
--- /dev/null
@@@ -1,279 -1,0 +1,294 @@@
-     syntax_highlighting::{highlights::Highlights, injector::Injector},
 +//! "Recursive" Syntax highlighting for code in doctests and fixtures.
 +
 +use std::mem;
 +
 +use either::Either;
 +use hir::{InFile, Semantics};
 +use ide_db::{
 +    active_parameter::ActiveParameter, base_db::FileId, defs::Definition, rust_doc::is_rust_fence,
 +    SymbolKind,
 +};
 +use syntax::{
 +    ast::{self, AstNode, IsString, QuoteOffsets},
 +    AstToken, NodeOrToken, SyntaxNode, TextRange, TextSize,
 +};
 +
 +use crate::{
 +    doc_links::{doc_attributes, extract_definitions_from_docs, resolve_doc_path_for_def},
-     for mut hl_range in analysis.highlight(tmp_file_id).unwrap() {
++    syntax_highlighting::{highlights::Highlights, injector::Injector, HighlightConfig},
 +    Analysis, HlMod, HlRange, HlTag, RootDatabase,
 +};
 +
 +pub(super) fn ra_fixture(
 +    hl: &mut Highlights,
 +    sema: &Semantics<'_, RootDatabase>,
++    config: HighlightConfig,
 +    literal: &ast::String,
 +    expanded: &ast::String,
 +) -> Option<()> {
 +    let active_parameter = ActiveParameter::at_token(sema, expanded.syntax().clone())?;
 +    if !active_parameter.ident().map_or(false, |name| name.text().starts_with("ra_fixture")) {
 +        return None;
 +    }
 +    let value = literal.value()?;
 +
 +    if let Some(range) = literal.open_quote_text_range() {
 +        hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None })
 +    }
 +
 +    let mut inj = Injector::default();
 +
 +    let mut text = &*value;
 +    let mut offset: TextSize = 0.into();
 +
 +    while !text.is_empty() {
 +        let marker = "$0";
 +        let idx = text.find(marker).unwrap_or(text.len());
 +        let (chunk, next) = text.split_at(idx);
 +        inj.add(chunk, TextRange::at(offset, TextSize::of(chunk)));
 +
 +        text = next;
 +        offset += TextSize::of(chunk);
 +
 +        if let Some(next) = text.strip_prefix(marker) {
 +            if let Some(range) = literal.map_range_up(TextRange::at(offset, TextSize::of(marker))) {
 +                hl.add(HlRange { range, highlight: HlTag::Keyword.into(), binding_hash: None });
 +            }
 +
 +            text = next;
 +
 +            let marker_len = TextSize::of(marker);
 +            offset += marker_len;
 +        }
 +    }
 +
 +    let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
 +
-     if let Ok(ranges) = analysis.with_db(|db| super::highlight(db, tmp_file_id, None, true)) {
++    for mut hl_range in analysis
++        .highlight(
++            HighlightConfig { syntactic_name_ref_highlighting: false, ..config },
++            tmp_file_id,
++        )
++        .unwrap()
++    {
 +        for range in inj.map_range_up(hl_range.range) {
 +            if let Some(range) = literal.map_range_up(range) {
 +                hl_range.range = range;
 +                hl.add(hl_range);
 +            }
 +        }
 +    }
 +
 +    if let Some(range) = literal.close_quote_text_range() {
 +        hl.add(HlRange { range, highlight: HlTag::StringLiteral.into(), binding_hash: None })
 +    }
 +
 +    Some(())
 +}
 +
 +const RUSTDOC_FENCE_LENGTH: usize = 3;
 +const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
 +
 +/// Injection of syntax highlighting of doctests and intra doc links.
 +pub(super) fn doc_comment(
 +    hl: &mut Highlights,
 +    sema: &Semantics<'_, RootDatabase>,
++    config: HighlightConfig,
 +    src_file_id: FileId,
 +    node: &SyntaxNode,
 +) {
 +    let (attributes, def) = match doc_attributes(sema, node) {
 +        Some(it) => it,
 +        None => return,
 +    };
 +    let src_file_id = src_file_id.into();
 +
 +    // Extract intra-doc links and emit highlights for them.
 +    if let Some((docs, doc_mapping)) = attributes.docs_with_rangemap(sema.db) {
 +        extract_definitions_from_docs(&docs)
 +            .into_iter()
 +            .filter_map(|(range, link, ns)| {
 +                doc_mapping.map(range).filter(|mapping| mapping.file_id == src_file_id).and_then(
 +                    |InFile { value: mapped_range, .. }| {
 +                        Some(mapped_range).zip(resolve_doc_path_for_def(sema.db, def, &link, ns))
 +                    },
 +                )
 +            })
 +            .for_each(|(range, def)| {
 +                hl.add(HlRange {
 +                    range,
 +                    highlight: module_def_to_hl_tag(def)
 +                        | HlMod::Documentation
 +                        | HlMod::Injected
 +                        | HlMod::IntraDocLink,
 +                    binding_hash: None,
 +                })
 +            });
 +    }
 +
 +    // Extract doc-test sources from the docs and calculate highlighting for them.
 +
 +    let mut inj = Injector::default();
 +    inj.add_unmapped("fn doctest() {\n");
 +
 +    let attrs_source_map = attributes.source_map(sema.db);
 +
 +    let mut is_codeblock = false;
 +    let mut is_doctest = false;
 +
 +    let mut new_comments = Vec::new();
 +    let mut string;
 +
 +    for attr in attributes.by_key("doc").attrs() {
 +        let InFile { file_id, value: src } = attrs_source_map.source_of(attr);
 +        if file_id != src_file_id {
 +            continue;
 +        }
 +        let (line, range) = match &src {
 +            Either::Left(it) => {
 +                string = match find_doc_string_in_attr(attr, it) {
 +                    Some(it) => it,
 +                    None => continue,
 +                };
 +                let text = string.text();
 +                let text_range = string.syntax().text_range();
 +                match string.quote_offsets() {
 +                    Some(QuoteOffsets { contents, .. }) => {
 +                        (&text[contents - text_range.start()], contents)
 +                    }
 +                    None => (text, text_range),
 +                }
 +            }
 +            Either::Right(comment) => {
 +                let value = comment.prefix().len();
 +                let range = comment.syntax().text_range();
 +                (
 +                    &comment.text()[value..],
 +                    TextRange::new(range.start() + TextSize::try_from(value).unwrap(), range.end()),
 +                )
 +            }
 +        };
 +
 +        let mut range_start = range.start();
 +        for line in line.split('\n') {
 +            let line_len = TextSize::from(line.len() as u32);
 +            let prev_range_start = {
 +                let next_range_start = range_start + line_len + TextSize::from(1);
 +                mem::replace(&mut range_start, next_range_start)
 +            };
 +            let mut pos = TextSize::from(0);
 +
 +            match RUSTDOC_FENCES.into_iter().find_map(|fence| line.find(fence)) {
 +                Some(idx) => {
 +                    is_codeblock = !is_codeblock;
 +                    // Check whether code is rust by inspecting fence guards
 +                    let guards = &line[idx + RUSTDOC_FENCE_LENGTH..];
 +                    let is_rust = is_rust_fence(guards);
 +                    is_doctest = is_codeblock && is_rust;
 +                    continue;
 +                }
 +                None if !is_doctest => continue,
 +                None => (),
 +            }
 +
 +            // whitespace after comment is ignored
 +            if let Some(ws) = line[pos.into()..].chars().next().filter(|c| c.is_whitespace()) {
 +                pos += TextSize::of(ws);
 +            }
 +            // lines marked with `#` should be ignored in output, we skip the `#` char
 +            if line[pos.into()..].starts_with('#') {
 +                pos += TextSize::of('#');
 +            }
 +
 +            new_comments.push(TextRange::at(prev_range_start, pos));
 +            inj.add(&line[pos.into()..], TextRange::new(pos, line_len) + prev_range_start);
 +            inj.add_unmapped("\n");
 +        }
 +    }
 +
 +    if new_comments.is_empty() {
 +        return; // no need to run an analysis on an empty file
 +    }
 +
 +    inj.add_unmapped("\n}");
 +
 +    let (analysis, tmp_file_id) = Analysis::from_single_file(inj.take_text());
 +
++    if let Ok(ranges) = analysis.with_db(|db| {
++        super::highlight(
++            db,
++            HighlightConfig { syntactic_name_ref_highlighting: true, ..config },
++            tmp_file_id,
++            None,
++        )
++    }) {
 +        for HlRange { range, highlight, binding_hash } in ranges {
 +            for range in inj.map_range_up(range) {
 +                hl.add(HlRange { range, highlight: highlight | HlMod::Injected, binding_hash });
 +            }
 +        }
 +    }
 +
 +    for range in new_comments {
 +        hl.add(HlRange {
 +            range,
 +            highlight: HlTag::Comment | HlMod::Documentation,
 +            binding_hash: None,
 +        });
 +    }
 +}
 +
 +fn find_doc_string_in_attr(attr: &hir::Attr, it: &ast::Attr) -> Option<ast::String> {
 +    match it.expr() {
 +        // #[doc = lit]
 +        Some(ast::Expr::Literal(lit)) => match lit.kind() {
 +            ast::LiteralKind::String(it) => Some(it),
 +            _ => None,
 +        },
 +        // #[cfg_attr(..., doc = "", ...)]
 +        None => {
 +            // We gotta hunt the string token manually here
 +            let text = attr.string_value()?;
 +            // FIXME: We just pick the first string literal that has the same text as the doc attribute
 +            // This means technically we might highlight the wrong one
 +            it.syntax()
 +                .descendants_with_tokens()
 +                .filter_map(NodeOrToken::into_token)
 +                .filter_map(ast::String::cast)
 +                .find(|string| {
 +                    string.text().get(1..string.text().len() - 1).map_or(false, |it| it == text)
 +                })
 +        }
 +        _ => None,
 +    }
 +}
 +
 +fn module_def_to_hl_tag(def: Definition) -> HlTag {
 +    let symbol = match def {
 +        Definition::Module(_) => SymbolKind::Module,
 +        Definition::Function(_) => SymbolKind::Function,
 +        Definition::Adt(hir::Adt::Struct(_)) => SymbolKind::Struct,
 +        Definition::Adt(hir::Adt::Enum(_)) => SymbolKind::Enum,
 +        Definition::Adt(hir::Adt::Union(_)) => SymbolKind::Union,
 +        Definition::Variant(_) => SymbolKind::Variant,
 +        Definition::Const(_) => SymbolKind::Const,
 +        Definition::Static(_) => SymbolKind::Static,
 +        Definition::Trait(_) => SymbolKind::Trait,
 +        Definition::TypeAlias(_) => SymbolKind::TypeAlias,
 +        Definition::BuiltinType(_) => return HlTag::BuiltinType,
 +        Definition::Macro(_) => SymbolKind::Macro,
 +        Definition::Field(_) => SymbolKind::Field,
 +        Definition::SelfType(_) => SymbolKind::Impl,
 +        Definition::Local(_) => SymbolKind::Local,
 +        Definition::GenericParam(gp) => match gp {
 +            hir::GenericParam::TypeParam(_) => SymbolKind::TypeParam,
 +            hir::GenericParam::ConstParam(_) => SymbolKind::ConstParam,
 +            hir::GenericParam::LifetimeParam(_) => SymbolKind::LifetimeParam,
 +        },
 +        Definition::Label(_) => SymbolKind::Label,
 +        Definition::BuiltinAttr(_) => SymbolKind::BuiltinAttr,
 +        Definition::ToolModule(_) => SymbolKind::ToolModule,
 +        Definition::DeriveHelper(_) => SymbolKind::DeriveHelper,
 +    };
 +    HlTag::Symbol(symbol)
 +}
index 5262770f30317f9312cc156f5664f1185ceba8f9,0000000000000000000000000000000000000000..3949f1189bd5ed47d946d46dec67cc2102c7bd40
mode 100644,000000..100644
--- /dev/null
@@@ -1,340 -1,0 +1,344 @@@
-     const ALL: &'static [HlMod; HlMod::Unsafe as u8 as usize + 1] = &[
 +//! Defines token tags we use for syntax highlighting.
 +//! A tag is not unlike a CSS class.
 +
 +use std::{
 +    fmt::{self, Write},
 +    ops,
 +};
 +
 +use ide_db::SymbolKind;
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
 +pub struct Highlight {
 +    pub tag: HlTag,
 +    pub mods: HlMods,
 +}
 +
 +#[derive(Default, Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
 +pub struct HlMods(u32);
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
 +pub enum HlTag {
 +    Symbol(SymbolKind),
 +
 +    AttributeBracket,
 +    BoolLiteral,
 +    BuiltinType,
 +    ByteLiteral,
 +    CharLiteral,
 +    Comment,
 +    EscapeSequence,
 +    FormatSpecifier,
 +    Keyword,
 +    NumericLiteral,
 +    Operator(HlOperator),
 +    Punctuation(HlPunct),
 +    StringLiteral,
 +    UnresolvedReference,
 +
 +    // For things which don't have a specific highlight.
 +    None,
 +}
 +
 +// Don't forget to adjust the feature description in crates/ide/src/syntax_highlighting.rs.
 +// And make sure to use the lsp strings used when converting to the protocol in crates\rust-analyzer\src\semantic_tokens.rs, not the names of the variants here.
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
 +#[repr(u8)]
 +pub enum HlMod {
 +    /// Used for items in traits and impls.
 +    Associated = 0,
 +    /// Used with keywords like `async` and `await`.
 +    Async,
 +    /// Used to differentiate individual elements within attributes.
 +    Attribute,
 +    /// Callable item or value.
 +    Callable,
 +    /// Value that is being consumed in a function call
 +    Consuming,
 +    /// Used with keywords like `if` and `break`.
 +    ControlFlow,
 +    /// Used for crate names, like `serde`.
 +    CrateRoot,
 +    /// Used for items from built-in crates (std, core, alloc, test and proc_macro).
 +    DefaultLibrary,
 +    /// `foo` in `fn foo(x: i32)` is a definition, `foo` in `foo(90 + 2)` is
 +    /// not.
 +    Definition,
 +    /// Doc-strings like this one.
 +    Documentation,
 +    /// Highlighting injection like rust code in doc strings or ra_fixture.
 +    Injected,
 +    /// Used for intra doc links in doc injection.
 +    IntraDocLink,
 +    /// Used for items from other crates.
 +    Library,
 +    /// Mutable binding.
 +    Mutable,
 +    /// Used for public items.
 +    Public,
 +    /// Immutable reference.
 +    Reference,
 +    /// Used for associated functions.
 +    Static,
 +    /// Used for items in traits and trait impls.
 +    Trait,
 +    // Keep this last!
 +    /// Used for unsafe functions, unsafe traits, mutable statics, union accesses and unsafe operations.
 +    Unsafe,
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
 +pub enum HlPunct {
 +    /// []
 +    Bracket,
 +    /// {}
 +    Brace,
 +    /// ()
 +    Parenthesis,
 +    /// <>
 +    Angle,
 +    /// ,
 +    Comma,
 +    /// .
 +    Dot,
 +    /// :
 +    Colon,
 +    /// ;
 +    Semi,
 +    /// ! (only for macro calls)
 +    MacroBang,
 +    ///
 +    Other,
 +}
 +
 +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
 +pub enum HlOperator {
 +    /// |, &, !, ^, |=, &=, ^=
 +    Bitwise,
 +    /// +, -, *, /, +=, -=, *=, /=
 +    Arithmetic,
 +    /// &&, ||, !
 +    Logical,
 +    /// >, <, ==, >=, <=, !=
 +    Comparison,
 +    ///
 +    Other,
 +}
 +
 +impl HlTag {
 +    fn as_str(self) -> &'static str {
 +        match self {
 +            HlTag::Symbol(symbol) => match symbol {
 +                SymbolKind::Attribute => "attribute",
 +                SymbolKind::BuiltinAttr => "builtin_attr",
 +                SymbolKind::Const => "constant",
 +                SymbolKind::ConstParam => "const_param",
 +                SymbolKind::Derive => "derive",
 +                SymbolKind::DeriveHelper => "derive_helper",
 +                SymbolKind::Enum => "enum",
 +                SymbolKind::Field => "field",
 +                SymbolKind::Function => "function",
 +                SymbolKind::Impl => "self_type",
 +                SymbolKind::Label => "label",
 +                SymbolKind::LifetimeParam => "lifetime",
 +                SymbolKind::Local => "variable",
 +                SymbolKind::Macro => "macro",
 +                SymbolKind::Module => "module",
 +                SymbolKind::SelfParam => "self_keyword",
 +                SymbolKind::SelfType => "self_type_keyword",
 +                SymbolKind::Static => "static",
 +                SymbolKind::Struct => "struct",
 +                SymbolKind::ToolModule => "tool_module",
 +                SymbolKind::Trait => "trait",
 +                SymbolKind::TypeAlias => "type_alias",
 +                SymbolKind::TypeParam => "type_param",
 +                SymbolKind::Union => "union",
 +                SymbolKind::ValueParam => "value_param",
 +                SymbolKind::Variant => "enum_variant",
 +            },
 +            HlTag::AttributeBracket => "attribute_bracket",
 +            HlTag::BoolLiteral => "bool_literal",
 +            HlTag::BuiltinType => "builtin_type",
 +            HlTag::ByteLiteral => "byte_literal",
 +            HlTag::CharLiteral => "char_literal",
 +            HlTag::Comment => "comment",
 +            HlTag::EscapeSequence => "escape_sequence",
 +            HlTag::FormatSpecifier => "format_specifier",
 +            HlTag::Keyword => "keyword",
 +            HlTag::Punctuation(punct) => match punct {
 +                HlPunct::Bracket => "bracket",
 +                HlPunct::Brace => "brace",
 +                HlPunct::Parenthesis => "parenthesis",
 +                HlPunct::Angle => "angle",
 +                HlPunct::Comma => "comma",
 +                HlPunct::Dot => "dot",
 +                HlPunct::Colon => "colon",
 +                HlPunct::Semi => "semicolon",
 +                HlPunct::MacroBang => "macro_bang",
 +                HlPunct::Other => "punctuation",
 +            },
 +            HlTag::NumericLiteral => "numeric_literal",
 +            HlTag::Operator(op) => match op {
 +                HlOperator::Bitwise => "bitwise",
 +                HlOperator::Arithmetic => "arithmetic",
 +                HlOperator::Logical => "logical",
 +                HlOperator::Comparison => "comparison",
 +                HlOperator::Other => "operator",
 +            },
 +            HlTag::StringLiteral => "string_literal",
 +            HlTag::UnresolvedReference => "unresolved_reference",
 +            HlTag::None => "none",
 +        }
 +    }
 +}
 +
 +impl fmt::Display for HlTag {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        fmt::Display::fmt(self.as_str(), f)
 +    }
 +}
 +
 +impl HlMod {
-         self.tag == HlTag::None && self.mods == HlMods::default()
++    const ALL: &'static [HlMod; 19] = &[
 +        HlMod::Associated,
 +        HlMod::Async,
 +        HlMod::Attribute,
 +        HlMod::Callable,
 +        HlMod::Consuming,
 +        HlMod::ControlFlow,
 +        HlMod::CrateRoot,
 +        HlMod::DefaultLibrary,
 +        HlMod::Definition,
 +        HlMod::Documentation,
 +        HlMod::Injected,
 +        HlMod::IntraDocLink,
 +        HlMod::Library,
 +        HlMod::Mutable,
 +        HlMod::Public,
 +        HlMod::Reference,
 +        HlMod::Static,
 +        HlMod::Trait,
 +        HlMod::Unsafe,
 +    ];
 +
 +    fn as_str(self) -> &'static str {
 +        match self {
 +            HlMod::Associated => "associated",
 +            HlMod::Async => "async",
 +            HlMod::Attribute => "attribute",
 +            HlMod::Callable => "callable",
 +            HlMod::Consuming => "consuming",
 +            HlMod::ControlFlow => "control",
 +            HlMod::CrateRoot => "crate_root",
 +            HlMod::DefaultLibrary => "default_library",
 +            HlMod::Definition => "declaration",
 +            HlMod::Documentation => "documentation",
 +            HlMod::Injected => "injected",
 +            HlMod::IntraDocLink => "intra_doc_link",
 +            HlMod::Library => "library",
 +            HlMod::Mutable => "mutable",
 +            HlMod::Public => "public",
 +            HlMod::Reference => "reference",
 +            HlMod::Static => "static",
 +            HlMod::Trait => "trait",
 +            HlMod::Unsafe => "unsafe",
 +        }
 +    }
 +
 +    fn mask(self) -> u32 {
 +        1 << (self as u32)
 +    }
 +}
 +
 +impl fmt::Display for HlMod {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        fmt::Display::fmt(self.as_str(), f)
 +    }
 +}
 +
 +impl fmt::Display for Highlight {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        self.tag.fmt(f)?;
 +        for modifier in self.mods.iter() {
 +            f.write_char('.')?;
 +            modifier.fmt(f)?;
 +        }
 +        Ok(())
 +    }
 +}
 +
 +impl From<HlTag> for Highlight {
 +    fn from(tag: HlTag) -> Highlight {
 +        Highlight::new(tag)
 +    }
 +}
 +
 +impl From<HlOperator> for Highlight {
 +    fn from(op: HlOperator) -> Highlight {
 +        Highlight::new(HlTag::Operator(op))
 +    }
 +}
 +
 +impl From<HlPunct> for Highlight {
 +    fn from(punct: HlPunct) -> Highlight {
 +        Highlight::new(HlTag::Punctuation(punct))
 +    }
 +}
 +
 +impl From<SymbolKind> for Highlight {
 +    fn from(sym: SymbolKind) -> Highlight {
 +        Highlight::new(HlTag::Symbol(sym))
 +    }
 +}
 +
 +impl Highlight {
 +    pub(crate) fn new(tag: HlTag) -> Highlight {
 +        Highlight { tag, mods: HlMods::default() }
 +    }
 +    pub fn is_empty(&self) -> bool {
++        self.tag == HlTag::None && self.mods.is_empty()
 +    }
 +}
 +
 +impl ops::BitOr<HlMod> for HlTag {
 +    type Output = Highlight;
 +
 +    fn bitor(self, rhs: HlMod) -> Highlight {
 +        Highlight::new(self) | rhs
 +    }
 +}
 +
 +impl ops::BitOrAssign<HlMod> for HlMods {
 +    fn bitor_assign(&mut self, rhs: HlMod) {
 +        self.0 |= rhs.mask();
 +    }
 +}
 +
 +impl ops::BitOrAssign<HlMod> for Highlight {
 +    fn bitor_assign(&mut self, rhs: HlMod) {
 +        self.mods |= rhs;
 +    }
 +}
 +
 +impl ops::BitOr<HlMod> for Highlight {
 +    type Output = Highlight;
 +
 +    fn bitor(mut self, rhs: HlMod) -> Highlight {
 +        self |= rhs;
 +        self
 +    }
 +}
 +
 +impl HlMods {
++    pub fn is_empty(&self) -> bool {
++        self.0 == 0
++    }
++
 +    pub fn contains(self, m: HlMod) -> bool {
 +        self.0 & m.mask() == m.mask()
 +    }
 +
 +    pub fn iter(self) -> impl Iterator<Item = HlMod> {
 +        HlMod::ALL.iter().copied().filter(move |it| self.0 & it.mask() == it.mask())
 +    }
 +}
index a747b4bc1f9c052796ab8b13dac6820c8e556b41,0000000000000000000000000000000000000000..eef5baea9839239e3d1b0e8220f102d24cec7db0
mode 100644,000000..100644
--- /dev/null
@@@ -1,190 -1,0 +1,190 @@@
- <span class="comment documentation">/// This is an impl with a code block.</span>
 +
 +<style>
 +body                { margin: 0; }
 +pre                 { color: #DCDCCC; background: #3F3F3F; font-size: 22px; padding: 0.4em; }
 +
 +.lifetime           { color: #DFAF8F; font-style: italic; }
 +.label              { color: #DFAF8F; font-style: italic; }
 +.comment            { color: #7F9F7F; }
 +.documentation      { color: #629755; }
 +.intra_doc_link     { font-style: italic; }
 +.injected           { opacity: 0.65 ; }
 +.struct, .enum      { color: #7CB8BB; }
 +.enum_variant       { color: #BDE0F3; }
 +.string_literal     { color: #CC9393; }
 +.field              { color: #94BFF3; }
 +.function           { color: #93E0E3; }
 +.function.unsafe    { color: #BC8383; }
 +.trait.unsafe       { color: #BC8383; }
 +.operator.unsafe    { color: #BC8383; }
 +.mutable.unsafe     { color: #BC8383; text-decoration: underline; }
 +.keyword.unsafe     { color: #BC8383; font-weight: bold; }
 +.macro.unsafe       { color: #BC8383; }
 +.parameter          { color: #94BFF3; }
 +.text               { color: #DCDCCC; }
 +.type               { color: #7CB8BB; }
 +.builtin_type       { color: #8CD0D3; }
 +.type_param         { color: #DFAF8F; }
 +.attribute          { color: #94BFF3; }
 +.numeric_literal    { color: #BFEBBF; }
 +.bool_literal       { color: #BFE6EB; }
 +.macro              { color: #94BFF3; }
 +.derive             { color: #94BFF3; font-style: italic; }
 +.module             { color: #AFD8AF; }
 +.value_param        { color: #DCDCCC; }
 +.variable           { color: #DCDCCC; }
 +.format_specifier   { color: #CC696B; }
 +.mutable            { text-decoration: underline; }
 +.escape_sequence    { color: #94BFF3; }
 +.keyword            { color: #F0DFAF; font-weight: bold; }
 +.control            { font-style: italic; }
 +.reference          { font-style: italic; font-weight: bold; }
 +
 +.unresolved_reference { color: #FC5555; text-decoration: wavy underline; }
 +</style>
 +<pre><code><span class="comment documentation">//! This is a module to test doc injection.</span>
 +<span class="comment documentation">//! ```</span>
 +<span class="comment documentation">//!</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">test</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
 +<span class="comment documentation">//! ```</span>
 +
 +<span class="keyword">mod</span> <span class="module declaration">outline_module</span><span class="semicolon">;</span>
 +
 +<span class="comment documentation">/// ```</span>
 +<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"early doctests should not go boom"</span><span class="semicolon injected">;</span>
 +<span class="comment documentation">/// ```</span>
 +<span class="keyword">struct</span> <span class="struct declaration">Foo</span> <span class="brace">{</span>
 +    <span class="field declaration">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span><span class="comma">,</span>
 +<span class="brace">}</span>
 +
++<span class="comment documentation">/// This is an impl of </span><span class="struct documentation injected intra_doc_link">[`Foo`]</span><span class="comment documentation"> with a code block.</span>
 +<span class="comment documentation">///</span>
 +<span class="comment documentation">/// ```</span>
 +<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
 +<span class="comment documentation">///</span>
 +<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="brace injected">}</span>
 +<span class="comment documentation">/// ```</span>
 +<span class="keyword">impl</span> <span class="struct">Foo</span> <span class="brace">{</span>
 +    <span class="comment documentation">/// ```</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Call me</span>
 +    <span class="comment">//    KILLER WHALE</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected">    Ishmael."</span><span class="semicolon injected">;</span>
 +    <span class="comment documentation">/// ```</span>
 +    <span class="keyword">pub</span> <span class="keyword">const</span> <span class="constant associated declaration public">bar</span><span class="colon">:</span> <span class="builtin_type">bool</span> <span class="operator">=</span> <span class="bool_literal">true</span><span class="semicolon">;</span>
 +
 +    <span class="comment documentation">/// Constructs a new `Foo`.</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">/// # Examples</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">/// ```</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> #</span><span class="none injected"> </span><span class="attribute_bracket attribute injected">#</span><span class="attribute_bracket attribute injected">!</span><span class="attribute_bracket attribute injected">[</span><span class="builtin_attr attribute injected library">allow</span><span class="parenthesis attribute injected">(</span><span class="none attribute injected">unused_mut</span><span class="parenthesis attribute injected">)</span><span class="attribute_bracket attribute injected">]</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="keyword injected">mut</span><span class="none injected"> </span><span class="variable declaration injected mutable">foo</span><span class="colon injected">:</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 +    <span class="comment documentation">/// ```</span>
 +    <span class="keyword">pub</span> <span class="keyword">const</span> <span class="keyword">fn</span> <span class="function associated declaration public static">new</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="struct">Foo</span> <span class="brace">{</span>
 +        <span class="struct">Foo</span> <span class="brace">{</span> <span class="field">bar</span><span class="colon">:</span> <span class="bool_literal">true</span> <span class="brace">}</span>
 +    <span class="brace">}</span>
 +
 +    <span class="comment documentation">/// `bar` method on `Foo`.</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">/// # Examples</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">/// ```</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">use</span><span class="none injected"> </span><span class="module injected">x</span><span class="operator injected">::</span><span class="module injected">y</span><span class="semicolon injected">;</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foo</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// calls bar on foo</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected">assert</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="none injected">foo</span><span class="operator injected">.</span><span class="none injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">bar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="variable injected">foo</span><span class="operator injected">.</span><span class="field injected">bar</span><span class="none injected"> </span><span class="logical injected">||</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="constant injected">bar</span><span class="semicolon injected">;</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">/* multi-line</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">       comment */</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected reference">multi_line_string</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="string_literal injected">"Foo</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected">  bar</span><span class="escape_sequence injected">\n</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="string_literal injected">         "</span><span class="semicolon injected">;</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">/// ```</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">/// ```rust,no_run</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="function injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 +    <span class="comment documentation">/// ```</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">/// ~~~rust,no_run</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// code block with tilde.</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">foobar</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="struct injected">Foo</span><span class="operator injected">::</span><span class="function injected">new</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="operator injected">.</span><span class="function injected">bar</span><span class="parenthesis injected">(</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 +    <span class="comment documentation">/// ~~~</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">/// ```</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="comment injected">// functions</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">fn</span><span class="none injected"> </span><span class="function declaration injected">foo</span><span class="angle injected">&lt;</span><span class="type_param declaration injected">T</span><span class="comma injected">,</span><span class="none injected"> </span><span class="keyword injected">const</span><span class="none injected"> </span><span class="const_param declaration injected">X</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">usize</span><span class="angle injected">&gt;</span><span class="parenthesis injected">(</span><span class="value_param declaration injected">arg</span><span class="colon injected">:</span><span class="none injected"> </span><span class="builtin_type injected">i32</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="brace injected">{</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="none injected">    </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="variable declaration injected">x</span><span class="colon injected">:</span><span class="none injected"> </span><span class="type_param injected">T</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="const_param injected">X</span><span class="semicolon injected">;</span>
 +    <span class="comment documentation">///</span><span class="comment documentation"> </span><span class="brace injected">}</span>
 +    <span class="comment documentation">/// ```</span>
 +    <span class="comment documentation">///</span>
 +    <span class="comment documentation">/// ```sh</span>
 +    <span class="comment documentation">/// echo 1</span>
 +    <span class="comment documentation">/// ```</span>
 +    <span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function associated declaration public reference">foo</span><span class="parenthesis">(</span><span class="operator">&</span><span class="self_keyword declaration reference">self</span><span class="parenthesis">)</span> <span class="operator">-&gt;</span> <span class="builtin_type">bool</span> <span class="brace">{</span>
 +        <span class="bool_literal">true</span>
 +    <span class="brace">}</span>
 +<span class="brace">}</span>
 +
 +<span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[`Foo`](Foo)</span><span class="comment documentation"> is a struct</span>
 +<span class="comment documentation">/// This function is &gt; </span><span class="function documentation injected intra_doc_link">[`all_the_links`](all_the_links)</span><span class="comment documentation"> &lt;</span>
 +<span class="comment documentation">/// </span><span class="macro documentation injected intra_doc_link">[`noop`](noop)</span><span class="comment documentation"> is a macro below</span>
 +<span class="comment documentation">/// </span><span class="struct documentation injected intra_doc_link">[`Item`]</span><span class="comment documentation"> is a struct in the module </span><span class="module documentation injected intra_doc_link">[`module`]</span>
 +<span class="comment documentation">///</span>
 +<span class="comment documentation">/// [`Item`]: module::Item</span>
 +<span class="comment documentation">/// [mix_and_match]: ThisShouldntResolve</span>
 +<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">all_the_links</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
 +
 +<span class="keyword">pub</span> <span class="keyword">mod</span> <span class="module declaration public">module</span> <span class="brace">{</span>
 +    <span class="keyword">pub</span> <span class="keyword">struct</span> <span class="struct declaration public">Item</span><span class="semicolon">;</span>
 +<span class="brace">}</span>
 +
 +<span class="comment documentation">/// ```</span>
 +<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">macro_rules</span><span class="macro_bang injected">!</span><span class="none injected"> </span><span class="macro declaration injected">noop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="parenthesis injected">(</span><span class="punctuation injected">$</span><span class="none injected">expr</span><span class="colon injected">:</span><span class="none injected">expr</span><span class="parenthesis injected">)</span><span class="none injected"> </span><span class="operator injected">=</span><span class="angle injected">&gt;</span><span class="none injected"> </span><span class="brace injected">{</span><span class="none injected"> </span><span class="punctuation injected">$</span><span class="none injected">expr </span><span class="brace injected">}</span><span class="brace injected">}</span>
 +<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="macro injected">noop</span><span class="macro_bang injected">!</span><span class="parenthesis injected">(</span><span class="numeric_literal injected">1</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 +<span class="comment documentation">/// ```</span>
 +<span class="keyword">macro_rules</span><span class="macro_bang">!</span> <span class="macro declaration">noop</span> <span class="brace">{</span>
 +    <span class="parenthesis">(</span><span class="punctuation">$</span>expr<span class="colon">:</span>expr<span class="parenthesis">)</span> <span class="operator">=</span><span class="angle">&gt;</span> <span class="brace">{</span>
 +        <span class="punctuation">$</span>expr
 +    <span class="brace">}</span>
 +<span class="brace">}</span>
 +
 +<span class="comment documentation">/// ```rust</span>
 +<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 +<span class="comment documentation">/// ```</span>
 +<span class="comment documentation">///</span>
 +<span class="comment documentation">/// ```</span>
 +<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span>
 +<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"false"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"</span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span><span class="string_literal attribute">"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
 +<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"</span><span class="keyword control injected">loop</span><span class="none injected"> </span><span class="brace injected">{</span><span class="brace injected">}</span><span class="string_literal attribute">"</span><span class="attribute_bracket attribute">]</span>
 +<span class="comment documentation">/// ```</span>
 +<span class="comment documentation">///</span>
 +<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```rust"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
 +<span class="attribute_bracket attribute">#</span><span class="attribute_bracket attribute">[</span><span class="builtin_attr attribute library">cfg_attr</span><span class="parenthesis attribute">(</span><span class="none attribute">not</span><span class="parenthesis attribute">(</span><span class="none attribute">feature</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"alloc"</span><span class="parenthesis attribute">)</span><span class="comma attribute">,</span> <span class="none attribute">doc</span> <span class="operator attribute">=</span> <span class="string_literal attribute">"```ignore"</span><span class="parenthesis attribute">)</span><span class="attribute_bracket attribute">]</span>
 +<span class="comment documentation">///</span><span class="comment documentation"> </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="module injected">alloc</span><span class="operator injected">::</span><span class="macro injected">vec</span><span class="macro_bang injected">!</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span>
 +<span class="comment documentation">/// ```</span>
 +<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">mix_and_match</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
 +
 +<span class="comment documentation">/**
 +It is beyond me why you'd use these when you got ///
 +```rust
 +</span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
 +```
 +</span><span class="function documentation injected intra_doc_link">[`block_comments2`]</span><span class="comment documentation"> tests these with indentation
 + */</span>
 +<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">block_comments</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
 +
 +<span class="comment documentation">/**
 +    Really, I don't get it
 +    ```rust
 +</span><span class="comment documentation"> </span><span class="none injected">   </span><span class="keyword injected">let</span><span class="none injected"> </span><span class="punctuation injected">_</span><span class="none injected"> </span><span class="operator injected">=</span><span class="none injected"> </span><span class="function injected">example</span><span class="parenthesis injected">(</span><span class="operator injected">&</span><span class="bracket injected">[</span><span class="numeric_literal injected">1</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">2</span><span class="comma injected">,</span><span class="none injected"> </span><span class="numeric_literal injected">3</span><span class="bracket injected">]</span><span class="parenthesis injected">)</span><span class="semicolon injected">;</span><span class="comment documentation">
 +    ```
 +    </span><span class="function documentation injected intra_doc_link">[`block_comments`]</span><span class="comment documentation"> tests these without indentation
 +*/</span>
 +<span class="keyword">pub</span> <span class="keyword">fn</span> <span class="function declaration public">block_comments2</span><span class="parenthesis">(</span><span class="parenthesis">)</span> <span class="brace">{</span><span class="brace">}</span>
 +
 +</code></pre>
index 382735cb368df3cac89ca0bb72559896b944f28c,0000000000000000000000000000000000000000..46cc667fc454f4f08f842017232a1b28e506e2fd
mode 100644,000000..100644
--- /dev/null
@@@ -1,1096 -1,0 +1,1110 @@@
- use crate::{fixture, FileRange, HlTag, TextRange};
 +use std::time::Instant;
 +
 +use expect_test::{expect_file, ExpectFile};
 +use ide_db::SymbolKind;
 +use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear};
 +
- /// This is an impl with a code block.
++use crate::{fixture, FileRange, HighlightConfig, HlTag, TextRange};
++
++const HL_CONFIG: HighlightConfig = HighlightConfig {
++    strings: true,
++    punctuation: true,
++    specialize_punctuation: true,
++    specialize_operator: true,
++    operator: true,
++    inject_doc_comment: true,
++    macro_bang: true,
++    syntactic_name_ref_highlighting: false,
++};
 +
 +#[test]
 +fn attributes() {
 +    check_highlighting(
 +        r#"
 +//- proc_macros: identity
 +//- minicore: derive, copy
 +#[allow(dead_code)]
 +#[rustfmt::skip]
 +#[proc_macros::identity]
 +#[derive(Copy)]
 +/// This is a doc comment
 +// This is a normal comment
 +/// This is a doc comment
 +#[derive(Copy)]
 +// This is another normal comment
 +/// This is another doc comment
 +// This is another normal comment
 +#[derive(Copy)]
 +// The reason for these being here is to test AttrIds
 +struct Foo;
 +"#,
 +        expect_file!["./test_data/highlight_attributes.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn macros() {
 +    check_highlighting(
 +        r#"
 +//- proc_macros: mirror
 +proc_macros::mirror! {
 +    {
 +        ,i32 :x pub
 +        ,i32 :y pub
 +    } Foo struct
 +}
 +macro_rules! def_fn {
 +    ($($tt:tt)*) => {$($tt)*}
 +}
 +
 +def_fn! {
 +    fn bar() -> u32 {
 +        100
 +    }
 +}
 +
 +macro_rules! dont_color_me_braces {
 +    () => {0}
 +}
 +
 +macro_rules! noop {
 +    ($expr:expr) => {
 +        $expr
 +    }
 +}
 +
 +/// textually shadow previous definition
 +macro_rules! noop {
 +    ($expr:expr) => {
 +        $expr
 +    }
 +}
 +
 +macro_rules! keyword_frag {
 +    ($type:ty) => ($type)
 +}
 +
 +macro with_args($i:ident) {
 +    $i
 +}
 +
 +macro without_args {
 +    ($i:ident) => {
 +        $i
 +    }
 +}
 +
 +fn main() {
 +    println!("Hello, {}!", 92);
 +    dont_color_me_braces!();
 +    noop!(noop!(1));
 +}
 +"#,
 +        expect_file!["./test_data/highlight_macros.html"],
 +        false,
 +    );
 +}
 +
 +/// If what you want to test feels like a specific entity consider making a new test instead,
 +/// this test fixture here in fact should shrink instead of grow ideally.
 +#[test]
 +fn test_highlighting() {
 +    check_highlighting(
 +        r#"
 +//- minicore: derive, copy
 +//- /main.rs crate:main deps:foo
 +use inner::{self as inner_mod};
 +mod inner {}
 +
 +pub mod ops {
 +    #[lang = "fn_once"]
 +    pub trait FnOnce<Args> {}
 +
 +    #[lang = "fn_mut"]
 +    pub trait FnMut<Args>: FnOnce<Args> {}
 +
 +    #[lang = "fn"]
 +    pub trait Fn<Args>: FnMut<Args> {}
 +}
 +
 +struct Foo {
 +    x: u32,
 +}
 +
 +trait Bar {
 +    fn bar(&self) -> i32;
 +}
 +
 +impl Bar for Foo {
 +    fn bar(&self) -> i32 {
 +        self.x
 +    }
 +}
 +
 +impl Foo {
 +    fn baz(mut self, f: Foo) -> i32 {
 +        f.baz(self)
 +    }
 +
 +    fn qux(&mut self) {
 +        self.x = 0;
 +    }
 +
 +    fn quop(&self) -> i32 {
 +        self.x
 +    }
 +}
 +
 +use self::FooCopy::{self as BarCopy};
 +
 +#[derive(Copy)]
 +struct FooCopy {
 +    x: u32,
 +}
 +
 +impl FooCopy {
 +    fn baz(self, f: FooCopy) -> u32 {
 +        f.baz(self)
 +    }
 +
 +    fn qux(&mut self) {
 +        self.x = 0;
 +    }
 +
 +    fn quop(&self) -> u32 {
 +        self.x
 +    }
 +}
 +
 +fn str() {
 +    str();
 +}
 +
 +fn foo<'a, T>() -> T {
 +    foo::<'a, i32>()
 +}
 +
 +fn never() -> ! {
 +    loop {}
 +}
 +
 +fn const_param<const FOO: usize>() -> usize {
 +    const_param::<{ FOO }>();
 +    FOO
 +}
 +
 +use ops::Fn;
 +fn baz<F: Fn() -> ()>(f: F) {
 +    f()
 +}
 +
 +fn foobar() -> impl Copy {}
 +
 +fn foo() {
 +    let bar = foobar();
 +}
 +
 +// comment
 +fn main() {
 +    let mut x = 42;
 +    x += 1;
 +    let y = &mut x;
 +    let z = &y;
 +
 +    let Foo { x: z, y } = Foo { x: z, y };
 +
 +    y;
 +
 +    let mut foo = Foo { x, y: x };
 +    let foo2 = Foo { x, y: x };
 +    foo.quop();
 +    foo.qux();
 +    foo.baz(foo2);
 +
 +    let mut copy = FooCopy { x };
 +    copy.quop();
 +    copy.qux();
 +    copy.baz(copy);
 +
 +    let a = |x| x;
 +    let bar = Foo::baz;
 +
 +    let baz = (-42,);
 +    let baz = -baz.0;
 +
 +    let _ = !true;
 +
 +    'foo: loop {
 +        break 'foo;
 +        continue 'foo;
 +    }
 +}
 +
 +enum Option<T> {
 +    Some(T),
 +    None,
 +}
 +use Option::*;
 +
 +impl<T> Option<T> {
 +    fn and<U>(self, other: Option<U>) -> Option<(T, U)> {
 +        match other {
 +            None => unimplemented!(),
 +            Nope => Nope,
 +        }
 +    }
 +}
 +
 +async fn learn_and_sing() {
 +    let song = learn_song().await;
 +    sing_song(song).await;
 +}
 +
 +async fn async_main() {
 +    let f1 = learn_and_sing();
 +    let f2 = dance();
 +    futures::join!(f1, f2);
 +}
 +
 +fn use_foo_items() {
 +    let bob = foo::Person {
 +        name: "Bob",
 +        age: foo::consts::NUMBER,
 +    };
 +
 +    let control_flow = foo::identity(foo::ControlFlow::Continue);
 +
 +    if control_flow.should_die() {
 +        foo::die!();
 +    }
 +}
 +
 +pub enum Bool { True, False }
 +
 +impl Bool {
 +    pub const fn to_primitive(self) -> bool {
 +        true
 +    }
 +}
 +const USAGE_OF_BOOL:bool = Bool::True.to_primitive();
 +
 +trait Baz {
 +    type Qux;
 +}
 +
 +fn baz<T>(t: T)
 +where
 +    T: Baz,
 +    <T as Baz>::Qux: Bar {}
 +
 +fn gp_shadows_trait<Baz: Bar>() {
 +    Baz::bar;
 +}
 +
 +//- /foo.rs crate:foo
 +pub struct Person {
 +    pub name: &'static str,
 +    pub age: u8,
 +}
 +
 +pub enum ControlFlow {
 +    Continue,
 +    Die,
 +}
 +
 +impl ControlFlow {
 +    pub fn should_die(self) -> bool {
 +        matches!(self, ControlFlow::Die)
 +    }
 +}
 +
 +pub fn identity<T>(x: T) -> T { x }
 +
 +pub mod consts {
 +    pub const NUMBER: i64 = 92;
 +}
 +
 +macro_rules! die {
 +    () => {
 +        panic!();
 +    };
 +}
 +"#,
 +        expect_file!["./test_data/highlight_general.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_lifetime_highlighting() {
 +    check_highlighting(
 +        r#"
 +//- minicore: derive
 +
 +#[derive()]
 +struct Foo<'a, 'b, 'c> where 'a: 'a, 'static: 'static {
 +    field: &'a (),
 +    field2: &'static (),
 +}
 +impl<'a> Foo<'_, 'a, 'static>
 +where
 +    'a: 'a,
 +    'static: 'static
 +{}
 +"#,
 +        expect_file!["./test_data/highlight_lifetimes.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_keyword_highlighting() {
 +    check_highlighting(
 +        r#"
 +extern crate self;
 +
 +use crate;
 +use self;
 +mod __ {
 +    use super::*;
 +}
 +
 +macro_rules! void {
 +    ($($tt:tt)*) => {}
 +}
 +void!(Self);
 +struct __ where Self:;
 +fn __(_: Self) {}
 +"#,
 +        expect_file!["./test_data/highlight_keywords.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_string_highlighting() {
 +    // The format string detection is based on macro-expansion,
 +    // thus, we have to copy the macro definition from `std`
 +    check_highlighting(
 +        r#"
 +macro_rules! println {
 +    ($($arg:tt)*) => ({
 +        $crate::io::_print($crate::format_args_nl!($($arg)*));
 +    })
 +}
 +#[rustc_builtin_macro]
 +#[macro_export]
 +macro_rules! format_args {}
 +#[rustc_builtin_macro]
 +#[macro_export]
 +macro_rules! const_format_args {}
 +#[rustc_builtin_macro]
 +#[macro_export]
 +macro_rules! format_args_nl {}
 +
 +mod panic {
 +    pub macro panic_2015 {
 +        () => (
 +            $crate::panicking::panic("explicit panic")
 +        ),
 +        ($msg:literal $(,)?) => (
 +            $crate::panicking::panic($msg)
 +        ),
 +        // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint.
 +        ($msg:expr $(,)?) => (
 +            $crate::panicking::panic_str($msg)
 +        ),
 +        // Special-case the single-argument case for const_panic.
 +        ("{}", $arg:expr $(,)?) => (
 +            $crate::panicking::panic_display(&$arg)
 +        ),
 +        ($fmt:expr, $($arg:tt)+) => (
 +            $crate::panicking::panic_fmt($crate::const_format_args!($fmt, $($arg)+))
 +        ),
 +    }
 +}
 +
 +#[rustc_builtin_macro(std_panic)]
 +#[macro_export]
 +macro_rules! panic {}
 +#[rustc_builtin_macro]
 +macro_rules! assert {}
 +#[rustc_builtin_macro]
 +macro_rules! asm {}
 +
 +macro_rules! toho {
 +    () => ($crate::panic!("not yet implemented"));
 +    ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", $crate::format_args!($($arg)+)));
 +}
 +
 +fn main() {
 +    println!("Hello {{Hello}}");
 +    // from https://doc.rust-lang.org/std/fmt/index.html
 +    println!("Hello");                 // => "Hello"
 +    println!("Hello, {}!", "world");   // => "Hello, world!"
 +    println!("The number is {}", 1);   // => "The number is 1"
 +    println!("{:?}", (3, 4));          // => "(3, 4)"
 +    println!("{value}", value=4);      // => "4"
 +    println!("{} {}", 1, 2);           // => "1 2"
 +    println!("{:04}", 42);             // => "0042" with leading zerosV
 +    println!("{1} {} {0} {}", 1, 2);   // => "2 1 1 2"
 +    println!("{argument}", argument = "test");   // => "test"
 +    println!("{name} {}", 1, name = 2);          // => "2 1"
 +    println!("{a} {c} {b}", a="a", b='b', c=3);  // => "a 3 b"
 +    println!("{{{}}}", 2);                       // => "{2}"
 +    println!("Hello {:5}!", "x");
 +    println!("Hello {:1$}!", "x", 5);
 +    println!("Hello {1:0$}!", 5, "x");
 +    println!("Hello {:width$}!", "x", width = 5);
 +    println!("Hello {:<5}!", "x");
 +    println!("Hello {:-<5}!", "x");
 +    println!("Hello {:^5}!", "x");
 +    println!("Hello {:>5}!", "x");
 +    println!("Hello {:+}!", 5);
 +    println!("{:#x}!", 27);
 +    println!("Hello {:05}!", 5);
 +    println!("Hello {:05}!", -5);
 +    println!("{:#010x}!", 27);
 +    println!("Hello {0} is {1:.5}", "x", 0.01);
 +    println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
 +    println!("Hello {0} is {2:.1$}", "x", 5, 0.01);
 +    println!("Hello {} is {:.*}",    "x", 5, 0.01);
 +    println!("Hello {} is {2:.*}",   "x", 5, 0.01);
 +    println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01);
 +    println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56);
 +    println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56");
 +    println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56");
 +
 +    let _ = "{}"
 +    let _ = "{{}}";
 +
 +    println!("Hello {{}}");
 +    println!("{{ Hello");
 +    println!("Hello }}");
 +    println!("{{Hello}}");
 +    println!("{{ Hello }}");
 +    println!("{{Hello }}");
 +    println!("{{ Hello}}");
 +
 +    println!(r"Hello, {}!", "world");
 +
 +    // escape sequences
 +    println!("Hello\nWorld");
 +    println!("\u{48}\x65\x6C\x6C\x6F World");
 +
 +    let _ = "\x28\x28\x00\x63\n";
 +    let _ = b"\x28\x28\x00\x63\n";
 +
 +    println!("{\x41}", A = 92);
 +    println!("{ничоси}", ничоси = 92);
 +
 +    println!("{:x?} {} ", thingy, n2);
 +    panic!("{}", 0);
 +    panic!("more {}", 1);
 +    assert!(true, "{}", 1);
 +    assert!(true, "{} asdasd", 1);
 +    toho!("{}fmt", 0);
 +    asm!("mov eax, {0}");
 +    format_args!(concat!("{}"), "{}");
 +}"#,
 +        expect_file!["./test_data/highlight_strings.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_unsafe_highlighting() {
 +    check_highlighting(
 +        r#"
 +macro_rules! id {
 +    ($($tt:tt)*) => {
 +        $($tt)*
 +    };
 +}
 +macro_rules! unsafe_deref {
 +    () => {
 +        *(&() as *const ())
 +    };
 +}
 +static mut MUT_GLOBAL: Struct = Struct { field: 0 };
 +static GLOBAL: Struct = Struct { field: 0 };
 +unsafe fn unsafe_fn() {}
 +
 +union Union {
 +    a: u32,
 +    b: f32,
 +}
 +
 +struct Struct { field: i32 }
 +impl Struct {
 +    unsafe fn unsafe_method(&self) {}
 +}
 +
 +#[repr(packed)]
 +struct Packed {
 +    a: u16,
 +}
 +
 +unsafe trait UnsafeTrait {}
 +unsafe impl UnsafeTrait for Packed {}
 +impl !UnsafeTrait for () {}
 +
 +fn unsafe_trait_bound<T: UnsafeTrait>(_: T) {}
 +
 +trait DoTheAutoref {
 +    fn calls_autoref(&self);
 +}
 +
 +impl DoTheAutoref for u16 {
 +    fn calls_autoref(&self) {}
 +}
 +
 +fn main() {
 +    let x = &5 as *const _ as *const usize;
 +    let u = Union { b: 0 };
 +
 +    id! {
 +        unsafe { unsafe_deref!() }
 +    };
 +
 +    unsafe {
 +        unsafe_deref!();
 +        id! { unsafe_deref!() };
 +
 +        // unsafe fn and method calls
 +        unsafe_fn();
 +        let b = u.b;
 +        match u {
 +            Union { b: 0 } => (),
 +            Union { a } => (),
 +        }
 +        Struct { field: 0 }.unsafe_method();
 +
 +        // unsafe deref
 +        *x;
 +
 +        // unsafe access to a static mut
 +        MUT_GLOBAL.field;
 +        GLOBAL.field;
 +
 +        // unsafe ref of packed fields
 +        let packed = Packed { a: 0 };
 +        let a = &packed.a;
 +        let ref a = packed.a;
 +        let Packed { ref a } = packed;
 +        let Packed { a: ref _a } = packed;
 +
 +        // unsafe auto ref of packed field
 +        packed.a.calls_autoref();
 +    }
 +}
 +"#,
 +        expect_file!["./test_data/highlight_unsafe.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_highlight_doc_comment() {
 +    check_highlighting(
 +        r#"
 +//- /main.rs
 +//! This is a module to test doc injection.
 +//! ```
 +//! fn test() {}
 +//! ```
 +
 +mod outline_module;
 +
 +/// ```
 +/// let _ = "early doctests should not go boom";
 +/// ```
 +struct Foo {
 +    bar: bool,
 +}
 +
-         .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) })
++/// This is an impl of [`Foo`] with a code block.
 +///
 +/// ```
 +/// fn foo() {
 +///
 +/// }
 +/// ```
 +impl Foo {
 +    /// ```
 +    /// let _ = "Call me
 +    //    KILLER WHALE
 +    ///     Ishmael.";
 +    /// ```
 +    pub const bar: bool = true;
 +
 +    /// Constructs a new `Foo`.
 +    ///
 +    /// # Examples
 +    ///
 +    /// ```
 +    /// # #![allow(unused_mut)]
 +    /// let mut foo: Foo = Foo::new();
 +    /// ```
 +    pub const fn new() -> Foo {
 +        Foo { bar: true }
 +    }
 +
 +    /// `bar` method on `Foo`.
 +    ///
 +    /// # Examples
 +    ///
 +    /// ```
 +    /// use x::y;
 +    ///
 +    /// let foo = Foo::new();
 +    ///
 +    /// // calls bar on foo
 +    /// assert!(foo.bar());
 +    ///
 +    /// let bar = foo.bar || Foo::bar;
 +    ///
 +    /// /* multi-line
 +    ///        comment */
 +    ///
 +    /// let multi_line_string = "Foo
 +    ///   bar\n
 +    ///          ";
 +    ///
 +    /// ```
 +    ///
 +    /// ```rust,no_run
 +    /// let foobar = Foo::new().bar();
 +    /// ```
 +    ///
 +    /// ~~~rust,no_run
 +    /// // code block with tilde.
 +    /// let foobar = Foo::new().bar();
 +    /// ~~~
 +    ///
 +    /// ```
 +    /// // functions
 +    /// fn foo<T, const X: usize>(arg: i32) {
 +    ///     let x: T = X;
 +    /// }
 +    /// ```
 +    ///
 +    /// ```sh
 +    /// echo 1
 +    /// ```
 +    pub fn foo(&self) -> bool {
 +        true
 +    }
 +}
 +
 +/// [`Foo`](Foo) is a struct
 +/// This function is > [`all_the_links`](all_the_links) <
 +/// [`noop`](noop) is a macro below
 +/// [`Item`] is a struct in the module [`module`]
 +///
 +/// [`Item`]: module::Item
 +/// [mix_and_match]: ThisShouldntResolve
 +pub fn all_the_links() {}
 +
 +pub mod module {
 +    pub struct Item;
 +}
 +
 +/// ```
 +/// macro_rules! noop { ($expr:expr) => { $expr }}
 +/// noop!(1);
 +/// ```
 +macro_rules! noop {
 +    ($expr:expr) => {
 +        $expr
 +    }
 +}
 +
 +/// ```rust
 +/// let _ = example(&[1, 2, 3]);
 +/// ```
 +///
 +/// ```
 +/// loop {}
 +#[cfg_attr(not(feature = "false"), doc = "loop {}")]
 +#[doc = "loop {}"]
 +/// ```
 +///
 +#[cfg_attr(feature = "alloc", doc = "```rust")]
 +#[cfg_attr(not(feature = "alloc"), doc = "```ignore")]
 +/// let _ = example(&alloc::vec![1, 2, 3]);
 +/// ```
 +pub fn mix_and_match() {}
 +
 +/**
 +It is beyond me why you'd use these when you got ///
 +```rust
 +let _ = example(&[1, 2, 3]);
 +```
 +[`block_comments2`] tests these with indentation
 + */
 +pub fn block_comments() {}
 +
 +/**
 +    Really, I don't get it
 +    ```rust
 +    let _ = example(&[1, 2, 3]);
 +    ```
 +    [`block_comments`] tests these without indentation
 +*/
 +pub fn block_comments2() {}
 +
 +//- /outline_module.rs
 +//! This is an outline module whose purpose is to test that its inline attribute injection does not
 +//! spill into its parent.
 +//! ```
 +//! fn test() {}
 +//! ```
 +"#,
 +        expect_file!["./test_data/highlight_doctest.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_extern_crate() {
 +    check_highlighting(
 +        r#"
 +//- /main.rs crate:main deps:std,alloc
 +extern crate std;
 +extern crate alloc as abc;
 +//- /std/lib.rs crate:std
 +pub struct S;
 +//- /alloc/lib.rs crate:alloc
 +pub struct A
 +"#,
 +        expect_file!["./test_data/highlight_extern_crate.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_crate_root() {
 +    check_highlighting(
 +        r#"
 +//- minicore: iterators
 +//- /main.rs crate:main deps:foo
 +extern crate foo;
 +use core::iter;
 +
 +pub const NINETY_TWO: u8 = 92;
 +
 +use foo as foooo;
 +
 +pub(crate) fn main() {
 +    let baz = iter::repeat(92);
 +}
 +
 +mod bar {
 +    pub(in super) const FORTY_TWO: u8 = 42;
 +
 +    mod baz {
 +        use super::super::NINETY_TWO;
 +        use crate::foooo::Point;
 +
 +        pub(in super::super) const TWENTY_NINE: u8 = 29;
 +    }
 +}
 +//- /foo.rs crate:foo
 +struct Point {
 +    x: u8,
 +    y: u8,
 +}
 +
 +mod inner {
 +    pub(super) fn swap(p: crate::Point) -> crate::Point {
 +        crate::Point { x: p.y, y: p.x }
 +    }
 +}
 +"#,
 +        expect_file!["./test_data/highlight_crate_root.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_default_library() {
 +    check_highlighting(
 +        r#"
 +//- minicore: option, iterators
 +use core::iter;
 +
 +fn main() {
 +    let foo = Some(92);
 +    let nums = iter::repeat(foo.unwrap());
 +}
 +"#,
 +        expect_file!["./test_data/highlight_default_library.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_associated_function() {
 +    check_highlighting(
 +        r#"
 +fn not_static() {}
 +
 +struct foo {}
 +
 +impl foo {
 +    pub fn is_static() {}
 +    pub fn is_not_static(&self) {}
 +}
 +
 +trait t {
 +    fn t_is_static() {}
 +    fn t_is_not_static(&self) {}
 +}
 +
 +impl t for foo {
 +    pub fn is_static() {}
 +    pub fn is_not_static(&self) {}
 +}
 +"#,
 +        expect_file!["./test_data/highlight_assoc_functions.html"],
 +        false,
 +    )
 +}
 +
 +#[test]
 +fn test_injection() {
 +    check_highlighting(
 +        r##"
 +fn fixture(ra_fixture: &str) {}
 +
 +fn main() {
 +    fixture(r#"
 +trait Foo {
 +    fn foo() {
 +        println!("2 + 2 = {}", 4);
 +    }
 +}"#
 +    );
 +    fixture(r"
 +fn foo() {
 +    foo(\$0{
 +        92
 +    }\$0)
 +}"
 +    );
 +}
 +"##,
 +        expect_file!["./test_data/highlight_injection.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_operators() {
 +    check_highlighting(
 +        r##"
 +fn main() {
 +    1 + 1 - 1 * 1 / 1 % 1 | 1 & 1 ! 1 ^ 1 >> 1 << 1;
 +    let mut a = 0;
 +    a += 1;
 +    a -= 1;
 +    a *= 1;
 +    a /= 1;
 +    a %= 1;
 +    a |= 1;
 +    a &= 1;
 +    a ^= 1;
 +    a >>= 1;
 +    a <<= 1;
 +}
 +"##,
 +        expect_file!["./test_data/highlight_operators.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +fn test_mod_hl_injection() {
 +    check_highlighting(
 +        r##"
 +//- /foo.rs
 +//! [Struct]
 +//! This is an intra doc injection test for modules
 +//! [Struct]
 +//! This is an intra doc injection test for modules
 +
 +pub struct Struct;
 +//- /lib.rs crate:foo
 +/// [crate::foo::Struct]
 +/// This is an intra doc injection test for modules
 +/// [crate::foo::Struct]
 +/// This is an intra doc injection test for modules
 +mod foo;
 +"##,
 +        expect_file!["./test_data/highlight_module_docs_inline.html"],
 +        false,
 +    );
 +    check_highlighting(
 +        r##"
 +//- /lib.rs crate:foo
 +/// [crate::foo::Struct]
 +/// This is an intra doc injection test for modules
 +/// [crate::foo::Struct]
 +/// This is an intra doc injection test for modules
 +mod foo;
 +//- /foo.rs
 +//! [Struct]
 +//! This is an intra doc injection test for modules
 +//! [Struct]
 +//! This is an intra doc injection test for modules
 +
 +pub struct Struct;
 +"##,
 +        expect_file!["./test_data/highlight_module_docs_outline.html"],
 +        false,
 +    );
 +}
 +
 +#[test]
 +#[cfg_attr(
 +    not(all(unix, target_pointer_width = "64")),
 +    ignore = "depends on `DefaultHasher` outputs"
 +)]
 +fn test_rainbow_highlighting() {
 +    check_highlighting(
 +        r#"
 +fn main() {
 +    let hello = "hello";
 +    let x = hello.to_string();
 +    let y = hello.to_string();
 +
 +    let x = "other color please!";
 +    let y = x.to_string();
 +}
 +
 +fn bar() {
 +    let mut hello = "hello";
 +}
 +"#,
 +        expect_file!["./test_data/highlight_rainbow.html"],
 +        true,
 +    );
 +}
 +
 +#[test]
 +fn test_ranges() {
 +    let (analysis, file_id) = fixture::file(
 +        r#"
 +#[derive(Clone, Debug)]
 +struct Foo {
 +    pub x: i32,
 +    pub y: i32,
 +}
 +"#,
 +    );
 +
 +    // The "x"
 +    let highlights = &analysis
-     let _ = analysis.highlight(file_id).unwrap();
++        .highlight_range(
++            HL_CONFIG,
++            FileRange { file_id, range: TextRange::at(45.into(), 1.into()) },
++        )
 +        .unwrap();
 +
 +    assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public");
 +}
 +
 +#[test]
 +fn ranges_sorted() {
 +    let (analysis, file_id) = fixture::file(
 +        r#"
 +#[foo(bar = "bar")]
 +macro_rules! test {}
 +}"#
 +        .trim(),
 +    );
-             .highlight(file_id)
++    let _ = analysis.highlight(HL_CONFIG, file_id).unwrap();
 +}
 +
 +/// Highlights the code given by the `ra_fixture` argument, renders the
 +/// result as HTML, and compares it with the HTML file given as `snapshot`.
 +/// Note that the `snapshot` file is overwritten by the rendered HTML.
 +fn check_highlighting(ra_fixture: &str, expect: ExpectFile, rainbow: bool) {
 +    let (analysis, file_id) = fixture::file(ra_fixture.trim());
 +    let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap();
 +    expect.assert_eq(actual_html)
 +}
 +
 +#[test]
 +fn benchmark_syntax_highlighting_long_struct() {
 +    if skip_slow_tests() {
 +        return;
 +    }
 +
 +    let fixture = bench_fixture::big_struct();
 +    let (analysis, file_id) = fixture::file(&fixture);
 +
 +    let hash = {
 +        let _pt = bench("syntax highlighting long struct");
 +        analysis
-                 .highlight(file_id)
++            .highlight(HL_CONFIG, file_id)
 +            .unwrap()
 +            .iter()
 +            .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
 +            .count()
 +    };
 +    assert_eq!(hash, 2001);
 +}
 +
 +#[test]
 +fn syntax_highlighting_not_quadratic() {
 +    if skip_slow_tests() {
 +        return;
 +    }
 +
 +    let mut al = AssertLinear::default();
 +    while al.next_round() {
 +        for i in 6..=10 {
 +            let n = 1 << i;
 +
 +            let fixture = bench_fixture::big_struct_n(n);
 +            let (analysis, file_id) = fixture::file(&fixture);
 +
 +            let time = Instant::now();
 +
 +            let hash = analysis
-             .highlight(file_id)
++                .highlight(HL_CONFIG, file_id)
 +                .unwrap()
 +                .iter()
 +                .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
 +                .count();
 +            assert!(hash > n as usize);
 +
 +            let elapsed = time.elapsed();
 +            al.sample(n as f64, elapsed.as_millis() as f64);
 +        }
 +    }
 +}
 +
 +#[test]
 +fn benchmark_syntax_highlighting_parser() {
 +    if skip_slow_tests() {
 +        return;
 +    }
 +
 +    let fixture = bench_fixture::glorious_old_parser();
 +    let (analysis, file_id) = fixture::file(&fixture);
 +
 +    let hash = {
 +        let _pt = bench("syntax highlighting parser");
 +        analysis
++            .highlight(HL_CONFIG, file_id)
 +            .unwrap()
 +            .iter()
 +            .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function))
 +            .count()
 +    };
 +    assert_eq!(hash, 1609);
 +}
index bf7b7efe28228885712908e998c30b31135573d5,0000000000000000000000000000000000000000..17a1e385b77248b4ed974d271745a3ffeddf5919
mode 100644,000000..100644
--- /dev/null
@@@ -1,93 -1,0 +1,94 @@@
-     FxHashSet, RootDatabase,
 +use std::sync::Arc;
 +
 +use dot::{Id, LabelText};
 +use ide_db::{
 +    base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
-     crates_to_render: FxHashSet<CrateId>,
++    RootDatabase,
 +};
++use stdx::hash::NoHashHashSet;
 +
 +// Feature: View Crate Graph
 +//
 +// Renders the currently loaded crate graph as an SVG graphic. Requires the `dot` tool, which
 +// is part of graphviz, to be installed.
 +//
 +// Only workspace crates are included, no crates.io dependencies or sysroot crates.
 +//
 +// |===
 +// | Editor  | Action Name
 +//
 +// | VS Code | **rust-analyzer: View Crate Graph**
 +// |===
 +pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
 +    let crate_graph = db.crate_graph();
 +    let crates_to_render = crate_graph
 +        .iter()
 +        .filter(|krate| {
 +            if full {
 +                true
 +            } else {
 +                // Only render workspace crates
 +                let root_id = db.file_source_root(crate_graph[*krate].root_file_id);
 +                !db.source_root(root_id).is_library
 +            }
 +        })
 +        .collect();
 +    let graph = DotCrateGraph { graph: crate_graph, crates_to_render };
 +
 +    let mut dot = Vec::new();
 +    dot::render(&graph, &mut dot).unwrap();
 +    Ok(String::from_utf8(dot).unwrap())
 +}
 +
 +struct DotCrateGraph {
 +    graph: Arc<CrateGraph>,
++    crates_to_render: NoHashHashSet<CrateId>,
 +}
 +
 +type Edge<'a> = (CrateId, &'a Dependency);
 +
 +impl<'a> dot::GraphWalk<'a, CrateId, Edge<'a>> for DotCrateGraph {
 +    fn nodes(&'a self) -> dot::Nodes<'a, CrateId> {
 +        self.crates_to_render.iter().copied().collect()
 +    }
 +
 +    fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> {
 +        self.crates_to_render
 +            .iter()
 +            .flat_map(|krate| {
 +                self.graph[*krate]
 +                    .dependencies
 +                    .iter()
 +                    .filter(|dep| self.crates_to_render.contains(&dep.crate_id))
 +                    .map(move |dep| (*krate, dep))
 +            })
 +            .collect()
 +    }
 +
 +    fn source(&'a self, edge: &Edge<'a>) -> CrateId {
 +        edge.0
 +    }
 +
 +    fn target(&'a self, edge: &Edge<'a>) -> CrateId {
 +        edge.1.crate_id
 +    }
 +}
 +
 +impl<'a> dot::Labeller<'a, CrateId, Edge<'a>> for DotCrateGraph {
 +    fn graph_id(&'a self) -> Id<'a> {
 +        Id::new("rust_analyzer_crate_graph").unwrap()
 +    }
 +
 +    fn node_id(&'a self, n: &CrateId) -> Id<'a> {
 +        Id::new(format!("_{}", n.0)).unwrap()
 +    }
 +
 +    fn node_shape(&'a self, _node: &CrateId) -> Option<LabelText<'a>> {
 +        Some(LabelText::LabelStr("box".into()))
 +    }
 +
 +    fn node_label(&'a self, n: &CrateId) -> LabelText<'a> {
 +        let name = self.graph[*n].display_name.as_ref().map_or("(unnamed crate)", |name| &*name);
 +        LabelText::LabelStr(name.into())
 +    }
 +}
index 7e21a808da0a481af06c98aa2829ccdc7f27a17d,0000000000000000000000000000000000000000..bc1224af9b212c639bcf80026661554887f19d07
mode 100644,000000..100644
--- /dev/null
@@@ -1,466 -1,0 +1,471 @@@
-         if !p.at_ts(PATTERN_FIRST) {
 +use super::*;
 +
 +pub(super) const PATTERN_FIRST: TokenSet =
 +    expressions::LITERAL_FIRST.union(paths::PATH_FIRST).union(TokenSet::new(&[
 +        T![box],
 +        T![ref],
 +        T![mut],
 +        T!['('],
 +        T!['['],
 +        T![&],
 +        T![_],
 +        T![-],
 +        T![.],
 +    ]));
 +
++const PAT_TOP_FIRST: TokenSet = PATTERN_FIRST.union(TokenSet::new(&[T![|]]));
++
 +pub(crate) fn pattern(p: &mut Parser<'_>) {
 +    pattern_r(p, PAT_RECOVERY_SET);
 +}
 +
 +/// Parses a pattern list separated by pipes `|`.
 +pub(super) fn pattern_top(p: &mut Parser<'_>) {
 +    pattern_top_r(p, PAT_RECOVERY_SET);
 +}
 +
 +pub(crate) fn pattern_single(p: &mut Parser<'_>) {
 +    pattern_single_r(p, PAT_RECOVERY_SET);
 +}
 +
 +/// Parses a pattern list separated by pipes `|`
 +/// using the given `recovery_set`.
 +pub(super) fn pattern_top_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
 +    p.eat(T![|]);
 +    pattern_r(p, recovery_set);
 +}
 +
 +/// Parses a pattern list separated by pipes `|`, with no leading `|`,using the
 +/// given `recovery_set`.
 +
 +// test or_pattern
 +// fn main() {
 +//     match () {
 +//         (_ | _) => (),
 +//         &(_ | _) => (),
 +//         (_ | _,) => (),
 +//         [_ | _,] => (),
 +//     }
 +// }
 +fn pattern_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
 +    let m = p.start();
 +    pattern_single_r(p, recovery_set);
 +
 +    if !p.at(T![|]) {
 +        m.abandon(p);
 +        return;
 +    }
 +    while p.eat(T![|]) {
 +        pattern_single_r(p, recovery_set);
 +    }
 +    m.complete(p, OR_PAT);
 +}
 +
 +fn pattern_single_r(p: &mut Parser<'_>, recovery_set: TokenSet) {
 +    if let Some(lhs) = atom_pat(p, recovery_set) {
 +        // test range_pat
 +        // fn main() {
 +        //     match 92 {
 +        //         0 ... 100 => (),
 +        //         101 ..= 200 => (),
 +        //         200 .. 301 => (),
 +        //         302 .. => (),
 +        //     }
 +        //
 +        //     match Some(10 as u8) {
 +        //         Some(0) | None => (),
 +        //         Some(1..) => ()
 +        //     }
 +        //
 +        //     match () {
 +        //         S { a: 0 } => (),
 +        //         S { a: 1.. } => (),
 +        //     }
 +        //
 +        //     match () {
 +        //         [0] => (),
 +        //         [1..] => (),
 +        //     }
 +        //
 +        //     match (10 as u8, 5 as u8) {
 +        //         (0, _) => (),
 +        //         (1.., _) => ()
 +        //     }
 +        // }
 +
 +        // FIXME: support half_open_range_patterns (`..=2`),
 +        // exclusive_range_pattern (`..5`) with missing lhs
 +        for range_op in [T![...], T![..=], T![..]] {
 +            if p.at(range_op) {
 +                let m = lhs.precede(p);
 +                p.bump(range_op);
 +
 +                // testing if we're at one of the following positions:
 +                // `0 .. =>`
 +                //       ^
 +                // `let 0 .. =`
 +                //           ^
 +                // `let 0..: _ =`
 +                //         ^
 +                // (1.., _)
 +                //     ^
 +                // `Some(0 .. )`
 +                //            ^
 +                // `S { t: 0.. }`
 +                //             ^
 +                // `[0..]`
 +                //      ^
 +                if matches!(p.current(), T![=] | T![,] | T![:] | T![')'] | T!['}'] | T![']']) {
 +                    // test half_open_range_pat
 +                    // fn f() {
 +                    //     let 0 .. = 1u32;
 +                    //     let 0..: _ = 1u32;
 +                    // }
 +                } else {
 +                    atom_pat(p, recovery_set);
 +                }
 +                m.complete(p, RANGE_PAT);
 +                return;
 +            }
 +        }
 +    }
 +}
 +
 +const PAT_RECOVERY_SET: TokenSet =
 +    TokenSet::new(&[T![let], T![if], T![while], T![loop], T![match], T![')'], T![,], T![=]]);
 +
 +fn atom_pat(p: &mut Parser<'_>, recovery_set: TokenSet) -> Option<CompletedMarker> {
 +    let m = match p.current() {
 +        T![box] => box_pat(p),
 +        T![ref] | T![mut] => ident_pat(p, true),
 +        T![const] => const_block_pat(p),
 +        IDENT => match p.nth(1) {
 +            // Checks the token after an IDENT to see if a pattern is a path (Struct { .. }) or macro
 +            // (T![x]).
 +            T!['('] | T!['{'] | T![!] => path_or_macro_pat(p),
 +            T![:] if p.nth_at(1, T![::]) => path_or_macro_pat(p),
 +            _ => ident_pat(p, true),
 +        },
 +
 +        // test type_path_in_pattern
 +        // fn main() { let <_>::Foo = (); }
 +        _ if paths::is_path_start(p) => path_or_macro_pat(p),
 +        _ if is_literal_pat_start(p) => literal_pat(p),
 +
 +        T![.] if p.at(T![..]) => rest_pat(p),
 +        T![_] => wildcard_pat(p),
 +        T![&] => ref_pat(p),
 +        T!['('] => tuple_pat(p),
 +        T!['['] => slice_pat(p),
 +
 +        _ => {
 +            p.err_recover("expected pattern", recovery_set);
 +            return None;
 +        }
 +    };
 +
 +    Some(m)
 +}
 +
 +fn is_literal_pat_start(p: &Parser<'_>) -> bool {
 +    p.at(T![-]) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER)
 +        || p.at_ts(expressions::LITERAL_FIRST)
 +}
 +
 +// test literal_pattern
 +// fn main() {
 +//     match () {
 +//         -1 => (),
 +//         92 => (),
 +//         'c' => (),
 +//         "hello" => (),
 +//     }
 +// }
 +fn literal_pat(p: &mut Parser<'_>) -> CompletedMarker {
 +    assert!(is_literal_pat_start(p));
 +    let m = p.start();
 +    if p.at(T![-]) {
 +        p.bump(T![-]);
 +    }
 +    expressions::literal(p);
 +    m.complete(p, LITERAL_PAT)
 +}
 +
 +// test path_part
 +// fn foo() {
 +//     let foo::Bar = ();
 +//     let ::Bar = ();
 +//     let Bar { .. } = ();
 +//     let Bar(..) = ();
 +// }
 +fn path_or_macro_pat(p: &mut Parser<'_>) -> CompletedMarker {
 +    assert!(paths::is_path_start(p));
 +    let m = p.start();
 +    paths::expr_path(p);
 +    let kind = match p.current() {
 +        T!['('] => {
 +            tuple_pat_fields(p);
 +            TUPLE_STRUCT_PAT
 +        }
 +        T!['{'] => {
 +            record_pat_field_list(p);
 +            RECORD_PAT
 +        }
 +        // test marco_pat
 +        // fn main() {
 +        //     let m!(x) = 0;
 +        // }
 +        T![!] => {
 +            items::macro_call_after_excl(p);
 +            return m.complete(p, MACRO_CALL).precede(p).complete(p, MACRO_PAT);
 +        }
 +        _ => PATH_PAT,
 +    };
 +    m.complete(p, kind)
 +}
 +
 +// test tuple_pat_fields
 +// fn foo() {
 +//     let S() = ();
 +//     let S(_) = ();
 +//     let S(_,) = ();
 +//     let S(_, .. , x) = ();
++//     let S(| a) = ();
 +// }
 +fn tuple_pat_fields(p: &mut Parser<'_>) {
 +    assert!(p.at(T!['(']));
 +    p.bump(T!['(']);
 +    pat_list(p, T![')']);
 +    p.expect(T![')']);
 +}
 +
 +// test record_pat_field
 +// fn foo() {
 +//     let S { 0: 1 } = ();
 +//     let S { x: 1 } = ();
 +//     let S { #[cfg(any())] x: 1 } = ();
 +// }
 +fn record_pat_field(p: &mut Parser<'_>) {
 +    match p.current() {
 +        IDENT | INT_NUMBER if p.nth(1) == T![:] => {
 +            name_ref_or_index(p);
 +            p.bump(T![:]);
 +            pattern(p);
 +        }
 +        T![box] => {
 +            // FIXME: not all box patterns should be allowed
 +            box_pat(p);
 +        }
 +        T![ref] | T![mut] | IDENT => {
 +            ident_pat(p, false);
 +        }
 +        _ => {
 +            p.err_and_bump("expected identifier");
 +        }
 +    }
 +}
 +
 +// test record_pat_field_list
 +// fn foo() {
 +//     let S {} = ();
 +//     let S { f, ref mut g } = ();
 +//     let S { h: _, ..} = ();
 +//     let S { h: _, } = ();
 +//     let S { #[cfg(any())] .. } = ();
 +// }
 +fn record_pat_field_list(p: &mut Parser<'_>) {
 +    assert!(p.at(T!['{']));
 +    let m = p.start();
 +    p.bump(T!['{']);
 +    while !p.at(EOF) && !p.at(T!['}']) {
 +        let m = p.start();
 +        attributes::outer_attrs(p);
 +
 +        match p.current() {
 +            // A trailing `..` is *not* treated as a REST_PAT.
 +            T![.] if p.at(T![..]) => {
 +                p.bump(T![..]);
 +                m.complete(p, REST_PAT);
 +            }
 +            T!['{'] => {
 +                error_block(p, "expected ident");
 +                m.abandon(p);
 +            }
 +            _ => {
 +                record_pat_field(p);
 +                m.complete(p, RECORD_PAT_FIELD);
 +            }
 +        }
 +        if !p.at(T!['}']) {
 +            p.expect(T![,]);
 +        }
 +    }
 +    p.expect(T!['}']);
 +    m.complete(p, RECORD_PAT_FIELD_LIST);
 +}
 +
 +// test placeholder_pat
 +// fn main() { let _ = (); }
 +fn wildcard_pat(p: &mut Parser<'_>) -> CompletedMarker {
 +    assert!(p.at(T![_]));
 +    let m = p.start();
 +    p.bump(T![_]);
 +    m.complete(p, WILDCARD_PAT)
 +}
 +
 +// test dot_dot_pat
 +// fn main() {
 +//     let .. = ();
 +//     //
 +//     // Tuples
 +//     //
 +//     let (a, ..) = ();
 +//     let (a, ..,) = ();
 +//     let Tuple(a, ..) = ();
 +//     let Tuple(a, ..,) = ();
 +//     let (.., ..) = ();
 +//     let Tuple(.., ..) = ();
 +//     let (.., a, ..) = ();
 +//     let Tuple(.., a, ..) = ();
 +//     //
 +//     // Slices
 +//     //
 +//     let [..] = ();
 +//     let [head, ..] = ();
 +//     let [head, tail @ ..] = ();
 +//     let [head, .., cons] = ();
 +//     let [head, mid @ .., cons] = ();
 +//     let [head, .., .., cons] = ();
 +//     let [head, .., mid, tail @ ..] = ();
 +//     let [head, .., mid, .., cons] = ();
 +// }
 +fn rest_pat(p: &mut Parser<'_>) -> CompletedMarker {
 +    assert!(p.at(T![..]));
 +    let m = p.start();
 +    p.bump(T![..]);
 +    m.complete(p, REST_PAT)
 +}
 +
 +// test ref_pat
 +// fn main() {
 +//     let &a = ();
 +//     let &mut b = ();
 +// }
 +fn ref_pat(p: &mut Parser<'_>) -> CompletedMarker {
 +    assert!(p.at(T![&]));
 +    let m = p.start();
 +    p.bump(T![&]);
 +    p.eat(T![mut]);
 +    pattern_single(p);
 +    m.complete(p, REF_PAT)
 +}
 +
 +// test tuple_pat
 +// fn main() {
 +//     let (a, b, ..) = ();
 +//     let (a,) = ();
 +//     let (..) = ();
 +//     let () = ();
++//     let (| a | a, | b) = ((),());
 +// }
 +fn tuple_pat(p: &mut Parser<'_>) -> CompletedMarker {
 +    assert!(p.at(T!['(']));
 +    let m = p.start();
 +    p.bump(T!['(']);
 +    let mut has_comma = false;
 +    let mut has_pat = false;
 +    let mut has_rest = false;
 +    while !p.at(EOF) && !p.at(T![')']) {
 +        has_pat = true;
-         pattern(p);
++        if !p.at_ts(PAT_TOP_FIRST) {
 +            p.error("expected a pattern");
 +            break;
 +        }
 +        has_rest |= p.at(T![..]);
 +
-         if !p.at_ts(PATTERN_FIRST) {
++        pattern_top(p);
 +        if !p.at(T![')']) {
 +            has_comma = true;
 +            p.expect(T![,]);
 +        }
 +    }
 +    p.expect(T![')']);
 +
 +    m.complete(p, if !has_comma && !has_rest && has_pat { PAREN_PAT } else { TUPLE_PAT })
 +}
 +
 +// test slice_pat
 +// fn main() {
 +//     let [a, b, ..] = [];
++//     let [| a, ..] = [];
 +// }
 +fn slice_pat(p: &mut Parser<'_>) -> CompletedMarker {
 +    assert!(p.at(T!['[']));
 +    let m = p.start();
 +    p.bump(T!['[']);
 +    pat_list(p, T![']']);
 +    p.expect(T![']']);
 +    m.complete(p, SLICE_PAT)
 +}
 +
 +fn pat_list(p: &mut Parser<'_>, ket: SyntaxKind) {
 +    while !p.at(EOF) && !p.at(ket) {
-         pattern(p);
++        if !p.at_ts(PAT_TOP_FIRST) {
 +            p.error("expected a pattern");
 +            break;
 +        }
 +
++        pattern_top(p);
 +        if !p.at(ket) {
 +            p.expect(T![,]);
 +        }
 +    }
 +}
 +
 +// test bind_pat
 +// fn main() {
 +//     let a = ();
 +//     let mut b = ();
 +//     let ref c = ();
 +//     let ref mut d = ();
 +//     let e @ _ = ();
 +//     let ref mut f @ g @ _ = ();
 +// }
 +fn ident_pat(p: &mut Parser<'_>, with_at: bool) -> CompletedMarker {
 +    assert!(matches!(p.current(), T![ref] | T![mut] | IDENT));
 +    let m = p.start();
 +    p.eat(T![ref]);
 +    p.eat(T![mut]);
 +    name_r(p, PAT_RECOVERY_SET);
 +    if with_at && p.eat(T![@]) {
 +        pattern_single(p);
 +    }
 +    m.complete(p, IDENT_PAT)
 +}
 +
 +// test box_pat
 +// fn main() {
 +//     let box i = ();
 +//     let box Outer { box i, j: box Inner(box &x) } = ();
 +//     let box ref mut i = ();
 +// }
 +fn box_pat(p: &mut Parser<'_>) -> CompletedMarker {
 +    assert!(p.at(T![box]));
 +    let m = p.start();
 +    p.bump(T![box]);
 +    pattern_single(p);
 +    m.complete(p, BOX_PAT)
 +}
 +
 +// test const_block_pat
 +// fn main() {
 +//     let const { 15 } = ();
 +//     let const { foo(); bar() } = ();
 +// }
 +fn const_block_pat(p: &mut Parser<'_>) -> CompletedMarker {
 +    assert!(p.at(T![const]));
 +    let m = p.start();
 +    p.bump(T![const]);
 +    expressions::block_expr(p);
 +    m.complete(p, CONST_BLOCK_PAT)
 +}
index 235a9d7f404cf43602ac36d809c0fb4c44ddbbdc,0000000000000000000000000000000000000000..dff72ba886fe82db58644325c4f72f57e8f543a5
mode 100644,000000..100644
--- /dev/null
@@@ -1,42 -1,0 +1,65 @@@
 +SOURCE_FILE
 +  FN
 +    FN_KW "fn"
 +    WHITESPACE " "
 +    NAME
 +      IDENT "main"
 +    PARAM_LIST
 +      L_PAREN "("
 +      R_PAREN ")"
 +    WHITESPACE " "
 +    BLOCK_EXPR
 +      STMT_LIST
 +        L_CURLY "{"
 +        WHITESPACE "\n    "
 +        LET_STMT
 +          LET_KW "let"
 +          WHITESPACE " "
 +          SLICE_PAT
 +            L_BRACK "["
 +            IDENT_PAT
 +              NAME
 +                IDENT "a"
 +            COMMA ","
 +            WHITESPACE " "
 +            IDENT_PAT
 +              NAME
 +                IDENT "b"
 +            COMMA ","
 +            WHITESPACE " "
 +            REST_PAT
 +              DOT2 ".."
 +            R_BRACK "]"
 +          WHITESPACE " "
 +          EQ "="
 +          WHITESPACE " "
 +          ARRAY_EXPR
 +            L_BRACK "["
 +            R_BRACK "]"
 +          SEMICOLON ";"
++        WHITESPACE "\n    "
++        LET_STMT
++          LET_KW "let"
++          WHITESPACE " "
++          SLICE_PAT
++            L_BRACK "["
++            PIPE "|"
++            WHITESPACE " "
++            IDENT_PAT
++              NAME
++                IDENT "a"
++            COMMA ","
++            WHITESPACE " "
++            REST_PAT
++              DOT2 ".."
++            R_BRACK "]"
++          WHITESPACE " "
++          EQ "="
++          WHITESPACE " "
++          ARRAY_EXPR
++            L_BRACK "["
++            R_BRACK "]"
++          SEMICOLON ";"
 +        WHITESPACE "\n"
 +        R_CURLY "}"
 +  WHITESPACE "\n"
index 7955973b952a770d71d5fe786f83fde238e8e8e0,0000000000000000000000000000000000000000..855ba89b1e9d9ec911ba50b82cce76ac9f2a21c6
mode 100644,000000..100644
--- /dev/null
@@@ -1,3 -1,0 +1,4 @@@
 +fn main() {
 +    let [a, b, ..] = [];
++    let [| a, ..] = [];
 +}
index 3cdaf32b5721c24103d18014a6aebda85ec6ab0a,0000000000000000000000000000000000000000..55baf2fdcb4f6a525a166c6c1f40fc049ee7d519
mode 100644,000000..100644
--- /dev/null
@@@ -1,105 -1,0 +1,128 @@@
 +SOURCE_FILE
 +  FN
 +    FN_KW "fn"
 +    WHITESPACE " "
 +    NAME
 +      IDENT "foo"
 +    PARAM_LIST
 +      L_PAREN "("
 +      R_PAREN ")"
 +    WHITESPACE " "
 +    BLOCK_EXPR
 +      STMT_LIST
 +        L_CURLY "{"
 +        WHITESPACE "\n    "
 +        LET_STMT
 +          LET_KW "let"
 +          WHITESPACE " "
 +          TUPLE_STRUCT_PAT
 +            PATH
 +              PATH_SEGMENT
 +                NAME_REF
 +                  IDENT "S"
 +            L_PAREN "("
 +            R_PAREN ")"
 +          WHITESPACE " "
 +          EQ "="
 +          WHITESPACE " "
 +          TUPLE_EXPR
 +            L_PAREN "("
 +            R_PAREN ")"
 +          SEMICOLON ";"
 +        WHITESPACE "\n    "
 +        LET_STMT
 +          LET_KW "let"
 +          WHITESPACE " "
 +          TUPLE_STRUCT_PAT
 +            PATH
 +              PATH_SEGMENT
 +                NAME_REF
 +                  IDENT "S"
 +            L_PAREN "("
 +            WILDCARD_PAT
 +              UNDERSCORE "_"
 +            R_PAREN ")"
 +          WHITESPACE " "
 +          EQ "="
 +          WHITESPACE " "
 +          TUPLE_EXPR
 +            L_PAREN "("
 +            R_PAREN ")"
 +          SEMICOLON ";"
 +        WHITESPACE "\n    "
 +        LET_STMT
 +          LET_KW "let"
 +          WHITESPACE " "
 +          TUPLE_STRUCT_PAT
 +            PATH
 +              PATH_SEGMENT
 +                NAME_REF
 +                  IDENT "S"
 +            L_PAREN "("
 +            WILDCARD_PAT
 +              UNDERSCORE "_"
 +            COMMA ","
 +            R_PAREN ")"
 +          WHITESPACE " "
 +          EQ "="
 +          WHITESPACE " "
 +          TUPLE_EXPR
 +            L_PAREN "("
 +            R_PAREN ")"
 +          SEMICOLON ";"
 +        WHITESPACE "\n    "
 +        LET_STMT
 +          LET_KW "let"
 +          WHITESPACE " "
 +          TUPLE_STRUCT_PAT
 +            PATH
 +              PATH_SEGMENT
 +                NAME_REF
 +                  IDENT "S"
 +            L_PAREN "("
 +            WILDCARD_PAT
 +              UNDERSCORE "_"
 +            COMMA ","
 +            WHITESPACE " "
 +            REST_PAT
 +              DOT2 ".."
 +            WHITESPACE " "
 +            COMMA ","
 +            WHITESPACE " "
 +            IDENT_PAT
 +              NAME
 +                IDENT "x"
 +            R_PAREN ")"
 +          WHITESPACE " "
 +          EQ "="
 +          WHITESPACE " "
 +          TUPLE_EXPR
 +            L_PAREN "("
 +            R_PAREN ")"
 +          SEMICOLON ";"
++        WHITESPACE "\n    "
++        LET_STMT
++          LET_KW "let"
++          WHITESPACE " "
++          TUPLE_STRUCT_PAT
++            PATH
++              PATH_SEGMENT
++                NAME_REF
++                  IDENT "S"
++            L_PAREN "("
++            PIPE "|"
++            WHITESPACE " "
++            IDENT_PAT
++              NAME
++                IDENT "a"
++            R_PAREN ")"
++          WHITESPACE " "
++          EQ "="
++          WHITESPACE " "
++          TUPLE_EXPR
++            L_PAREN "("
++            R_PAREN ")"
++          SEMICOLON ";"
 +        WHITESPACE "\n"
 +        R_CURLY "}"
 +  WHITESPACE "\n"
index 0dfe63629679baa0eef52b815e49ffa5dd6cfac8,0000000000000000000000000000000000000000..8ec6f4ca93e031ed6f3365e859f0b42ab959e328
mode 100644,000000..100644
--- /dev/null
@@@ -1,6 -1,0 +1,7 @@@
 +fn foo() {
 +    let S() = ();
 +    let S(_) = ();
 +    let S(_,) = ();
 +    let S(_, .. , x) = ();
++    let S(| a) = ();
 +}
index cebe98c43aa62ed8ac6f1e43de72d73d098361fa,0000000000000000000000000000000000000000..1a01e0f69381feff531618d16b6f6cce6b85901a
mode 100644,000000..100644
--- /dev/null
@@@ -1,90 -1,0 +1,130 @@@
 +SOURCE_FILE
 +  FN
 +    FN_KW "fn"
 +    WHITESPACE " "
 +    NAME
 +      IDENT "main"
 +    PARAM_LIST
 +      L_PAREN "("
 +      R_PAREN ")"
 +    WHITESPACE " "
 +    BLOCK_EXPR
 +      STMT_LIST
 +        L_CURLY "{"
 +        WHITESPACE "\n    "
 +        LET_STMT
 +          LET_KW "let"
 +          WHITESPACE " "
 +          TUPLE_PAT
 +            L_PAREN "("
 +            IDENT_PAT
 +              NAME
 +                IDENT "a"
 +            COMMA ","
 +            WHITESPACE " "
 +            IDENT_PAT
 +              NAME
 +                IDENT "b"
 +            COMMA ","
 +            WHITESPACE " "
 +            REST_PAT
 +              DOT2 ".."
 +            R_PAREN ")"
 +          WHITESPACE " "
 +          EQ "="
 +          WHITESPACE " "
 +          TUPLE_EXPR
 +            L_PAREN "("
 +            R_PAREN ")"
 +          SEMICOLON ";"
 +        WHITESPACE "\n    "
 +        LET_STMT
 +          LET_KW "let"
 +          WHITESPACE " "
 +          TUPLE_PAT
 +            L_PAREN "("
 +            IDENT_PAT
 +              NAME
 +                IDENT "a"
 +            COMMA ","
 +            R_PAREN ")"
 +          WHITESPACE " "
 +          EQ "="
 +          WHITESPACE " "
 +          TUPLE_EXPR
 +            L_PAREN "("
 +            R_PAREN ")"
 +          SEMICOLON ";"
 +        WHITESPACE "\n    "
 +        LET_STMT
 +          LET_KW "let"
 +          WHITESPACE " "
 +          TUPLE_PAT
 +            L_PAREN "("
 +            REST_PAT
 +              DOT2 ".."
 +            R_PAREN ")"
 +          WHITESPACE " "
 +          EQ "="
 +          WHITESPACE " "
 +          TUPLE_EXPR
 +            L_PAREN "("
 +            R_PAREN ")"
 +          SEMICOLON ";"
 +        WHITESPACE "\n    "
 +        LET_STMT
 +          LET_KW "let"
 +          WHITESPACE " "
 +          TUPLE_PAT
 +            L_PAREN "("
 +            R_PAREN ")"
 +          WHITESPACE " "
 +          EQ "="
 +          WHITESPACE " "
 +          TUPLE_EXPR
 +            L_PAREN "("
 +            R_PAREN ")"
 +          SEMICOLON ";"
++        WHITESPACE "\n    "
++        LET_STMT
++          LET_KW "let"
++          WHITESPACE " "
++          TUPLE_PAT
++            L_PAREN "("
++            PIPE "|"
++            WHITESPACE " "
++            OR_PAT
++              IDENT_PAT
++                NAME
++                  IDENT "a"
++              WHITESPACE " "
++              PIPE "|"
++              WHITESPACE " "
++              IDENT_PAT
++                NAME
++                  IDENT "a"
++            COMMA ","
++            WHITESPACE " "
++            PIPE "|"
++            WHITESPACE " "
++            IDENT_PAT
++              NAME
++                IDENT "b"
++            R_PAREN ")"
++          WHITESPACE " "
++          EQ "="
++          WHITESPACE " "
++          TUPLE_EXPR
++            L_PAREN "("
++            TUPLE_EXPR
++              L_PAREN "("
++              R_PAREN ")"
++            COMMA ","
++            TUPLE_EXPR
++              L_PAREN "("
++              R_PAREN ")"
++            R_PAREN ")"
++          SEMICOLON ";"
 +        WHITESPACE "\n"
 +        R_CURLY "}"
 +  WHITESPACE "\n"
index ba719879d4c1acad2e13ddef6392072e1532e35a,0000000000000000000000000000000000000000..fbd7f48f66bd93da02e9d44cd51d47feae0e2015
mode 100644,000000..100644
--- /dev/null
@@@ -1,6 -1,0 +1,7 @@@
 +fn main() {
 +    let (a, b, ..) = ();
 +    let (a,) = ();
 +    let (..) = ();
 +    let () = ();
++    let (| a | a, | b) = ((),());
 +}
index 5746eac0b3790923886a9d90f020743c9cc44a38,0000000000000000000000000000000000000000..e39026ac70bfd52e4d78b7b6a08a85d6938306ac
mode 100644,000000..100644
--- /dev/null
@@@ -1,36 -1,0 +1,35 @@@
- crossbeam = "0.8.1"
 +[package]
 +name = "proc-macro-srv"
 +version = "0.0.0"
 +description = "TBD"
 +license = "MIT OR Apache-2.0"
 +edition = "2021"
 +rust-version = "1.57"
 +
 +[lib]
 +doctest = false
 +
 +[dependencies]
 +object = { version = "0.29.0", default-features = false, features = [
 +    "std",
 +    "read_core",
 +    "elf",
 +    "macho",
 +    "pe",
 +] }
 +libloading = "0.7.3"
 +memmap2 = "0.5.4"
 +
 +tt = { path = "../tt", version = "0.0.0" }
 +mbe = { path = "../mbe", version = "0.0.0" }
 +paths = { path = "../paths", version = "0.0.0" }
 +proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
 +
 +[dev-dependencies]
 +expect-test = "1.4.0"
 +
 +# used as proc macro test targets
 +proc-macro-test = { path = "../proc-macro-test" }
 +
 +[features]
 +sysroot-abi = []
index 4c205b9cadac3abd8311f9a7c1ae4d4f113f1b63,0000000000000000000000000000000000000000..3679bfc43c980a85a40909698dbe9fc7f27f9715
mode 100644,000000..100644
--- /dev/null
@@@ -1,160 -1,0 +1,155 @@@
-         // FIXME: replace this with std's scoped threads once they stabilize
-         // (then remove dependency on crossbeam)
-         let result = crossbeam::scope(|s| {
-             let res = match s
-                 .builder()
 +//! RA Proc Macro Server
 +//!
 +//! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code.
 +//! The general idea here is based on <https://github.com/fedochet/rust-proc-macro-expander>.
 +//!
 +//! But we adapt it to better fit RA needs:
 +//!
 +//! * We use `tt` for proc-macro `TokenStream` server, it is easier to manipulate and interact with
 +//!   RA than `proc-macro2` token stream.
 +//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
 +//!   rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
 +
 +#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 +#![cfg_attr(
 +    feature = "sysroot-abi",
 +    feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)
 +)]
 +#![allow(unreachable_pub)]
 +
 +mod dylib;
 +mod abis;
 +
 +use std::{
 +    collections::{hash_map::Entry, HashMap},
 +    env,
 +    ffi::OsString,
 +    fs,
 +    path::{Path, PathBuf},
++    thread,
 +    time::SystemTime,
 +};
 +
 +use proc_macro_api::{
 +    msg::{ExpandMacro, FlatTree, PanicMessage},
 +    ProcMacroKind,
 +};
 +
 +#[derive(Default)]
 +pub(crate) struct ProcMacroSrv {
 +    expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>,
 +}
 +
 +const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
 +
 +impl ProcMacroSrv {
 +    pub fn expand(&mut self, task: ExpandMacro) -> Result<FlatTree, PanicMessage> {
 +        let expander = self.expander(task.lib.as_ref()).map_err(|err| {
 +            debug_assert!(false, "should list macros before asking to expand");
 +            PanicMessage(format!("failed to load macro: {}", err))
 +        })?;
 +
 +        let prev_env = EnvSnapshot::new();
 +        for (k, v) in &task.env {
 +            env::set_var(k, v);
 +        }
 +        let prev_working_dir = match task.current_dir {
 +            Some(dir) => {
 +                let prev_working_dir = std::env::current_dir().ok();
 +                if let Err(err) = std::env::set_current_dir(&dir) {
 +                    eprintln!("Failed to set the current working dir to {}. Error: {:?}", dir, err)
 +                }
 +                prev_working_dir
 +            }
 +            None => None,
 +        };
 +
 +        let macro_body = task.macro_body.to_subtree();
 +        let attributes = task.attributes.map(|it| it.to_subtree());
-                 .spawn(|_| {
++        let result = thread::scope(|s| {
++            let thread = thread::Builder::new()
 +                .stack_size(EXPANDER_STACK_SIZE)
 +                .name(task.macro_name.clone())
-                 }) {
++                .spawn_scoped(s, || {
 +                    expander
 +                        .expand(&task.macro_name, &macro_body, attributes.as_ref())
 +                        .map(|it| FlatTree::new(&it))
-         let result = match result {
-             Ok(result) => result,
-             Err(e) => std::panic::resume_unwind(e),
-         };
++                });
++            let res = match thread {
 +                Ok(handle) => handle.join(),
 +                Err(e) => std::panic::resume_unwind(Box::new(e)),
 +            };
 +
 +            match res {
 +                Ok(res) => res,
 +                Err(e) => std::panic::resume_unwind(e),
 +            }
 +        });
 +
 +        prev_env.rollback();
 +
 +        if let Some(dir) = prev_working_dir {
 +            if let Err(err) = std::env::set_current_dir(&dir) {
 +                eprintln!(
 +                    "Failed to set the current working dir to {}. Error: {:?}",
 +                    dir.display(),
 +                    err
 +                )
 +            }
 +        }
 +
 +        result.map_err(PanicMessage)
 +    }
 +
 +    pub(crate) fn list_macros(
 +        &mut self,
 +        dylib_path: &Path,
 +    ) -> Result<Vec<(String, ProcMacroKind)>, String> {
 +        let expander = self.expander(dylib_path)?;
 +        Ok(expander.list_macros())
 +    }
 +
 +    fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> {
 +        let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| {
 +            format!("Failed to get file metadata for {}: {:?}", path.display(), err)
 +        })?;
 +
 +        Ok(match self.expanders.entry((path.to_path_buf(), time)) {
 +            Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| {
 +                format!("Cannot create expander for {}: {:?}", path.display(), err)
 +            })?),
 +            Entry::Occupied(e) => e.into_mut(),
 +        })
 +    }
 +}
 +
 +struct EnvSnapshot {
 +    vars: HashMap<OsString, OsString>,
 +}
 +
 +impl EnvSnapshot {
 +    fn new() -> EnvSnapshot {
 +        EnvSnapshot { vars: env::vars_os().collect() }
 +    }
 +
 +    fn rollback(self) {
 +        let mut old_vars = self.vars;
 +        for (name, value) in env::vars_os() {
 +            let old_value = old_vars.remove(&name);
 +            if old_value != Some(value) {
 +                match old_value {
 +                    None => env::remove_var(name),
 +                    Some(old_value) => env::set_var(name, old_value),
 +                }
 +            }
 +        }
 +        for (name, old_value) in old_vars {
 +            env::set_var(name, old_value)
 +        }
 +    }
 +}
 +
 +pub mod cli;
 +
 +#[cfg(test)]
 +mod tests;
index 8d0fa757c2e176baf6c0904927bfe252a9e4772d,0000000000000000000000000000000000000000..9ccb6e9101ef440c4604350b1901060375df66ae
mode 100644,000000..100644
--- /dev/null
@@@ -1,1821 -1,0 +1,1821 @@@
-                         2,
 +use std::{
 +    ops::Deref,
 +    path::{Path, PathBuf},
 +};
 +
 +use base_db::{CrateGraph, FileId};
 +use cfg::{CfgAtom, CfgDiff};
 +use expect_test::{expect, Expect};
 +use paths::{AbsPath, AbsPathBuf};
 +use serde::de::DeserializeOwned;
 +
 +use crate::{
 +    CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot,
 +    WorkspaceBuildScripts,
 +};
 +
 +fn load_cargo(file: &str) -> CrateGraph {
 +    load_cargo_with_overrides(file, CfgOverrides::default())
 +}
 +
 +fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGraph {
 +    let meta = get_test_json_file(file);
 +    let cargo_workspace = CargoWorkspace::new(meta);
 +    let project_workspace = ProjectWorkspace::Cargo {
 +        cargo: cargo_workspace,
 +        build_scripts: WorkspaceBuildScripts::default(),
 +        sysroot: None,
 +        rustc: None,
 +        rustc_cfg: Vec::new(),
 +        cfg_overrides,
 +        toolchain: None,
 +    };
 +    to_crate_graph(project_workspace)
 +}
 +
 +fn load_rust_project(file: &str) -> CrateGraph {
 +    let data = get_test_json_file(file);
 +    let project = rooted_project_json(data);
 +    let sysroot = Some(get_fake_sysroot());
 +    let project_workspace = ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new() };
 +    to_crate_graph(project_workspace)
 +}
 +
 +fn get_test_json_file<T: DeserializeOwned>(file: &str) -> T {
 +    let file = get_test_path(file);
 +    let data = std::fs::read_to_string(file).unwrap();
 +    let mut json = data.parse::<serde_json::Value>().unwrap();
 +    fixup_paths(&mut json);
 +    return serde_json::from_value(json).unwrap();
 +
 +    fn fixup_paths(val: &mut serde_json::Value) {
 +        match val {
 +            serde_json::Value::String(s) => replace_root(s, true),
 +            serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths),
 +            serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths),
 +            serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => {
 +            }
 +        }
 +    }
 +}
 +
 +fn replace_root(s: &mut String, direction: bool) {
 +    if direction {
 +        let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" };
 +        *s = s.replace("$ROOT$", root)
 +    } else {
 +        let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" };
 +        *s = s.replace(root, "$ROOT$")
 +    }
 +}
 +
 +fn get_test_path(file: &str) -> PathBuf {
 +    let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
 +    base.join("test_data").join(file)
 +}
 +
 +fn get_fake_sysroot() -> Sysroot {
 +    let sysroot_path = get_test_path("fake-sysroot");
 +    // there's no `libexec/` directory with a `proc-macro-srv` binary in that
 +    // fake sysroot, so we give them both the same path:
 +    let sysroot_dir = AbsPathBuf::assert(sysroot_path);
 +    let sysroot_src_dir = sysroot_dir.clone();
 +    Sysroot::load(sysroot_dir, sysroot_src_dir).unwrap()
 +}
 +
 +fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
 +    let mut root = "$ROOT$".to_string();
 +    replace_root(&mut root, true);
 +    let path = Path::new(&root);
 +    let base = AbsPath::assert(path);
 +    ProjectJson::new(base, data)
 +}
 +
 +fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph {
 +    project_workspace.to_crate_graph(&mut |_, _| Ok(Vec::new()), &mut {
 +        let mut counter = 0;
 +        move |_path| {
 +            counter += 1;
 +            Some(FileId(counter))
 +        }
 +    })
 +}
 +
 +fn check_crate_graph(crate_graph: CrateGraph, expect: Expect) {
 +    let mut crate_graph = format!("{:#?}", crate_graph);
 +    replace_root(&mut crate_graph, false);
 +    expect.assert_eq(&crate_graph);
 +}
 +
 +#[test]
 +fn cargo_hello_world_project_model_with_wildcard_overrides() {
 +    let cfg_overrides = CfgOverrides::Wildcard(
 +        CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(),
 +    );
 +    let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides);
 +    check_crate_graph(
 +        crate_graph,
 +        expect![[r#"
 +            CrateGraph {
 +                arena: {
 +                    CrateId(
 +                        0,
 +                    ): CrateData {
 +                        root_file_id: FileId(
 +                            1,
 +                        ),
 +                        edition: Edition2018,
 +                        version: Some(
 +                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
 +                                    "hello_world",
 +                                ),
 +                                canonical_name: "hello-world",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
 +                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
 +                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
 +                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
 +                                "CARGO_PKG_NAME": "hello-world",
 +                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
 +                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    4,
 +                                ),
 +                                name: CrateName(
 +                                    "libc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
 +                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             3,
++                        1,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "an_example",
++                            2,
 +                        ),
 +                        edition: Edition2018,
 +                        version: Some(
 +                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "an-example",
++                                    "hello_world",
 +                                ),
-                         4,
++                                canonical_name: "hello-world",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
 +                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
 +                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
 +                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
 +                                "CARGO_PKG_NAME": "hello-world",
 +                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
 +                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    0,
 +                                ),
 +                                name: CrateName(
 +                                    "hello_world",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    4,
 +                                ),
 +                                name: CrateName(
 +                                    "libc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
 +                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             5,
++                        2,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                         edition: Edition2015,
++                            3,
 +                        ),
-                             "0.2.98",
++                        edition: Edition2018,
 +                        version: Some(
-                                     "libc",
++                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "libc",
++                                    "an_example",
 +                                ),
-                                 "feature=default",
-                                 "feature=std",
++                                canonical_name: "an-example",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
-                                 "feature=align",
-                                 "feature=const-extern-fn",
-                                 "feature=default",
-                                 "feature=extra_traits",
-                                 "feature=rustc-dep-of-std",
-                                 "feature=std",
-                                 "feature=use_std",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
-                                 "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
-                                 "CARGO_PKG_VERSION": "0.2.98",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
-                                 "CARGO_CRATE_NAME": "libc",
++                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
++                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
-                                 "CARGO_PKG_NAME": "libc",
-                                 "CARGO_PKG_VERSION_PATCH": "98",
++                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
-                                 "CARGO_PKG_VERSION_MINOR": "2",
++                                "CARGO_PKG_NAME": "hello-world",
++                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
-                         dependencies: [],
++                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
-                             repo: Some(
-                                 "https://github.com/rust-lang/libc",
-                             ),
++                        dependencies: [
++                            Dependency {
++                                crate_id: CrateId(
++                                    0,
++                                ),
++                                name: CrateName(
++                                    "hello_world",
++                                ),
++                                prelude: true,
++                            },
++                            Dependency {
++                                crate_id: CrateId(
++                                    4,
++                                ),
++                                name: CrateName(
++                                    "libc",
++                                ),
++                                prelude: true,
++                            },
++                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
-                         1,
++                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             2,
++                        3,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "hello_world",
++                            4,
 +                        ),
 +                        edition: Edition2018,
 +                        version: Some(
 +                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "hello-world",
++                                    "it",
 +                                ),
-                         3,
++                                canonical_name: "it",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
 +                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
 +                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
 +                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
 +                                "CARGO_PKG_NAME": "hello-world",
 +                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
 +                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    0,
 +                                ),
 +                                name: CrateName(
 +                                    "hello_world",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    4,
 +                                ),
 +                                name: CrateName(
 +                                    "libc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
 +                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             4,
++                        4,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                         edition: Edition2018,
++                            5,
 +                        ),
-                             "0.1.0",
++                        edition: Edition2015,
 +                        version: Some(
-                                     "it",
++                            "0.2.98",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "it",
++                                    "libc",
 +                                ),
-                                 "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
-                                 "CARGO_PKG_VERSION": "0.1.0",
++                                canonical_name: "libc",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
++                                "feature=default",
++                                "feature=std",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
++                                "feature=align",
++                                "feature=const-extern-fn",
++                                "feature=default",
++                                "feature=extra_traits",
++                                "feature=rustc-dep-of-std",
++                                "feature=std",
++                                "feature=use_std",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
-                                 "CARGO_CRATE_NAME": "hello_world",
++                                "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
++                                "CARGO_PKG_VERSION": "0.2.98",
 +                                "CARGO_PKG_AUTHORS": "",
-                                 "CARGO_PKG_NAME": "hello-world",
-                                 "CARGO_PKG_VERSION_PATCH": "0",
++                                "CARGO_CRATE_NAME": "libc",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
-                                 "CARGO_PKG_VERSION_MINOR": "1",
++                                "CARGO_PKG_NAME": "libc",
++                                "CARGO_PKG_VERSION_PATCH": "98",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
-                         dependencies: [
-                             Dependency {
-                                 crate_id: CrateId(
-                                     0,
-                                 ),
-                                 name: CrateName(
-                                     "hello_world",
-                                 ),
-                                 prelude: true,
-                             },
-                             Dependency {
-                                 crate_id: CrateId(
-                                     4,
-                                 ),
-                                 name: CrateName(
-                                     "libc",
-                                 ),
-                                 prelude: true,
-                             },
-                         ],
++                                "CARGO_PKG_VERSION_MINOR": "2",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
-                             repo: None,
++                        dependencies: [],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
-                         2,
++                            repo: Some(
++                                "https://github.com/rust-lang/libc",
++                            ),
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                },
 +            }"#]],
 +    )
 +}
 +
 +#[test]
 +fn cargo_hello_world_project_model_with_selective_overrides() {
 +    let cfg_overrides = {
 +        CfgOverrides::Selective(
 +            std::iter::once((
 +                "libc".to_owned(),
 +                CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(),
 +            ))
 +            .collect(),
 +        )
 +    };
 +    let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides);
 +    check_crate_graph(
 +        crate_graph,
 +        expect![[r#"
 +            CrateGraph {
 +                arena: {
 +                    CrateId(
 +                        0,
 +                    ): CrateData {
 +                        root_file_id: FileId(
 +                            1,
 +                        ),
 +                        edition: Edition2018,
 +                        version: Some(
 +                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
 +                                    "hello_world",
 +                                ),
 +                                canonical_name: "hello-world",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
 +                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
 +                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
 +                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
 +                                "CARGO_PKG_NAME": "hello-world",
 +                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
 +                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    4,
 +                                ),
 +                                name: CrateName(
 +                                    "libc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
 +                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             3,
++                        1,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "an_example",
++                            2,
 +                        ),
 +                        edition: Edition2018,
 +                        version: Some(
 +                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "an-example",
++                                    "hello_world",
 +                                ),
-                         4,
-                     ): CrateData {
-                         root_file_id: FileId(
-                             5,
-                         ),
-                         edition: Edition2015,
-                         version: Some(
-                             "0.2.98",
-                         ),
-                         display_name: Some(
-                             CrateDisplayName {
-                                 crate_name: CrateName(
-                                     "libc",
-                                 ),
-                                 canonical_name: "libc",
-                             },
-                         ),
-                         cfg_options: CfgOptions(
-                             [
-                                 "debug_assertions",
-                                 "feature=default",
-                                 "feature=std",
-                             ],
-                         ),
-                         potential_cfg_options: CfgOptions(
-                             [
-                                 "debug_assertions",
-                                 "feature=align",
-                                 "feature=const-extern-fn",
-                                 "feature=default",
-                                 "feature=extra_traits",
-                                 "feature=rustc-dep-of-std",
-                                 "feature=std",
-                                 "feature=use_std",
-                             ],
-                         ),
-                         env: Env {
-                             entries: {
-                                 "CARGO_PKG_LICENSE": "",
-                                 "CARGO_PKG_VERSION_MAJOR": "0",
-                                 "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
-                                 "CARGO_PKG_VERSION": "0.2.98",
-                                 "CARGO_PKG_AUTHORS": "",
-                                 "CARGO_CRATE_NAME": "libc",
-                                 "CARGO_PKG_LICENSE_FILE": "",
-                                 "CARGO_PKG_HOMEPAGE": "",
-                                 "CARGO_PKG_DESCRIPTION": "",
-                                 "CARGO_PKG_NAME": "libc",
-                                 "CARGO_PKG_VERSION_PATCH": "98",
-                                 "CARGO": "cargo",
-                                 "CARGO_PKG_REPOSITORY": "",
-                                 "CARGO_PKG_VERSION_MINOR": "2",
-                                 "CARGO_PKG_VERSION_PRE": "",
-                             },
-                         },
-                         dependencies: [],
-                         proc_macro: Err(
-                             "crate has not (yet) been built",
-                         ),
-                         origin: CratesIo {
-                             repo: Some(
-                                 "https://github.com/rust-lang/libc",
-                             ),
-                         },
-                         is_proc_macro: false,
-                     },
-                     CrateId(
-                         1,
++                                canonical_name: "hello-world",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
 +                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
 +                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
 +                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
 +                                "CARGO_PKG_NAME": "hello-world",
 +                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
 +                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    0,
 +                                ),
 +                                name: CrateName(
 +                                    "hello_world",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    4,
 +                                ),
 +                                name: CrateName(
 +                                    "libc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
 +                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             2,
++                        2,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "hello_world",
++                            3,
 +                        ),
 +                        edition: Edition2018,
 +                        version: Some(
 +                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "hello-world",
++                                    "an_example",
 +                                ),
-                 },
-             }"#]],
-     )
- }
- #[test]
- fn cargo_hello_world_project_model() {
-     let crate_graph = load_cargo("hello-world-metadata.json");
-     check_crate_graph(
-         crate_graph,
-         expect![[r#"
-             CrateGraph {
-                 arena: {
++                                canonical_name: "an-example",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
 +                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
 +                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
 +                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
 +                                "CARGO_PKG_NAME": "hello-world",
 +                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
 +                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    0,
 +                                ),
 +                                name: CrateName(
 +                                    "hello_world",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    4,
 +                                ),
 +                                name: CrateName(
 +                                    "libc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
 +                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
 +                        3,
 +                    ): CrateData {
 +                        root_file_id: FileId(
 +                            4,
 +                        ),
 +                        edition: Edition2018,
 +                        version: Some(
 +                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
 +                                    "it",
 +                                ),
 +                                canonical_name: "it",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
 +                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
 +                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
 +                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
 +                                "CARGO_PKG_NAME": "hello-world",
 +                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
 +                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    0,
 +                                ),
 +                                name: CrateName(
 +                                    "hello_world",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    4,
 +                                ),
 +                                name: CrateName(
 +                                    "libc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
 +                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
-                         0,
 +                    CrateId(
-                             1,
++                        4,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                         edition: Edition2018,
++                            5,
 +                        ),
-                             "0.1.0",
++                        edition: Edition2015,
 +                        version: Some(
-                                     "hello_world",
++                            "0.2.98",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "hello-world",
++                                    "libc",
 +                                ),
-                                 "test",
++                                canonical_name: "libc",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
-                                 "test",
++                                "feature=default",
++                                "feature=std",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
-                                 "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
-                                 "CARGO_PKG_VERSION": "0.1.0",
++                                "feature=align",
++                                "feature=const-extern-fn",
++                                "feature=default",
++                                "feature=extra_traits",
++                                "feature=rustc-dep-of-std",
++                                "feature=std",
++                                "feature=use_std",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
-                                 "CARGO_CRATE_NAME": "hello_world",
++                                "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
++                                "CARGO_PKG_VERSION": "0.2.98",
 +                                "CARGO_PKG_AUTHORS": "",
-                                 "CARGO_PKG_NAME": "hello-world",
-                                 "CARGO_PKG_VERSION_PATCH": "0",
++                                "CARGO_CRATE_NAME": "libc",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
-                                 "CARGO_PKG_VERSION_MINOR": "1",
++                                "CARGO_PKG_NAME": "libc",
++                                "CARGO_PKG_VERSION_PATCH": "98",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
-                         dependencies: [
-                             Dependency {
-                                 crate_id: CrateId(
-                                     4,
-                                 ),
-                                 name: CrateName(
-                                     "libc",
-                                 ),
-                                 prelude: true,
-                             },
-                         ],
++                                "CARGO_PKG_VERSION_MINOR": "2",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
-                             repo: None,
++                        dependencies: [],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
-                         2,
++                            repo: Some(
++                                "https://github.com/rust-lang/libc",
++                            ),
 +                        },
 +                        is_proc_macro: false,
 +                    },
++                },
++            }"#]],
++    )
++}
++
++#[test]
++fn cargo_hello_world_project_model() {
++    let crate_graph = load_cargo("hello-world-metadata.json");
++    check_crate_graph(
++        crate_graph,
++        expect![[r#"
++            CrateGraph {
++                arena: {
 +                    CrateId(
-                             3,
++                        0,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "an_example",
++                            1,
 +                        ),
 +                        edition: Edition2018,
 +                        version: Some(
 +                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "an-example",
++                                    "hello_world",
 +                                ),
-                             Dependency {
-                                 crate_id: CrateId(
-                                     0,
-                                 ),
-                                 name: CrateName(
-                                     "hello_world",
-                                 ),
-                                 prelude: true,
-                             },
++                                canonical_name: "hello-world",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
 +                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
 +                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
 +                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
 +                                "CARGO_PKG_NAME": "hello-world",
 +                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
 +                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
 +                        dependencies: [
-                         4,
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    4,
 +                                ),
 +                                name: CrateName(
 +                                    "libc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
 +                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             5,
++                        1,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                         edition: Edition2015,
++                            2,
 +                        ),
-                             "0.2.98",
++                        edition: Edition2018,
 +                        version: Some(
-                                     "libc",
++                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "libc",
++                                    "hello_world",
 +                                ),
-                                 "feature=default",
-                                 "feature=std",
++                                canonical_name: "hello-world",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
-                                 "feature=align",
-                                 "feature=const-extern-fn",
-                                 "feature=default",
-                                 "feature=extra_traits",
-                                 "feature=rustc-dep-of-std",
-                                 "feature=std",
-                                 "feature=use_std",
++                                "test",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
-                                 "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
-                                 "CARGO_PKG_VERSION": "0.2.98",
++                                "test",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
-                                 "CARGO_CRATE_NAME": "libc",
++                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
++                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
-                                 "CARGO_PKG_NAME": "libc",
-                                 "CARGO_PKG_VERSION_PATCH": "98",
++                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
-                                 "CARGO_PKG_VERSION_MINOR": "2",
++                                "CARGO_PKG_NAME": "hello-world",
++                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
-                         dependencies: [],
++                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
-                             repo: Some(
-                                 "https://github.com/rust-lang/libc",
-                             ),
++                        dependencies: [
++                            Dependency {
++                                crate_id: CrateId(
++                                    0,
++                                ),
++                                name: CrateName(
++                                    "hello_world",
++                                ),
++                                prelude: true,
++                            },
++                            Dependency {
++                                crate_id: CrateId(
++                                    4,
++                                ),
++                                name: CrateName(
++                                    "libc",
++                                ),
++                                prelude: true,
++                            },
++                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
-                         1,
++                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             2,
++                        2,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "hello_world",
++                            3,
 +                        ),
 +                        edition: Edition2018,
 +                        version: Some(
 +                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "hello-world",
++                                    "an_example",
 +                                ),
-                         10,
++                                canonical_name: "an-example",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
 +                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
 +                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
 +                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
 +                                "CARGO_PKG_NAME": "hello-world",
 +                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
 +                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    0,
 +                                ),
 +                                name: CrateName(
 +                                    "hello_world",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    4,
 +                                ),
 +                                name: CrateName(
 +                                    "libc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
 +                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
 +                        3,
 +                    ): CrateData {
 +                        root_file_id: FileId(
 +                            4,
 +                        ),
 +                        edition: Edition2018,
 +                        version: Some(
 +                            "0.1.0",
 +                        ),
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
 +                                    "it",
 +                                ),
 +                                canonical_name: "it",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [
 +                                "debug_assertions",
 +                                "test",
 +                            ],
 +                        ),
 +                        env: Env {
 +                            entries: {
 +                                "CARGO_PKG_LICENSE": "",
 +                                "CARGO_PKG_VERSION_MAJOR": "0",
 +                                "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
 +                                "CARGO_PKG_VERSION": "0.1.0",
 +                                "CARGO_PKG_AUTHORS": "",
 +                                "CARGO_CRATE_NAME": "hello_world",
 +                                "CARGO_PKG_LICENSE_FILE": "",
 +                                "CARGO_PKG_HOMEPAGE": "",
 +                                "CARGO_PKG_DESCRIPTION": "",
 +                                "CARGO_PKG_NAME": "hello-world",
 +                                "CARGO_PKG_VERSION_PATCH": "0",
 +                                "CARGO": "cargo",
 +                                "CARGO_PKG_REPOSITORY": "",
 +                                "CARGO_PKG_VERSION_MINOR": "1",
 +                                "CARGO_PKG_VERSION_PRE": "",
 +                            },
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    0,
 +                                ),
 +                                name: CrateName(
 +                                    "hello_world",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    4,
 +                                ),
 +                                name: CrateName(
 +                                    "libc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "crate has not (yet) been built",
 +                        ),
 +                        origin: CratesIo {
 +                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
++                    CrateId(
++                        4,
++                    ): CrateData {
++                        root_file_id: FileId(
++                            5,
++                        ),
++                        edition: Edition2015,
++                        version: Some(
++                            "0.2.98",
++                        ),
++                        display_name: Some(
++                            CrateDisplayName {
++                                crate_name: CrateName(
++                                    "libc",
++                                ),
++                                canonical_name: "libc",
++                            },
++                        ),
++                        cfg_options: CfgOptions(
++                            [
++                                "debug_assertions",
++                                "feature=default",
++                                "feature=std",
++                            ],
++                        ),
++                        potential_cfg_options: CfgOptions(
++                            [
++                                "debug_assertions",
++                                "feature=align",
++                                "feature=const-extern-fn",
++                                "feature=default",
++                                "feature=extra_traits",
++                                "feature=rustc-dep-of-std",
++                                "feature=std",
++                                "feature=use_std",
++                            ],
++                        ),
++                        env: Env {
++                            entries: {
++                                "CARGO_PKG_LICENSE": "",
++                                "CARGO_PKG_VERSION_MAJOR": "0",
++                                "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
++                                "CARGO_PKG_VERSION": "0.2.98",
++                                "CARGO_PKG_AUTHORS": "",
++                                "CARGO_CRATE_NAME": "libc",
++                                "CARGO_PKG_LICENSE_FILE": "",
++                                "CARGO_PKG_HOMEPAGE": "",
++                                "CARGO_PKG_DESCRIPTION": "",
++                                "CARGO_PKG_NAME": "libc",
++                                "CARGO_PKG_VERSION_PATCH": "98",
++                                "CARGO": "cargo",
++                                "CARGO_PKG_REPOSITORY": "",
++                                "CARGO_PKG_VERSION_MINOR": "2",
++                                "CARGO_PKG_VERSION_PRE": "",
++                            },
++                        },
++                        dependencies: [],
++                        proc_macro: Err(
++                            "crate has not (yet) been built",
++                        ),
++                        origin: CratesIo {
++                            repo: Some(
++                                "https://github.com/rust-lang/libc",
++                            ),
++                        },
++                        is_proc_macro: false,
++                    },
 +                },
 +            }"#]],
 +    )
 +}
 +
 +#[test]
 +fn rust_project_hello_world_project_model() {
 +    let crate_graph = load_rust_project("hello-world-project.json");
 +    check_crate_graph(
 +        crate_graph,
 +        expect![[r#"
 +            CrateGraph {
 +                arena: {
 +                    CrateId(
 +                        0,
 +                    ): CrateData {
 +                        root_file_id: FileId(
 +                            1,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
 +                                    "alloc",
 +                                ),
 +                                canonical_name: "alloc",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    1,
 +                                ),
 +                                name: CrateName(
 +                                    "core",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "no proc macro loaded for sysroot crate",
 +                        ),
 +                        origin: Lang(
 +                            Alloc,
 +                        ),
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             11,
++                        1,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "unwind",
++                            2,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "unwind",
++                                    "core",
 +                                ),
-                         7,
++                                canonical_name: "core",
++                            },
++                        ),
++                        cfg_options: CfgOptions(
++                            [],
++                        ),
++                        potential_cfg_options: CfgOptions(
++                            [],
++                        ),
++                        env: Env {
++                            entries: {},
++                        },
++                        dependencies: [],
++                        proc_macro: Err(
++                            "no proc macro loaded for sysroot crate",
++                        ),
++                        origin: Lang(
++                            Core,
++                        ),
++                        is_proc_macro: false,
++                    },
++                    CrateId(
++                        2,
++                    ): CrateData {
++                        root_file_id: FileId(
++                            3,
++                        ),
++                        edition: Edition2018,
++                        version: None,
++                        display_name: Some(
++                            CrateDisplayName {
++                                crate_name: CrateName(
++                                    "panic_abort",
++                                ),
++                                canonical_name: "panic_abort",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [],
 +                        proc_macro: Err(
 +                            "no proc macro loaded for sysroot crate",
 +                        ),
 +                        origin: Lang(
 +                            Other,
 +                        ),
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             8,
++                        3,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "std_detect",
++                            4,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "std_detect",
++                                    "panic_unwind",
 +                                ),
-                         1,
++                                canonical_name: "panic_unwind",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [],
 +                        proc_macro: Err(
 +                            "no proc macro loaded for sysroot crate",
 +                        ),
 +                        origin: Lang(
 +                            Other,
 +                        ),
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
 +                        4,
 +                    ): CrateData {
 +                        root_file_id: FileId(
 +                            5,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
 +                                    "proc_macro",
 +                                ),
 +                                canonical_name: "proc_macro",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    6,
 +                                ),
 +                                name: CrateName(
 +                                    "std",
 +                                ),
 +                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
 +                            "no proc macro loaded for sysroot crate",
 +                        ),
 +                        origin: Lang(
 +                            Other,
 +                        ),
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             2,
++                        5,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "core",
++                            6,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "core",
++                                    "profiler_builtins",
 +                                ),
-                             Core,
++                                canonical_name: "profiler_builtins",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [],
 +                        proc_macro: Err(
 +                            "no proc macro loaded for sysroot crate",
 +                        ),
 +                        origin: Lang(
-                         11,
++                            Other,
 +                        ),
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             12,
++                        6,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "hello_world",
++                            7,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "hello_world",
++                                    "std",
 +                                ),
-                                     0,
++                                canonical_name: "std",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [
++                            Dependency {
++                                crate_id: CrateId(
++                                    0,
++                                ),
++                                name: CrateName(
++                                    "alloc",
++                                ),
++                                prelude: true,
++                            },
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    1,
 +                                ),
 +                                name: CrateName(
 +                                    "core",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
-                                     "alloc",
++                                    2,
 +                                ),
 +                                name: CrateName(
-                                     6,
++                                    "panic_abort",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
-                                     "std",
++                                    3,
 +                                ),
 +                                name: CrateName(
-                                 prelude: false,
++                                    "panic_unwind",
++                                ),
++                                prelude: true,
++                            },
++                            Dependency {
++                                crate_id: CrateId(
++                                    5,
++                                ),
++                                name: CrateName(
++                                    "profiler_builtins",
++                                ),
++                                prelude: true,
++                            },
++                            Dependency {
++                                crate_id: CrateId(
++                                    7,
++                                ),
++                                name: CrateName(
++                                    "std_detect",
++                                ),
++                                prelude: true,
++                            },
++                            Dependency {
++                                crate_id: CrateId(
++                                    8,
++                                ),
++                                name: CrateName(
++                                    "term",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    9,
 +                                ),
 +                                name: CrateName(
 +                                    "test",
 +                                ),
-                             "no proc macro dylib present",
++                                prelude: true,
++                            },
++                            Dependency {
++                                crate_id: CrateId(
++                                    10,
++                                ),
++                                name: CrateName(
++                                    "unwind",
++                                ),
++                                prelude: true,
 +                            },
 +                        ],
 +                        proc_macro: Err(
-                         origin: CratesIo {
-                             repo: None,
-                         },
++                            "no proc macro loaded for sysroot crate",
++                        ),
++                        origin: Lang(
++                            Std,
 +                        ),
-                         8,
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             9,
++                        7,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "term",
++                            8,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "term",
++                                    "std_detect",
 +                                ),
-                         5,
++                                canonical_name: "std_detect",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [],
 +                        proc_macro: Err(
 +                            "no proc macro loaded for sysroot crate",
 +                        ),
 +                        origin: Lang(
 +                            Other,
 +                        ),
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             6,
++                        8,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "profiler_builtins",
++                            9,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "profiler_builtins",
++                                    "term",
 +                                ),
-                         2,
++                                canonical_name: "term",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [],
 +                        proc_macro: Err(
 +                            "no proc macro loaded for sysroot crate",
 +                        ),
 +                        origin: Lang(
 +                            Other,
 +                        ),
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             3,
++                        9,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "panic_abort",
++                            10,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "panic_abort",
++                                    "test",
 +                                ),
-                             Other,
++                                canonical_name: "test",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [],
 +                        proc_macro: Err(
 +                            "no proc macro loaded for sysroot crate",
 +                        ),
 +                        origin: Lang(
-                         9,
++                            Test,
 +                        ),
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             10,
++                        10,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "test",
++                            11,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "test",
++                                    "unwind",
 +                                ),
-                             Test,
++                                canonical_name: "unwind",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [],
 +                        proc_macro: Err(
 +                            "no proc macro loaded for sysroot crate",
 +                        ),
 +                        origin: Lang(
-                         6,
++                            Other,
 +                        ),
 +                        is_proc_macro: false,
 +                    },
 +                    CrateId(
-                             7,
++                        11,
 +                    ): CrateData {
 +                        root_file_id: FileId(
-                                     "std",
++                            12,
 +                        ),
 +                        edition: Edition2018,
 +                        version: None,
 +                        display_name: Some(
 +                            CrateDisplayName {
 +                                crate_name: CrateName(
-                                 canonical_name: "std",
++                                    "hello_world",
 +                                ),
-                             Dependency {
-                                 crate_id: CrateId(
-                                     0,
-                                 ),
-                                 name: CrateName(
-                                     "alloc",
-                                 ),
-                                 prelude: true,
-                             },
++                                canonical_name: "hello_world",
 +                            },
 +                        ),
 +                        cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        potential_cfg_options: CfgOptions(
 +                            [],
 +                        ),
 +                        env: Env {
 +                            entries: {},
 +                        },
 +                        dependencies: [
-                                     2,
-                                 ),
-                                 name: CrateName(
-                                     "panic_abort",
-                                 ),
-                                 prelude: true,
-                             },
-                             Dependency {
-                                 crate_id: CrateId(
-                                     3,
-                                 ),
-                                 name: CrateName(
-                                     "panic_unwind",
-                                 ),
-                                 prelude: true,
-                             },
-                             Dependency {
-                                 crate_id: CrateId(
-                                     5,
-                                 ),
-                                 name: CrateName(
-                                     "profiler_builtins",
-                                 ),
-                                 prelude: true,
-                             },
-                             Dependency {
-                                 crate_id: CrateId(
-                                     7,
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    1,
 +                                ),
 +                                name: CrateName(
 +                                    "core",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
-                                     "std_detect",
++                                    0,
 +                                ),
 +                                name: CrateName(
-                                     8,
++                                    "alloc",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
-                                     "term",
++                                    6,
 +                                ),
 +                                name: CrateName(
-                                 prelude: true,
-                             },
-                             Dependency {
-                                 crate_id: CrateId(
-                                     10,
-                                 ),
-                                 name: CrateName(
-                                     "unwind",
-                                 ),
-                                 prelude: true,
++                                    "std",
 +                                ),
 +                                prelude: true,
 +                            },
 +                            Dependency {
 +                                crate_id: CrateId(
 +                                    9,
 +                                ),
 +                                name: CrateName(
 +                                    "test",
 +                                ),
-                             "no proc macro loaded for sysroot crate",
-                         ),
-                         origin: Lang(
-                             Std,
-                         ),
-                         is_proc_macro: false,
-                     },
-                     CrateId(
-                         3,
-                     ): CrateData {
-                         root_file_id: FileId(
-                             4,
-                         ),
-                         edition: Edition2018,
-                         version: None,
-                         display_name: Some(
-                             CrateDisplayName {
-                                 crate_name: CrateName(
-                                     "panic_unwind",
-                                 ),
-                                 canonical_name: "panic_unwind",
-                             },
-                         ),
-                         cfg_options: CfgOptions(
-                             [],
-                         ),
-                         potential_cfg_options: CfgOptions(
-                             [],
++                                prelude: false,
 +                            },
 +                        ],
 +                        proc_macro: Err(
-                         env: Env {
-                             entries: {},
++                            "no proc macro dylib present",
 +                        ),
-                         dependencies: [],
-                         proc_macro: Err(
-                             "no proc macro loaded for sysroot crate",
-                         ),
-                         origin: Lang(
-                             Other,
-                         ),
++                        origin: CratesIo {
++                            repo: None,
 +                        },
 +                        is_proc_macro: false,
 +                    },
 +                },
 +            }"#]],
 +    );
 +}
 +
 +#[test]
 +fn rust_project_is_proc_macro_has_proc_macro_dep() {
 +    let crate_graph = load_rust_project("is-proc-macro-project.json");
 +    // Since the project only defines one crate (outside the sysroot crates),
 +    // it should be the one with the biggest Id.
 +    let crate_id = crate_graph.iter().max().unwrap();
 +    let crate_data = &crate_graph[crate_id];
 +    // Assert that the project crate with `is_proc_macro` has a dependency
 +    // on the proc_macro sysroot crate.
 +    crate_data.dependencies.iter().find(|&dep| dep.name.deref() == "proc_macro").unwrap();
 +}
index 8d6f50f5587c5ad0e8248702e66ba69c9187eced,0000000000000000000000000000000000000000..818bbed6af2ec73600d2bde661d2f420d48969f7
mode 100644,000000..100644
--- /dev/null
@@@ -1,1042 -1,0 +1,1042 @@@
- use stdx::always;
 +//! Handles lowering of build-system specific workspace information (`cargo
 +//! metadata` or `rust-project.json`) into representation stored in the salsa
 +//! database -- `CrateGraph`.
 +
 +use std::{collections::VecDeque, fmt, fs, process::Command};
 +
 +use anyhow::{format_err, Context, Result};
 +use base_db::{
 +    CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env,
 +    FileId, LangCrateOrigin, ProcMacroLoadResult,
 +};
 +use cfg::{CfgDiff, CfgOptions};
 +use paths::{AbsPath, AbsPathBuf};
 +use rustc_hash::{FxHashMap, FxHashSet};
 +use semver::Version;
-     let crates: FxHashMap<CrateId, CrateId> = project
++use stdx::{always, hash::NoHashHashMap};
 +
 +use crate::{
 +    build_scripts::BuildScriptOutput,
 +    cargo_workspace::{DepKind, PackageData, RustcSource},
 +    cfg_flag::CfgFlag,
 +    rustc_cfg,
 +    sysroot::SysrootCrate,
 +    utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath, ProjectJson, ProjectManifest, Sysroot,
 +    TargetKind, WorkspaceBuildScripts,
 +};
 +
 +/// A set of cfg-overrides per crate.
 +///
 +/// `Wildcard(..)` is useful e.g. disabling `#[cfg(test)]` on all crates,
 +/// without having to first obtain a list of all crates.
 +#[derive(Debug, Clone, Eq, PartialEq)]
 +pub enum CfgOverrides {
 +    /// A single global set of overrides matching all crates.
 +    Wildcard(CfgDiff),
 +    /// A set of overrides matching specific crates.
 +    Selective(FxHashMap<String, CfgDiff>),
 +}
 +
 +impl Default for CfgOverrides {
 +    fn default() -> Self {
 +        Self::Selective(FxHashMap::default())
 +    }
 +}
 +
 +impl CfgOverrides {
 +    pub fn len(&self) -> usize {
 +        match self {
 +            CfgOverrides::Wildcard(_) => 1,
 +            CfgOverrides::Selective(hash_map) => hash_map.len(),
 +        }
 +    }
 +}
 +
 +/// `PackageRoot` describes a package root folder.
 +/// Which may be an external dependency, or a member of
 +/// the current workspace.
 +#[derive(Debug, Clone, Eq, PartialEq, Hash)]
 +pub struct PackageRoot {
 +    /// Is from the local filesystem and may be edited
 +    pub is_local: bool,
 +    pub include: Vec<AbsPathBuf>,
 +    pub exclude: Vec<AbsPathBuf>,
 +}
 +
 +#[derive(Clone, Eq, PartialEq)]
 +pub enum ProjectWorkspace {
 +    /// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
 +    Cargo {
 +        cargo: CargoWorkspace,
 +        build_scripts: WorkspaceBuildScripts,
 +        sysroot: Option<Sysroot>,
 +        rustc: Option<CargoWorkspace>,
 +        /// Holds cfg flags for the current target. We get those by running
 +        /// `rustc --print cfg`.
 +        ///
 +        /// FIXME: make this a per-crate map, as, eg, build.rs might have a
 +        /// different target.
 +        rustc_cfg: Vec<CfgFlag>,
 +        cfg_overrides: CfgOverrides,
 +        toolchain: Option<Version>,
 +    },
 +    /// Project workspace was manually specified using a `rust-project.json` file.
 +    Json { project: ProjectJson, sysroot: Option<Sysroot>, rustc_cfg: Vec<CfgFlag> },
 +
 +    // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning.
 +    // That's not the end user experience we should strive for.
 +    // Ideally, you should be able to just open a random detached file in existing cargo projects, and get the basic features working.
 +    // That needs some changes on the salsa-level though.
 +    // In particular, we should split the unified CrateGraph (which currently has maximal durability) into proper crate graph, and a set of ad hoc roots (with minimal durability).
 +    // Then, we need to hide the graph behind the queries such that most queries look only at the proper crate graph, and fall back to ad hoc roots only if there's no results.
 +    // After this, we should be able to tweak the logic in reload.rs to add newly opened files, which don't belong to any existing crates, to the set of the detached files.
 +    // //
 +    /// Project with a set of disjoint files, not belonging to any particular workspace.
 +    /// Backed by basic sysroot crates for basic completion and highlighting.
 +    DetachedFiles { files: Vec<AbsPathBuf>, sysroot: Sysroot, rustc_cfg: Vec<CfgFlag> },
 +}
 +
 +impl fmt::Debug for ProjectWorkspace {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        // Make sure this isn't too verbose.
 +        match self {
 +            ProjectWorkspace::Cargo {
 +                cargo,
 +                build_scripts: _,
 +                sysroot,
 +                rustc,
 +                rustc_cfg,
 +                cfg_overrides,
 +                toolchain,
 +            } => f
 +                .debug_struct("Cargo")
 +                .field("root", &cargo.workspace_root().file_name())
 +                .field("n_packages", &cargo.packages().len())
 +                .field("sysroot", &sysroot.is_some())
 +                .field(
 +                    "n_rustc_compiler_crates",
 +                    &rustc.as_ref().map_or(0, |rc| rc.packages().len()),
 +                )
 +                .field("n_rustc_cfg", &rustc_cfg.len())
 +                .field("n_cfg_overrides", &cfg_overrides.len())
 +                .field("toolchain", &toolchain)
 +                .finish(),
 +            ProjectWorkspace::Json { project, sysroot, rustc_cfg } => {
 +                let mut debug_struct = f.debug_struct("Json");
 +                debug_struct.field("n_crates", &project.n_crates());
 +                if let Some(sysroot) = sysroot {
 +                    debug_struct.field("n_sysroot_crates", &sysroot.crates().len());
 +                }
 +                debug_struct.field("n_rustc_cfg", &rustc_cfg.len());
 +                debug_struct.finish()
 +            }
 +            ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f
 +                .debug_struct("DetachedFiles")
 +                .field("n_files", &files.len())
 +                .field("n_sysroot_crates", &sysroot.crates().len())
 +                .field("n_rustc_cfg", &rustc_cfg.len())
 +                .finish(),
 +        }
 +    }
 +}
 +
 +impl ProjectWorkspace {
 +    pub fn load(
 +        manifest: ProjectManifest,
 +        config: &CargoConfig,
 +        progress: &dyn Fn(String),
 +    ) -> Result<ProjectWorkspace> {
 +        let res = match manifest {
 +            ProjectManifest::ProjectJson(project_json) => {
 +                let file = fs::read_to_string(&project_json).with_context(|| {
 +                    format!("Failed to read json file {}", project_json.display())
 +                })?;
 +                let data = serde_json::from_str(&file).with_context(|| {
 +                    format!("Failed to deserialize json file {}", project_json.display())
 +                })?;
 +                let project_location = project_json.parent().to_path_buf();
 +                let project_json = ProjectJson::new(&project_location, data);
 +                ProjectWorkspace::load_inline(project_json, config.target.as_deref())?
 +            }
 +            ProjectManifest::CargoToml(cargo_toml) => {
 +                let cargo_version = utf8_stdout({
 +                    let mut cmd = Command::new(toolchain::cargo());
 +                    cmd.arg("--version");
 +                    cmd
 +                })?;
 +                let toolchain = cargo_version
 +                    .get("cargo ".len()..)
 +                    .and_then(|it| Version::parse(it.split_whitespace().next()?).ok());
 +
 +                let meta = CargoWorkspace::fetch_metadata(
 +                    &cargo_toml,
 +                    cargo_toml.parent(),
 +                    config,
 +                    progress,
 +                )
 +                .with_context(|| {
 +                    format!(
 +                        "Failed to read Cargo metadata from Cargo.toml file {}, {:?}",
 +                        cargo_toml.display(),
 +                        toolchain
 +                    )
 +                })?;
 +                let cargo = CargoWorkspace::new(meta);
 +
 +                let sysroot = if config.no_sysroot {
 +                    None
 +                } else {
 +                    Some(Sysroot::discover(cargo_toml.parent()).with_context(|| {
 +                        format!(
 +                            "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
 +                            cargo_toml.display()
 +                        )
 +                    })?)
 +                };
 +
 +                let rustc_dir = match &config.rustc_source {
 +                    Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(),
 +                    Some(RustcSource::Discover) => Sysroot::discover_rustc(&cargo_toml),
 +                    None => None,
 +                };
 +
 +                let rustc = match rustc_dir {
 +                    Some(rustc_dir) => Some({
 +                        let meta = CargoWorkspace::fetch_metadata(
 +                            &rustc_dir,
 +                            cargo_toml.parent(),
 +                            config,
 +                            progress,
 +                        )
 +                        .with_context(|| {
 +                            "Failed to read Cargo metadata for Rust sources".to_string()
 +                        })?;
 +                        CargoWorkspace::new(meta)
 +                    }),
 +                    None => None,
 +                };
 +
 +                let rustc_cfg = rustc_cfg::get(Some(&cargo_toml), config.target.as_deref());
 +
 +                let cfg_overrides = config.cfg_overrides();
 +                ProjectWorkspace::Cargo {
 +                    cargo,
 +                    build_scripts: WorkspaceBuildScripts::default(),
 +                    sysroot,
 +                    rustc,
 +                    rustc_cfg,
 +                    cfg_overrides,
 +                    toolchain,
 +                }
 +            }
 +        };
 +
 +        Ok(res)
 +    }
 +
 +    pub fn load_inline(
 +        project_json: ProjectJson,
 +        target: Option<&str>,
 +    ) -> Result<ProjectWorkspace> {
 +        let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
 +            (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?),
 +            (Some(sysroot), None) => {
 +                // assume sysroot is structured like rustup's and guess `sysroot_src`
 +                let sysroot_src =
 +                    sysroot.join("lib").join("rustlib").join("src").join("rust").join("library");
 +
 +                Some(Sysroot::load(sysroot, sysroot_src)?)
 +            }
 +            (None, Some(sysroot_src)) => {
 +                // assume sysroot is structured like rustup's and guess `sysroot`
 +                let mut sysroot = sysroot_src.clone();
 +                for _ in 0..5 {
 +                    sysroot.pop();
 +                }
 +                Some(Sysroot::load(sysroot, sysroot_src)?)
 +            }
 +            (None, None) => None,
 +        };
 +
 +        let rustc_cfg = rustc_cfg::get(None, target);
 +        Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg })
 +    }
 +
 +    pub fn load_detached_files(detached_files: Vec<AbsPathBuf>) -> Result<ProjectWorkspace> {
 +        let sysroot = Sysroot::discover(
 +            detached_files
 +                .first()
 +                .and_then(|it| it.parent())
 +                .ok_or_else(|| format_err!("No detached files to load"))?,
 +        )?;
 +        let rustc_cfg = rustc_cfg::get(None, None);
 +        Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
 +    }
 +
 +    pub fn run_build_scripts(
 +        &self,
 +        config: &CargoConfig,
 +        progress: &dyn Fn(String),
 +    ) -> Result<WorkspaceBuildScripts> {
 +        match self {
 +            ProjectWorkspace::Cargo { cargo, toolchain, .. } => {
 +                WorkspaceBuildScripts::run(config, cargo, progress, toolchain).with_context(|| {
 +                    format!("Failed to run build scripts for {}", &cargo.workspace_root().display())
 +                })
 +            }
 +            ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
 +                Ok(WorkspaceBuildScripts::default())
 +            }
 +        }
 +    }
 +
 +    pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) {
 +        match self {
 +            ProjectWorkspace::Cargo { build_scripts, .. } => *build_scripts = bs,
 +            _ => {
 +                always!(bs == WorkspaceBuildScripts::default());
 +            }
 +        }
 +    }
 +
 +    /// Returns the roots for the current `ProjectWorkspace`
 +    /// The return type contains the path and whether or not
 +    /// the root is a member of the current workspace
 +    pub fn to_roots(&self) -> Vec<PackageRoot> {
 +        match self {
 +            ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
 +                .crates()
 +                .map(|(_, krate)| PackageRoot {
 +                    is_local: krate.is_workspace_member,
 +                    include: krate.include.clone(),
 +                    exclude: krate.exclude.clone(),
 +                })
 +                .collect::<FxHashSet<_>>()
 +                .into_iter()
 +                .chain(sysroot.as_ref().into_iter().flat_map(|sysroot| {
 +                    sysroot.crates().map(move |krate| PackageRoot {
 +                        is_local: false,
 +                        include: vec![sysroot[krate].root.parent().to_path_buf()],
 +                        exclude: Vec::new(),
 +                    })
 +                }))
 +                .collect::<Vec<_>>(),
 +            ProjectWorkspace::Cargo {
 +                cargo,
 +                sysroot,
 +                rustc,
 +                rustc_cfg: _,
 +                cfg_overrides: _,
 +                build_scripts,
 +                toolchain: _,
 +            } => {
 +                cargo
 +                    .packages()
 +                    .map(|pkg| {
 +                        let is_local = cargo[pkg].is_local;
 +                        let pkg_root = cargo[pkg].manifest.parent().to_path_buf();
 +
 +                        let mut include = vec![pkg_root.clone()];
 +                        let out_dir =
 +                            build_scripts.get_output(pkg).and_then(|it| it.out_dir.clone());
 +                        include.extend(out_dir);
 +
 +                        // In case target's path is manually set in Cargo.toml to be
 +                        // outside the package root, add its parent as an extra include.
 +                        // An example of this situation would look like this:
 +                        //
 +                        // ```toml
 +                        // [lib]
 +                        // path = "../../src/lib.rs"
 +                        // ```
 +                        let extra_targets = cargo[pkg]
 +                            .targets
 +                            .iter()
 +                            .filter(|&&tgt| cargo[tgt].kind == TargetKind::Lib)
 +                            .filter_map(|&tgt| cargo[tgt].root.parent())
 +                            .map(|tgt| tgt.normalize().to_path_buf())
 +                            .filter(|path| !path.starts_with(&pkg_root));
 +                        include.extend(extra_targets);
 +
 +                        let mut exclude = vec![pkg_root.join(".git")];
 +                        if is_local {
 +                            exclude.push(pkg_root.join("target"));
 +                        } else {
 +                            exclude.push(pkg_root.join("tests"));
 +                            exclude.push(pkg_root.join("examples"));
 +                            exclude.push(pkg_root.join("benches"));
 +                        }
 +                        PackageRoot { is_local, include, exclude }
 +                    })
 +                    .chain(sysroot.iter().map(|sysroot| PackageRoot {
 +                        is_local: false,
 +                        include: vec![sysroot.src_root().to_path_buf()],
 +                        exclude: Vec::new(),
 +                    }))
 +                    .chain(rustc.iter().flat_map(|rustc| {
 +                        rustc.packages().map(move |krate| PackageRoot {
 +                            is_local: false,
 +                            include: vec![rustc[krate].manifest.parent().to_path_buf()],
 +                            exclude: Vec::new(),
 +                        })
 +                    }))
 +                    .collect()
 +            }
 +            ProjectWorkspace::DetachedFiles { files, sysroot, .. } => files
 +                .iter()
 +                .map(|detached_file| PackageRoot {
 +                    is_local: true,
 +                    include: vec![detached_file.clone()],
 +                    exclude: Vec::new(),
 +                })
 +                .chain(sysroot.crates().map(|krate| PackageRoot {
 +                    is_local: false,
 +                    include: vec![sysroot[krate].root.parent().to_path_buf()],
 +                    exclude: Vec::new(),
 +                }))
 +                .collect(),
 +        }
 +    }
 +
 +    pub fn n_packages(&self) -> usize {
 +        match self {
 +            ProjectWorkspace::Json { project, .. } => project.n_crates(),
 +            ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => {
 +                let rustc_package_len = rustc.as_ref().map_or(0, |it| it.packages().len());
 +                let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len());
 +                cargo.packages().len() + sysroot_package_len + rustc_package_len
 +            }
 +            ProjectWorkspace::DetachedFiles { sysroot, files, .. } => {
 +                sysroot.crates().len() + files.len()
 +            }
 +        }
 +    }
 +
 +    pub fn to_crate_graph(
 +        &self,
 +        load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
 +        load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
 +    ) -> CrateGraph {
 +        let _p = profile::span("ProjectWorkspace::to_crate_graph");
 +
 +        let mut crate_graph = match self {
 +            ProjectWorkspace::Json { project, sysroot, rustc_cfg } => project_json_to_crate_graph(
 +                rustc_cfg.clone(),
 +                load_proc_macro,
 +                load,
 +                project,
 +                sysroot,
 +            ),
 +            ProjectWorkspace::Cargo {
 +                cargo,
 +                sysroot,
 +                rustc,
 +                rustc_cfg,
 +                cfg_overrides,
 +                build_scripts,
 +                toolchain: _,
 +            } => cargo_to_crate_graph(
 +                rustc_cfg.clone(),
 +                cfg_overrides,
 +                load_proc_macro,
 +                load,
 +                cargo,
 +                build_scripts,
 +                sysroot.as_ref(),
 +                rustc,
 +            ),
 +            ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
 +                detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot)
 +            }
 +        };
 +        if crate_graph.patch_cfg_if() {
 +            tracing::debug!("Patched std to depend on cfg-if")
 +        } else {
 +            tracing::debug!("Did not patch std to depend on cfg-if")
 +        }
 +        crate_graph
 +    }
 +}
 +
 +fn project_json_to_crate_graph(
 +    rustc_cfg: Vec<CfgFlag>,
 +    load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
 +    load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
 +    project: &ProjectJson,
 +    sysroot: &Option<Sysroot>,
 +) -> CrateGraph {
 +    let mut crate_graph = CrateGraph::default();
 +    let sysroot_deps = sysroot
 +        .as_ref()
 +        .map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load));
 +
 +    let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
++    let crates: NoHashHashMap<CrateId, CrateId> = project
 +        .crates()
 +        .filter_map(|(crate_id, krate)| {
 +            let file_path = &krate.root_module;
 +            let file_id = load(file_path)?;
 +            Some((crate_id, krate, file_id))
 +        })
 +        .map(|(crate_id, krate, file_id)| {
 +            let env = krate.env.clone().into_iter().collect();
 +            let proc_macro = match krate.proc_macro_dylib_path.clone() {
 +                Some(it) => load_proc_macro(
 +                    krate.display_name.as_ref().map(|it| it.canonical_name()).unwrap_or(""),
 +                    &it,
 +                ),
 +                None => Err("no proc macro dylib present".into()),
 +            };
 +
 +            let target_cfgs = match krate.target.as_deref() {
 +                Some(target) => {
 +                    cfg_cache.entry(target).or_insert_with(|| rustc_cfg::get(None, Some(target)))
 +                }
 +                None => &rustc_cfg,
 +            };
 +
 +            let mut cfg_options = CfgOptions::default();
 +            cfg_options.extend(target_cfgs.iter().chain(krate.cfg.iter()).cloned());
 +            (
 +                crate_id,
 +                crate_graph.add_crate_root(
 +                    file_id,
 +                    krate.edition,
 +                    krate.display_name.clone(),
 +                    krate.version.clone(),
 +                    cfg_options.clone(),
 +                    cfg_options,
 +                    env,
 +                    proc_macro,
 +                    krate.is_proc_macro,
 +                    if krate.display_name.is_some() {
 +                        CrateOrigin::CratesIo { repo: krate.repository.clone() }
 +                    } else {
 +                        CrateOrigin::CratesIo { repo: None }
 +                    },
 +                ),
 +            )
 +        })
 +        .collect();
 +
 +    for (from, krate) in project.crates() {
 +        if let Some(&from) = crates.get(&from) {
 +            if let Some((public_deps, libproc_macro)) = &sysroot_deps {
 +                public_deps.add(from, &mut crate_graph);
 +                if krate.is_proc_macro {
 +                    if let Some(proc_macro) = libproc_macro {
 +                        add_dep(
 +                            &mut crate_graph,
 +                            from,
 +                            CrateName::new("proc_macro").unwrap(),
 +                            *proc_macro,
 +                        );
 +                    }
 +                }
 +            }
 +
 +            for dep in &krate.deps {
 +                if let Some(&to) = crates.get(&dep.crate_id) {
 +                    add_dep(&mut crate_graph, from, dep.name.clone(), to)
 +                }
 +            }
 +        }
 +    }
 +    crate_graph
 +}
 +
 +fn cargo_to_crate_graph(
 +    rustc_cfg: Vec<CfgFlag>,
 +    override_cfg: &CfgOverrides,
 +    load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
 +    load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
 +    cargo: &CargoWorkspace,
 +    build_scripts: &WorkspaceBuildScripts,
 +    sysroot: Option<&Sysroot>,
 +    rustc: &Option<CargoWorkspace>,
 +) -> CrateGraph {
 +    let _p = profile::span("cargo_to_crate_graph");
 +    let mut crate_graph = CrateGraph::default();
 +    let (public_deps, libproc_macro) = match sysroot {
 +        Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load),
 +        None => (SysrootPublicDeps::default(), None),
 +    };
 +
 +    let mut cfg_options = CfgOptions::default();
 +    cfg_options.extend(rustc_cfg);
 +
 +    let mut pkg_to_lib_crate = FxHashMap::default();
 +
 +    cfg_options.insert_atom("debug_assertions".into());
 +
 +    let mut pkg_crates = FxHashMap::default();
 +    // Does any crate signal to rust-analyzer that they need the rustc_private crates?
 +    let mut has_private = false;
 +    // Next, create crates for each package, target pair
 +    for pkg in cargo.packages() {
 +        let mut cfg_options = cfg_options.clone();
 +
 +        let overrides = match override_cfg {
 +            CfgOverrides::Wildcard(cfg_diff) => Some(cfg_diff),
 +            CfgOverrides::Selective(cfg_overrides) => cfg_overrides.get(&cargo[pkg].name),
 +        };
 +
 +        // Add test cfg for local crates
 +        if cargo[pkg].is_local {
 +            cfg_options.insert_atom("test".into());
 +        }
 +
 +        if let Some(overrides) = overrides {
 +            // FIXME: this is sort of a hack to deal with #![cfg(not(test))] vanishing such as seen
 +            // in ed25519_dalek (#7243), and libcore (#9203) (although you only hit that one while
 +            // working on rust-lang/rust as that's the only time it appears outside sysroot).
 +            //
 +            // A more ideal solution might be to reanalyze crates based on where the cursor is and
 +            // figure out the set of cfgs that would have to apply to make it active.
 +
 +            cfg_options.apply_diff(overrides.clone());
 +        };
 +
 +        has_private |= cargo[pkg].metadata.rustc_private;
 +        let mut lib_tgt = None;
 +        for &tgt in cargo[pkg].targets.iter() {
 +            if cargo[tgt].kind != TargetKind::Lib && !cargo[pkg].is_member {
 +                // For non-workspace-members, Cargo does not resolve dev-dependencies, so we don't
 +                // add any targets except the library target, since those will not work correctly if
 +                // they use dev-dependencies.
 +                // In fact, they can break quite badly if multiple client workspaces get merged:
 +                // https://github.com/rust-lang/rust-analyzer/issues/11300
 +                continue;
 +            }
 +
 +            if let Some(file_id) = load(&cargo[tgt].root) {
 +                let crate_id = add_target_crate_root(
 +                    &mut crate_graph,
 +                    &cargo[pkg],
 +                    build_scripts.get_output(pkg),
 +                    cfg_options.clone(),
 +                    &mut |path| load_proc_macro(&cargo[tgt].name, path),
 +                    file_id,
 +                    &cargo[tgt].name,
 +                    cargo[tgt].is_proc_macro,
 +                );
 +                if cargo[tgt].kind == TargetKind::Lib {
 +                    lib_tgt = Some((crate_id, cargo[tgt].name.clone()));
 +                    pkg_to_lib_crate.insert(pkg, crate_id);
 +                }
 +                if let Some(proc_macro) = libproc_macro {
 +                    add_dep_with_prelude(
 +                        &mut crate_graph,
 +                        crate_id,
 +                        CrateName::new("proc_macro").unwrap(),
 +                        proc_macro,
 +                        cargo[tgt].is_proc_macro,
 +                    );
 +                }
 +
 +                pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, cargo[tgt].kind));
 +            }
 +        }
 +
 +        // Set deps to the core, std and to the lib target of the current package
 +        for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
 +            // Add sysroot deps first so that a lib target named `core` etc. can overwrite them.
 +            public_deps.add(*from, &mut crate_graph);
 +
 +            if let Some((to, name)) = lib_tgt.clone() {
 +                if to != *from && *kind != TargetKind::BuildScript {
 +                    // (build script can not depend on its library target)
 +
 +                    // For root projects with dashes in their name,
 +                    // cargo metadata does not do any normalization,
 +                    // so we do it ourselves currently
 +                    let name = CrateName::normalize_dashes(&name);
 +                    add_dep(&mut crate_graph, *from, name, to);
 +                }
 +            }
 +        }
 +    }
 +
 +    // Now add a dep edge from all targets of upstream to the lib
 +    // target of downstream.
 +    for pkg in cargo.packages() {
 +        for dep in cargo[pkg].dependencies.iter() {
 +            let name = CrateName::new(&dep.name).unwrap();
 +            if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
 +                for (from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
 +                    if dep.kind == DepKind::Build && *kind != TargetKind::BuildScript {
 +                        // Only build scripts may depend on build dependencies.
 +                        continue;
 +                    }
 +                    if dep.kind != DepKind::Build && *kind == TargetKind::BuildScript {
 +                        // Build scripts may only depend on build dependencies.
 +                        continue;
 +                    }
 +
 +                    add_dep(&mut crate_graph, *from, name.clone(), to)
 +                }
 +            }
 +        }
 +    }
 +
 +    if has_private {
 +        // If the user provided a path to rustc sources, we add all the rustc_private crates
 +        // and create dependencies on them for the crates which opt-in to that
 +        if let Some(rustc_workspace) = rustc {
 +            handle_rustc_crates(
 +                rustc_workspace,
 +                load,
 +                &mut crate_graph,
 +                &cfg_options,
 +                override_cfg,
 +                load_proc_macro,
 +                &mut pkg_to_lib_crate,
 +                &public_deps,
 +                cargo,
 +                &pkg_crates,
 +                build_scripts,
 +            );
 +        }
 +    }
 +    crate_graph
 +}
 +
 +fn detached_files_to_crate_graph(
 +    rustc_cfg: Vec<CfgFlag>,
 +    load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
 +    detached_files: &[AbsPathBuf],
 +    sysroot: &Sysroot,
 +) -> CrateGraph {
 +    let _p = profile::span("detached_files_to_crate_graph");
 +    let mut crate_graph = CrateGraph::default();
 +    let (public_deps, _libproc_macro) =
 +        sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
 +
 +    let mut cfg_options = CfgOptions::default();
 +    cfg_options.extend(rustc_cfg);
 +
 +    for detached_file in detached_files {
 +        let file_id = match load(detached_file) {
 +            Some(file_id) => file_id,
 +            None => {
 +                tracing::error!("Failed to load detached file {:?}", detached_file);
 +                continue;
 +            }
 +        };
 +        let display_name = detached_file
 +            .file_stem()
 +            .and_then(|os_str| os_str.to_str())
 +            .map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_string()));
 +        let detached_file_crate = crate_graph.add_crate_root(
 +            file_id,
 +            Edition::CURRENT,
 +            display_name,
 +            None,
 +            cfg_options.clone(),
 +            cfg_options.clone(),
 +            Env::default(),
 +            Ok(Vec::new()),
 +            false,
 +            CrateOrigin::CratesIo { repo: None },
 +        );
 +
 +        public_deps.add(detached_file_crate, &mut crate_graph);
 +    }
 +    crate_graph
 +}
 +
 +fn handle_rustc_crates(
 +    rustc_workspace: &CargoWorkspace,
 +    load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
 +    crate_graph: &mut CrateGraph,
 +    cfg_options: &CfgOptions,
 +    override_cfg: &CfgOverrides,
 +    load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
 +    pkg_to_lib_crate: &mut FxHashMap<la_arena::Idx<crate::PackageData>, CrateId>,
 +    public_deps: &SysrootPublicDeps,
 +    cargo: &CargoWorkspace,
 +    pkg_crates: &FxHashMap<la_arena::Idx<crate::PackageData>, Vec<(CrateId, TargetKind)>>,
 +    build_scripts: &WorkspaceBuildScripts,
 +) {
 +    let mut rustc_pkg_crates = FxHashMap::default();
 +    // The root package of the rustc-dev component is rustc_driver, so we match that
 +    let root_pkg =
 +        rustc_workspace.packages().find(|package| rustc_workspace[*package].name == "rustc_driver");
 +    // The rustc workspace might be incomplete (such as if rustc-dev is not
 +    // installed for the current toolchain) and `rustc_source` is set to discover.
 +    if let Some(root_pkg) = root_pkg {
 +        // Iterate through every crate in the dependency subtree of rustc_driver using BFS
 +        let mut queue = VecDeque::new();
 +        queue.push_back(root_pkg);
 +        while let Some(pkg) = queue.pop_front() {
 +            // Don't duplicate packages if they are dependended on a diamond pattern
 +            // N.B. if this line is omitted, we try to analyse over 4_800_000 crates
 +            // which is not ideal
 +            if rustc_pkg_crates.contains_key(&pkg) {
 +                continue;
 +            }
 +            for dep in &rustc_workspace[pkg].dependencies {
 +                queue.push_back(dep.pkg);
 +            }
 +
 +            let mut cfg_options = cfg_options.clone();
 +
 +            let overrides = match override_cfg {
 +                CfgOverrides::Wildcard(cfg_diff) => Some(cfg_diff),
 +                CfgOverrides::Selective(cfg_overrides) => {
 +                    cfg_overrides.get(&rustc_workspace[pkg].name)
 +                }
 +            };
 +
 +            if let Some(overrides) = overrides {
 +                // FIXME: this is sort of a hack to deal with #![cfg(not(test))] vanishing such as seen
 +                // in ed25519_dalek (#7243), and libcore (#9203) (although you only hit that one while
 +                // working on rust-lang/rust as that's the only time it appears outside sysroot).
 +                //
 +                // A more ideal solution might be to reanalyze crates based on where the cursor is and
 +                // figure out the set of cfgs that would have to apply to make it active.
 +
 +                cfg_options.apply_diff(overrides.clone());
 +            };
 +
 +            for &tgt in rustc_workspace[pkg].targets.iter() {
 +                if rustc_workspace[tgt].kind != TargetKind::Lib {
 +                    continue;
 +                }
 +                if let Some(file_id) = load(&rustc_workspace[tgt].root) {
 +                    let crate_id = add_target_crate_root(
 +                        crate_graph,
 +                        &rustc_workspace[pkg],
 +                        build_scripts.get_output(pkg),
 +                        cfg_options.clone(),
 +                        &mut |path| load_proc_macro(&rustc_workspace[tgt].name, path),
 +                        file_id,
 +                        &rustc_workspace[tgt].name,
 +                        rustc_workspace[tgt].is_proc_macro,
 +                    );
 +                    pkg_to_lib_crate.insert(pkg, crate_id);
 +                    // Add dependencies on core / std / alloc for this crate
 +                    public_deps.add(crate_id, crate_graph);
 +                    rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id);
 +                }
 +            }
 +        }
 +    }
 +    // Now add a dep edge from all targets of upstream to the lib
 +    // target of downstream.
 +    for pkg in rustc_pkg_crates.keys().copied() {
 +        for dep in rustc_workspace[pkg].dependencies.iter() {
 +            let name = CrateName::new(&dep.name).unwrap();
 +            if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
 +                for &from in rustc_pkg_crates.get(&pkg).into_iter().flatten() {
 +                    add_dep(crate_graph, from, name.clone(), to);
 +                }
 +            }
 +        }
 +    }
 +    // Add a dependency on the rustc_private crates for all targets of each package
 +    // which opts in
 +    for dep in rustc_workspace.packages() {
 +        let name = CrateName::normalize_dashes(&rustc_workspace[dep].name);
 +
 +        if let Some(&to) = pkg_to_lib_crate.get(&dep) {
 +            for pkg in cargo.packages() {
 +                let package = &cargo[pkg];
 +                if !package.metadata.rustc_private {
 +                    continue;
 +                }
 +                for (from, _) in pkg_crates.get(&pkg).into_iter().flatten() {
 +                    // Avoid creating duplicate dependencies
 +                    // This avoids the situation where `from` depends on e.g. `arrayvec`, but
 +                    // `rust_analyzer` thinks that it should use the one from the `rustc_source`
 +                    // instead of the one from `crates.io`
 +                    if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) {
 +                        add_dep(crate_graph, *from, name.clone(), to);
 +                    }
 +                }
 +            }
 +        }
 +    }
 +}
 +
 +fn add_target_crate_root(
 +    crate_graph: &mut CrateGraph,
 +    pkg: &PackageData,
 +    build_data: Option<&BuildScriptOutput>,
 +    cfg_options: CfgOptions,
 +    load_proc_macro: &mut dyn FnMut(&AbsPath) -> ProcMacroLoadResult,
 +    file_id: FileId,
 +    cargo_name: &str,
 +    is_proc_macro: bool,
 +) -> CrateId {
 +    let edition = pkg.edition;
 +    let mut potential_cfg_options = cfg_options.clone();
 +    potential_cfg_options.extend(
 +        pkg.features
 +            .iter()
 +            .map(|feat| CfgFlag::KeyValue { key: "feature".into(), value: feat.0.into() }),
 +    );
 +    let cfg_options = {
 +        let mut opts = cfg_options;
 +        for feature in pkg.active_features.iter() {
 +            opts.insert_key_value("feature".into(), feature.into());
 +        }
 +        if let Some(cfgs) = build_data.as_ref().map(|it| &it.cfgs) {
 +            opts.extend(cfgs.iter().cloned());
 +        }
 +        opts
 +    };
 +
 +    let mut env = Env::default();
 +    inject_cargo_env(pkg, &mut env);
 +
 +    if let Some(envs) = build_data.map(|it| &it.envs) {
 +        for (k, v) in envs {
 +            env.set(k, v.clone());
 +        }
 +    }
 +
 +    let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
 +        Some(Some(it)) => load_proc_macro(it),
 +        Some(None) => Err("no proc macro dylib present".into()),
 +        None => Err("crate has not (yet) been built".into()),
 +    };
 +
 +    let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_string());
 +    crate_graph.add_crate_root(
 +        file_id,
 +        edition,
 +        Some(display_name),
 +        Some(pkg.version.to_string()),
 +        cfg_options,
 +        potential_cfg_options,
 +        env,
 +        proc_macro,
 +        is_proc_macro,
 +        CrateOrigin::CratesIo { repo: pkg.repository.clone() },
 +    )
 +}
 +
 +#[derive(Default)]
 +struct SysrootPublicDeps {
 +    deps: Vec<(CrateName, CrateId, bool)>,
 +}
 +
 +impl SysrootPublicDeps {
 +    /// Makes `from` depend on the public sysroot crates.
 +    fn add(&self, from: CrateId, crate_graph: &mut CrateGraph) {
 +        for (name, krate, prelude) in &self.deps {
 +            add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude);
 +        }
 +    }
 +}
 +
 +fn sysroot_to_crate_graph(
 +    crate_graph: &mut CrateGraph,
 +    sysroot: &Sysroot,
 +    rustc_cfg: Vec<CfgFlag>,
 +    load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
 +) -> (SysrootPublicDeps, Option<CrateId>) {
 +    let _p = profile::span("sysroot_to_crate_graph");
 +    let mut cfg_options = CfgOptions::default();
 +    cfg_options.extend(rustc_cfg);
 +    let sysroot_crates: FxHashMap<SysrootCrate, CrateId> = sysroot
 +        .crates()
 +        .filter_map(|krate| {
 +            let file_id = load(&sysroot[krate].root)?;
 +
 +            let env = Env::default();
 +            let display_name = CrateDisplayName::from_canonical_name(sysroot[krate].name.clone());
 +            let crate_id = crate_graph.add_crate_root(
 +                file_id,
 +                Edition::CURRENT,
 +                Some(display_name),
 +                None,
 +                cfg_options.clone(),
 +                cfg_options.clone(),
 +                env,
 +                Err("no proc macro loaded for sysroot crate".into()),
 +                false,
 +                CrateOrigin::Lang(LangCrateOrigin::from(&*sysroot[krate].name)),
 +            );
 +            Some((krate, crate_id))
 +        })
 +        .collect();
 +
 +    for from in sysroot.crates() {
 +        for &to in sysroot[from].deps.iter() {
 +            let name = CrateName::new(&sysroot[to].name).unwrap();
 +            if let (Some(&from), Some(&to)) = (sysroot_crates.get(&from), sysroot_crates.get(&to)) {
 +                add_dep(crate_graph, from, name, to);
 +            }
 +        }
 +    }
 +
 +    let public_deps = SysrootPublicDeps {
 +        deps: sysroot
 +            .public_deps()
 +            .map(|(name, idx, prelude)| {
 +                (CrateName::new(name).unwrap(), sysroot_crates[&idx], prelude)
 +            })
 +            .collect::<Vec<_>>(),
 +    };
 +
 +    let libproc_macro = sysroot.proc_macro().and_then(|it| sysroot_crates.get(&it).copied());
 +    (public_deps, libproc_macro)
 +}
 +
 +fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) {
 +    add_dep_inner(graph, from, Dependency::new(name, to))
 +}
 +
 +fn add_dep_with_prelude(
 +    graph: &mut CrateGraph,
 +    from: CrateId,
 +    name: CrateName,
 +    to: CrateId,
 +    prelude: bool,
 +) {
 +    add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude))
 +}
 +
 +fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) {
 +    if let Err(err) = graph.add_dep(from, dep) {
 +        tracing::error!("{}", err)
 +    }
 +}
 +
 +/// Recreates the compile-time environment variables that Cargo sets.
 +///
 +/// Should be synced with
 +/// <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
 +///
 +/// FIXME: ask Cargo to provide this data instead of re-deriving.
 +fn inject_cargo_env(package: &PackageData, env: &mut Env) {
 +    // FIXME: Missing variables:
 +    // CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
 +
 +    let manifest_dir = package.manifest.parent();
 +    env.set("CARGO_MANIFEST_DIR", manifest_dir.as_os_str().to_string_lossy().into_owned());
 +
 +    // Not always right, but works for common cases.
 +    env.set("CARGO", "cargo".into());
 +
 +    env.set("CARGO_PKG_VERSION", package.version.to_string());
 +    env.set("CARGO_PKG_VERSION_MAJOR", package.version.major.to_string());
 +    env.set("CARGO_PKG_VERSION_MINOR", package.version.minor.to_string());
 +    env.set("CARGO_PKG_VERSION_PATCH", package.version.patch.to_string());
 +    env.set("CARGO_PKG_VERSION_PRE", package.version.pre.to_string());
 +
 +    env.set("CARGO_PKG_AUTHORS", String::new());
 +
 +    env.set("CARGO_PKG_NAME", package.name.clone());
 +    // FIXME: This isn't really correct (a package can have many crates with different names), but
 +    // it's better than leaving the variable unset.
 +    env.set("CARGO_CRATE_NAME", CrateName::normalize_dashes(&package.name).to_string());
 +    env.set("CARGO_PKG_DESCRIPTION", String::new());
 +    env.set("CARGO_PKG_HOMEPAGE", String::new());
 +    env.set("CARGO_PKG_REPOSITORY", String::new());
 +    env.set("CARGO_PKG_LICENSE", String::new());
 +
 +    env.set("CARGO_PKG_LICENSE_FILE", String::new());
 +}
index 07771d1b392ce1a2481d08f2c156078ca20f07f6,0000000000000000000000000000000000000000..5392589186d1db8bcb563f90808b16f29ebd79d9
mode 100644,000000..100644
--- /dev/null
@@@ -1,92 -1,0 +1,93 @@@
- lsp-types = { version = "0.93.0", features = ["proposed"] }
 +[package]
 +name = "rust-analyzer"
 +version = "0.0.0"
 +authors = ["rust-analyzer Team"]
 +homepage = "https://github.com/rust-analyzer/rust-analyzer"
 +description = "A language server for the Rust programming language"
 +documentation = "https://rust-analyzer.github.io/manual.html"
 +license = "MIT OR Apache-2.0"
 +autobins = false
 +edition = "2021"
 +rust-version = "1.57"
 +
 +[lib]
 +doctest = false
 +
 +[[bin]]
 +name = "rust-analyzer"
 +path = "src/bin/main.rs"
 +
 +[dependencies]
 +anyhow = "1.0.57"
 +crossbeam-channel = "0.5.5"
 +dissimilar = "1.0.4"
 +itertools = "0.10.3"
-     "syntax/in-rust-tree"
++scip = "0.1.1"
++lsp-types = { version = "0.93.1", features = ["proposed"] }
 +parking_lot = "0.12.1"
 +xflags = "0.2.4"
 +oorandom = "11.1.3"
 +rustc-hash = "1.1.0"
 +serde = { version = "1.0.137", features = ["derive"] }
 +serde_json = { version = "1.0.81", features = ["preserve_order"] }
 +threadpool = "1.8.1"
 +rayon = "1.5.3"
 +num_cpus = "1.13.1"
 +mimalloc = { version = "0.1.29", default-features = false, optional = true }
 +lsp-server = { version = "0.6.0", path = "../../lib/lsp-server" }
 +tracing = "0.1.35"
 +tracing-subscriber = { version = "0.3.14", default-features = false, features = [
 +    "env-filter",
 +    "registry",
 +    "fmt",
 +    "tracing-log",
 +] }
 +tracing-log = "0.1.3"
 +tracing-tree = "0.2.1"
 +always-assert = "0.1.2"
 +
 +stdx = { path = "../stdx", version = "0.0.0" }
 +flycheck = { path = "../flycheck", version = "0.0.0" }
 +ide = { path = "../ide", version = "0.0.0" }
 +ide-db = { path = "../ide-db", version = "0.0.0" }
 +profile = { path = "../profile", version = "0.0.0" }
 +project-model = { path = "../project-model", version = "0.0.0" }
 +syntax = { path = "../syntax", version = "0.0.0" }
 +vfs = { path = "../vfs", version = "0.0.0" }
 +vfs-notify = { path = "../vfs-notify", version = "0.0.0" }
 +cfg = { path = "../cfg", version = "0.0.0" }
 +toolchain = { path = "../toolchain", version = "0.0.0" }
 +tt = { path = "../tt", version = "0.0.0" }
 +proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
 +
 +# This should only be used in CLI
 +ide-ssr = { path = "../ide-ssr", version = "0.0.0" }
 +hir = { path = "../hir", version = "0.0.0" }
 +hir-def = { path = "../hir-def", version = "0.0.0" }
 +hir-ty = { path = "../hir-ty", version = "0.0.0" }
 +proc-macro-srv = { path = "../proc-macro-srv", version = "0.0.0" }
 +
 +[target.'cfg(windows)'.dependencies]
 +winapi = "0.3.9"
 +
 +[target.'cfg(not(target_env = "msvc"))'.dependencies]
 +jemallocator = { version = "0.5.0", package = "tikv-jemallocator", optional = true }
 +
 +[dev-dependencies]
 +expect-test = "1.4.0"
 +jod-thread = "0.1.2"
 +xshell = "0.2.2"
 +
 +test-utils = { path = "../test-utils" }
 +sourcegen = { path = "../sourcegen" }
 +mbe = { path = "../mbe" }
 +
 +[features]
 +jemalloc = ["jemallocator", "profile/jemalloc"]
 +force-always-assert = ["always-assert/force"]
 +in-rust-tree = [
 +    "proc-macro-srv/sysroot-abi",
 +    "sourcegen/in-rust-tree",
 +    "ide/in-rust-tree",
++    "syntax/in-rust-tree",
 +]
index e9de23cb395d1fbec1b2ceab4aa942618fedcd5f,0000000000000000000000000000000000000000..f6a6802972525c09002f18bdc81bacb60a39d3dd
mode 100644,000000..100644
--- /dev/null
@@@ -1,239 -1,0 +1,240 @@@
 +//! Driver for rust-analyzer.
 +//!
 +//! Based on cli flags, either spawns an LSP server, or runs a batch analysis
 +
 +#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 +
 +mod logger;
 +mod rustc_wrapper;
 +
 +use std::{env, fs, path::Path, process};
 +
 +use lsp_server::Connection;
 +use project_model::ProjectManifest;
 +use rust_analyzer::{cli::flags, config::Config, from_json, lsp_ext::supports_utf8, Result};
 +use vfs::AbsPathBuf;
 +
 +#[cfg(all(feature = "mimalloc"))]
 +#[global_allocator]
 +static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc;
 +
 +#[cfg(all(feature = "jemalloc", not(target_env = "msvc")))]
 +#[global_allocator]
 +static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc;
 +
 +fn main() {
 +    if std::env::var("RA_RUSTC_WRAPPER").is_ok() {
 +        let mut args = std::env::args_os();
 +        let _me = args.next().unwrap();
 +        let rustc = args.next().unwrap();
 +        let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) {
 +            Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102),
 +            Err(err) => {
 +                eprintln!("{}", err);
 +                101
 +            }
 +        };
 +        process::exit(code);
 +    }
 +
 +    if let Err(err) = try_main() {
 +        tracing::error!("Unexpected error: {}", err);
 +        eprintln!("{}", err);
 +        process::exit(101);
 +    }
 +}
 +
 +fn try_main() -> Result<()> {
 +    let flags = flags::RustAnalyzer::from_env()?;
 +
 +    #[cfg(debug_assertions)]
 +    if flags.wait_dbg || env::var("RA_WAIT_DBG").is_ok() {
 +        #[allow(unused_mut)]
 +        let mut d = 4;
 +        while d == 4 {
 +            d = 4;
 +        }
 +    }
 +
 +    let mut log_file = flags.log_file.as_deref();
 +
 +    let env_log_file = env::var("RA_LOG_FILE").ok();
 +    if let Some(env_log_file) = env_log_file.as_deref() {
 +        log_file = Some(Path::new(env_log_file));
 +    }
 +
 +    setup_logging(log_file)?;
 +    let verbosity = flags.verbosity();
 +
 +    match flags.subcommand {
 +        flags::RustAnalyzerCmd::LspServer(cmd) => {
 +            if cmd.print_config_schema {
 +                println!("{:#}", Config::json_schema());
 +                return Ok(());
 +            }
 +            if cmd.version {
 +                println!("rust-analyzer {}", rust_analyzer::version());
 +                return Ok(());
 +            }
 +            if cmd.help {
 +                println!("{}", flags::RustAnalyzer::HELP);
 +                return Ok(());
 +            }
 +            with_extra_thread("LspServer", run_server)?;
 +        }
 +        flags::RustAnalyzerCmd::ProcMacro(flags::ProcMacro) => {
 +            with_extra_thread("MacroExpander", || proc_macro_srv::cli::run().map_err(Into::into))?;
 +        }
 +        flags::RustAnalyzerCmd::Parse(cmd) => cmd.run()?,
 +        flags::RustAnalyzerCmd::Symbols(cmd) => cmd.run()?,
 +        flags::RustAnalyzerCmd::Highlight(cmd) => cmd.run()?,
 +        flags::RustAnalyzerCmd::AnalysisStats(cmd) => cmd.run(verbosity)?,
 +        flags::RustAnalyzerCmd::Diagnostics(cmd) => cmd.run()?,
 +        flags::RustAnalyzerCmd::Ssr(cmd) => cmd.run()?,
 +        flags::RustAnalyzerCmd::Search(cmd) => cmd.run()?,
 +        flags::RustAnalyzerCmd::Lsif(cmd) => cmd.run()?,
++        flags::RustAnalyzerCmd::Scip(cmd) => cmd.run()?,
 +    }
 +    Ok(())
 +}
 +
 +fn setup_logging(log_file: Option<&Path>) -> Result<()> {
 +    if cfg!(windows) {
 +        // This is required so that windows finds our pdb that is placed right beside the exe.
 +        // By default it doesn't look at the folder the exe resides in, only in the current working
 +        // directory which we set to the project workspace.
 +        // https://docs.microsoft.com/en-us/windows-hardware/drivers/debugger/general-environment-variables
 +        // https://docs.microsoft.com/en-us/windows/win32/api/dbghelp/nf-dbghelp-syminitialize
 +        if let Ok(path) = env::current_exe() {
 +            if let Some(path) = path.parent() {
 +                env::set_var("_NT_SYMBOL_PATH", path);
 +            }
 +        }
 +    }
 +    if env::var("RUST_BACKTRACE").is_err() {
 +        env::set_var("RUST_BACKTRACE", "short");
 +    }
 +
 +    let log_file = match log_file {
 +        Some(path) => {
 +            if let Some(parent) = path.parent() {
 +                let _ = fs::create_dir_all(parent);
 +            }
 +            Some(fs::File::create(path)?)
 +        }
 +        None => None,
 +    };
 +    let filter = env::var("RA_LOG").ok();
 +    // deliberately enable all `error` logs if the user has not set RA_LOG, as there is usually useful
 +    // information in there for debugging
 +    logger::Logger::new(log_file, filter.as_deref().or(Some("error"))).install()?;
 +
 +    profile::init();
 +
 +    Ok(())
 +}
 +
 +const STACK_SIZE: usize = 1024 * 1024 * 8;
 +
 +/// Parts of rust-analyzer can use a lot of stack space, and some operating systems only give us
 +/// 1 MB by default (eg. Windows), so this spawns a new thread with hopefully sufficient stack
 +/// space.
 +fn with_extra_thread(
 +    thread_name: impl Into<String>,
 +    f: impl FnOnce() -> Result<()> + Send + 'static,
 +) -> Result<()> {
 +    let handle =
 +        std::thread::Builder::new().name(thread_name.into()).stack_size(STACK_SIZE).spawn(f)?;
 +    match handle.join() {
 +        Ok(res) => res,
 +        Err(panic) => std::panic::resume_unwind(panic),
 +    }
 +}
 +
 +fn run_server() -> Result<()> {
 +    tracing::info!("server version {} will start", rust_analyzer::version());
 +
 +    let (connection, io_threads) = Connection::stdio();
 +
 +    let (initialize_id, initialize_params) = connection.initialize_start()?;
 +    tracing::info!("InitializeParams: {}", initialize_params);
 +    let initialize_params =
 +        from_json::<lsp_types::InitializeParams>("InitializeParams", &initialize_params)?;
 +
 +    let root_path = match initialize_params
 +        .root_uri
 +        .and_then(|it| it.to_file_path().ok())
 +        .and_then(|it| AbsPathBuf::try_from(it).ok())
 +    {
 +        Some(it) => it,
 +        None => {
 +            let cwd = env::current_dir()?;
 +            AbsPathBuf::assert(cwd)
 +        }
 +    };
 +
 +    let mut config = Config::new(root_path, initialize_params.capabilities);
 +    if let Some(json) = initialize_params.initialization_options {
 +        if let Err(e) = config.update(json) {
 +            use lsp_types::{
 +                notification::{Notification, ShowMessage},
 +                MessageType, ShowMessageParams,
 +            };
 +            let not = lsp_server::Notification::new(
 +                ShowMessage::METHOD.to_string(),
 +                ShowMessageParams { typ: MessageType::WARNING, message: e.to_string() },
 +            );
 +            connection.sender.send(lsp_server::Message::Notification(not)).unwrap();
 +        }
 +    }
 +
 +    let server_capabilities = rust_analyzer::server_capabilities(&config);
 +
 +    let initialize_result = lsp_types::InitializeResult {
 +        capabilities: server_capabilities,
 +        server_info: Some(lsp_types::ServerInfo {
 +            name: String::from("rust-analyzer"),
 +            version: Some(rust_analyzer::version().to_string()),
 +        }),
 +        offset_encoding: if supports_utf8(config.caps()) {
 +            Some("utf-8".to_string())
 +        } else {
 +            None
 +        },
 +    };
 +
 +    let initialize_result = serde_json::to_value(initialize_result).unwrap();
 +
 +    connection.initialize_finish(initialize_id, initialize_result)?;
 +
 +    if let Some(client_info) = initialize_params.client_info {
 +        tracing::info!("Client '{}' {}", client_info.name, client_info.version.unwrap_or_default());
 +    }
 +
 +    if config.linked_projects().is_empty() && config.detached_files().is_empty() {
 +        let workspace_roots = initialize_params
 +            .workspace_folders
 +            .map(|workspaces| {
 +                workspaces
 +                    .into_iter()
 +                    .filter_map(|it| it.uri.to_file_path().ok())
 +                    .filter_map(|it| AbsPathBuf::try_from(it).ok())
 +                    .collect::<Vec<_>>()
 +            })
 +            .filter(|workspaces| !workspaces.is_empty())
 +            .unwrap_or_else(|| vec![config.root_path().clone()]);
 +
 +        let discovered = ProjectManifest::discover_all(&workspace_roots);
 +        tracing::info!("discovered projects: {:?}", discovered);
 +        if discovered.is_empty() {
 +            tracing::error!("failed to find any projects in {:?}", workspace_roots);
 +        }
 +        config.discovered_projects = Some(discovered);
 +    }
 +
 +    rust_analyzer::main_loop(config, connection)?;
 +
 +    io_threads.join()?;
 +    tracing::info!("server did shut down");
 +    Ok(())
 +}
index 6ccdaa86dd628786d33a9142e3b199f2dbd5bfa2,0000000000000000000000000000000000000000..60ba67e25f93bc08679072f4a25de7bf10bdaf7c
mode 100644,000000..100644
--- /dev/null
@@@ -1,69 -1,0 +1,70 @@@
 +//! Various batch processing tasks, intended primarily for debugging.
 +
 +pub mod flags;
 +pub mod load_cargo;
 +mod parse;
 +mod symbols;
 +mod highlight;
 +mod analysis_stats;
 +mod diagnostics;
 +mod ssr;
 +mod lsif;
++mod scip;
 +
 +mod progress_report;
 +
 +use std::io::Read;
 +
 +use anyhow::Result;
 +use ide::AnalysisHost;
 +use vfs::Vfs;
 +
 +#[derive(Clone, Copy)]
 +pub enum Verbosity {
 +    Spammy,
 +    Verbose,
 +    Normal,
 +    Quiet,
 +}
 +
 +impl Verbosity {
 +    pub fn is_verbose(self) -> bool {
 +        matches!(self, Verbosity::Verbose | Verbosity::Spammy)
 +    }
 +    pub fn is_spammy(self) -> bool {
 +        matches!(self, Verbosity::Spammy)
 +    }
 +}
 +
 +fn read_stdin() -> Result<String> {
 +    let mut buff = String::new();
 +    std::io::stdin().read_to_string(&mut buff)?;
 +    Ok(buff)
 +}
 +
 +fn report_metric(metric: &str, value: u64, unit: &str) {
 +    if std::env::var("RA_METRICS").is_err() {
 +        return;
 +    }
 +    println!("METRIC:{}:{}:{}", metric, value, unit)
 +}
 +
 +fn print_memory_usage(mut host: AnalysisHost, vfs: Vfs) {
 +    let mut mem = host.per_query_memory_usage();
 +
 +    let before = profile::memory_usage();
 +    drop(vfs);
 +    let vfs = before.allocated - profile::memory_usage().allocated;
 +    mem.push(("VFS".into(), vfs));
 +
 +    let before = profile::memory_usage();
 +    drop(host);
 +    mem.push(("Unaccounted".into(), before.allocated - profile::memory_usage().allocated));
 +
 +    mem.push(("Remaining".into(), profile::memory_usage().allocated));
 +
 +    for (name, bytes) in mem {
 +        // NOTE: Not a debug print, so avoid going through the `eprintln` defined above.
 +        eprintln!("{:>8} {}", bytes, name);
 +    }
 +}
index 080e2fb4438816717f38985da19aa681cf0817ad,0000000000000000000000000000000000000000..aa32654fbdca1efc3afc748e8ac59fb5b28e4c85
mode 100644,000000..100644
--- /dev/null
@@@ -1,252 -1,0 +1,262 @@@
 +//! Grammar for the command-line arguments.
 +#![allow(unreachable_pub)]
 +use std::{path::PathBuf, str::FromStr};
 +
 +use ide_ssr::{SsrPattern, SsrRule};
 +
 +use crate::cli::Verbosity;
 +
 +xflags::xflags! {
 +    src "./src/cli/flags.rs"
 +
 +    /// LSP server for the Rust programming language.
 +    ///
 +    /// Subcommands and their flags do not provide any stability guarantees and may be removed or
 +    /// changed without notice. Top-level flags that are not are marked as [Unstable] provide
 +    /// backwards-compatibility and may be relied on.
 +    cmd rust-analyzer {
 +        /// Verbosity level, can be repeated multiple times.
 +        repeated -v, --verbose
 +        /// Verbosity level.
 +        optional -q, --quiet
 +
 +        /// Log to the specified file instead of stderr.
 +        optional --log-file path: PathBuf
 +        /// Flush log records to the file immediately.
 +        optional --no-log-buffering
 +
 +        /// [Unstable] Wait until a debugger is attached to (requires debug build).
 +        optional --wait-dbg
 +
 +        default cmd lsp-server {
 +            /// Print version.
 +            optional --version
 +            /// Print help.
 +            optional -h, --help
 +
 +            /// Dump a LSP config JSON schema.
 +            optional --print-config-schema
 +        }
 +
 +        /// Parse stdin.
 +        cmd parse {
 +            /// Suppress printing.
 +            optional --no-dump
 +        }
 +
 +        /// Parse stdin and print the list of symbols.
 +        cmd symbols {}
 +
 +        /// Highlight stdin as html.
 +        cmd highlight {
 +            /// Enable rainbow highlighting of identifiers.
 +            optional --rainbow
 +        }
 +
 +        /// Batch typecheck project and print summary statistics
 +        cmd analysis-stats
 +            /// Directory with Cargo.toml.
 +            required path: PathBuf
 +        {
 +            optional --output format: OutputFormat
 +
 +            /// Randomize order in which crates, modules, and items are processed.
 +            optional --randomize
 +            /// Run type inference in parallel.
 +            optional --parallel
 +            /// Collect memory usage statistics.
 +            optional --memory-usage
 +            /// Print the total length of all source and macro files (whitespace is not counted).
 +            optional --source-stats
 +
 +            /// Only analyze items matching this path.
 +            optional -o, --only path: String
 +            /// Also analyze all dependencies.
 +            optional --with-deps
 +            /// Don't load sysroot crates (`std`, `core` & friends).
 +            optional --no-sysroot
 +
 +            /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
 +            optional --disable-build-scripts
 +            /// Don't use expand proc macros.
 +            optional --disable-proc-macros
 +            /// Only resolve names, don't run type inference.
 +            optional --skip-inference
 +        }
 +
 +        cmd diagnostics
 +            /// Directory with Cargo.toml.
 +            required path: PathBuf
 +        {
 +            /// Don't run build scripts or load `OUT_DIR` values by running `cargo check` before analysis.
 +            optional --disable-build-scripts
 +            /// Don't use expand proc macros.
 +            optional --disable-proc-macros
 +        }
 +
 +        cmd ssr
 +            /// A structured search replace rule (`$a.foo($b) ==> bar($a, $b)`)
 +            repeated rule: SsrRule
 +        {}
 +
 +        cmd search
 +            /// A structured search replace pattern (`$a.foo($b)`)
 +            repeated pattern: SsrPattern
 +        {
 +            /// Prints debug information for any nodes with source exactly equal to snippet.
 +            optional --debug snippet: String
 +        }
 +
 +        cmd proc-macro {}
 +
 +        cmd lsif
 +            required path: PathBuf
 +        {}
++
++        cmd scip
++            required path: PathBuf
++        {}
 +    }
 +}
 +
 +// generated start
 +// The following code is generated by `xflags` macro.
 +// Run `env UPDATE_XFLAGS=1 cargo build` to regenerate.
 +#[derive(Debug)]
 +pub struct RustAnalyzer {
 +    pub verbose: u32,
 +    pub quiet: bool,
 +    pub log_file: Option<PathBuf>,
 +    pub no_log_buffering: bool,
 +    pub wait_dbg: bool,
 +    pub subcommand: RustAnalyzerCmd,
 +}
 +
 +#[derive(Debug)]
 +pub enum RustAnalyzerCmd {
 +    LspServer(LspServer),
 +    Parse(Parse),
 +    Symbols(Symbols),
 +    Highlight(Highlight),
 +    AnalysisStats(AnalysisStats),
 +    Diagnostics(Diagnostics),
 +    Ssr(Ssr),
 +    Search(Search),
 +    ProcMacro(ProcMacro),
 +    Lsif(Lsif),
++    Scip(Scip),
 +}
 +
 +#[derive(Debug)]
 +pub struct LspServer {
 +    pub version: bool,
 +    pub help: bool,
 +    pub print_config_schema: bool,
 +}
 +
 +#[derive(Debug)]
 +pub struct Parse {
 +    pub no_dump: bool,
 +}
 +
 +#[derive(Debug)]
 +pub struct Symbols;
 +
 +#[derive(Debug)]
 +pub struct Highlight {
 +    pub rainbow: bool,
 +}
 +
 +#[derive(Debug)]
 +pub struct AnalysisStats {
 +    pub path: PathBuf,
 +
 +    pub output: Option<OutputFormat>,
 +    pub randomize: bool,
 +    pub parallel: bool,
 +    pub memory_usage: bool,
 +    pub source_stats: bool,
 +    pub only: Option<String>,
 +    pub with_deps: bool,
 +    pub no_sysroot: bool,
 +    pub disable_build_scripts: bool,
 +    pub disable_proc_macros: bool,
 +    pub skip_inference: bool,
 +}
 +
 +#[derive(Debug)]
 +pub struct Diagnostics {
 +    pub path: PathBuf,
 +
 +    pub disable_build_scripts: bool,
 +    pub disable_proc_macros: bool,
 +}
 +
 +#[derive(Debug)]
 +pub struct Ssr {
 +    pub rule: Vec<SsrRule>,
 +}
 +
 +#[derive(Debug)]
 +pub struct Search {
 +    pub pattern: Vec<SsrPattern>,
 +
 +    pub debug: Option<String>,
 +}
 +
 +#[derive(Debug)]
 +pub struct ProcMacro;
 +
 +#[derive(Debug)]
 +pub struct Lsif {
 +    pub path: PathBuf,
 +}
 +
++#[derive(Debug)]
++pub struct Scip {
++    pub path: PathBuf,
++}
++
 +impl RustAnalyzer {
 +    pub const HELP: &'static str = Self::HELP_;
 +
 +    #[allow(dead_code)]
 +    pub fn from_env() -> xflags::Result<Self> {
 +        Self::from_env_()
 +    }
 +
 +    #[allow(dead_code)]
 +    pub fn from_vec(args: Vec<std::ffi::OsString>) -> xflags::Result<Self> {
 +        Self::from_vec_(args)
 +    }
 +}
 +// generated end
 +
 +#[derive(Debug, PartialEq, Eq)]
 +pub enum OutputFormat {
 +    Csv,
 +}
 +
 +impl RustAnalyzer {
 +    pub fn verbosity(&self) -> Verbosity {
 +        if self.quiet {
 +            return Verbosity::Quiet;
 +        }
 +        match self.verbose {
 +            0 => Verbosity::Normal,
 +            1 => Verbosity::Verbose,
 +            _ => Verbosity::Spammy,
 +        }
 +    }
 +}
 +
 +impl FromStr for OutputFormat {
 +    type Err = String;
 +
 +    fn from_str(s: &str) -> Result<Self, Self::Err> {
 +        match s {
 +            "csv" => Ok(Self::Csv),
 +            _ => Err(format!("unknown output format `{}`", s)),
 +        }
 +    }
 +}
index 0000000000000000000000000000000000000000,0000000000000000000000000000000000000000..65cc993c45e7145f8936532d002ba8133b7d6105
new file mode 100644 (file)
--- /dev/null
--- /dev/null
@@@ -1,0 -1,0 +1,448 @@@
++//! SCIP generator
++
++use std::{
++    collections::{HashMap, HashSet},
++    time::Instant,
++};
++
++use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
++use hir::Name;
++use ide::{
++    LineCol, MonikerDescriptorKind, MonikerResult, StaticIndex, StaticIndexedFile, TextRange,
++    TokenId,
++};
++use ide_db::LineIndexDatabase;
++use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
++use scip::types as scip_types;
++use std::env;
++
++use crate::cli::{
++    flags,
++    load_cargo::{load_workspace, LoadCargoConfig},
++    Result,
++};
++
++impl flags::Scip {
++    pub fn run(self) -> Result<()> {
++        eprintln!("Generating SCIP start...");
++        let now = Instant::now();
++        let cargo_config = CargoConfig::default();
++
++        let no_progress = &|s| (eprintln!("rust-analyzer: Loading {}", s));
++        let load_cargo_config = LoadCargoConfig {
++            load_out_dirs_from_check: true,
++            with_proc_macro: true,
++            prefill_caches: true,
++        };
++        let path = vfs::AbsPathBuf::assert(env::current_dir()?.join(&self.path));
++        let rootpath = path.normalize();
++        let manifest = ProjectManifest::discover_single(&path)?;
++
++        let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
++
++        let (host, vfs, _) = load_workspace(workspace, &load_cargo_config)?;
++        let db = host.raw_database();
++        let analysis = host.analysis();
++
++        let si = StaticIndex::compute(&analysis);
++
++        let mut index = scip_types::Index {
++            metadata: Some(scip_types::Metadata {
++                version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(),
++                tool_info: Some(scip_types::ToolInfo {
++                    name: "rust-analyzer".to_owned(),
++                    version: "0.1".to_owned(),
++                    arguments: vec![],
++                    ..Default::default()
++                })
++                .into(),
++                project_root: format!(
++                    "file://{}",
++                    path.normalize()
++                        .as_os_str()
++                        .to_str()
++                        .ok_or(anyhow::anyhow!("Unable to normalize project_root path"))?
++                        .to_string()
++                ),
++                text_document_encoding: scip_types::TextEncoding::UTF8.into(),
++                ..Default::default()
++            })
++            .into(),
++            ..Default::default()
++        };
++
++        let mut symbols_emitted: HashSet<TokenId> = HashSet::default();
++        let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new();
++
++        for file in si.files {
++            let mut local_count = 0;
++            let mut new_local_symbol = || {
++                let new_symbol = scip::types::Symbol::new_local(local_count);
++                local_count += 1;
++
++                new_symbol
++            };
++
++            let StaticIndexedFile { file_id, tokens, .. } = file;
++            let relative_path = match get_relative_filepath(&vfs, &rootpath, file_id) {
++                Some(relative_path) => relative_path,
++                None => continue,
++            };
++
++            let line_index = LineIndex {
++                index: db.line_index(file_id),
++                encoding: OffsetEncoding::Utf8,
++                endings: LineEndings::Unix,
++            };
++
++            let mut doc = scip_types::Document {
++                relative_path,
++                language: "rust".to_string(),
++                ..Default::default()
++            };
++
++            tokens.into_iter().for_each(|(range, id)| {
++                let token = si.tokens.get(id).unwrap();
++
++                let mut occurrence = scip_types::Occurrence::default();
++                occurrence.range = text_range_to_scip_range(&line_index, range);
++                occurrence.symbol = match tokens_to_symbol.get(&id) {
++                    Some(symbol) => symbol.clone(),
++                    None => {
++                        let symbol = match &token.moniker {
++                            Some(moniker) => moniker_to_symbol(&moniker),
++                            None => new_local_symbol(),
++                        };
++
++                        let symbol = scip::symbol::format_symbol(symbol);
++                        tokens_to_symbol.insert(id, symbol.clone());
++                        symbol
++                    }
++                };
++
++                if let Some(def) = token.definition {
++                    if def.range == range {
++                        occurrence.symbol_roles |= scip_types::SymbolRole::Definition as i32;
++                    }
++
++                    if !symbols_emitted.contains(&id) {
++                        symbols_emitted.insert(id);
++
++                        let mut symbol_info = scip_types::SymbolInformation::default();
++                        symbol_info.symbol = occurrence.symbol.clone();
++                        if let Some(hover) = &token.hover {
++                            if !hover.markup.as_str().is_empty() {
++                                symbol_info.documentation = vec![hover.markup.as_str().to_string()];
++                            }
++                        }
++
++                        doc.symbols.push(symbol_info)
++                    }
++                }
++
++                doc.occurrences.push(occurrence);
++            });
++
++            if doc.occurrences.is_empty() {
++                continue;
++            }
++
++            index.documents.push(doc);
++        }
++
++        scip::write_message_to_file("index.scip", index)
++            .map_err(|err| anyhow::anyhow!("Failed to write scip to file: {}", err))?;
++
++        eprintln!("Generating SCIP finished {:?}", now.elapsed());
++        Ok(())
++    }
++}
++
++fn get_relative_filepath(
++    vfs: &vfs::Vfs,
++    rootpath: &vfs::AbsPathBuf,
++    file_id: ide::FileId,
++) -> Option<String> {
++    Some(vfs.file_path(file_id).as_path()?.strip_prefix(&rootpath)?.as_ref().to_str()?.to_string())
++}
++
++// SCIP Ranges have a (very large) optimization that ranges if they are on the same line
++// only encode as a vector of [start_line, start_col, end_col].
++//
++// This transforms a line index into the optimized SCIP Range.
++fn text_range_to_scip_range(line_index: &LineIndex, range: TextRange) -> Vec<i32> {
++    let LineCol { line: start_line, col: start_col } = line_index.index.line_col(range.start());
++    let LineCol { line: end_line, col: end_col } = line_index.index.line_col(range.end());
++
++    if start_line == end_line {
++        vec![start_line as i32, start_col as i32, end_col as i32]
++    } else {
++        vec![start_line as i32, start_col as i32, end_line as i32, end_col as i32]
++    }
++}
++
++fn new_descriptor_str(
++    name: &str,
++    suffix: scip_types::descriptor::Suffix,
++) -> scip_types::Descriptor {
++    scip_types::Descriptor {
++        name: name.to_string(),
++        disambiguator: "".to_string(),
++        suffix: suffix.into(),
++        ..Default::default()
++    }
++}
++
++fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_types::Descriptor {
++    let mut name = name.to_string();
++    if name.contains("'") {
++        name = format!("`{}`", name);
++    }
++
++    new_descriptor_str(name.as_str(), suffix)
++}
++
++/// Loosely based on `def_to_moniker`
++///
++/// Only returns a Symbol when it's a non-local symbol.
++///     So if the visibility isn't outside of a document, then it will return None
++fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol {
++    use scip_types::descriptor::Suffix::*;
++
++    let package_name = moniker.package_information.name.clone();
++    let version = moniker.package_information.version.clone();
++    let descriptors = moniker
++        .identifier
++        .description
++        .iter()
++        .map(|desc| {
++            new_descriptor(
++                desc.name.clone(),
++                match desc.desc {
++                    MonikerDescriptorKind::Namespace => Namespace,
++                    MonikerDescriptorKind::Type => Type,
++                    MonikerDescriptorKind::Term => Term,
++                    MonikerDescriptorKind::Method => Method,
++                    MonikerDescriptorKind::TypeParameter => TypeParameter,
++                    MonikerDescriptorKind::Parameter => Parameter,
++                    MonikerDescriptorKind::Macro => Macro,
++                    MonikerDescriptorKind::Meta => Meta,
++                },
++            )
++        })
++        .collect();
++
++    scip_types::Symbol {
++        scheme: "rust-analyzer".into(),
++        package: Some(scip_types::Package {
++            manager: "cargo".to_string(),
++            name: package_name,
++            version,
++            ..Default::default()
++        })
++        .into(),
++        descriptors,
++        ..Default::default()
++    }
++}
++
++#[cfg(test)]
++mod test {
++    use super::*;
++    use hir::Semantics;
++    use ide::{AnalysisHost, FilePosition};
++    use ide_db::defs::IdentClass;
++    use ide_db::{base_db::fixture::ChangeFixture, helpers::pick_best_token};
++    use scip::symbol::format_symbol;
++    use syntax::SyntaxKind::*;
++    use syntax::{AstNode, T};
++
++    fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) {
++        let mut host = AnalysisHost::default();
++        let change_fixture = ChangeFixture::parse(ra_fixture);
++        host.raw_database_mut().apply_change(change_fixture.change);
++        let (file_id, range_or_offset) =
++            change_fixture.file_position.expect("expected a marker ($0)");
++        let offset = range_or_offset.expect_offset();
++        (host, FilePosition { file_id, offset })
++    }
++
++    /// If expected == "", then assert that there are no symbols (this is basically local symbol)
++    #[track_caller]
++    fn check_symbol(ra_fixture: &str, expected: &str) {
++        let (host, position) = position(ra_fixture);
++
++        let FilePosition { file_id, offset } = position;
++
++        let db = host.raw_database();
++        let sema = &Semantics::new(db);
++        let file = sema.parse(file_id).syntax().clone();
++        let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
++            IDENT
++            | INT_NUMBER
++            | LIFETIME_IDENT
++            | T![self]
++            | T![super]
++            | T![crate]
++            | T![Self]
++            | COMMENT => 2,
++            kind if kind.is_trivia() => 0,
++            _ => 1,
++        })
++        .expect("OK OK");
++
++        let navs = sema
++            .descend_into_macros(original_token.clone())
++            .into_iter()
++            .filter_map(|token| {
++                IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
++                    it.into_iter().flat_map(|def| {
++                        let module = def.module(db).unwrap();
++                        let current_crate = module.krate();
++
++                        match MonikerResult::from_def(sema.db, def, current_crate) {
++                            Some(moniker_result) => Some(moniker_to_symbol(&moniker_result)),
++                            None => None,
++                        }
++                    })
++                })
++            })
++            .flatten()
++            .collect::<Vec<_>>();
++
++        if expected == "" {
++            assert_eq!(0, navs.len(), "must have no symbols {:?}", navs);
++            return;
++        }
++
++        assert_eq!(1, navs.len(), "must have one symbol {:?}", navs);
++
++        let res = navs.get(0).unwrap();
++        let formatted = format_symbol(res.clone());
++        assert_eq!(formatted, expected);
++    }
++
++    #[test]
++    fn basic() {
++        check_symbol(
++            r#"
++//- /lib.rs crate:main deps:foo
++use foo::example_mod::func;
++fn main() {
++    func$0();
++}
++//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
++pub mod example_mod {
++    pub fn func() {}
++}
++"#,
++            "rust-analyzer cargo foo 0.1.0 example_mod/func().",
++        );
++    }
++
++    #[test]
++    fn symbol_for_trait() {
++        check_symbol(
++            r#"
++//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
++pub mod module {
++    pub trait MyTrait {
++        pub fn func$0() {}
++    }
++}
++"#,
++            "rust-analyzer cargo foo 0.1.0 module/MyTrait#func().",
++        );
++    }
++
++    #[test]
++    fn symbol_for_trait_constant() {
++        check_symbol(
++            r#"
++    //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
++    pub mod module {
++        pub trait MyTrait {
++            const MY_CONST$0: u8;
++        }
++    }
++    "#,
++            "rust-analyzer cargo foo 0.1.0 module/MyTrait#MY_CONST.",
++        );
++    }
++
++    #[test]
++    fn symbol_for_trait_type() {
++        check_symbol(
++            r#"
++    //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
++    pub mod module {
++        pub trait MyTrait {
++            type MyType$0;
++        }
++    }
++    "#,
++            // "foo::module::MyTrait::MyType",
++            "rust-analyzer cargo foo 0.1.0 module/MyTrait#[MyType]",
++        );
++    }
++
++    #[test]
++    fn symbol_for_trait_impl_function() {
++        check_symbol(
++            r#"
++    //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
++    pub mod module {
++        pub trait MyTrait {
++            pub fn func() {}
++        }
++
++        struct MyStruct {}
++
++        impl MyTrait for MyStruct {
++            pub fn func$0() {}
++        }
++    }
++    "#,
++            // "foo::module::MyStruct::MyTrait::func",
++            "rust-analyzer cargo foo 0.1.0 module/MyStruct#MyTrait#func().",
++        );
++    }
++
++    #[test]
++    fn symbol_for_field() {
++        check_symbol(
++            r#"
++    //- /lib.rs crate:main deps:foo
++    use foo::St;
++    fn main() {
++        let x = St { a$0: 2 };
++    }
++    //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
++    pub struct St {
++        pub a: i32,
++    }
++    "#,
++            "rust-analyzer cargo foo 0.1.0 St#a.",
++        );
++    }
++
++    #[test]
++    fn local_symbol_for_local() {
++        check_symbol(
++            r#"
++    //- /lib.rs crate:main deps:foo
++    use foo::module::func;
++    fn main() {
++        func();
++    }
++    //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
++    pub mod module {
++        pub fn func() {
++            let x$0 = 2;
++        }
++    }
++    "#,
++            "",
++        );
++    }
++}
index 6649f42b4ef9e6cade85b71105ecda9b535dda32,0000000000000000000000000000000000000000..54dcb42d99c789ea142e8873dee91d246114692e
mode 100644,000000..100644
--- /dev/null
@@@ -1,1995 -1,0 +1,2034 @@@
-     HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig, JoinLinesConfig,
-     Snippet, SnippetScope,
 +//! Config used by the language server.
 +//!
 +//! We currently get this config from `initialize` LSP request, which is not the
 +//! best way to do it, but was the simplest thing we could implement.
 +//!
 +//! Of particular interest is the `feature_flags` hash map: while other fields
 +//! configure the server itself, feature flags are passed into analysis, and
 +//! tweak things like automatic insertion of `()` in completions.
 +
 +use std::{ffi::OsString, fmt, iter, path::PathBuf};
 +
 +use flycheck::FlycheckConfig;
 +use ide::{
 +    AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
-     pub fn highlighting_strings(&self) -> bool {
-         self.data.semanticHighlighting_strings_enable
++    HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig,
++    JoinLinesConfig, Snippet, SnippetScope,
 +};
 +use ide_db::{
 +    imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
 +    SnippetCap,
 +};
 +use itertools::Itertools;
 +use lsp_types::{ClientCapabilities, MarkupKind};
 +use project_model::{
 +    CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource, UnsetTestCrates,
 +};
 +use rustc_hash::{FxHashMap, FxHashSet};
 +use serde::{de::DeserializeOwned, Deserialize};
 +use vfs::AbsPathBuf;
 +
 +use crate::{
 +    caps::completion_item_edit_resolve,
 +    diagnostics::DiagnosticsMapConfig,
 +    line_index::OffsetEncoding,
 +    lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
 +};
 +
 +mod patch_old_style;
 +
 +// Conventions for configuration keys to preserve maximal extendability without breakage:
 +//  - Toggles (be it binary true/false or with more options in-between) should almost always suffix as `_enable`
 +//    This has the benefit of namespaces being extensible, and if the suffix doesn't fit later it can be changed without breakage.
 +//  - In general be wary of using the namespace of something verbatim, it prevents us from adding subkeys in the future
 +//  - Don't use abbreviations unless really necessary
 +//  - foo_command = overrides the subcommand, foo_overrideCommand allows full overwriting, extra args only applies for foo_command
 +
 +// Defines the server-side configuration of the rust-analyzer. We generate
 +// *parts* of VS Code's `package.json` config from this. Run `cargo test` to
 +// re-generate that file.
 +//
 +// However, editor specific config, which the server doesn't know about, should
 +// be specified directly in `package.json`.
 +//
 +// To deprecate an option by replacing it with another name use `new_name | `old_name` so that we keep
 +// parsing the old name.
 +config_data! {
 +    struct ConfigData {
 +        /// Placeholder expression to use for missing expressions in assists.
 +        assist_expressionFillDefault: ExprFillDefaultDef              = "\"todo\"",
 +
 +        /// Warm up caches on project load.
 +        cachePriming_enable: bool = "true",
 +        /// How many worker threads to handle priming caches. The default `0` means to pick automatically.
 +        cachePriming_numThreads: ParallelCachePrimingNumThreads = "0",
 +
 +        /// Automatically refresh project info via `cargo metadata` on
 +        /// `Cargo.toml` or `.cargo/config.toml` changes.
 +        cargo_autoreload: bool           = "true",
 +        /// Run build scripts (`build.rs`) for more precise code analysis.
 +        cargo_buildScripts_enable: bool  = "true",
 +        /// Override the command rust-analyzer uses to run build scripts and
 +        /// build procedural macros. The command is required to output json
 +        /// and should therefore include `--message-format=json` or a similar
 +        /// option.
 +        ///
 +        /// By default, a cargo invocation will be constructed for the configured
 +        /// targets and features, with the following base command line:
 +        ///
 +        /// ```bash
 +        /// cargo check --quiet --workspace --message-format=json --all-targets
 +        /// ```
 +        /// .
 +        cargo_buildScripts_overrideCommand: Option<Vec<String>> = "null",
 +        /// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
 +        /// avoid checking unnecessary things.
 +        cargo_buildScripts_useRustcWrapper: bool = "true",
 +        /// List of features to activate.
 +        ///
 +        /// Set this to `"all"` to pass `--all-features` to cargo.
 +        cargo_features: CargoFeatures      = "[]",
 +        /// Whether to pass `--no-default-features` to cargo.
 +        cargo_noDefaultFeatures: bool    = "false",
 +        /// Internal config for debugging, disables loading of sysroot crates.
 +        cargo_noSysroot: bool            = "false",
 +        /// Compilation target override (target triple).
 +        cargo_target: Option<String>     = "null",
 +        /// Unsets `#[cfg(test)]` for the specified crates.
 +        cargo_unsetTest: Vec<String>   = "[\"core\"]",
 +
 +        /// Check all targets and tests (`--all-targets`).
 +        checkOnSave_allTargets: bool                     = "true",
 +        /// Cargo command to use for `cargo check`.
 +        checkOnSave_command: String                      = "\"check\"",
 +        /// Run specified `cargo check` command for diagnostics on save.
 +        checkOnSave_enable: bool                         = "true",
 +        /// Extra arguments for `cargo check`.
 +        checkOnSave_extraArgs: Vec<String>               = "[]",
 +        /// List of features to activate. Defaults to
 +        /// `#rust-analyzer.cargo.features#`.
 +        ///
 +        /// Set to `"all"` to pass `--all-features` to Cargo.
 +        checkOnSave_features: Option<CargoFeatures>      = "null",
 +        /// Whether to pass `--no-default-features` to Cargo. Defaults to
 +        /// `#rust-analyzer.cargo.noDefaultFeatures#`.
 +        checkOnSave_noDefaultFeatures: Option<bool>      = "null",
 +        /// Override the command rust-analyzer uses instead of `cargo check` for
 +        /// diagnostics on save. The command is required to output json and
 +        /// should therefor include `--message-format=json` or a similar option.
 +        ///
 +        /// If you're changing this because you're using some tool wrapping
 +        /// Cargo, you might also want to change
 +        /// `#rust-analyzer.cargo.buildScripts.overrideCommand#`.
 +        ///
 +        /// If there are multiple linked projects, this command is invoked for
 +        /// each of them, with the working directory being the project root
 +        /// (i.e., the folder containing the `Cargo.toml`).
 +        ///
 +        /// An example command would be:
 +        ///
 +        /// ```bash
 +        /// cargo check --workspace --message-format=json --all-targets
 +        /// ```
 +        /// .
 +        checkOnSave_overrideCommand: Option<Vec<String>> = "null",
 +        /// Check for a specific target. Defaults to
 +        /// `#rust-analyzer.cargo.target#`.
 +        checkOnSave_target: Option<String>               = "null",
 +
 +        /// Toggles the additional completions that automatically add imports when completed.
 +        /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
 +        completion_autoimport_enable: bool       = "true",
 +        /// Toggles the additional completions that automatically show method calls and field accesses
 +        /// with `self` prefixed to them when inside a method.
 +        completion_autoself_enable: bool        = "true",
 +        /// Whether to add parenthesis and argument snippets when completing function.
 +        completion_callable_snippets: CallableCompletionDef  = "\"fill_arguments\"",
 +        /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
 +        completion_postfix_enable: bool         = "true",
 +        /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
 +        completion_privateEditable_enable: bool = "false",
 +        /// Custom completion snippets.
 +        // NOTE: Keep this list in sync with the feature docs of user snippets.
 +        completion_snippets_custom: FxHashMap<String, SnippetDef> = r#"{
 +            "Arc::new": {
 +                "postfix": "arc",
 +                "body": "Arc::new(${receiver})",
 +                "requires": "std::sync::Arc",
 +                "description": "Put the expression into an `Arc`",
 +                "scope": "expr"
 +            },
 +            "Rc::new": {
 +                "postfix": "rc",
 +                "body": "Rc::new(${receiver})",
 +                "requires": "std::rc::Rc",
 +                "description": "Put the expression into an `Rc`",
 +                "scope": "expr"
 +            },
 +            "Box::pin": {
 +                "postfix": "pinbox",
 +                "body": "Box::pin(${receiver})",
 +                "requires": "std::boxed::Box",
 +                "description": "Put the expression into a pinned `Box`",
 +                "scope": "expr"
 +            },
 +            "Ok": {
 +                "postfix": "ok",
 +                "body": "Ok(${receiver})",
 +                "description": "Wrap the expression in a `Result::Ok`",
 +                "scope": "expr"
 +            },
 +            "Err": {
 +                "postfix": "err",
 +                "body": "Err(${receiver})",
 +                "description": "Wrap the expression in a `Result::Err`",
 +                "scope": "expr"
 +            },
 +            "Some": {
 +                "postfix": "some",
 +                "body": "Some(${receiver})",
 +                "description": "Wrap the expression in an `Option::Some`",
 +                "scope": "expr"
 +            }
 +        }"#,
 +
 +        /// List of rust-analyzer diagnostics to disable.
 +        diagnostics_disabled: FxHashSet<String> = "[]",
 +        /// Whether to show native rust-analyzer diagnostics.
 +        diagnostics_enable: bool                = "true",
 +        /// Whether to show experimental rust-analyzer diagnostics that might
 +        /// have more false positives than usual.
 +        diagnostics_experimental_enable: bool    = "false",
 +        /// Map of prefixes to be substituted when parsing diagnostic file paths.
 +        /// This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
 +        diagnostics_remapPrefix: FxHashMap<String, String> = "{}",
 +        /// List of warnings that should be displayed with hint severity.
 +        ///
 +        /// The warnings will be indicated by faded text or three dots in code
 +        /// and will not show up in the `Problems Panel`.
 +        diagnostics_warningsAsHint: Vec<String> = "[]",
 +        /// List of warnings that should be displayed with info severity.
 +        ///
 +        /// The warnings will be indicated by a blue squiggly underline in code
 +        /// and a blue icon in the `Problems Panel`.
 +        diagnostics_warningsAsInfo: Vec<String> = "[]",
 +
 +        /// These directories will be ignored by rust-analyzer. They are
 +        /// relative to the workspace root, and globs are not supported. You may
 +        /// also need to add the folders to Code's `files.watcherExclude`.
 +        files_excludeDirs: Vec<PathBuf> = "[]",
 +        /// Controls file watching implementation.
 +        files_watcher: FilesWatcherDef = "\"client\"",
 +
 +        /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
 +        highlightRelated_breakPoints_enable: bool = "true",
 +        /// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
 +        highlightRelated_exitPoints_enable: bool = "true",
 +        /// Enables highlighting of related references while the cursor is on any identifier.
 +        highlightRelated_references_enable: bool = "true",
 +        /// Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
 +        highlightRelated_yieldPoints_enable: bool = "true",
 +
 +        /// Whether to show `Debug` action. Only applies when
 +        /// `#rust-analyzer.hover.actions.enable#` is set.
 +        hover_actions_debug_enable: bool           = "true",
 +        /// Whether to show HoverActions in Rust files.
 +        hover_actions_enable: bool          = "true",
 +        /// Whether to show `Go to Type Definition` action. Only applies when
 +        /// `#rust-analyzer.hover.actions.enable#` is set.
 +        hover_actions_gotoTypeDef_enable: bool     = "true",
 +        /// Whether to show `Implementations` action. Only applies when
 +        /// `#rust-analyzer.hover.actions.enable#` is set.
 +        hover_actions_implementations_enable: bool = "true",
 +        /// Whether to show `References` action. Only applies when
 +        /// `#rust-analyzer.hover.actions.enable#` is set.
 +        hover_actions_references_enable: bool      = "false",
 +        /// Whether to show `Run` action. Only applies when
 +        /// `#rust-analyzer.hover.actions.enable#` is set.
 +        hover_actions_run_enable: bool             = "true",
 +
 +        /// Whether to show documentation on hover.
 +        hover_documentation_enable: bool           = "true",
 +        /// Whether to show keyword hover popups. Only applies when
 +        /// `#rust-analyzer.hover.documentation.enable#` is set.
 +        hover_documentation_keywords_enable: bool  = "true",
 +        /// Use markdown syntax for links in hover.
 +        hover_links_enable: bool = "true",
 +
 +        /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
 +        imports_granularity_enforce: bool              = "false",
 +        /// How imports should be grouped into use statements.
 +        imports_granularity_group: ImportGranularityDef  = "\"crate\"",
 +        /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
 +        imports_group_enable: bool                           = "true",
 +        /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
 +        imports_merge_glob: bool           = "true",
 +        /// The path structure for newly inserted paths to use.
 +        imports_prefix: ImportPrefixDef               = "\"plain\"",
 +
 +        /// Whether to show inlay type hints for binding modes.
 +        inlayHints_bindingModeHints_enable: bool                   = "false",
 +        /// Whether to show inlay type hints for method chains.
 +        inlayHints_chainingHints_enable: bool                      = "true",
 +        /// Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
 +        inlayHints_closingBraceHints_enable: bool                  = "true",
 +        /// Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
 +        /// to always show them).
 +        inlayHints_closingBraceHints_minLines: usize               = "25",
 +        /// Whether to show inlay type hints for return types of closures.
 +        inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef  = "\"never\"",
 +        /// Whether to show inlay type hints for elided lifetimes in function signatures.
 +        inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
 +        /// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
 +        inlayHints_lifetimeElisionHints_useParameterNames: bool    = "false",
 +        /// Maximum length for inlay hints. Set to null to have an unlimited length.
 +        inlayHints_maxLength: Option<usize>                        = "25",
 +        /// Whether to show function parameter name inlay hints at the call
 +        /// site.
 +        inlayHints_parameterHints_enable: bool                     = "true",
 +        /// Whether to show inlay type hints for compiler inserted reborrows.
 +        inlayHints_reborrowHints_enable: ReborrowHintsDef          = "\"never\"",
 +        /// Whether to render leading colons for type hints, and trailing colons for parameter hints.
 +        inlayHints_renderColons: bool                              = "true",
 +        /// Whether to show inlay type hints for variables.
 +        inlayHints_typeHints_enable: bool                          = "true",
 +        /// Whether to hide inlay type hints for `let` statements that initialize to a closure.
 +        /// Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
 +        inlayHints_typeHints_hideClosureInitialization: bool       = "false",
 +        /// Whether to hide inlay type hints for constructors.
 +        inlayHints_typeHints_hideNamedConstructor: bool            = "false",
 +
 +        /// Join lines merges consecutive declaration and initialization of an assignment.
 +        joinLines_joinAssignments: bool = "true",
 +        /// Join lines inserts else between consecutive ifs.
 +        joinLines_joinElseIf: bool = "true",
 +        /// Join lines removes trailing commas.
 +        joinLines_removeTrailingComma: bool = "true",
 +        /// Join lines unwraps trivial blocks.
 +        joinLines_unwrapTrivialBlock: bool = "true",
 +
 +        /// Whether to show `Debug` lens. Only applies when
 +        /// `#rust-analyzer.lens.enable#` is set.
 +        lens_debug_enable: bool            = "true",
 +        /// Whether to show CodeLens in Rust files.
 +        lens_enable: bool           = "true",
 +        /// Internal config: use custom client-side commands even when the
 +        /// client doesn't set the corresponding capability.
 +        lens_forceCustomCommands: bool = "true",
 +        /// Whether to show `Implementations` lens. Only applies when
 +        /// `#rust-analyzer.lens.enable#` is set.
 +        lens_implementations_enable: bool  = "true",
 +        /// Whether to show `References` lens for Struct, Enum, and Union.
 +        /// Only applies when `#rust-analyzer.lens.enable#` is set.
 +        lens_references_adt_enable: bool = "false",
 +        /// Whether to show `References` lens for Enum Variants.
 +        /// Only applies when `#rust-analyzer.lens.enable#` is set.
 +        lens_references_enumVariant_enable: bool = "false",
 +        /// Whether to show `Method References` lens. Only applies when
 +        /// `#rust-analyzer.lens.enable#` is set.
 +        lens_references_method_enable: bool = "false",
 +        /// Whether to show `References` lens for Trait.
 +        /// Only applies when `#rust-analyzer.lens.enable#` is set.
 +        lens_references_trait_enable: bool = "false",
 +        /// Whether to show `Run` lens. Only applies when
 +        /// `#rust-analyzer.lens.enable#` is set.
 +        lens_run_enable: bool              = "true",
 +
 +        /// Disable project auto-discovery in favor of explicitly specified set
 +        /// of projects.
 +        ///
 +        /// Elements must be paths pointing to `Cargo.toml`,
 +        /// `rust-project.json`, or JSON objects in `rust-project.json` format.
 +        linkedProjects: Vec<ManifestOrProjectJson> = "[]",
 +
 +        /// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
 +        lru_capacity: Option<usize>                 = "null",
 +
 +        /// Whether to show `can't find Cargo.toml` error message.
 +        notifications_cargoTomlNotFound: bool      = "true",
 +
 +        /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
 +        procMacro_attributes_enable: bool = "true",
 +        /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
 +        procMacro_enable: bool                     = "true",
 +        /// These proc-macros will be ignored when trying to expand them.
 +        ///
 +        /// This config takes a map of crate names with the exported proc-macro names to ignore as values.
 +        procMacro_ignored: FxHashMap<Box<str>, Box<[Box<str>]>>          = "{}",
 +        /// Internal config, path to proc-macro server executable (typically,
 +        /// this is rust-analyzer itself, but we override this in tests).
 +        procMacro_server: Option<PathBuf>          = "null",
 +
 +        /// Command to be executed instead of 'cargo' for runnables.
 +        runnables_command: Option<String> = "null",
 +        /// Additional arguments to be passed to cargo for runnables such as
 +        /// tests or binaries. For example, it may be `--release`.
 +        runnables_extraArgs: Vec<String>   = "[]",
 +
 +        /// Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
 +        /// projects, or "discover" to try to automatically find it if the `rustc-dev` component
 +        /// is installed.
 +        ///
 +        /// Any project which uses rust-analyzer with the rustcPrivate
 +        /// crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
 +        ///
 +        /// This option does not take effect until rust-analyzer is restarted.
 +        rustc_source: Option<String> = "null",
 +
 +        /// Additional arguments to `rustfmt`.
 +        rustfmt_extraArgs: Vec<String>               = "[]",
 +        /// Advanced option, fully override the command rust-analyzer uses for
 +        /// formatting.
 +        rustfmt_overrideCommand: Option<Vec<String>> = "null",
 +        /// Enables the use of rustfmt's unstable range formatting command for the
 +        /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only
 +        /// available on a nightly build.
 +        rustfmt_rangeFormatting_enable: bool = "false",
 +
++        /// Inject additional highlighting into doc comments.
++        ///
++        /// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
++        /// doc links.
++        semanticHighlighting_doc_comment_inject_enable: bool = "true",
++        /// Use semantic tokens for operators.
++        ///
++        /// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
++        /// they are tagged with modifiers.
++        semanticHighlighting_operator_enable: bool = "true",
++        /// Use specialized semantic tokens for operators.
++        ///
++        /// When enabled, rust-analyzer will emit special token types for operator tokens instead
++        /// of the generic `operator` token type.
++        semanticHighlighting_operator_specialization_enable: bool = "false",
++        /// Use semantic tokens for punctuations.
++        ///
++        /// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
++        /// they are tagged with modifiers or have a special role.
++        semanticHighlighting_punctuation_enable: bool = "false",
++        /// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
++        /// calls.
++        semanticHighlighting_punctuation_separate_macro_bang: bool = "false",
++        /// Use specialized semantic tokens for punctuations.
++        ///
++        /// When enabled, rust-analyzer will emit special token types for punctuation tokens instead
++        /// of the generic `punctuation` token type.
++        semanticHighlighting_punctuation_specialization_enable: bool = "false",
 +        /// Use semantic tokens for strings.
 +        ///
 +        /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
 +        /// By disabling semantic tokens for strings, other grammars can be used to highlight
 +        /// their contents.
 +        semanticHighlighting_strings_enable: bool = "true",
 +
 +        /// Show full signature of the callable. Only shows parameters if disabled.
 +        signatureInfo_detail: SignatureDetail                           = "\"full\"",
 +        /// Show documentation.
 +        signatureInfo_documentation_enable: bool                       = "true",
 +
 +        /// Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
 +        typing_autoClosingAngleBrackets_enable: bool = "false",
 +
 +        /// Workspace symbol search kind.
 +        workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = "\"only_types\"",
 +        /// Limits the number of items returned from a workspace symbol search (Defaults to 128).
 +        /// Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
 +        /// Other clients requires all results upfront and might require a higher limit.
 +        workspace_symbol_search_limit: usize = "128",
 +        /// Workspace symbol search scope.
 +        workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = "\"workspace\"",
 +    }
 +}
 +
 +impl Default for ConfigData {
 +    fn default() -> Self {
 +        ConfigData::from_json(serde_json::Value::Null, &mut Vec::new())
 +    }
 +}
 +
 +#[derive(Debug, Clone)]
 +pub struct Config {
 +    pub discovered_projects: Option<Vec<ProjectManifest>>,
 +    caps: lsp_types::ClientCapabilities,
 +    root_path: AbsPathBuf,
 +    data: ConfigData,
 +    detached_files: Vec<AbsPathBuf>,
 +    snippets: Vec<Snippet>,
 +}
 +
 +type ParallelCachePrimingNumThreads = u8;
 +
 +#[derive(Debug, Clone, Eq, PartialEq)]
 +pub enum LinkedProject {
 +    ProjectManifest(ProjectManifest),
 +    InlineJsonProject(ProjectJson),
 +}
 +
 +impl From<ProjectManifest> for LinkedProject {
 +    fn from(v: ProjectManifest) -> Self {
 +        LinkedProject::ProjectManifest(v)
 +    }
 +}
 +
 +impl From<ProjectJson> for LinkedProject {
 +    fn from(v: ProjectJson) -> Self {
 +        LinkedProject::InlineJsonProject(v)
 +    }
 +}
 +
 +pub struct CallInfoConfig {
 +    pub params_only: bool,
 +    pub docs: bool,
 +}
 +
 +#[derive(Clone, Debug, PartialEq, Eq)]
 +pub struct LensConfig {
 +    // runnables
 +    pub run: bool,
 +    pub debug: bool,
 +
 +    // implementations
 +    pub implementations: bool,
 +
 +    // references
 +    pub method_refs: bool,
 +    pub refs_adt: bool,   // for Struct, Enum, Union and Trait
 +    pub refs_trait: bool, // for Struct, Enum, Union and Trait
 +    pub enum_variant_refs: bool,
 +}
 +
 +impl LensConfig {
 +    pub fn any(&self) -> bool {
 +        self.run
 +            || self.debug
 +            || self.implementations
 +            || self.method_refs
 +            || self.refs_adt
 +            || self.refs_trait
 +            || self.enum_variant_refs
 +    }
 +
 +    pub fn none(&self) -> bool {
 +        !self.any()
 +    }
 +
 +    pub fn runnable(&self) -> bool {
 +        self.run || self.debug
 +    }
 +
 +    pub fn references(&self) -> bool {
 +        self.method_refs || self.refs_adt || self.refs_trait || self.enum_variant_refs
 +    }
 +}
 +
 +#[derive(Clone, Debug, PartialEq, Eq)]
 +pub struct HoverActionsConfig {
 +    pub implementations: bool,
 +    pub references: bool,
 +    pub run: bool,
 +    pub debug: bool,
 +    pub goto_type_def: bool,
 +}
 +
 +impl HoverActionsConfig {
 +    pub const NO_ACTIONS: Self = Self {
 +        implementations: false,
 +        references: false,
 +        run: false,
 +        debug: false,
 +        goto_type_def: false,
 +    };
 +
 +    pub fn any(&self) -> bool {
 +        self.implementations || self.references || self.runnable() || self.goto_type_def
 +    }
 +
 +    pub fn none(&self) -> bool {
 +        !self.any()
 +    }
 +
 +    pub fn runnable(&self) -> bool {
 +        self.run || self.debug
 +    }
 +}
 +
 +#[derive(Debug, Clone)]
 +pub struct FilesConfig {
 +    pub watcher: FilesWatcher,
 +    pub exclude: Vec<AbsPathBuf>,
 +}
 +
 +#[derive(Debug, Clone)]
 +pub enum FilesWatcher {
 +    Client,
 +    Server,
 +}
 +
 +#[derive(Debug, Clone)]
 +pub struct NotificationsConfig {
 +    pub cargo_toml_not_found: bool,
 +}
 +
 +#[derive(Debug, Clone)]
 +pub enum RustfmtConfig {
 +    Rustfmt { extra_args: Vec<String>, enable_range_formatting: bool },
 +    CustomCommand { command: String, args: Vec<String> },
 +}
 +
 +/// Configuration for runnable items, such as `main` function or tests.
 +#[derive(Debug, Clone)]
 +pub struct RunnablesConfig {
 +    /// Custom command to be executed instead of `cargo` for runnables.
 +    pub override_cargo: Option<String>,
 +    /// Additional arguments for the `cargo`, e.g. `--release`.
 +    pub cargo_extra_args: Vec<String>,
 +}
 +
 +/// Configuration for workspace symbol search requests.
 +#[derive(Debug, Clone)]
 +pub struct WorkspaceSymbolConfig {
 +    /// In what scope should the symbol be searched in.
 +    pub search_scope: WorkspaceSymbolSearchScope,
 +    /// What kind of symbol is being searched for.
 +    pub search_kind: WorkspaceSymbolSearchKind,
 +    /// How many items are returned at most.
 +    pub search_limit: usize,
 +}
 +
 +pub struct ClientCommandsConfig {
 +    pub run_single: bool,
 +    pub debug_single: bool,
 +    pub show_reference: bool,
 +    pub goto_location: bool,
 +    pub trigger_parameter_hints: bool,
 +}
 +
 +#[derive(Debug)]
 +pub struct ConfigUpdateError {
 +    errors: Vec<(String, serde_json::Error)>,
 +}
 +
 +impl fmt::Display for ConfigUpdateError {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        let errors = self.errors.iter().format_with("\n", |(key, e), f| {
 +            f(key)?;
 +            f(&": ")?;
 +            f(e)
 +        });
 +        write!(
 +            f,
 +            "rust-analyzer found {} invalid config value{}:\n{}",
 +            self.errors.len(),
 +            if self.errors.len() == 1 { "" } else { "s" },
 +            errors
 +        )
 +    }
 +}
 +
 +impl Config {
 +    pub fn new(root_path: AbsPathBuf, caps: ClientCapabilities) -> Self {
 +        Config {
 +            caps,
 +            data: ConfigData::default(),
 +            detached_files: Vec::new(),
 +            discovered_projects: None,
 +            root_path,
 +            snippets: Default::default(),
 +        }
 +    }
 +
 +    pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> {
 +        tracing::info!("updating config from JSON: {:#}", json);
 +        if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) {
 +            return Ok(());
 +        }
 +        let mut errors = Vec::new();
 +        self.detached_files =
 +            get_field::<Vec<PathBuf>>(&mut json, &mut errors, "detachedFiles", None, "[]")
 +                .into_iter()
 +                .map(AbsPathBuf::assert)
 +                .collect();
 +        patch_old_style::patch_json_for_outdated_configs(&mut json);
 +        self.data = ConfigData::from_json(json, &mut errors);
 +        tracing::debug!("deserialized config data: {:#?}", self.data);
 +        self.snippets.clear();
 +        for (name, def) in self.data.completion_snippets_custom.iter() {
 +            if def.prefix.is_empty() && def.postfix.is_empty() {
 +                continue;
 +            }
 +            let scope = match def.scope {
 +                SnippetScopeDef::Expr => SnippetScope::Expr,
 +                SnippetScopeDef::Type => SnippetScope::Type,
 +                SnippetScopeDef::Item => SnippetScope::Item,
 +            };
 +            match Snippet::new(
 +                &def.prefix,
 +                &def.postfix,
 +                &def.body,
 +                def.description.as_ref().unwrap_or(name),
 +                &def.requires,
 +                scope,
 +            ) {
 +                Some(snippet) => self.snippets.push(snippet),
 +                None => errors.push((
 +                    format!("snippet {name} is invalid"),
 +                    <serde_json::Error as serde::de::Error>::custom(
 +                        "snippet path is invalid or triggers are missing",
 +                    ),
 +                )),
 +            }
 +        }
 +
 +        self.validate(&mut errors);
 +
 +        if errors.is_empty() {
 +            Ok(())
 +        } else {
 +            Err(ConfigUpdateError { errors })
 +        }
 +    }
 +
 +    fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) {
 +        use serde::de::Error;
 +        if self.data.checkOnSave_command.is_empty() {
 +            error_sink.push((
 +                "/checkOnSave/command".to_string(),
 +                serde_json::Error::custom("expected a non-empty string"),
 +            ));
 +        }
 +    }
 +
 +    pub fn json_schema() -> serde_json::Value {
 +        ConfigData::json_schema()
 +    }
 +
 +    pub fn root_path(&self) -> &AbsPathBuf {
 +        &self.root_path
 +    }
 +
 +    pub fn caps(&self) -> &lsp_types::ClientCapabilities {
 +        &self.caps
 +    }
 +
 +    pub fn detached_files(&self) -> &[AbsPathBuf] {
 +        &self.detached_files
 +    }
 +}
 +
 +macro_rules! try_ {
 +    ($expr:expr) => {
 +        || -> _ { Some($expr) }()
 +    };
 +}
 +macro_rules! try_or {
 +    ($expr:expr, $or:expr) => {
 +        try_!($expr).unwrap_or($or)
 +    };
 +}
 +
 +macro_rules! try_or_def {
 +    ($expr:expr) => {
 +        try_!($expr).unwrap_or_default()
 +    };
 +}
 +
 +impl Config {
 +    pub fn linked_projects(&self) -> Vec<LinkedProject> {
 +        match self.data.linkedProjects.as_slice() {
 +            [] => match self.discovered_projects.as_ref() {
 +                Some(discovered_projects) => {
 +                    let exclude_dirs: Vec<_> = self
 +                        .data
 +                        .files_excludeDirs
 +                        .iter()
 +                        .map(|p| self.root_path.join(p))
 +                        .collect();
 +                    discovered_projects
 +                        .iter()
 +                        .filter(|p| {
 +                            let (ProjectManifest::ProjectJson(path)
 +                            | ProjectManifest::CargoToml(path)) = p;
 +                            !exclude_dirs.iter().any(|p| path.starts_with(p))
 +                        })
 +                        .cloned()
 +                        .map(LinkedProject::from)
 +                        .collect()
 +                }
 +                None => Vec::new(),
 +            },
 +            linked_projects => linked_projects
 +                .iter()
 +                .filter_map(|linked_project| match linked_project {
 +                    ManifestOrProjectJson::Manifest(it) => {
 +                        let path = self.root_path.join(it);
 +                        ProjectManifest::from_manifest_file(path)
 +                            .map_err(|e| tracing::error!("failed to load linked project: {}", e))
 +                            .ok()
 +                            .map(Into::into)
 +                    }
 +                    ManifestOrProjectJson::ProjectJson(it) => {
 +                        Some(ProjectJson::new(&self.root_path, it.clone()).into())
 +                    }
 +                })
 +                .collect(),
 +        }
 +    }
 +
 +    pub fn did_save_text_document_dynamic_registration(&self) -> bool {
 +        let caps = try_or_def!(self.caps.text_document.as_ref()?.synchronization.clone()?);
 +        caps.did_save == Some(true) && caps.dynamic_registration == Some(true)
 +    }
 +
 +    pub fn did_change_watched_files_dynamic_registration(&self) -> bool {
 +        try_or_def!(
 +            self.caps.workspace.as_ref()?.did_change_watched_files.as_ref()?.dynamic_registration?
 +        )
 +    }
 +
 +    pub fn prefill_caches(&self) -> bool {
 +        self.data.cachePriming_enable
 +    }
 +
 +    pub fn location_link(&self) -> bool {
 +        try_or_def!(self.caps.text_document.as_ref()?.definition?.link_support?)
 +    }
 +
 +    pub fn line_folding_only(&self) -> bool {
 +        try_or_def!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?)
 +    }
 +
 +    pub fn hierarchical_symbols(&self) -> bool {
 +        try_or_def!(
 +            self.caps
 +                .text_document
 +                .as_ref()?
 +                .document_symbol
 +                .as_ref()?
 +                .hierarchical_document_symbol_support?
 +        )
 +    }
 +
 +    pub fn code_action_literals(&self) -> bool {
 +        try_!(self
 +            .caps
 +            .text_document
 +            .as_ref()?
 +            .code_action
 +            .as_ref()?
 +            .code_action_literal_support
 +            .as_ref()?)
 +        .is_some()
 +    }
 +
 +    pub fn work_done_progress(&self) -> bool {
 +        try_or_def!(self.caps.window.as_ref()?.work_done_progress?)
 +    }
 +
 +    pub fn will_rename(&self) -> bool {
 +        try_or_def!(self.caps.workspace.as_ref()?.file_operations.as_ref()?.will_rename?)
 +    }
 +
 +    pub fn change_annotation_support(&self) -> bool {
 +        try_!(self
 +            .caps
 +            .workspace
 +            .as_ref()?
 +            .workspace_edit
 +            .as_ref()?
 +            .change_annotation_support
 +            .as_ref()?)
 +        .is_some()
 +    }
 +
 +    pub fn code_action_resolve(&self) -> bool {
 +        try_or_def!(self
 +            .caps
 +            .text_document
 +            .as_ref()?
 +            .code_action
 +            .as_ref()?
 +            .resolve_support
 +            .as_ref()?
 +            .properties
 +            .as_slice())
 +        .iter()
 +        .any(|it| it == "edit")
 +    }
 +
 +    pub fn signature_help_label_offsets(&self) -> bool {
 +        try_or_def!(
 +            self.caps
 +                .text_document
 +                .as_ref()?
 +                .signature_help
 +                .as_ref()?
 +                .signature_information
 +                .as_ref()?
 +                .parameter_information
 +                .as_ref()?
 +                .label_offset_support?
 +        )
 +    }
 +
 +    pub fn completion_label_details_support(&self) -> bool {
 +        try_!(self
 +            .caps
 +            .text_document
 +            .as_ref()?
 +            .completion
 +            .as_ref()?
 +            .completion_item
 +            .as_ref()?
 +            .label_details_support
 +            .as_ref()?)
 +        .is_some()
 +    }
 +
 +    pub fn offset_encoding(&self) -> OffsetEncoding {
 +        if supports_utf8(&self.caps) {
 +            OffsetEncoding::Utf8
 +        } else {
 +            OffsetEncoding::Utf16
 +        }
 +    }
 +
 +    fn experimental(&self, index: &'static str) -> bool {
 +        try_or_def!(self.caps.experimental.as_ref()?.get(index)?.as_bool()?)
 +    }
 +
 +    pub fn code_action_group(&self) -> bool {
 +        self.experimental("codeActionGroup")
 +    }
 +
 +    pub fn server_status_notification(&self) -> bool {
 +        self.experimental("serverStatusNotification")
 +    }
 +
 +    pub fn publish_diagnostics(&self) -> bool {
 +        self.data.diagnostics_enable
 +    }
 +
 +    pub fn diagnostics(&self) -> DiagnosticsConfig {
 +        DiagnosticsConfig {
 +            proc_attr_macros_enabled: self.expand_proc_attr_macros(),
 +            proc_macros_enabled: self.data.procMacro_enable,
 +            disable_experimental: !self.data.diagnostics_experimental_enable,
 +            disabled: self.data.diagnostics_disabled.clone(),
 +            expr_fill_default: match self.data.assist_expressionFillDefault {
 +                ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo,
 +                ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
 +            },
 +            insert_use: self.insert_use_config(),
 +        }
 +    }
 +
 +    pub fn diagnostics_map(&self) -> DiagnosticsMapConfig {
 +        DiagnosticsMapConfig {
 +            remap_prefix: self.data.diagnostics_remapPrefix.clone(),
 +            warnings_as_info: self.data.diagnostics_warningsAsInfo.clone(),
 +            warnings_as_hint: self.data.diagnostics_warningsAsHint.clone(),
 +        }
 +    }
 +
 +    pub fn lru_capacity(&self) -> Option<usize> {
 +        self.data.lru_capacity
 +    }
 +
 +    pub fn proc_macro_srv(&self) -> Option<(AbsPathBuf, Vec<OsString>)> {
 +        if !self.data.procMacro_enable {
 +            return None;
 +        }
 +        let path = match &self.data.procMacro_server {
 +            Some(it) => self.root_path.join(it),
 +            None => AbsPathBuf::assert(std::env::current_exe().ok()?),
 +        };
 +        Some((path, vec!["proc-macro".into()]))
 +    }
 +
 +    pub fn dummy_replacements(&self) -> &FxHashMap<Box<str>, Box<[Box<str>]>> {
 +        &self.data.procMacro_ignored
 +    }
 +
 +    pub fn expand_proc_attr_macros(&self) -> bool {
 +        self.data.procMacro_enable && self.data.procMacro_attributes_enable
 +    }
 +
 +    pub fn files(&self) -> FilesConfig {
 +        FilesConfig {
 +            watcher: match self.data.files_watcher {
 +                FilesWatcherDef::Client if self.did_change_watched_files_dynamic_registration() => {
 +                    FilesWatcher::Client
 +                }
 +                _ => FilesWatcher::Server,
 +            },
 +            exclude: self.data.files_excludeDirs.iter().map(|it| self.root_path.join(it)).collect(),
 +        }
 +    }
 +
 +    pub fn notifications(&self) -> NotificationsConfig {
 +        NotificationsConfig { cargo_toml_not_found: self.data.notifications_cargoTomlNotFound }
 +    }
 +
 +    pub fn cargo_autoreload(&self) -> bool {
 +        self.data.cargo_autoreload
 +    }
 +
 +    pub fn run_build_scripts(&self) -> bool {
 +        self.data.cargo_buildScripts_enable || self.data.procMacro_enable
 +    }
 +
 +    pub fn cargo(&self) -> CargoConfig {
 +        let rustc_source = self.data.rustc_source.as_ref().map(|rustc_src| {
 +            if rustc_src == "discover" {
 +                RustcSource::Discover
 +            } else {
 +                RustcSource::Path(self.root_path.join(rustc_src))
 +            }
 +        });
 +
 +        CargoConfig {
 +            no_default_features: self.data.cargo_noDefaultFeatures,
 +            all_features: matches!(self.data.cargo_features, CargoFeatures::All),
 +            features: match &self.data.cargo_features {
 +                CargoFeatures::All => vec![],
 +                CargoFeatures::Listed(it) => it.clone(),
 +            },
 +            target: self.data.cargo_target.clone(),
 +            no_sysroot: self.data.cargo_noSysroot,
 +            rustc_source,
 +            unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()),
 +            wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper,
 +            run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(),
 +        }
 +    }
 +
 +    pub fn rustfmt(&self) -> RustfmtConfig {
 +        match &self.data.rustfmt_overrideCommand {
 +            Some(args) if !args.is_empty() => {
 +                let mut args = args.clone();
 +                let command = args.remove(0);
 +                RustfmtConfig::CustomCommand { command, args }
 +            }
 +            Some(_) | None => RustfmtConfig::Rustfmt {
 +                extra_args: self.data.rustfmt_extraArgs.clone(),
 +                enable_range_formatting: self.data.rustfmt_rangeFormatting_enable,
 +            },
 +        }
 +    }
 +
 +    pub fn flycheck(&self) -> Option<FlycheckConfig> {
 +        if !self.data.checkOnSave_enable {
 +            return None;
 +        }
 +        let flycheck_config = match &self.data.checkOnSave_overrideCommand {
 +            Some(args) if !args.is_empty() => {
 +                let mut args = args.clone();
 +                let command = args.remove(0);
 +                FlycheckConfig::CustomCommand { command, args }
 +            }
 +            Some(_) | None => FlycheckConfig::CargoCommand {
 +                command: self.data.checkOnSave_command.clone(),
 +                target_triple: self
 +                    .data
 +                    .checkOnSave_target
 +                    .clone()
 +                    .or_else(|| self.data.cargo_target.clone()),
 +                all_targets: self.data.checkOnSave_allTargets,
 +                no_default_features: self
 +                    .data
 +                    .checkOnSave_noDefaultFeatures
 +                    .unwrap_or(self.data.cargo_noDefaultFeatures),
 +                all_features: matches!(
 +                    self.data.checkOnSave_features.as_ref().unwrap_or(&self.data.cargo_features),
 +                    CargoFeatures::All
 +                ),
 +                features: match self
 +                    .data
 +                    .checkOnSave_features
 +                    .clone()
 +                    .unwrap_or_else(|| self.data.cargo_features.clone())
 +                {
 +                    CargoFeatures::All => vec![],
 +                    CargoFeatures::Listed(it) => it,
 +                },
 +                extra_args: self.data.checkOnSave_extraArgs.clone(),
 +            },
 +        };
 +        Some(flycheck_config)
 +    }
 +
 +    pub fn runnables(&self) -> RunnablesConfig {
 +        RunnablesConfig {
 +            override_cargo: self.data.runnables_command.clone(),
 +            cargo_extra_args: self.data.runnables_extraArgs.clone(),
 +        }
 +    }
 +
 +    pub fn inlay_hints(&self) -> InlayHintsConfig {
 +        InlayHintsConfig {
 +            render_colons: self.data.inlayHints_renderColons,
 +            type_hints: self.data.inlayHints_typeHints_enable,
 +            parameter_hints: self.data.inlayHints_parameterHints_enable,
 +            chaining_hints: self.data.inlayHints_chainingHints_enable,
 +            closure_return_type_hints: match self.data.inlayHints_closureReturnTypeHints_enable {
 +                ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always,
 +                ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never,
 +                ClosureReturnTypeHintsDef::WithBlock => ide::ClosureReturnTypeHints::WithBlock,
 +            },
 +            lifetime_elision_hints: match self.data.inlayHints_lifetimeElisionHints_enable {
 +                LifetimeElisionDef::Always => ide::LifetimeElisionHints::Always,
 +                LifetimeElisionDef::Never => ide::LifetimeElisionHints::Never,
 +                LifetimeElisionDef::SkipTrivial => ide::LifetimeElisionHints::SkipTrivial,
 +            },
 +            hide_named_constructor_hints: self.data.inlayHints_typeHints_hideNamedConstructor,
 +            hide_closure_initialization_hints: self
 +                .data
 +                .inlayHints_typeHints_hideClosureInitialization,
 +            reborrow_hints: match self.data.inlayHints_reborrowHints_enable {
 +                ReborrowHintsDef::Always => ide::ReborrowHints::Always,
 +                ReborrowHintsDef::Never => ide::ReborrowHints::Never,
 +                ReborrowHintsDef::Mutable => ide::ReborrowHints::MutableOnly,
 +            },
 +            binding_mode_hints: self.data.inlayHints_bindingModeHints_enable,
 +            param_names_for_lifetime_elision_hints: self
 +                .data
 +                .inlayHints_lifetimeElisionHints_useParameterNames,
 +            max_length: self.data.inlayHints_maxLength,
 +            closing_brace_hints_min_lines: if self.data.inlayHints_closingBraceHints_enable {
 +                Some(self.data.inlayHints_closingBraceHints_minLines)
 +            } else {
 +                None
 +            },
 +        }
 +    }
 +
 +    fn insert_use_config(&self) -> InsertUseConfig {
 +        InsertUseConfig {
 +            granularity: match self.data.imports_granularity_group {
 +                ImportGranularityDef::Preserve => ImportGranularity::Preserve,
 +                ImportGranularityDef::Item => ImportGranularity::Item,
 +                ImportGranularityDef::Crate => ImportGranularity::Crate,
 +                ImportGranularityDef::Module => ImportGranularity::Module,
 +            },
 +            enforce_granularity: self.data.imports_granularity_enforce,
 +            prefix_kind: match self.data.imports_prefix {
 +                ImportPrefixDef::Plain => PrefixKind::Plain,
 +                ImportPrefixDef::ByCrate => PrefixKind::ByCrate,
 +                ImportPrefixDef::BySelf => PrefixKind::BySelf,
 +            },
 +            group: self.data.imports_group_enable,
 +            skip_glob_imports: !self.data.imports_merge_glob,
 +        }
 +    }
 +
 +    pub fn completion(&self) -> CompletionConfig {
 +        CompletionConfig {
 +            enable_postfix_completions: self.data.completion_postfix_enable,
 +            enable_imports_on_the_fly: self.data.completion_autoimport_enable
 +                && completion_item_edit_resolve(&self.caps),
 +            enable_self_on_the_fly: self.data.completion_autoself_enable,
 +            enable_private_editable: self.data.completion_privateEditable_enable,
 +            callable: match self.data.completion_callable_snippets {
 +                CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
 +                CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
 +                CallableCompletionDef::None => None,
 +            },
 +            insert_use: self.insert_use_config(),
 +            snippet_cap: SnippetCap::new(try_or_def!(
 +                self.caps
 +                    .text_document
 +                    .as_ref()?
 +                    .completion
 +                    .as_ref()?
 +                    .completion_item
 +                    .as_ref()?
 +                    .snippet_support?
 +            )),
 +            snippets: self.snippets.clone(),
 +        }
 +    }
 +
 +    pub fn snippet_cap(&self) -> bool {
 +        self.experimental("snippetTextEdit")
 +    }
 +
 +    pub fn assist(&self) -> AssistConfig {
 +        AssistConfig {
 +            snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")),
 +            allowed: None,
 +            insert_use: self.insert_use_config(),
 +        }
 +    }
 +
 +    pub fn join_lines(&self) -> JoinLinesConfig {
 +        JoinLinesConfig {
 +            join_else_if: self.data.joinLines_joinElseIf,
 +            remove_trailing_comma: self.data.joinLines_removeTrailingComma,
 +            unwrap_trivial_blocks: self.data.joinLines_unwrapTrivialBlock,
 +            join_assignments: self.data.joinLines_joinAssignments,
 +        }
 +    }
 +
 +    pub fn call_info(&self) -> CallInfoConfig {
 +        CallInfoConfig {
 +            params_only: matches!(self.data.signatureInfo_detail, SignatureDetail::Parameters),
 +            docs: self.data.signatureInfo_documentation_enable,
 +        }
 +    }
 +
 +    pub fn lens(&self) -> LensConfig {
 +        LensConfig {
 +            run: self.data.lens_enable && self.data.lens_run_enable,
 +            debug: self.data.lens_enable && self.data.lens_debug_enable,
 +            implementations: self.data.lens_enable && self.data.lens_implementations_enable,
 +            method_refs: self.data.lens_enable && self.data.lens_references_method_enable,
 +            refs_adt: self.data.lens_enable && self.data.lens_references_adt_enable,
 +            refs_trait: self.data.lens_enable && self.data.lens_references_trait_enable,
 +            enum_variant_refs: self.data.lens_enable
 +                && self.data.lens_references_enumVariant_enable,
 +        }
 +    }
 +
 +    pub fn hover_actions(&self) -> HoverActionsConfig {
 +        let enable = self.experimental("hoverActions") && self.data.hover_actions_enable;
 +        HoverActionsConfig {
 +            implementations: enable && self.data.hover_actions_implementations_enable,
 +            references: enable && self.data.hover_actions_references_enable,
 +            run: enable && self.data.hover_actions_run_enable,
 +            debug: enable && self.data.hover_actions_debug_enable,
 +            goto_type_def: enable && self.data.hover_actions_gotoTypeDef_enable,
 +        }
 +    }
 +
++    pub fn highlighting_config(&self) -> HighlightConfig {
++        HighlightConfig {
++            strings: self.data.semanticHighlighting_strings_enable,
++            punctuation: self.data.semanticHighlighting_punctuation_enable,
++            specialize_punctuation: self
++                .data
++                .semanticHighlighting_punctuation_specialization_enable,
++            macro_bang: self.data.semanticHighlighting_punctuation_separate_macro_bang,
++            operator: self.data.semanticHighlighting_operator_enable,
++            specialize_operator: self.data.semanticHighlighting_operator_specialization_enable,
++            inject_doc_comment: self.data.semanticHighlighting_doc_comment_inject_enable,
++            syntactic_name_ref_highlighting: false,
++        }
 +    }
 +
 +    pub fn hover(&self) -> HoverConfig {
 +        HoverConfig {
 +            links_in_hover: self.data.hover_links_enable,
 +            documentation: self.data.hover_documentation_enable.then(|| {
 +                let is_markdown = try_or_def!(self
 +                    .caps
 +                    .text_document
 +                    .as_ref()?
 +                    .hover
 +                    .as_ref()?
 +                    .content_format
 +                    .as_ref()?
 +                    .as_slice())
 +                .contains(&MarkupKind::Markdown);
 +                if is_markdown {
 +                    HoverDocFormat::Markdown
 +                } else {
 +                    HoverDocFormat::PlainText
 +                }
 +            }),
 +            keywords: self.data.hover_documentation_keywords_enable,
 +        }
 +    }
 +
 +    pub fn workspace_symbol(&self) -> WorkspaceSymbolConfig {
 +        WorkspaceSymbolConfig {
 +            search_scope: match self.data.workspace_symbol_search_scope {
 +                WorkspaceSymbolSearchScopeDef::Workspace => WorkspaceSymbolSearchScope::Workspace,
 +                WorkspaceSymbolSearchScopeDef::WorkspaceAndDependencies => {
 +                    WorkspaceSymbolSearchScope::WorkspaceAndDependencies
 +                }
 +            },
 +            search_kind: match self.data.workspace_symbol_search_kind {
 +                WorkspaceSymbolSearchKindDef::OnlyTypes => WorkspaceSymbolSearchKind::OnlyTypes,
 +                WorkspaceSymbolSearchKindDef::AllSymbols => WorkspaceSymbolSearchKind::AllSymbols,
 +            },
 +            search_limit: self.data.workspace_symbol_search_limit,
 +        }
 +    }
 +
 +    pub fn semantic_tokens_refresh(&self) -> bool {
 +        try_or_def!(self.caps.workspace.as_ref()?.semantic_tokens.as_ref()?.refresh_support?)
 +    }
 +
 +    pub fn code_lens_refresh(&self) -> bool {
 +        try_or_def!(self.caps.workspace.as_ref()?.code_lens.as_ref()?.refresh_support?)
 +    }
 +
 +    pub fn insert_replace_support(&self) -> bool {
 +        try_or_def!(
 +            self.caps
 +                .text_document
 +                .as_ref()?
 +                .completion
 +                .as_ref()?
 +                .completion_item
 +                .as_ref()?
 +                .insert_replace_support?
 +        )
 +    }
 +
 +    pub fn client_commands(&self) -> ClientCommandsConfig {
 +        let commands =
 +            try_or!(self.caps.experimental.as_ref()?.get("commands")?, &serde_json::Value::Null);
 +        let commands: Option<lsp_ext::ClientCommandOptions> =
 +            serde_json::from_value(commands.clone()).ok();
 +        let force = commands.is_none() && self.data.lens_forceCustomCommands;
 +        let commands = commands.map(|it| it.commands).unwrap_or_default();
 +
 +        let get = |name: &str| commands.iter().any(|it| it == name) || force;
 +
 +        ClientCommandsConfig {
 +            run_single: get("rust-analyzer.runSingle"),
 +            debug_single: get("rust-analyzer.debugSingle"),
 +            show_reference: get("rust-analyzer.showReferences"),
 +            goto_location: get("rust-analyzer.gotoLocation"),
 +            trigger_parameter_hints: get("editor.action.triggerParameterHints"),
 +        }
 +    }
 +
 +    pub fn highlight_related(&self) -> HighlightRelatedConfig {
 +        HighlightRelatedConfig {
 +            references: self.data.highlightRelated_references_enable,
 +            break_points: self.data.highlightRelated_breakPoints_enable,
 +            exit_points: self.data.highlightRelated_exitPoints_enable,
 +            yield_points: self.data.highlightRelated_yieldPoints_enable,
 +        }
 +    }
 +
 +    pub fn prime_caches_num_threads(&self) -> u8 {
 +        match self.data.cachePriming_numThreads {
 +            0 => num_cpus::get_physical().try_into().unwrap_or(u8::MAX),
 +            n => n,
 +        }
 +    }
 +
 +    pub fn typing_autoclose_angle(&self) -> bool {
 +        self.data.typing_autoClosingAngleBrackets_enable
 +    }
 +}
 +// Deserialization definitions
 +
 +macro_rules! create_bool_or_string_de {
 +    ($ident:ident<$bool:literal, $string:literal>) => {
 +        fn $ident<'de, D>(d: D) -> Result<(), D::Error>
 +        where
 +            D: serde::Deserializer<'de>,
 +        {
 +            struct V;
 +            impl<'de> serde::de::Visitor<'de> for V {
 +                type Value = ();
 +
 +                fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
 +                    formatter.write_str(concat!(
 +                        stringify!($bool),
 +                        " or \"",
 +                        stringify!($string),
 +                        "\""
 +                    ))
 +                }
 +
 +                fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
 +                where
 +                    E: serde::de::Error,
 +                {
 +                    match v {
 +                        $bool => Ok(()),
 +                        _ => Err(serde::de::Error::invalid_value(
 +                            serde::de::Unexpected::Bool(v),
 +                            &self,
 +                        )),
 +                    }
 +                }
 +
 +                fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
 +                where
 +                    E: serde::de::Error,
 +                {
 +                    match v {
 +                        $string => Ok(()),
 +                        _ => Err(serde::de::Error::invalid_value(
 +                            serde::de::Unexpected::Str(v),
 +                            &self,
 +                        )),
 +                    }
 +                }
 +
 +                fn visit_enum<A>(self, a: A) -> Result<Self::Value, A::Error>
 +                where
 +                    A: serde::de::EnumAccess<'de>,
 +                {
 +                    use serde::de::VariantAccess;
 +                    let (variant, va) = a.variant::<&'de str>()?;
 +                    va.unit_variant()?;
 +                    match variant {
 +                        $string => Ok(()),
 +                        _ => Err(serde::de::Error::invalid_value(
 +                            serde::de::Unexpected::Str(variant),
 +                            &self,
 +                        )),
 +                    }
 +                }
 +            }
 +            d.deserialize_any(V)
 +        }
 +    };
 +}
 +create_bool_or_string_de!(true_or_always<true, "always">);
 +create_bool_or_string_de!(false_or_never<false, "never">);
 +
 +macro_rules! named_unit_variant {
 +    ($variant:ident) => {
 +        pub(super) fn $variant<'de, D>(deserializer: D) -> Result<(), D::Error>
 +        where
 +            D: serde::Deserializer<'de>,
 +        {
 +            struct V;
 +            impl<'de> serde::de::Visitor<'de> for V {
 +                type Value = ();
 +                fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
 +                    f.write_str(concat!("\"", stringify!($variant), "\""))
 +                }
 +                fn visit_str<E: serde::de::Error>(self, value: &str) -> Result<Self::Value, E> {
 +                    if value == stringify!($variant) {
 +                        Ok(())
 +                    } else {
 +                        Err(E::invalid_value(serde::de::Unexpected::Str(value), &self))
 +                    }
 +                }
 +            }
 +            deserializer.deserialize_str(V)
 +        }
 +    };
 +}
 +
 +mod de_unit_v {
 +    named_unit_variant!(all);
 +    named_unit_variant!(skip_trivial);
 +    named_unit_variant!(mutable);
 +    named_unit_variant!(with_block);
 +}
 +
 +#[derive(Deserialize, Debug, Clone, Copy)]
 +#[serde(rename_all = "snake_case")]
 +enum SnippetScopeDef {
 +    Expr,
 +    Item,
 +    Type,
 +}
 +
 +impl Default for SnippetScopeDef {
 +    fn default() -> Self {
 +        SnippetScopeDef::Expr
 +    }
 +}
 +
 +#[derive(Deserialize, Debug, Clone, Default)]
 +#[serde(default)]
 +struct SnippetDef {
 +    #[serde(deserialize_with = "single_or_array")]
 +    prefix: Vec<String>,
 +    #[serde(deserialize_with = "single_or_array")]
 +    postfix: Vec<String>,
 +    description: Option<String>,
 +    #[serde(deserialize_with = "single_or_array")]
 +    body: Vec<String>,
 +    #[serde(deserialize_with = "single_or_array")]
 +    requires: Vec<String>,
 +    scope: SnippetScopeDef,
 +}
 +
 +fn single_or_array<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
 +where
 +    D: serde::Deserializer<'de>,
 +{
 +    struct SingleOrVec;
 +
 +    impl<'de> serde::de::Visitor<'de> for SingleOrVec {
 +        type Value = Vec<String>;
 +
 +        fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
 +            formatter.write_str("string or array of strings")
 +        }
 +
 +        fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
 +        where
 +            E: serde::de::Error,
 +        {
 +            Ok(vec![value.to_owned()])
 +        }
 +
 +        fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
 +        where
 +            A: serde::de::SeqAccess<'de>,
 +        {
 +            Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))
 +        }
 +    }
 +
 +    deserializer.deserialize_any(SingleOrVec)
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(untagged)]
 +enum ManifestOrProjectJson {
 +    Manifest(PathBuf),
 +    ProjectJson(ProjectJsonData),
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(rename_all = "snake_case")]
 +enum ExprFillDefaultDef {
 +    Todo,
 +    Default,
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(rename_all = "snake_case")]
 +enum ImportGranularityDef {
 +    Preserve,
 +    Item,
 +    Crate,
 +    Module,
 +}
 +
 +#[derive(Deserialize, Debug, Copy, Clone)]
 +#[serde(rename_all = "snake_case")]
 +enum CallableCompletionDef {
 +    FillArguments,
 +    AddParentheses,
 +    None,
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(untagged)]
 +enum CargoFeatures {
 +    #[serde(deserialize_with = "de_unit_v::all")]
 +    All,
 +    Listed(Vec<String>),
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(untagged)]
 +enum LifetimeElisionDef {
 +    #[serde(deserialize_with = "true_or_always")]
 +    Always,
 +    #[serde(deserialize_with = "false_or_never")]
 +    Never,
 +    #[serde(deserialize_with = "de_unit_v::skip_trivial")]
 +    SkipTrivial,
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(untagged)]
 +enum ClosureReturnTypeHintsDef {
 +    #[serde(deserialize_with = "true_or_always")]
 +    Always,
 +    #[serde(deserialize_with = "false_or_never")]
 +    Never,
 +    #[serde(deserialize_with = "de_unit_v::with_block")]
 +    WithBlock,
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(untagged)]
 +enum ReborrowHintsDef {
 +    #[serde(deserialize_with = "true_or_always")]
 +    Always,
 +    #[serde(deserialize_with = "false_or_never")]
 +    Never,
 +    #[serde(deserialize_with = "de_unit_v::mutable")]
 +    Mutable,
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(rename_all = "snake_case")]
 +enum FilesWatcherDef {
 +    Client,
 +    Notify,
 +    Server,
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(rename_all = "snake_case")]
 +enum ImportPrefixDef {
 +    Plain,
 +    #[serde(alias = "self")]
 +    BySelf,
 +    #[serde(alias = "crate")]
 +    ByCrate,
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(rename_all = "snake_case")]
 +enum WorkspaceSymbolSearchScopeDef {
 +    Workspace,
 +    WorkspaceAndDependencies,
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(rename_all = "snake_case")]
 +enum SignatureDetail {
 +    Full,
 +    Parameters,
 +}
 +
 +#[derive(Deserialize, Debug, Clone)]
 +#[serde(rename_all = "snake_case")]
 +enum WorkspaceSymbolSearchKindDef {
 +    OnlyTypes,
 +    AllSymbols,
 +}
 +
 +macro_rules! _config_data {
 +    (struct $name:ident {
 +        $(
 +            $(#[doc=$doc:literal])*
 +            $field:ident $(| $alias:ident)*: $ty:ty = $default:expr,
 +        )*
 +    }) => {
 +        #[allow(non_snake_case)]
 +        #[derive(Debug, Clone)]
 +        struct $name { $($field: $ty,)* }
 +        impl $name {
 +            fn from_json(mut json: serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>) -> $name {
 +                $name {$(
 +                    $field: get_field(
 +                        &mut json,
 +                        error_sink,
 +                        stringify!($field),
 +                        None$(.or(Some(stringify!($alias))))*,
 +                        $default,
 +                    ),
 +                )*}
 +            }
 +
 +            fn json_schema() -> serde_json::Value {
 +                schema(&[
 +                    $({
 +                        let field = stringify!($field);
 +                        let ty = stringify!($ty);
 +
 +                        (field, ty, &[$($doc),*], $default)
 +                    },)*
 +                ])
 +            }
 +
 +            #[cfg(test)]
 +            fn manual() -> String {
 +                manual(&[
 +                    $({
 +                        let field = stringify!($field);
 +                        let ty = stringify!($ty);
 +
 +                        (field, ty, &[$($doc),*], $default)
 +                    },)*
 +                ])
 +            }
 +        }
 +
 +        #[test]
 +        fn fields_are_sorted() {
 +            [$(stringify!($field)),*].windows(2).for_each(|w| assert!(w[0] <= w[1], "{} <= {} does not hold", w[0], w[1]));
 +        }
 +    };
 +}
 +use _config_data as config_data;
 +
 +fn get_field<T: DeserializeOwned>(
 +    json: &mut serde_json::Value,
 +    error_sink: &mut Vec<(String, serde_json::Error)>,
 +    field: &'static str,
 +    alias: Option<&'static str>,
 +    default: &str,
 +) -> T {
 +    let default = serde_json::from_str(default).unwrap();
 +    // XXX: check alias first, to work-around the VS Code where it pre-fills the
 +    // defaults instead of sending an empty object.
 +    alias
 +        .into_iter()
 +        .chain(iter::once(field))
 +        .find_map(move |field| {
 +            let mut pointer = field.replace('_', "/");
 +            pointer.insert(0, '/');
 +            json.pointer_mut(&pointer).and_then(|it| match serde_json::from_value(it.take()) {
 +                Ok(it) => Some(it),
 +                Err(e) => {
 +                    tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e);
 +                    error_sink.push((pointer, e));
 +                    None
 +                }
 +            })
 +        })
 +        .unwrap_or(default)
 +}
 +
 +fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value {
 +    for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) {
 +        fn key(f: &str) -> &str {
 +            f.splitn(2, '_').next().unwrap()
 +        }
 +        assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2);
 +    }
 +
 +    let map = fields
 +        .iter()
 +        .map(|(field, ty, doc, default)| {
 +            let name = field.replace('_', ".");
 +            let name = format!("rust-analyzer.{}", name);
 +            let props = field_props(field, ty, doc, default);
 +            (name, props)
 +        })
 +        .collect::<serde_json::Map<_, _>>();
 +    map.into()
 +}
 +
 +fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json::Value {
 +    let doc = doc_comment_to_string(doc);
 +    let doc = doc.trim_end_matches('\n');
 +    assert!(
 +        doc.ends_with('.') && doc.starts_with(char::is_uppercase),
 +        "bad docs for {}: {:?}",
 +        field,
 +        doc
 +    );
 +    let default = default.parse::<serde_json::Value>().unwrap();
 +
 +    let mut map = serde_json::Map::default();
 +    macro_rules! set {
 +        ($($key:literal: $value:tt),*$(,)?) => {{$(
 +            map.insert($key.into(), serde_json::json!($value));
 +        )*}};
 +    }
 +    set!("markdownDescription": doc);
 +    set!("default": default);
 +
 +    match ty {
 +        "bool" => set!("type": "boolean"),
 +        "usize" => set!("type": "integer", "minimum": 0),
 +        "String" => set!("type": "string"),
 +        "Vec<String>" => set! {
 +            "type": "array",
 +            "items": { "type": "string" },
 +        },
 +        "Vec<PathBuf>" => set! {
 +            "type": "array",
 +            "items": { "type": "string" },
 +        },
 +        "FxHashSet<String>" => set! {
 +            "type": "array",
 +            "items": { "type": "string" },
 +            "uniqueItems": true,
 +        },
 +        "FxHashMap<Box<str>, Box<[Box<str>]>>" => set! {
 +            "type": "object",
 +        },
 +        "FxHashMap<String, SnippetDef>" => set! {
 +            "type": "object",
 +        },
 +        "FxHashMap<String, String>" => set! {
 +            "type": "object",
 +        },
 +        "Option<usize>" => set! {
 +            "type": ["null", "integer"],
 +            "minimum": 0,
 +        },
 +        "Option<String>" => set! {
 +            "type": ["null", "string"],
 +        },
 +        "Option<PathBuf>" => set! {
 +            "type": ["null", "string"],
 +        },
 +        "Option<bool>" => set! {
 +            "type": ["null", "boolean"],
 +        },
 +        "Option<Vec<String>>" => set! {
 +            "type": ["null", "array"],
 +            "items": { "type": "string" },
 +        },
 +        "MergeBehaviorDef" => set! {
 +            "type": "string",
 +            "enum": ["none", "crate", "module"],
 +            "enumDescriptions": [
 +                "Do not merge imports at all.",
 +                "Merge imports from the same crate into a single `use` statement.",
 +                "Merge imports from the same module into a single `use` statement."
 +            ],
 +        },
 +        "ExprFillDefaultDef" => set! {
 +            "type": "string",
 +            "enum": ["todo", "default"],
 +            "enumDescriptions": [
 +                "Fill missing expressions with the `todo` macro",
 +                "Fill missing expressions with reasonable defaults, `new` or `default` constructors."
 +            ],
 +        },
 +        "ImportGranularityDef" => set! {
 +            "type": "string",
 +            "enum": ["preserve", "crate", "module", "item"],
 +            "enumDescriptions": [
 +                "Do not change the granularity of any imports and preserve the original structure written by the developer.",
 +                "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
 +                "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
 +                "Flatten imports so that each has its own use statement."
 +            ],
 +        },
 +        "ImportPrefixDef" => set! {
 +            "type": "string",
 +            "enum": [
 +                "plain",
 +                "self",
 +                "crate"
 +            ],
 +            "enumDescriptions": [
 +                "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.",
 +                "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item. Prefixes `self` in front of the path if it starts with a module.",
 +                "Force import paths to be absolute by always starting them with `crate` or the extern crate name they come from."
 +            ],
 +        },
 +        "Vec<ManifestOrProjectJson>" => set! {
 +            "type": "array",
 +            "items": { "type": ["string", "object"] },
 +        },
 +        "WorkspaceSymbolSearchScopeDef" => set! {
 +            "type": "string",
 +            "enum": ["workspace", "workspace_and_dependencies"],
 +            "enumDescriptions": [
 +                "Search in current workspace only.",
 +                "Search in current workspace and dependencies."
 +            ],
 +        },
 +        "WorkspaceSymbolSearchKindDef" => set! {
 +            "type": "string",
 +            "enum": ["only_types", "all_symbols"],
 +            "enumDescriptions": [
 +                "Search for types only.",
 +                "Search for all symbols kinds."
 +            ],
 +        },
 +        "ParallelCachePrimingNumThreads" => set! {
 +            "type": "number",
 +            "minimum": 0,
 +            "maximum": 255
 +        },
 +        "LifetimeElisionDef" => set! {
 +            "type": "string",
 +            "enum": [
 +                "always",
 +                "never",
 +                "skip_trivial"
 +            ],
 +            "enumDescriptions": [
 +                "Always show lifetime elision hints.",
 +                "Never show lifetime elision hints.",
 +                "Only show lifetime elision hints if a return type is involved."
 +            ]
 +        },
 +        "ClosureReturnTypeHintsDef" => set! {
 +            "type": "string",
 +            "enum": [
 +                "always",
 +                "never",
 +                "with_block"
 +            ],
 +            "enumDescriptions": [
 +                "Always show type hints for return types of closures.",
 +                "Never show type hints for return types of closures.",
 +                "Only show type hints for return types of closures with blocks."
 +            ]
 +        },
 +        "ReborrowHintsDef" => set! {
 +            "type": "string",
 +            "enum": [
 +                "always",
 +                "never",
 +                "mutable"
 +            ],
 +            "enumDescriptions": [
 +                "Always show reborrow hints.",
 +                "Never show reborrow hints.",
 +                "Only show mutable reborrow hints."
 +            ]
 +        },
 +        "CargoFeatures" => set! {
 +            "anyOf": [
 +                {
 +                    "type": "string",
 +                    "enum": [
 +                        "all"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Pass `--all-features` to cargo",
 +                    ]
 +                },
 +                {
 +                    "type": "array",
 +                    "items": { "type": "string" }
 +                }
 +            ],
 +        },
 +        "Option<CargoFeatures>" => set! {
 +            "anyOf": [
 +                {
 +                    "type": "string",
 +                    "enum": [
 +                        "all"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Pass `--all-features` to cargo",
 +                    ]
 +                },
 +                {
 +                    "type": "array",
 +                    "items": { "type": "string" }
 +                },
 +                { "type": "null" }
 +            ],
 +        },
 +        "CallableCompletionDef" => set! {
 +            "type": "string",
 +            "enum": [
 +                "fill_arguments",
 +                "add_parentheses",
 +                "none",
 +            ],
 +            "enumDescriptions": [
 +                "Add call parentheses and pre-fill arguments.",
 +                "Add call parentheses.",
 +                "Do no snippet completions for callables."
 +            ]
 +        },
 +        "SignatureDetail" => set! {
 +            "type": "string",
 +            "enum": ["full", "parameters"],
 +            "enumDescriptions": [
 +                "Show the entire signature.",
 +                "Show only the parameters."
 +            ],
 +        },
 +        "FilesWatcherDef" => set! {
 +            "type": "string",
 +            "enum": ["client", "server"],
 +            "enumDescriptions": [
 +                "Use the client (editor) to watch files for changes",
 +                "Use server-side file watching",
 +            ],
 +        },
 +        _ => panic!("missing entry for {}: {}", ty, default),
 +    }
 +
 +    map.into()
 +}
 +
 +#[cfg(test)]
 +fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
 +    fields
 +        .iter()
 +        .map(|(field, _ty, doc, default)| {
 +            let name = format!("rust-analyzer.{}", field.replace('_', "."));
 +            let doc = doc_comment_to_string(*doc);
 +            if default.contains('\n') {
 +                format!(
 +                    r#"[[{}]]{}::
 ++
 +--
 +Default:
 +----
 +{}
 +----
 +{}
 +--
 +"#,
 +                    name, name, default, doc
 +                )
 +            } else {
 +                format!("[[{}]]{} (default: `{}`)::\n+\n--\n{}--\n", name, name, default, doc)
 +            }
 +        })
 +        .collect::<String>()
 +}
 +
 +fn doc_comment_to_string(doc: &[&str]) -> String {
 +    doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{}\n", it)).collect()
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use std::fs;
 +
 +    use test_utils::{ensure_file_contents, project_root};
 +
 +    use super::*;
 +
 +    #[test]
 +    fn generate_package_json_config() {
 +        let s = Config::json_schema();
 +        let schema = format!("{:#}", s);
 +        let mut schema = schema
 +            .trim_start_matches('{')
 +            .trim_end_matches('}')
 +            .replace("  ", "    ")
 +            .replace('\n', "\n            ")
 +            .trim_start_matches('\n')
 +            .trim_end()
 +            .to_string();
 +        schema.push_str(",\n");
 +
 +        // Transform the asciidoc form link to markdown style.
 +        //
 +        // https://link[text] => [text](https://link)
 +        let url_matches = schema.match_indices("https://");
 +        let mut url_offsets = url_matches.map(|(idx, _)| idx).collect::<Vec<usize>>();
 +        url_offsets.reverse();
 +        for idx in url_offsets {
 +            let link = &schema[idx..];
 +            // matching on whitespace to ignore normal links
 +            if let Some(link_end) = link.find(|c| c == ' ' || c == '[') {
 +                if link.chars().nth(link_end) == Some('[') {
 +                    if let Some(link_text_end) = link.find(']') {
 +                        let link_text = link[link_end..(link_text_end + 1)].to_string();
 +
 +                        schema.replace_range((idx + link_end)..(idx + link_text_end + 1), "");
 +                        schema.insert(idx, '(');
 +                        schema.insert(idx + link_end + 1, ')');
 +                        schema.insert_str(idx, &link_text);
 +                    }
 +                }
 +            }
 +        }
 +
 +        let package_json_path = project_root().join("editors/code/package.json");
 +        let mut package_json = fs::read_to_string(&package_json_path).unwrap();
 +
 +        let start_marker = "                \"$generated-start\": {},\n";
 +        let end_marker = "                \"$generated-end\": {}\n";
 +
 +        let start = package_json.find(start_marker).unwrap() + start_marker.len();
 +        let end = package_json.find(end_marker).unwrap();
 +
 +        let p = remove_ws(&package_json[start..end]);
 +        let s = remove_ws(&schema);
 +        if !p.contains(&s) {
 +            package_json.replace_range(start..end, &schema);
 +            ensure_file_contents(&package_json_path, &package_json)
 +        }
 +    }
 +
 +    #[test]
 +    fn generate_config_documentation() {
 +        let docs_path = project_root().join("docs/user/generated_config.adoc");
 +        let expected = ConfigData::manual();
 +        ensure_file_contents(&docs_path, &expected);
 +    }
 +
 +    fn remove_ws(text: &str) -> String {
 +        text.replace(char::is_whitespace, "")
 +    }
 +}
index 09150c77d7dd1d9de53adaff2a67382132e32711,0000000000000000000000000000000000000000..f516c194da467f7afee60a4f123aeff25c06b26d
mode 100644,000000..100644
--- /dev/null
@@@ -1,121 -1,0 +1,122 @@@
- use rustc_hash::{FxHashMap, FxHashSet};
 +//! Book keeping for keeping diagnostics easily in sync with the client.
 +pub(crate) mod to_proto;
 +
 +use std::{mem, sync::Arc};
 +
 +use ide::FileId;
- pub(crate) type CheckFixes = Arc<FxHashMap<usize, FxHashMap<FileId, Vec<Fix>>>>;
++use ide_db::FxHashMap;
++use stdx::hash::{NoHashHashMap, NoHashHashSet};
 +
 +use crate::lsp_ext;
 +
-     // FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>>
-     pub(crate) native: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
++pub(crate) type CheckFixes = Arc<NoHashHashMap<usize, NoHashHashMap<FileId, Vec<Fix>>>>;
 +
 +#[derive(Debug, Default, Clone)]
 +pub struct DiagnosticsMapConfig {
 +    pub remap_prefix: FxHashMap<String, String>,
 +    pub warnings_as_info: Vec<String>,
 +    pub warnings_as_hint: Vec<String>,
 +}
 +
 +#[derive(Debug, Default, Clone)]
 +pub(crate) struct DiagnosticCollection {
-     pub(crate) check: FxHashMap<usize, FxHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
++    // FIXME: should be NoHashHashMap<FileId, Vec<ra_id::Diagnostic>>
++    pub(crate) native: NoHashHashMap<FileId, Vec<lsp_types::Diagnostic>>,
 +    // FIXME: should be Vec<flycheck::Diagnostic>
-     changes: FxHashSet<FileId>,
++    pub(crate) check: NoHashHashMap<usize, NoHashHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
 +    pub(crate) check_fixes: CheckFixes,
-     pub(crate) fn take_changes(&mut self) -> Option<FxHashSet<FileId>> {
++    changes: NoHashHashSet<FileId>,
 +}
 +
 +#[derive(Debug, Clone)]
 +pub(crate) struct Fix {
 +    // Fixes may be triggerable from multiple ranges.
 +    pub(crate) ranges: Vec<lsp_types::Range>,
 +    pub(crate) action: lsp_ext::CodeAction,
 +}
 +
 +impl DiagnosticCollection {
 +    pub(crate) fn clear_check(&mut self, flycheck_id: usize) {
 +        if let Some(it) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
 +            it.clear();
 +        }
 +        if let Some(it) = self.check.get_mut(&flycheck_id) {
 +            self.changes.extend(it.drain().map(|(key, _value)| key));
 +        }
 +    }
 +
 +    pub(crate) fn clear_check_all(&mut self) {
 +        Arc::make_mut(&mut self.check_fixes).clear();
 +        self.changes
 +            .extend(self.check.values_mut().flat_map(|it| it.drain().map(|(key, _value)| key)))
 +    }
 +
 +    pub(crate) fn clear_native_for(&mut self, file_id: FileId) {
 +        self.native.remove(&file_id);
 +        self.changes.insert(file_id);
 +    }
 +
 +    pub(crate) fn add_check_diagnostic(
 +        &mut self,
 +        flycheck_id: usize,
 +        file_id: FileId,
 +        diagnostic: lsp_types::Diagnostic,
 +        fix: Option<Fix>,
 +    ) {
 +        let diagnostics = self.check.entry(flycheck_id).or_default().entry(file_id).or_default();
 +        for existing_diagnostic in diagnostics.iter() {
 +            if are_diagnostics_equal(existing_diagnostic, &diagnostic) {
 +                return;
 +            }
 +        }
 +
 +        let check_fixes = Arc::make_mut(&mut self.check_fixes);
 +        check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().extend(fix);
 +        diagnostics.push(diagnostic);
 +        self.changes.insert(file_id);
 +    }
 +
 +    pub(crate) fn set_native_diagnostics(
 +        &mut self,
 +        file_id: FileId,
 +        diagnostics: Vec<lsp_types::Diagnostic>,
 +    ) {
 +        if let Some(existing_diagnostics) = self.native.get(&file_id) {
 +            if existing_diagnostics.len() == diagnostics.len()
 +                && diagnostics
 +                    .iter()
 +                    .zip(existing_diagnostics)
 +                    .all(|(new, existing)| are_diagnostics_equal(new, existing))
 +            {
 +                return;
 +            }
 +        }
 +
 +        self.native.insert(file_id, diagnostics);
 +        self.changes.insert(file_id);
 +    }
 +
 +    pub(crate) fn diagnostics_for(
 +        &self,
 +        file_id: FileId,
 +    ) -> impl Iterator<Item = &lsp_types::Diagnostic> {
 +        let native = self.native.get(&file_id).into_iter().flatten();
 +        let check =
 +            self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten();
 +        native.chain(check)
 +    }
 +
++    pub(crate) fn take_changes(&mut self) -> Option<NoHashHashSet<FileId>> {
 +        if self.changes.is_empty() {
 +            return None;
 +        }
 +        Some(mem::take(&mut self.changes))
 +    }
 +}
 +
 +fn are_diagnostics_equal(left: &lsp_types::Diagnostic, right: &lsp_types::Diagnostic) -> bool {
 +    left.source == right.source
 +        && left.severity == right.severity
 +        && left.range == right.range
 +        && left.message == right.message
 +}
index c55bbbbe6ef7c2c442e8593179daebe778e70303,0000000000000000000000000000000000000000..706e1742dffde38a695d9119413849ad8fb497ac
mode 100644,000000..100644
--- /dev/null
@@@ -1,384 -1,0 +1,385 @@@
-     pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
 +//! The context or environment in which the language server functions. In our
 +//! server implementation this is know as the `WorldState`.
 +//!
 +//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
 +
 +use std::{sync::Arc, time::Instant};
 +
 +use crossbeam_channel::{unbounded, Receiver, Sender};
 +use flycheck::FlycheckHandle;
 +use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
 +use ide_db::base_db::{CrateId, FileLoader, SourceDatabase};
 +use lsp_types::{SemanticTokens, Url};
 +use parking_lot::{Mutex, RwLock};
 +use proc_macro_api::ProcMacroServer;
 +use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
 +use rustc_hash::FxHashMap;
++use stdx::hash::NoHashHashMap;
 +use vfs::AnchoredPathBuf;
 +
 +use crate::{
 +    config::Config,
 +    diagnostics::{CheckFixes, DiagnosticCollection},
 +    from_proto,
 +    line_index::{LineEndings, LineIndex},
 +    lsp_ext,
 +    main_loop::Task,
 +    mem_docs::MemDocs,
 +    op_queue::OpQueue,
 +    reload::{self, SourceRootConfig},
 +    task_pool::TaskPool,
 +    to_proto::url_from_abs_path,
 +    Result,
 +};
 +
 +// Enforces drop order
 +pub(crate) struct Handle<H, C> {
 +    pub(crate) handle: H,
 +    pub(crate) receiver: C,
 +}
 +
 +pub(crate) type ReqHandler = fn(&mut GlobalState, lsp_server::Response);
 +pub(crate) type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>;
 +
 +/// `GlobalState` is the primary mutable state of the language server
 +///
 +/// The most interesting components are `vfs`, which stores a consistent
 +/// snapshot of the file systems, and `analysis_host`, which stores our
 +/// incremental salsa database.
 +///
 +/// Note that this struct has more than one impl in various modules!
 +pub(crate) struct GlobalState {
 +    sender: Sender<lsp_server::Message>,
 +    req_queue: ReqQueue,
 +    pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>,
 +    pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
 +    pub(crate) config: Arc<Config>,
 +    pub(crate) analysis_host: AnalysisHost,
 +    pub(crate) diagnostics: DiagnosticCollection,
 +    pub(crate) mem_docs: MemDocs,
 +    pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
 +    pub(crate) shutdown_requested: bool,
 +    pub(crate) proc_macro_changed: bool,
 +    pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
 +    pub(crate) source_root_config: SourceRootConfig,
 +    pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>,
 +
 +    pub(crate) flycheck: Vec<FlycheckHandle>,
 +    pub(crate) flycheck_sender: Sender<flycheck::Message>,
 +    pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
 +
-     vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
++    pub(crate) vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
 +    pub(crate) vfs_config_version: u32,
 +    pub(crate) vfs_progress_config_version: u32,
 +    pub(crate) vfs_progress_n_total: usize,
 +    pub(crate) vfs_progress_n_done: usize,
 +
 +    /// `workspaces` field stores the data we actually use, while the `OpQueue`
 +    /// stores the result of the last fetch.
 +    ///
 +    /// If the fetch (partially) fails, we do not update the current value.
 +    ///
 +    /// The handling of build data is subtle. We fetch workspace in two phases:
 +    ///
 +    /// *First*, we run `cargo metadata`, which gives us fast results for
 +    /// initial analysis.
 +    ///
 +    /// *Second*, we run `cargo check` which runs build scripts and compiles
 +    /// proc macros.
 +    ///
 +    /// We need both for the precise analysis, but we want rust-analyzer to be
 +    /// at least partially available just after the first phase. That's because
 +    /// first phase is much faster, and is much less likely to fail.
 +    ///
 +    /// This creates a complication -- by the time the second phase completes,
 +    /// the results of the fist phase could be invalid. That is, while we run
 +    /// `cargo check`, the user edits `Cargo.toml`, we notice this, and the new
 +    /// `cargo metadata` completes before `cargo check`.
 +    ///
 +    /// An additional complication is that we want to avoid needless work. When
 +    /// the user just adds comments or whitespace to Cargo.toml, we do not want
 +    /// to invalidate any salsa caches.
 +    pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
 +    pub(crate) fetch_workspaces_queue: OpQueue<Vec<anyhow::Result<ProjectWorkspace>>>,
 +    pub(crate) fetch_build_data_queue:
 +        OpQueue<(Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
 +
 +    pub(crate) prime_caches_queue: OpQueue<()>,
 +}
 +
 +/// An immutable snapshot of the world's state at a point in time.
 +pub(crate) struct GlobalStateSnapshot {
 +    pub(crate) config: Arc<Config>,
 +    pub(crate) analysis: Analysis,
 +    pub(crate) check_fixes: CheckFixes,
 +    mem_docs: MemDocs,
 +    pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
-             vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
++    vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
 +    pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
 +}
 +
 +impl std::panic::UnwindSafe for GlobalStateSnapshot {}
 +
 +impl GlobalState {
 +    pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> GlobalState {
 +        let loader = {
 +            let (sender, receiver) = unbounded::<vfs::loader::Message>();
 +            let handle: vfs_notify::NotifyHandle =
 +                vfs::loader::Handle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
 +            let handle = Box::new(handle) as Box<dyn vfs::loader::Handle>;
 +            Handle { handle, receiver }
 +        };
 +
 +        let task_pool = {
 +            let (sender, receiver) = unbounded();
 +            let handle = TaskPool::new(sender);
 +            Handle { handle, receiver }
 +        };
 +
 +        let analysis_host = AnalysisHost::new(config.lru_capacity());
 +        let (flycheck_sender, flycheck_receiver) = unbounded();
 +        let mut this = GlobalState {
 +            sender,
 +            req_queue: ReqQueue::default(),
 +            task_pool,
 +            loader,
 +            config: Arc::new(config.clone()),
 +            analysis_host,
 +            diagnostics: Default::default(),
 +            mem_docs: MemDocs::default(),
 +            semantic_tokens_cache: Arc::new(Default::default()),
 +            shutdown_requested: false,
 +            proc_macro_changed: false,
 +            last_reported_status: None,
 +            source_root_config: SourceRootConfig::default(),
 +            proc_macro_clients: vec![],
 +
 +            flycheck: Vec::new(),
 +            flycheck_sender,
 +            flycheck_receiver,
 +
++            vfs: Arc::new(RwLock::new((vfs::Vfs::default(), NoHashHashMap::default()))),
 +            vfs_config_version: 0,
 +            vfs_progress_config_version: 0,
 +            vfs_progress_n_total: 0,
 +            vfs_progress_n_done: 0,
 +
 +            workspaces: Arc::new(Vec::new()),
 +            fetch_workspaces_queue: OpQueue::default(),
 +            prime_caches_queue: OpQueue::default(),
 +
 +            fetch_build_data_queue: OpQueue::default(),
 +        };
 +        // Apply any required database inputs from the config.
 +        this.update_configuration(config);
 +        this
 +    }
 +
 +    pub(crate) fn process_changes(&mut self) -> bool {
 +        let _p = profile::span("GlobalState::process_changes");
 +        // A file was added or deleted
 +        let mut has_structure_changes = false;
 +        let mut workspace_structure_change = None;
 +
 +        let (change, changed_files) = {
 +            let mut change = Change::new();
 +            let (vfs, line_endings_map) = &mut *self.vfs.write();
 +            let changed_files = vfs.take_changes();
 +            if changed_files.is_empty() {
 +                return false;
 +            }
 +
 +            for file in &changed_files {
 +                if let Some(path) = vfs.file_path(file.file_id).as_path() {
 +                    let path = path.to_path_buf();
 +                    if reload::should_refresh_for_change(&path, file.change_kind) {
 +                        workspace_structure_change = Some(path);
 +                    }
 +                    if file.is_created_or_deleted() {
 +                        has_structure_changes = true;
 +                    }
 +                }
 +
 +                // Clear native diagnostics when their file gets deleted
 +                if !file.exists() {
 +                    self.diagnostics.clear_native_for(file.file_id);
 +                }
 +
 +                let text = if file.exists() {
 +                    let bytes = vfs.file_contents(file.file_id).to_vec();
 +                    String::from_utf8(bytes).ok().and_then(|text| {
 +                        let (text, line_endings) = LineEndings::normalize(text);
 +                        line_endings_map.insert(file.file_id, line_endings);
 +                        Some(Arc::new(text))
 +                    })
 +                } else {
 +                    None
 +                };
 +                change.change_file(file.file_id, text);
 +            }
 +            if has_structure_changes {
 +                let roots = self.source_root_config.partition(vfs);
 +                change.set_roots(roots);
 +            }
 +            (change, changed_files)
 +        };
 +
 +        self.analysis_host.apply_change(change);
 +
 +        {
 +            let raw_database = self.analysis_host.raw_database();
 +            // FIXME: ideally we should only trigger a workspace fetch for non-library changes
 +            // but somethings going wrong with the source root business when we add a new local
 +            // crate see https://github.com/rust-lang/rust-analyzer/issues/13029
 +            if let Some(path) = workspace_structure_change {
 +                self.fetch_workspaces_queue
 +                    .request_op(format!("workspace vfs file change: {}", path.display()));
 +            }
 +            self.proc_macro_changed =
 +                changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
 +                    let crates = raw_database.relevant_crates(file.file_id);
 +                    let crate_graph = raw_database.crate_graph();
 +
 +                    crates.iter().any(|&krate| crate_graph[krate].is_proc_macro)
 +                });
 +        }
 +
 +        true
 +    }
 +
 +    pub(crate) fn snapshot(&self) -> GlobalStateSnapshot {
 +        GlobalStateSnapshot {
 +            config: Arc::clone(&self.config),
 +            workspaces: Arc::clone(&self.workspaces),
 +            analysis: self.analysis_host.analysis(),
 +            vfs: Arc::clone(&self.vfs),
 +            check_fixes: Arc::clone(&self.diagnostics.check_fixes),
 +            mem_docs: self.mem_docs.clone(),
 +            semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
 +        }
 +    }
 +
 +    pub(crate) fn send_request<R: lsp_types::request::Request>(
 +        &mut self,
 +        params: R::Params,
 +        handler: ReqHandler,
 +    ) {
 +        let request = self.req_queue.outgoing.register(R::METHOD.to_string(), params, handler);
 +        self.send(request.into());
 +    }
 +
 +    pub(crate) fn complete_request(&mut self, response: lsp_server::Response) {
 +        let handler = self
 +            .req_queue
 +            .outgoing
 +            .complete(response.id.clone())
 +            .expect("received response for unknown request");
 +        handler(self, response)
 +    }
 +
 +    pub(crate) fn send_notification<N: lsp_types::notification::Notification>(
 +        &mut self,
 +        params: N::Params,
 +    ) {
 +        let not = lsp_server::Notification::new(N::METHOD.to_string(), params);
 +        self.send(not.into());
 +    }
 +
 +    pub(crate) fn register_request(
 +        &mut self,
 +        request: &lsp_server::Request,
 +        request_received: Instant,
 +    ) {
 +        self.req_queue
 +            .incoming
 +            .register(request.id.clone(), (request.method.clone(), request_received));
 +    }
 +
 +    pub(crate) fn respond(&mut self, response: lsp_server::Response) {
 +        if let Some((method, start)) = self.req_queue.incoming.complete(response.id.clone()) {
 +            if let Some(err) = &response.error {
 +                if err.message.starts_with("server panicked") {
 +                    self.poke_rust_analyzer_developer(format!("{}, check the log", err.message))
 +                }
 +            }
 +
 +            let duration = start.elapsed();
 +            tracing::debug!("handled {} - ({}) in {:0.2?}", method, response.id, duration);
 +            self.send(response.into());
 +        }
 +    }
 +
 +    pub(crate) fn cancel(&mut self, request_id: lsp_server::RequestId) {
 +        if let Some(response) = self.req_queue.incoming.cancel(request_id) {
 +            self.send(response.into());
 +        }
 +    }
 +
 +    fn send(&mut self, message: lsp_server::Message) {
 +        self.sender.send(message).unwrap()
 +    }
 +}
 +
 +impl Drop for GlobalState {
 +    fn drop(&mut self) {
 +        self.analysis_host.request_cancellation();
 +    }
 +}
 +
 +impl GlobalStateSnapshot {
 +    pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> {
 +        url_to_file_id(&self.vfs.read().0, url)
 +    }
 +
 +    pub(crate) fn file_id_to_url(&self, id: FileId) -> Url {
 +        file_id_to_url(&self.vfs.read().0, id)
 +    }
 +
 +    pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
 +        let endings = self.vfs.read().1[&file_id];
 +        let index = self.analysis.file_line_index(file_id)?;
 +        let res = LineIndex { index, endings, encoding: self.config.offset_encoding() };
 +        Ok(res)
 +    }
 +
 +    pub(crate) fn url_file_version(&self, url: &Url) -> Option<i32> {
 +        let path = from_proto::vfs_path(url).ok()?;
 +        Some(self.mem_docs.get(&path)?.version)
 +    }
 +
 +    pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url {
 +        let mut base = self.vfs.read().0.file_path(path.anchor);
 +        base.pop();
 +        let path = base.join(&path.path).unwrap();
 +        let path = path.as_path().unwrap();
 +        url_from_abs_path(path)
 +    }
 +
 +    pub(crate) fn cargo_target_for_crate_root(
 +        &self,
 +        crate_id: CrateId,
 +    ) -> Option<(&CargoWorkspace, Target)> {
 +        let file_id = self.analysis.crate_root(crate_id).ok()?;
 +        let path = self.vfs.read().0.file_path(file_id);
 +        let path = path.as_path()?;
 +        self.workspaces.iter().find_map(|ws| match ws {
 +            ProjectWorkspace::Cargo { cargo, .. } => {
 +                cargo.target_by_root(path).map(|it| (cargo, it))
 +            }
 +            ProjectWorkspace::Json { .. } => None,
 +            ProjectWorkspace::DetachedFiles { .. } => None,
 +        })
 +    }
 +}
 +
 +pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
 +    let path = vfs.file_path(id);
 +    let path = path.as_path().unwrap();
 +    url_from_abs_path(path)
 +}
 +
 +pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
 +    let path = from_proto::vfs_path(url)?;
 +    let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?;
 +    Ok(res)
 +}
index e0bcc80b31cb9893192d53408f31b08f3fb34c8e,0000000000000000000000000000000000000000..d89f0f5a3cf456d24c7889deec9355b3098e7a34
mode 100644,000000..100644
--- /dev/null
@@@ -1,1909 -1,0 +1,1903 @@@
-     let highlights = snap.analysis.highlight(file_id)?;
-     let highlight_strings = snap.config.highlighting_strings();
-     let semantic_tokens =
-         to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
 +//! This module is responsible for implementing handlers for Language Server
 +//! Protocol. The majority of requests are fulfilled by calling into the
 +//! `ide` crate.
 +
 +use std::{
 +    io::Write as _,
 +    process::{self, Stdio},
 +};
 +
 +use anyhow::Context;
 +use ide::{
 +    AnnotationConfig, AssistKind, AssistResolveStrategy, FileId, FilePosition, FileRange,
 +    HoverAction, HoverGotoTypeData, Query, RangeInfo, Runnable, RunnableKind, SingleResolve,
 +    SourceChange, TextEdit,
 +};
 +use ide_db::SymbolKind;
 +use lsp_server::ErrorCode;
 +use lsp_types::{
 +    CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem,
 +    CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
 +    CodeLens, CompletionItem, Diagnostic, DiagnosticTag, DocumentFormattingParams, FoldingRange,
 +    FoldingRangeParams, HoverContents, InlayHint, InlayHintParams, Location, LocationLink,
 +    NumberOrString, Position, PrepareRenameResponse, Range, RenameParams,
 +    SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, SemanticTokensParams,
 +    SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
 +    SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
 +};
 +use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
 +use serde_json::json;
 +use stdx::{format_to, never};
 +use syntax::{algo, ast, AstNode, TextRange, TextSize, T};
 +use vfs::AbsPathBuf;
 +
 +use crate::{
 +    cargo_target_spec::CargoTargetSpec,
 +    config::{RustfmtConfig, WorkspaceSymbolConfig},
 +    diff::diff,
 +    from_proto,
 +    global_state::{GlobalState, GlobalStateSnapshot},
 +    line_index::LineEndings,
 +    lsp_ext::{self, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams},
 +    lsp_utils::{all_edits_are_disjoint, invalid_params_error},
 +    to_proto, LspError, Result,
 +};
 +
 +pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
 +    state.proc_macro_clients.clear();
 +    state.proc_macro_changed = false;
 +    state.fetch_workspaces_queue.request_op("reload workspace request".to_string());
 +    state.fetch_build_data_queue.request_op("reload workspace request".to_string());
 +    Ok(())
 +}
 +
 +pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
 +    let _p = profile::span("handle_stop_flycheck");
 +    state.flycheck.iter().for_each(|flycheck| flycheck.cancel());
 +    Ok(())
 +}
 +
 +pub(crate) fn handle_analyzer_status(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::AnalyzerStatusParams,
 +) -> Result<String> {
 +    let _p = profile::span("handle_analyzer_status");
 +
 +    let mut buf = String::new();
 +
 +    let mut file_id = None;
 +    if let Some(tdi) = params.text_document {
 +        match from_proto::file_id(&snap, &tdi.uri) {
 +            Ok(it) => file_id = Some(it),
 +            Err(_) => format_to!(buf, "file {} not found in vfs", tdi.uri),
 +        }
 +    }
 +
 +    if snap.workspaces.is_empty() {
 +        buf.push_str("No workspaces\n")
 +    } else {
 +        buf.push_str("Workspaces:\n");
 +        format_to!(
 +            buf,
 +            "Loaded {:?} packages across {} workspace{}.\n",
 +            snap.workspaces.iter().map(|w| w.n_packages()).sum::<usize>(),
 +            snap.workspaces.len(),
 +            if snap.workspaces.len() == 1 { "" } else { "s" }
 +        );
 +    }
 +    buf.push_str("\nAnalysis:\n");
 +    buf.push_str(
 +        &snap
 +            .analysis
 +            .status(file_id)
 +            .unwrap_or_else(|_| "Analysis retrieval was cancelled".to_owned()),
 +    );
 +    Ok(buf)
 +}
 +
 +pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<String> {
 +    let _p = profile::span("handle_memory_usage");
 +    let mut mem = state.analysis_host.per_query_memory_usage();
 +    mem.push(("Remaining".into(), profile::memory_usage().allocated));
 +
 +    let mut out = String::new();
 +    for (name, bytes) in mem {
 +        format_to!(out, "{:>8} {}\n", bytes, name);
 +    }
 +    Ok(out)
 +}
 +
 +pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Result<()> {
 +    state.analysis_host.shuffle_crate_graph();
 +    Ok(())
 +}
 +
 +pub(crate) fn handle_syntax_tree(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::SyntaxTreeParams,
 +) -> Result<String> {
 +    let _p = profile::span("handle_syntax_tree");
 +    let id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let line_index = snap.file_line_index(id)?;
 +    let text_range = params.range.and_then(|r| from_proto::text_range(&line_index, r).ok());
 +    let res = snap.analysis.syntax_tree(id, text_range)?;
 +    Ok(res)
 +}
 +
 +pub(crate) fn handle_view_hir(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::TextDocumentPositionParams,
 +) -> Result<String> {
 +    let _p = profile::span("handle_view_hir");
 +    let position = from_proto::file_position(&snap, params)?;
 +    let res = snap.analysis.view_hir(position)?;
 +    Ok(res)
 +}
 +
 +pub(crate) fn handle_view_file_text(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::TextDocumentIdentifier,
 +) -> Result<String> {
 +    let file_id = from_proto::file_id(&snap, &params.uri)?;
 +    Ok(snap.analysis.file_text(file_id)?.to_string())
 +}
 +
 +pub(crate) fn handle_view_item_tree(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::ViewItemTreeParams,
 +) -> Result<String> {
 +    let _p = profile::span("handle_view_item_tree");
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let res = snap.analysis.view_item_tree(file_id)?;
 +    Ok(res)
 +}
 +
 +pub(crate) fn handle_view_crate_graph(
 +    snap: GlobalStateSnapshot,
 +    params: ViewCrateGraphParams,
 +) -> Result<String> {
 +    let _p = profile::span("handle_view_crate_graph");
 +    let dot = snap.analysis.view_crate_graph(params.full)??;
 +    Ok(dot)
 +}
 +
 +pub(crate) fn handle_expand_macro(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::ExpandMacroParams,
 +) -> Result<Option<lsp_ext::ExpandedMacro>> {
 +    let _p = profile::span("handle_expand_macro");
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +    let offset = from_proto::offset(&line_index, params.position)?;
 +
 +    let res = snap.analysis.expand_macro(FilePosition { file_id, offset })?;
 +    Ok(res.map(|it| lsp_ext::ExpandedMacro { name: it.name, expansion: it.expansion }))
 +}
 +
 +pub(crate) fn handle_selection_range(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::SelectionRangeParams,
 +) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
 +    let _p = profile::span("handle_selection_range");
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +    let res: Result<Vec<lsp_types::SelectionRange>> = params
 +        .positions
 +        .into_iter()
 +        .map(|position| {
 +            let offset = from_proto::offset(&line_index, position)?;
 +            let mut ranges = Vec::new();
 +            {
 +                let mut range = TextRange::new(offset, offset);
 +                loop {
 +                    ranges.push(range);
 +                    let frange = FileRange { file_id, range };
 +                    let next = snap.analysis.extend_selection(frange)?;
 +                    if next == range {
 +                        break;
 +                    } else {
 +                        range = next
 +                    }
 +                }
 +            }
 +            let mut range = lsp_types::SelectionRange {
 +                range: to_proto::range(&line_index, *ranges.last().unwrap()),
 +                parent: None,
 +            };
 +            for &r in ranges.iter().rev().skip(1) {
 +                range = lsp_types::SelectionRange {
 +                    range: to_proto::range(&line_index, r),
 +                    parent: Some(Box::new(range)),
 +                }
 +            }
 +            Ok(range)
 +        })
 +        .collect();
 +
 +    Ok(Some(res?))
 +}
 +
 +pub(crate) fn handle_matching_brace(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::MatchingBraceParams,
 +) -> Result<Vec<Position>> {
 +    let _p = profile::span("handle_matching_brace");
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +    params
 +        .positions
 +        .into_iter()
 +        .map(|position| {
 +            let offset = from_proto::offset(&line_index, position);
 +            offset.map(|offset| {
 +                let offset = match snap.analysis.matching_brace(FilePosition { file_id, offset }) {
 +                    Ok(Some(matching_brace_offset)) => matching_brace_offset,
 +                    Err(_) | Ok(None) => offset,
 +                };
 +                to_proto::position(&line_index, offset)
 +            })
 +        })
 +        .collect()
 +}
 +
 +pub(crate) fn handle_join_lines(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::JoinLinesParams,
 +) -> Result<Vec<lsp_types::TextEdit>> {
 +    let _p = profile::span("handle_join_lines");
 +
 +    let config = snap.config.join_lines();
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +
 +    let mut res = TextEdit::default();
 +    for range in params.ranges {
 +        let range = from_proto::text_range(&line_index, range)?;
 +        let edit = snap.analysis.join_lines(&config, FileRange { file_id, range })?;
 +        match res.union(edit) {
 +            Ok(()) => (),
 +            Err(_edit) => {
 +                // just ignore overlapping edits
 +            }
 +        }
 +    }
 +
 +    Ok(to_proto::text_edit_vec(&line_index, res))
 +}
 +
 +pub(crate) fn handle_on_enter(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::TextDocumentPositionParams,
 +) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
 +    let _p = profile::span("handle_on_enter");
 +    let position = from_proto::file_position(&snap, params)?;
 +    let edit = match snap.analysis.on_enter(position)? {
 +        None => return Ok(None),
 +        Some(it) => it,
 +    };
 +    let line_index = snap.file_line_index(position.file_id)?;
 +    let edit = to_proto::snippet_text_edit_vec(&line_index, true, edit);
 +    Ok(Some(edit))
 +}
 +
 +pub(crate) fn handle_on_type_formatting(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::DocumentOnTypeFormattingParams,
 +) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
 +    let _p = profile::span("handle_on_type_formatting");
 +    let mut position = from_proto::file_position(&snap, params.text_document_position)?;
 +    let line_index = snap.file_line_index(position.file_id)?;
 +
 +    // in `ide`, the `on_type` invariant is that
 +    // `text.char_at(position) == typed_char`.
 +    position.offset -= TextSize::of('.');
 +    let char_typed = params.ch.chars().next().unwrap_or('\0');
 +
 +    let text = snap.analysis.file_text(position.file_id)?;
 +    if stdx::never!(!text[usize::from(position.offset)..].starts_with(char_typed)) {
 +        return Ok(None);
 +    }
 +
 +    // We have an assist that inserts ` ` after typing `->` in `fn foo() ->{`,
 +    // but it requires precise cursor positioning to work, and one can't
 +    // position the cursor with on_type formatting. So, let's just toggle this
 +    // feature off here, hoping that we'll enable it one day, 😿.
 +    if char_typed == '>' {
 +        return Ok(None);
 +    }
 +
 +    let edit =
 +        snap.analysis.on_char_typed(position, char_typed, snap.config.typing_autoclose_angle())?;
 +    let edit = match edit {
 +        Some(it) => it,
 +        None => return Ok(None),
 +    };
 +
 +    // This should be a single-file edit
 +    let (_, text_edit) = edit.source_file_edits.into_iter().next().unwrap();
 +
 +    let change = to_proto::snippet_text_edit_vec(&line_index, edit.is_snippet, text_edit);
 +    Ok(Some(change))
 +}
 +
 +pub(crate) fn handle_document_symbol(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::DocumentSymbolParams,
 +) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
 +    let _p = profile::span("handle_document_symbol");
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +
 +    let mut parents: Vec<(lsp_types::DocumentSymbol, Option<usize>)> = Vec::new();
 +
 +    for symbol in snap.analysis.file_structure(file_id)? {
 +        let mut tags = Vec::new();
 +        if symbol.deprecated {
 +            tags.push(SymbolTag::DEPRECATED)
 +        };
 +
 +        #[allow(deprecated)]
 +        let doc_symbol = lsp_types::DocumentSymbol {
 +            name: symbol.label,
 +            detail: symbol.detail,
 +            kind: to_proto::structure_node_kind(symbol.kind),
 +            tags: Some(tags),
 +            deprecated: Some(symbol.deprecated),
 +            range: to_proto::range(&line_index, symbol.node_range),
 +            selection_range: to_proto::range(&line_index, symbol.navigation_range),
 +            children: None,
 +        };
 +        parents.push((doc_symbol, symbol.parent));
 +    }
 +
 +    // Builds hierarchy from a flat list, in reverse order (so that indices
 +    // makes sense)
 +    let document_symbols = {
 +        let mut acc = Vec::new();
 +        while let Some((mut node, parent_idx)) = parents.pop() {
 +            if let Some(children) = &mut node.children {
 +                children.reverse();
 +            }
 +            let parent = match parent_idx {
 +                None => &mut acc,
 +                Some(i) => parents[i].0.children.get_or_insert_with(Vec::new),
 +            };
 +            parent.push(node);
 +        }
 +        acc.reverse();
 +        acc
 +    };
 +
 +    let res = if snap.config.hierarchical_symbols() {
 +        document_symbols.into()
 +    } else {
 +        let url = to_proto::url(&snap, file_id);
 +        let mut symbol_information = Vec::<SymbolInformation>::new();
 +        for symbol in document_symbols {
 +            flatten_document_symbol(&symbol, None, &url, &mut symbol_information);
 +        }
 +        symbol_information.into()
 +    };
 +    return Ok(Some(res));
 +
 +    fn flatten_document_symbol(
 +        symbol: &lsp_types::DocumentSymbol,
 +        container_name: Option<String>,
 +        url: &Url,
 +        res: &mut Vec<SymbolInformation>,
 +    ) {
 +        let mut tags = Vec::new();
 +
 +        #[allow(deprecated)]
 +        if let Some(true) = symbol.deprecated {
 +            tags.push(SymbolTag::DEPRECATED)
 +        }
 +
 +        #[allow(deprecated)]
 +        res.push(SymbolInformation {
 +            name: symbol.name.clone(),
 +            kind: symbol.kind,
 +            tags: Some(tags),
 +            deprecated: symbol.deprecated,
 +            location: Location::new(url.clone(), symbol.range),
 +            container_name,
 +        });
 +
 +        for child in symbol.children.iter().flatten() {
 +            flatten_document_symbol(child, Some(symbol.name.clone()), url, res);
 +        }
 +    }
 +}
 +
 +pub(crate) fn handle_workspace_symbol(
 +    snap: GlobalStateSnapshot,
 +    params: WorkspaceSymbolParams,
 +) -> Result<Option<Vec<SymbolInformation>>> {
 +    let _p = profile::span("handle_workspace_symbol");
 +
 +    let config = snap.config.workspace_symbol();
 +    let (all_symbols, libs) = decide_search_scope_and_kind(&params, &config);
 +    let limit = config.search_limit;
 +
 +    let query = {
 +        let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect();
 +        let mut q = Query::new(query);
 +        if !all_symbols {
 +            q.only_types();
 +        }
 +        if libs {
 +            q.libs();
 +        }
 +        q.limit(limit);
 +        q
 +    };
 +    let mut res = exec_query(&snap, query)?;
 +    if res.is_empty() && !all_symbols {
 +        let mut query = Query::new(params.query);
 +        query.limit(limit);
 +        res = exec_query(&snap, query)?;
 +    }
 +
 +    return Ok(Some(res));
 +
 +    fn decide_search_scope_and_kind(
 +        params: &WorkspaceSymbolParams,
 +        config: &WorkspaceSymbolConfig,
 +    ) -> (bool, bool) {
 +        // Support old-style parsing of markers in the query.
 +        let mut all_symbols = params.query.contains('#');
 +        let mut libs = params.query.contains('*');
 +
 +        // If no explicit marker was set, check request params. If that's also empty
 +        // use global config.
 +        if !all_symbols {
 +            let search_kind = match params.search_kind {
 +                Some(ref search_kind) => search_kind,
 +                None => &config.search_kind,
 +            };
 +            all_symbols = match search_kind {
 +                lsp_ext::WorkspaceSymbolSearchKind::OnlyTypes => false,
 +                lsp_ext::WorkspaceSymbolSearchKind::AllSymbols => true,
 +            }
 +        }
 +
 +        if !libs {
 +            let search_scope = match params.search_scope {
 +                Some(ref search_scope) => search_scope,
 +                None => &config.search_scope,
 +            };
 +            libs = match search_scope {
 +                lsp_ext::WorkspaceSymbolSearchScope::Workspace => false,
 +                lsp_ext::WorkspaceSymbolSearchScope::WorkspaceAndDependencies => true,
 +            }
 +        }
 +
 +        (all_symbols, libs)
 +    }
 +
 +    fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> {
 +        let mut res = Vec::new();
 +        for nav in snap.analysis.symbol_search(query)? {
 +            let container_name = nav.container_name.as_ref().map(|v| v.to_string());
 +
 +            #[allow(deprecated)]
 +            let info = SymbolInformation {
 +                name: nav.name.to_string(),
 +                kind: nav
 +                    .kind
 +                    .map(to_proto::symbol_kind)
 +                    .unwrap_or(lsp_types::SymbolKind::VARIABLE),
 +                tags: None,
 +                location: to_proto::location_from_nav(snap, nav)?,
 +                container_name,
 +                deprecated: None,
 +            };
 +            res.push(info);
 +        }
 +        Ok(res)
 +    }
 +}
 +
 +pub(crate) fn handle_will_rename_files(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::RenameFilesParams,
 +) -> Result<Option<lsp_types::WorkspaceEdit>> {
 +    let _p = profile::span("handle_will_rename_files");
 +
 +    let source_changes: Vec<SourceChange> = params
 +        .files
 +        .into_iter()
 +        .filter_map(|file_rename| {
 +            let from = Url::parse(&file_rename.old_uri).ok()?;
 +            let to = Url::parse(&file_rename.new_uri).ok()?;
 +
 +            let from_path = from.to_file_path().ok()?;
 +            let to_path = to.to_file_path().ok()?;
 +
 +            // Limit to single-level moves for now.
 +            match (from_path.parent(), to_path.parent()) {
 +                (Some(p1), Some(p2)) if p1 == p2 => {
 +                    if from_path.is_dir() {
 +                        // add '/' to end of url -- from `file://path/to/folder` to `file://path/to/folder/`
 +                        let mut old_folder_name = from_path.file_stem()?.to_str()?.to_string();
 +                        old_folder_name.push('/');
 +                        let from_with_trailing_slash = from.join(&old_folder_name).ok()?;
 +
 +                        let imitate_from_url = from_with_trailing_slash.join("mod.rs").ok()?;
 +                        let new_file_name = to_path.file_name()?.to_str()?;
 +                        Some((
 +                            snap.url_to_file_id(&imitate_from_url).ok()?,
 +                            new_file_name.to_string(),
 +                        ))
 +                    } else {
 +                        let old_name = from_path.file_stem()?.to_str()?;
 +                        let new_name = to_path.file_stem()?.to_str()?;
 +                        match (old_name, new_name) {
 +                            ("mod", _) => None,
 +                            (_, "mod") => None,
 +                            _ => Some((snap.url_to_file_id(&from).ok()?, new_name.to_string())),
 +                        }
 +                    }
 +                }
 +                _ => None,
 +            }
 +        })
 +        .filter_map(|(file_id, new_name)| {
 +            snap.analysis.will_rename_file(file_id, &new_name).ok()?
 +        })
 +        .collect();
 +
 +    // Drop file system edits since we're just renaming things on the same level
 +    let mut source_changes = source_changes.into_iter();
 +    let mut source_change = source_changes.next().unwrap_or_default();
 +    source_change.file_system_edits.clear();
 +    // no collect here because we want to merge text edits on same file ids
 +    source_change.extend(source_changes.flat_map(|it| it.source_file_edits));
 +    if source_change.source_file_edits.is_empty() {
 +        Ok(None)
 +    } else {
 +        to_proto::workspace_edit(&snap, source_change).map(Some)
 +    }
 +}
 +
 +pub(crate) fn handle_goto_definition(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::GotoDefinitionParams,
 +) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
 +    let _p = profile::span("handle_goto_definition");
 +    let position = from_proto::file_position(&snap, params.text_document_position_params)?;
 +    let nav_info = match snap.analysis.goto_definition(position)? {
 +        None => return Ok(None),
 +        Some(it) => it,
 +    };
 +    let src = FileRange { file_id: position.file_id, range: nav_info.range };
 +    let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_goto_declaration(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::request::GotoDeclarationParams,
 +) -> Result<Option<lsp_types::request::GotoDeclarationResponse>> {
 +    let _p = profile::span("handle_goto_declaration");
 +    let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
 +    let nav_info = match snap.analysis.goto_declaration(position)? {
 +        None => return handle_goto_definition(snap, params),
 +        Some(it) => it,
 +    };
 +    let src = FileRange { file_id: position.file_id, range: nav_info.range };
 +    let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_goto_implementation(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::request::GotoImplementationParams,
 +) -> Result<Option<lsp_types::request::GotoImplementationResponse>> {
 +    let _p = profile::span("handle_goto_implementation");
 +    let position = from_proto::file_position(&snap, params.text_document_position_params)?;
 +    let nav_info = match snap.analysis.goto_implementation(position)? {
 +        None => return Ok(None),
 +        Some(it) => it,
 +    };
 +    let src = FileRange { file_id: position.file_id, range: nav_info.range };
 +    let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_goto_type_definition(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::request::GotoTypeDefinitionParams,
 +) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
 +    let _p = profile::span("handle_goto_type_definition");
 +    let position = from_proto::file_position(&snap, params.text_document_position_params)?;
 +    let nav_info = match snap.analysis.goto_type_definition(position)? {
 +        None => return Ok(None),
 +        Some(it) => it,
 +    };
 +    let src = FileRange { file_id: position.file_id, range: nav_info.range };
 +    let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_parent_module(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::TextDocumentPositionParams,
 +) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
 +    let _p = profile::span("handle_parent_module");
 +    if let Ok(file_path) = &params.text_document.uri.to_file_path() {
 +        if file_path.file_name().unwrap_or_default() == "Cargo.toml" {
 +            // search workspaces for parent packages or fallback to workspace root
 +            let abs_path_buf = match AbsPathBuf::try_from(file_path.to_path_buf()).ok() {
 +                Some(abs_path_buf) => abs_path_buf,
 +                None => return Ok(None),
 +            };
 +
 +            let manifest_path = match ManifestPath::try_from(abs_path_buf).ok() {
 +                Some(manifest_path) => manifest_path,
 +                None => return Ok(None),
 +            };
 +
 +            let links: Vec<LocationLink> = snap
 +                .workspaces
 +                .iter()
 +                .filter_map(|ws| match ws {
 +                    ProjectWorkspace::Cargo { cargo, .. } => cargo.parent_manifests(&manifest_path),
 +                    _ => None,
 +                })
 +                .flatten()
 +                .map(|parent_manifest_path| LocationLink {
 +                    origin_selection_range: None,
 +                    target_uri: to_proto::url_from_abs_path(&parent_manifest_path),
 +                    target_range: Range::default(),
 +                    target_selection_range: Range::default(),
 +                })
 +                .collect::<_>();
 +            return Ok(Some(links.into()));
 +        }
 +
 +        // check if invoked at the crate root
 +        let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +        let crate_id = match snap.analysis.crate_for(file_id)?.first() {
 +            Some(&crate_id) => crate_id,
 +            None => return Ok(None),
 +        };
 +        let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
 +            Some(it) => it,
 +            None => return Ok(None),
 +        };
 +
 +        if snap.analysis.crate_root(crate_id)? == file_id {
 +            let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
 +            let res = vec![LocationLink {
 +                origin_selection_range: None,
 +                target_uri: cargo_toml_url,
 +                target_range: Range::default(),
 +                target_selection_range: Range::default(),
 +            }]
 +            .into();
 +            return Ok(Some(res));
 +        }
 +    }
 +
 +    // locate parent module by semantics
 +    let position = from_proto::file_position(&snap, params)?;
 +    let navs = snap.analysis.parent_module(position)?;
 +    let res = to_proto::goto_definition_response(&snap, None, navs)?;
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_runnables(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::RunnablesParams,
 +) -> Result<Vec<lsp_ext::Runnable>> {
 +    let _p = profile::span("handle_runnables");
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +    let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok());
 +    let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
 +
 +    let expect_test = match offset {
 +        Some(offset) => {
 +            let source_file = snap.analysis.parse(file_id)?;
 +            algo::find_node_at_offset::<ast::MacroCall>(source_file.syntax(), offset)
 +                .and_then(|it| it.path()?.segment()?.name_ref())
 +                .map_or(false, |it| it.text() == "expect" || it.text() == "expect_file")
 +        }
 +        None => false,
 +    };
 +
 +    let mut res = Vec::new();
 +    for runnable in snap.analysis.runnables(file_id)? {
 +        if should_skip_for_offset(&runnable, offset) {
 +            continue;
 +        }
 +        if should_skip_target(&runnable, cargo_spec.as_ref()) {
 +            continue;
 +        }
 +        let mut runnable = to_proto::runnable(&snap, runnable)?;
 +        if expect_test {
 +            runnable.label = format!("{} + expect", runnable.label);
 +            runnable.args.expect_test = Some(true);
 +        }
 +        res.push(runnable);
 +    }
 +
 +    // Add `cargo check` and `cargo test` for all targets of the whole package
 +    let config = snap.config.runnables();
 +    match cargo_spec {
 +        Some(spec) => {
 +            for cmd in ["check", "test"] {
 +                res.push(lsp_ext::Runnable {
 +                    label: format!("cargo {} -p {} --all-targets", cmd, spec.package),
 +                    location: None,
 +                    kind: lsp_ext::RunnableKind::Cargo,
 +                    args: lsp_ext::CargoRunnable {
 +                        workspace_root: Some(spec.workspace_root.clone().into()),
 +                        override_cargo: config.override_cargo.clone(),
 +                        cargo_args: vec![
 +                            cmd.to_string(),
 +                            "--package".to_string(),
 +                            spec.package.clone(),
 +                            "--all-targets".to_string(),
 +                        ],
 +                        cargo_extra_args: config.cargo_extra_args.clone(),
 +                        executable_args: Vec::new(),
 +                        expect_test: None,
 +                    },
 +                })
 +            }
 +        }
 +        None => {
 +            if !snap.config.linked_projects().is_empty()
 +                || !snap
 +                    .config
 +                    .discovered_projects
 +                    .as_ref()
 +                    .map(|projects| projects.is_empty())
 +                    .unwrap_or(true)
 +            {
 +                res.push(lsp_ext::Runnable {
 +                    label: "cargo check --workspace".to_string(),
 +                    location: None,
 +                    kind: lsp_ext::RunnableKind::Cargo,
 +                    args: lsp_ext::CargoRunnable {
 +                        workspace_root: None,
 +                        override_cargo: config.override_cargo,
 +                        cargo_args: vec!["check".to_string(), "--workspace".to_string()],
 +                        cargo_extra_args: config.cargo_extra_args,
 +                        executable_args: Vec::new(),
 +                        expect_test: None,
 +                    },
 +                });
 +            }
 +        }
 +    }
 +    Ok(res)
 +}
 +
 +fn should_skip_for_offset(runnable: &Runnable, offset: Option<TextSize>) -> bool {
 +    match offset {
 +        None => false,
 +        _ if matches!(&runnable.kind, RunnableKind::TestMod { .. }) => false,
 +        Some(offset) => !runnable.nav.full_range.contains_inclusive(offset),
 +    }
 +}
 +
 +pub(crate) fn handle_related_tests(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::TextDocumentPositionParams,
 +) -> Result<Vec<lsp_ext::TestInfo>> {
 +    let _p = profile::span("handle_related_tests");
 +    let position = from_proto::file_position(&snap, params)?;
 +
 +    let tests = snap.analysis.related_tests(position, None)?;
 +    let mut res = Vec::new();
 +    for it in tests {
 +        if let Ok(runnable) = to_proto::runnable(&snap, it) {
 +            res.push(lsp_ext::TestInfo { runnable })
 +        }
 +    }
 +
 +    Ok(res)
 +}
 +
 +pub(crate) fn handle_completion(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::CompletionParams,
 +) -> Result<Option<lsp_types::CompletionResponse>> {
 +    let _p = profile::span("handle_completion");
 +    let text_document_position = params.text_document_position.clone();
 +    let position = from_proto::file_position(&snap, params.text_document_position)?;
 +    let completion_trigger_character =
 +        params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
 +
 +    if Some(':') == completion_trigger_character {
 +        let source_file = snap.analysis.parse(position.file_id)?;
 +        let left_token = source_file.syntax().token_at_offset(position.offset).left_biased();
 +        let completion_triggered_after_single_colon = match left_token {
 +            Some(left_token) => left_token.kind() == T![:],
 +            None => true,
 +        };
 +        if completion_triggered_after_single_colon {
 +            return Ok(None);
 +        }
 +    }
 +
 +    let completion_config = &snap.config.completion();
 +    let items = match snap.analysis.completions(
 +        completion_config,
 +        position,
 +        completion_trigger_character,
 +    )? {
 +        None => return Ok(None),
 +        Some(items) => items,
 +    };
 +    let line_index = snap.file_line_index(position.file_id)?;
 +
 +    let items =
 +        to_proto::completion_items(&snap.config, &line_index, text_document_position, items);
 +
 +    let completion_list = lsp_types::CompletionList { is_incomplete: true, items };
 +    Ok(Some(completion_list.into()))
 +}
 +
 +pub(crate) fn handle_completion_resolve(
 +    snap: GlobalStateSnapshot,
 +    mut original_completion: CompletionItem,
 +) -> Result<CompletionItem> {
 +    let _p = profile::span("handle_completion_resolve");
 +
 +    if !all_edits_are_disjoint(&original_completion, &[]) {
 +        return Err(invalid_params_error(
 +            "Received a completion with overlapping edits, this is not LSP-compliant".to_string(),
 +        )
 +        .into());
 +    }
 +
 +    let data = match original_completion.data.take() {
 +        Some(it) => it,
 +        None => return Ok(original_completion),
 +    };
 +
 +    let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?;
 +
 +    let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +    let offset = from_proto::offset(&line_index, resolve_data.position.position)?;
 +
 +    let additional_edits = snap
 +        .analysis
 +        .resolve_completion_edits(
 +            &snap.config.completion(),
 +            FilePosition { file_id, offset },
 +            resolve_data
 +                .imports
 +                .into_iter()
 +                .map(|import| (import.full_import_path, import.imported_name)),
 +        )?
 +        .into_iter()
 +        .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
 +        .collect::<Vec<_>>();
 +
 +    if !all_edits_are_disjoint(&original_completion, &additional_edits) {
 +        return Err(LspError::new(
 +            ErrorCode::InternalError as i32,
 +            "Import edit overlaps with the original completion edits, this is not LSP-compliant"
 +                .into(),
 +        )
 +        .into());
 +    }
 +
 +    if let Some(original_additional_edits) = original_completion.additional_text_edits.as_mut() {
 +        original_additional_edits.extend(additional_edits.into_iter())
 +    } else {
 +        original_completion.additional_text_edits = Some(additional_edits);
 +    }
 +
 +    Ok(original_completion)
 +}
 +
 +pub(crate) fn handle_folding_range(
 +    snap: GlobalStateSnapshot,
 +    params: FoldingRangeParams,
 +) -> Result<Option<Vec<FoldingRange>>> {
 +    let _p = profile::span("handle_folding_range");
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let folds = snap.analysis.folding_ranges(file_id)?;
 +    let text = snap.analysis.file_text(file_id)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +    let line_folding_only = snap.config.line_folding_only();
 +    let res = folds
 +        .into_iter()
 +        .map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it))
 +        .collect();
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_signature_help(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::SignatureHelpParams,
 +) -> Result<Option<lsp_types::SignatureHelp>> {
 +    let _p = profile::span("handle_signature_help");
 +    let position = from_proto::file_position(&snap, params.text_document_position_params)?;
 +    let help = match snap.analysis.signature_help(position)? {
 +        Some(it) => it,
 +        None => return Ok(None),
 +    };
 +    let config = snap.config.call_info();
 +    let res = to_proto::signature_help(help, config, snap.config.signature_help_label_offsets());
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_hover(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::HoverParams,
 +) -> Result<Option<lsp_ext::Hover>> {
 +    let _p = profile::span("handle_hover");
 +    let range = match params.position {
 +        PositionOrRange::Position(position) => Range::new(position, position),
 +        PositionOrRange::Range(range) => range,
 +    };
 +
 +    let file_range = from_proto::file_range(&snap, params.text_document, range)?;
 +    let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
 +        None => return Ok(None),
 +        Some(info) => info,
 +    };
 +
 +    let line_index = snap.file_line_index(file_range.file_id)?;
 +    let range = to_proto::range(&line_index, info.range);
 +    let markup_kind =
 +        snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
 +    let hover = lsp_ext::Hover {
 +        hover: lsp_types::Hover {
 +            contents: HoverContents::Markup(to_proto::markup_content(
 +                info.info.markup,
 +                markup_kind,
 +            )),
 +            range: Some(range),
 +        },
 +        actions: if snap.config.hover_actions().none() {
 +            Vec::new()
 +        } else {
 +            prepare_hover_actions(&snap, &info.info.actions)
 +        },
 +    };
 +
 +    Ok(Some(hover))
 +}
 +
 +pub(crate) fn handle_prepare_rename(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::TextDocumentPositionParams,
 +) -> Result<Option<PrepareRenameResponse>> {
 +    let _p = profile::span("handle_prepare_rename");
 +    let position = from_proto::file_position(&snap, params)?;
 +
 +    let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?;
 +
 +    let line_index = snap.file_line_index(position.file_id)?;
 +    let range = to_proto::range(&line_index, change.range);
 +    Ok(Some(PrepareRenameResponse::Range(range)))
 +}
 +
 +pub(crate) fn handle_rename(
 +    snap: GlobalStateSnapshot,
 +    params: RenameParams,
 +) -> Result<Option<WorkspaceEdit>> {
 +    let _p = profile::span("handle_rename");
 +    let position = from_proto::file_position(&snap, params.text_document_position)?;
 +
 +    let mut change =
 +        snap.analysis.rename(position, &*params.new_name)?.map_err(to_proto::rename_error)?;
 +
 +    // this is kind of a hack to prevent double edits from happening when moving files
 +    // When a module gets renamed by renaming the mod declaration this causes the file to move
 +    // which in turn will trigger a WillRenameFiles request to the server for which we reply with a
 +    // a second identical set of renames, the client will then apply both edits causing incorrect edits
 +    // with this we only emit source_file_edits in the WillRenameFiles response which will do the rename instead
 +    // See https://github.com/microsoft/vscode-languageserver-node/issues/752 for more info
 +    if !change.file_system_edits.is_empty() && snap.config.will_rename() {
 +        change.source_file_edits.clear();
 +    }
 +    let workspace_edit = to_proto::workspace_edit(&snap, change)?;
 +    Ok(Some(workspace_edit))
 +}
 +
 +pub(crate) fn handle_references(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::ReferenceParams,
 +) -> Result<Option<Vec<Location>>> {
 +    let _p = profile::span("handle_references");
 +    let position = from_proto::file_position(&snap, params.text_document_position)?;
 +
 +    let refs = match snap.analysis.find_all_refs(position, None)? {
 +        None => return Ok(None),
 +        Some(refs) => refs,
 +    };
 +
 +    let include_declaration = params.context.include_declaration;
 +    let locations = refs
 +        .into_iter()
 +        .flat_map(|refs| {
 +            let decl = if include_declaration {
 +                refs.declaration.map(|decl| FileRange {
 +                    file_id: decl.nav.file_id,
 +                    range: decl.nav.focus_or_full_range(),
 +                })
 +            } else {
 +                None
 +            };
 +            refs.references
 +                .into_iter()
 +                .flat_map(|(file_id, refs)| {
 +                    refs.into_iter().map(move |(range, _)| FileRange { file_id, range })
 +                })
 +                .chain(decl)
 +        })
 +        .filter_map(|frange| to_proto::location(&snap, frange).ok())
 +        .collect();
 +
 +    Ok(Some(locations))
 +}
 +
 +pub(crate) fn handle_formatting(
 +    snap: GlobalStateSnapshot,
 +    params: DocumentFormattingParams,
 +) -> Result<Option<Vec<lsp_types::TextEdit>>> {
 +    let _p = profile::span("handle_formatting");
 +
 +    run_rustfmt(&snap, params.text_document, None)
 +}
 +
 +pub(crate) fn handle_range_formatting(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::DocumentRangeFormattingParams,
 +) -> Result<Option<Vec<lsp_types::TextEdit>>> {
 +    let _p = profile::span("handle_range_formatting");
 +
 +    run_rustfmt(&snap, params.text_document, Some(params.range))
 +}
 +
 +pub(crate) fn handle_code_action(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::CodeActionParams,
 +) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
 +    let _p = profile::span("handle_code_action");
 +
 +    if !snap.config.code_action_literals() {
 +        // We intentionally don't support command-based actions, as those either
 +        // require either custom client-code or server-initiated edits. Server
 +        // initiated edits break causality, so we avoid those.
 +        return Ok(None);
 +    }
 +
 +    let line_index =
 +        snap.file_line_index(from_proto::file_id(&snap, &params.text_document.uri)?)?;
 +    let frange = from_proto::file_range(&snap, params.text_document.clone(), params.range)?;
 +
 +    let mut assists_config = snap.config.assist();
 +    assists_config.allowed = params
 +        .context
 +        .only
 +        .clone()
 +        .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
 +
 +    let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
 +
 +    let code_action_resolve_cap = snap.config.code_action_resolve();
 +    let resolve = if code_action_resolve_cap {
 +        AssistResolveStrategy::None
 +    } else {
 +        AssistResolveStrategy::All
 +    };
 +    let assists = snap.analysis.assists_with_fixes(
 +        &assists_config,
 +        &snap.config.diagnostics(),
 +        resolve,
 +        frange,
 +    )?;
 +    for (index, assist) in assists.into_iter().enumerate() {
 +        let resolve_data =
 +            if code_action_resolve_cap { Some((index, params.clone())) } else { None };
 +        let code_action = to_proto::code_action(&snap, assist, resolve_data)?;
 +        res.push(code_action)
 +    }
 +
 +    // Fixes from `cargo check`.
 +    for fix in
 +        snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten()
 +    {
 +        // FIXME: this mapping is awkward and shouldn't exist. Refactor
 +        // `snap.check_fixes` to not convert to LSP prematurely.
 +        let intersect_fix_range = fix
 +            .ranges
 +            .iter()
 +            .copied()
 +            .filter_map(|range| from_proto::text_range(&line_index, range).ok())
 +            .any(|fix_range| fix_range.intersect(frange.range).is_some());
 +        if intersect_fix_range {
 +            res.push(fix.action.clone());
 +        }
 +    }
 +
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_code_action_resolve(
 +    snap: GlobalStateSnapshot,
 +    mut code_action: lsp_ext::CodeAction,
 +) -> Result<lsp_ext::CodeAction> {
 +    let _p = profile::span("handle_code_action_resolve");
 +    let params = match code_action.data.take() {
 +        Some(it) => it,
 +        None => return Err(invalid_params_error("code action without data".to_string()).into()),
 +    };
 +
 +    let file_id = from_proto::file_id(&snap, &params.code_action_params.text_document.uri)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +    let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
 +    let frange = FileRange { file_id, range };
 +
 +    let mut assists_config = snap.config.assist();
 +    assists_config.allowed = params
 +        .code_action_params
 +        .context
 +        .only
 +        .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
 +
 +    let (assist_index, assist_resolve) = match parse_action_id(&params.id) {
 +        Ok(parsed_data) => parsed_data,
 +        Err(e) => {
 +            return Err(invalid_params_error(format!(
 +                "Failed to parse action id string '{}': {}",
 +                params.id, e
 +            ))
 +            .into())
 +        }
 +    };
 +
 +    let expected_assist_id = assist_resolve.assist_id.clone();
 +    let expected_kind = assist_resolve.assist_kind;
 +
 +    let assists = snap.analysis.assists_with_fixes(
 +        &assists_config,
 +        &snap.config.diagnostics(),
 +        AssistResolveStrategy::Single(assist_resolve),
 +        frange,
 +    )?;
 +
 +    let assist = match assists.get(assist_index) {
 +        Some(assist) => assist,
 +        None => return Err(invalid_params_error(format!(
 +            "Failed to find the assist for index {} provided by the resolve request. Resolve request assist id: {}",
 +            assist_index, params.id,
 +        ))
 +        .into())
 +    };
 +    if assist.id.0 != expected_assist_id || assist.id.1 != expected_kind {
 +        return Err(invalid_params_error(format!(
 +            "Mismatching assist at index {} for the resolve parameters given. Resolve request assist id: {}, actual id: {:?}.",
 +            assist_index, params.id, assist.id
 +        ))
 +        .into());
 +    }
 +    let ca = to_proto::code_action(&snap, assist.clone(), None)?;
 +    code_action.edit = ca.edit;
 +    code_action.command = ca.command;
 +    Ok(code_action)
 +}
 +
 +fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
 +    let id_parts = action_id.split(':').collect::<Vec<_>>();
 +    match id_parts.as_slice() {
 +        [assist_id_string, assist_kind_string, index_string] => {
 +            let assist_kind: AssistKind = assist_kind_string.parse()?;
 +            let index: usize = match index_string.parse() {
 +                Ok(index) => index,
 +                Err(e) => return Err(format!("Incorrect index string: {}", e)),
 +            };
 +            Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind }))
 +        }
 +        _ => Err("Action id contains incorrect number of segments".to_string()),
 +    }
 +}
 +
 +pub(crate) fn handle_code_lens(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::CodeLensParams,
 +) -> Result<Option<Vec<CodeLens>>> {
 +    let _p = profile::span("handle_code_lens");
 +
 +    let lens_config = snap.config.lens();
 +    if lens_config.none() {
 +        // early return before any db query!
 +        return Ok(Some(Vec::default()));
 +    }
 +
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let cargo_target_spec = CargoTargetSpec::for_file(&snap, file_id)?;
 +
 +    let annotations = snap.analysis.annotations(
 +        &AnnotationConfig {
 +            binary_target: cargo_target_spec
 +                .map(|spec| {
 +                    matches!(
 +                        spec.target_kind,
 +                        TargetKind::Bin | TargetKind::Example | TargetKind::Test
 +                    )
 +                })
 +                .unwrap_or(false),
 +            annotate_runnables: lens_config.runnable(),
 +            annotate_impls: lens_config.implementations,
 +            annotate_references: lens_config.refs_adt,
 +            annotate_method_references: lens_config.method_refs,
 +            annotate_enum_variant_references: lens_config.enum_variant_refs,
 +        },
 +        file_id,
 +    )?;
 +
 +    let mut res = Vec::new();
 +    for a in annotations {
 +        to_proto::code_lens(&mut res, &snap, a)?;
 +    }
 +
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_code_lens_resolve(
 +    snap: GlobalStateSnapshot,
 +    code_lens: CodeLens,
 +) -> Result<CodeLens> {
 +    let annotation = from_proto::annotation(&snap, code_lens.clone())?;
 +    let annotation = snap.analysis.resolve_annotation(annotation)?;
 +
 +    let mut acc = Vec::new();
 +    to_proto::code_lens(&mut acc, &snap, annotation)?;
 +
 +    let res = match acc.pop() {
 +        Some(it) if acc.is_empty() => it,
 +        _ => {
 +            never!();
 +            code_lens
 +        }
 +    };
 +
 +    Ok(res)
 +}
 +
 +pub(crate) fn handle_document_highlight(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::DocumentHighlightParams,
 +) -> Result<Option<Vec<lsp_types::DocumentHighlight>>> {
 +    let _p = profile::span("handle_document_highlight");
 +    let position = from_proto::file_position(&snap, params.text_document_position_params)?;
 +    let line_index = snap.file_line_index(position.file_id)?;
 +
 +    let refs = match snap.analysis.highlight_related(snap.config.highlight_related(), position)? {
 +        None => return Ok(None),
 +        Some(refs) => refs,
 +    };
 +    let res = refs
 +        .into_iter()
 +        .map(|ide::HighlightedRange { range, category }| lsp_types::DocumentHighlight {
 +            range: to_proto::range(&line_index, range),
 +            kind: category.map(to_proto::document_highlight_kind),
 +        })
 +        .collect();
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_ssr(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::SsrParams,
 +) -> Result<lsp_types::WorkspaceEdit> {
 +    let _p = profile::span("handle_ssr");
 +    let selections = params
 +        .selections
 +        .iter()
 +        .map(|range| from_proto::file_range(&snap, params.position.text_document.clone(), *range))
 +        .collect::<Result<Vec<_>, _>>()?;
 +    let position = from_proto::file_position(&snap, params.position)?;
 +    let source_change = snap.analysis.structural_search_replace(
 +        &params.query,
 +        params.parse_only,
 +        position,
 +        selections,
 +    )??;
 +    to_proto::workspace_edit(&snap, source_change)
 +}
 +
 +pub(crate) fn publish_diagnostics(
 +    snap: &GlobalStateSnapshot,
 +    file_id: FileId,
 +) -> Result<Vec<Diagnostic>> {
 +    let _p = profile::span("publish_diagnostics");
 +    let line_index = snap.file_line_index(file_id)?;
 +
 +    let diagnostics: Vec<Diagnostic> = snap
 +        .analysis
 +        .diagnostics(&snap.config.diagnostics(), AssistResolveStrategy::None, file_id)?
 +        .into_iter()
 +        .map(|d| Diagnostic {
 +            range: to_proto::range(&line_index, d.range),
 +            severity: Some(to_proto::diagnostic_severity(d.severity)),
 +            code: Some(NumberOrString::String(d.code.as_str().to_string())),
 +            code_description: Some(lsp_types::CodeDescription {
 +                href: lsp_types::Url::parse(&format!(
 +                    "https://rust-analyzer.github.io/manual.html#{}",
 +                    d.code.as_str()
 +                ))
 +                .unwrap(),
 +            }),
 +            source: Some("rust-analyzer".to_string()),
 +            message: d.message,
 +            related_information: None,
 +            tags: if d.unused { Some(vec![DiagnosticTag::UNNECESSARY]) } else { None },
 +            data: None,
 +        })
 +        .collect();
 +    Ok(diagnostics)
 +}
 +
 +pub(crate) fn handle_inlay_hints(
 +    snap: GlobalStateSnapshot,
 +    params: InlayHintParams,
 +) -> Result<Option<Vec<InlayHint>>> {
 +    let _p = profile::span("handle_inlay_hints");
 +    let document_uri = &params.text_document.uri;
 +    let file_id = from_proto::file_id(&snap, document_uri)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +    let range = from_proto::file_range(
 +        &snap,
 +        TextDocumentIdentifier::new(document_uri.to_owned()),
 +        params.range,
 +    )?;
 +    let inlay_hints_config = snap.config.inlay_hints();
 +    Ok(Some(
 +        snap.analysis
 +            .inlay_hints(&inlay_hints_config, file_id, Some(range))?
 +            .into_iter()
 +            .map(|it| {
 +                to_proto::inlay_hint(&snap, &line_index, inlay_hints_config.render_colons, it)
 +            })
 +            .collect(),
 +    ))
 +}
 +
 +pub(crate) fn handle_inlay_hints_resolve(
 +    snap: GlobalStateSnapshot,
 +    mut hint: InlayHint,
 +) -> Result<InlayHint> {
 +    let _p = profile::span("handle_inlay_hints_resolve");
 +    let data = match hint.data.take() {
 +        Some(it) => it,
 +        None => return Ok(hint),
 +    };
 +
 +    let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
 +
 +    let file_range = from_proto::file_range(
 +        &snap,
 +        resolve_data.text_document,
 +        match resolve_data.position {
 +            PositionOrRange::Position(pos) => Range::new(pos, pos),
 +            PositionOrRange::Range(range) => range,
 +        },
 +    )?;
 +    let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
 +        None => return Ok(hint),
 +        Some(info) => info,
 +    };
 +
 +    let markup_kind =
 +        snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
 +
 +    // FIXME: hover actions?
 +    hint.tooltip = Some(lsp_types::InlayHintTooltip::MarkupContent(to_proto::markup_content(
 +        info.info.markup,
 +        markup_kind,
 +    )));
 +    Ok(hint)
 +}
 +
 +pub(crate) fn handle_call_hierarchy_prepare(
 +    snap: GlobalStateSnapshot,
 +    params: CallHierarchyPrepareParams,
 +) -> Result<Option<Vec<CallHierarchyItem>>> {
 +    let _p = profile::span("handle_call_hierarchy_prepare");
 +    let position = from_proto::file_position(&snap, params.text_document_position_params)?;
 +
 +    let nav_info = match snap.analysis.call_hierarchy(position)? {
 +        None => return Ok(None),
 +        Some(it) => it,
 +    };
 +
 +    let RangeInfo { range: _, info: navs } = nav_info;
 +    let res = navs
 +        .into_iter()
 +        .filter(|it| it.kind == Some(SymbolKind::Function))
 +        .map(|it| to_proto::call_hierarchy_item(&snap, it))
 +        .collect::<Result<Vec<_>>>()?;
 +
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_call_hierarchy_incoming(
 +    snap: GlobalStateSnapshot,
 +    params: CallHierarchyIncomingCallsParams,
 +) -> Result<Option<Vec<CallHierarchyIncomingCall>>> {
 +    let _p = profile::span("handle_call_hierarchy_incoming");
 +    let item = params.item;
 +
 +    let doc = TextDocumentIdentifier::new(item.uri);
 +    let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
 +    let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
 +
 +    let call_items = match snap.analysis.incoming_calls(fpos)? {
 +        None => return Ok(None),
 +        Some(it) => it,
 +    };
 +
 +    let mut res = vec![];
 +
 +    for call_item in call_items.into_iter() {
 +        let file_id = call_item.target.file_id;
 +        let line_index = snap.file_line_index(file_id)?;
 +        let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
 +        res.push(CallHierarchyIncomingCall {
 +            from: item,
 +            from_ranges: call_item
 +                .ranges
 +                .into_iter()
 +                .map(|it| to_proto::range(&line_index, it))
 +                .collect(),
 +        });
 +    }
 +
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_call_hierarchy_outgoing(
 +    snap: GlobalStateSnapshot,
 +    params: CallHierarchyOutgoingCallsParams,
 +) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> {
 +    let _p = profile::span("handle_call_hierarchy_outgoing");
 +    let item = params.item;
 +
 +    let doc = TextDocumentIdentifier::new(item.uri);
 +    let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
 +    let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
 +
 +    let call_items = match snap.analysis.outgoing_calls(fpos)? {
 +        None => return Ok(None),
 +        Some(it) => it,
 +    };
 +
 +    let mut res = vec![];
 +
 +    for call_item in call_items.into_iter() {
 +        let file_id = call_item.target.file_id;
 +        let line_index = snap.file_line_index(file_id)?;
 +        let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
 +        res.push(CallHierarchyOutgoingCall {
 +            to: item,
 +            from_ranges: call_item
 +                .ranges
 +                .into_iter()
 +                .map(|it| to_proto::range(&line_index, it))
 +                .collect(),
 +        });
 +    }
 +
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_semantic_tokens_full(
 +    snap: GlobalStateSnapshot,
 +    params: SemanticTokensParams,
 +) -> Result<Option<SemanticTokensResult>> {
 +    let _p = profile::span("handle_semantic_tokens_full");
 +
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let text = snap.analysis.file_text(file_id)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +
-     let highlights = snap.analysis.highlight(file_id)?;
-     let highlight_strings = snap.config.highlighting_strings();
-     let semantic_tokens =
-         to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
++    let highlights = snap.analysis.highlight(snap.config.highlighting_config(), file_id)?;
++    let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
 +
 +    // Unconditionally cache the tokens
 +    snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
 +
 +    Ok(Some(semantic_tokens.into()))
 +}
 +
 +pub(crate) fn handle_semantic_tokens_full_delta(
 +    snap: GlobalStateSnapshot,
 +    params: SemanticTokensDeltaParams,
 +) -> Result<Option<SemanticTokensFullDeltaResult>> {
 +    let _p = profile::span("handle_semantic_tokens_full_delta");
 +
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let text = snap.analysis.file_text(file_id)?;
 +    let line_index = snap.file_line_index(file_id)?;
 +
-     let highlights = snap.analysis.highlight_range(frange)?;
-     let highlight_strings = snap.config.highlighting_strings();
-     let semantic_tokens =
-         to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
++    let highlights = snap.analysis.highlight(snap.config.highlighting_config(), file_id)?;
++    let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
 +
 +    let mut cache = snap.semantic_tokens_cache.lock();
 +    let cached_tokens = cache.entry(params.text_document.uri).or_default();
 +
 +    if let Some(prev_id) = &cached_tokens.result_id {
 +        if *prev_id == params.previous_result_id {
 +            let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens);
 +            *cached_tokens = semantic_tokens;
 +            return Ok(Some(delta.into()));
 +        }
 +    }
 +
 +    *cached_tokens = semantic_tokens.clone();
 +
 +    Ok(Some(semantic_tokens.into()))
 +}
 +
 +pub(crate) fn handle_semantic_tokens_range(
 +    snap: GlobalStateSnapshot,
 +    params: SemanticTokensRangeParams,
 +) -> Result<Option<SemanticTokensRangeResult>> {
 +    let _p = profile::span("handle_semantic_tokens_range");
 +
 +    let frange = from_proto::file_range(&snap, params.text_document, params.range)?;
 +    let text = snap.analysis.file_text(frange.file_id)?;
 +    let line_index = snap.file_line_index(frange.file_id)?;
 +
++    let highlights = snap.analysis.highlight_range(snap.config.highlighting_config(), frange)?;
++    let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
 +    Ok(Some(semantic_tokens.into()))
 +}
 +
 +pub(crate) fn handle_open_docs(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_types::TextDocumentPositionParams,
 +) -> Result<Option<lsp_types::Url>> {
 +    let _p = profile::span("handle_open_docs");
 +    let position = from_proto::file_position(&snap, params)?;
 +
 +    let remote = snap.analysis.external_docs(position)?;
 +
 +    Ok(remote.and_then(|remote| Url::parse(&remote).ok()))
 +}
 +
 +pub(crate) fn handle_open_cargo_toml(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::OpenCargoTomlParams,
 +) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
 +    let _p = profile::span("handle_open_cargo_toml");
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +
 +    let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
 +        Some(it) => it,
 +        None => return Ok(None),
 +    };
 +
 +    let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
 +    let res: lsp_types::GotoDefinitionResponse =
 +        Location::new(cargo_toml_url, Range::default()).into();
 +    Ok(Some(res))
 +}
 +
 +pub(crate) fn handle_move_item(
 +    snap: GlobalStateSnapshot,
 +    params: lsp_ext::MoveItemParams,
 +) -> Result<Vec<lsp_ext::SnippetTextEdit>> {
 +    let _p = profile::span("handle_move_item");
 +    let file_id = from_proto::file_id(&snap, &params.text_document.uri)?;
 +    let range = from_proto::file_range(&snap, params.text_document, params.range)?;
 +
 +    let direction = match params.direction {
 +        lsp_ext::MoveItemDirection::Up => ide::Direction::Up,
 +        lsp_ext::MoveItemDirection::Down => ide::Direction::Down,
 +    };
 +
 +    match snap.analysis.move_item(range, direction)? {
 +        Some(text_edit) => {
 +            let line_index = snap.file_line_index(file_id)?;
 +            Ok(to_proto::snippet_text_edit_vec(&line_index, true, text_edit))
 +        }
 +        None => Ok(vec![]),
 +    }
 +}
 +
 +fn to_command_link(command: lsp_types::Command, tooltip: String) -> lsp_ext::CommandLink {
 +    lsp_ext::CommandLink { tooltip: Some(tooltip), command }
 +}
 +
 +fn show_impl_command_link(
 +    snap: &GlobalStateSnapshot,
 +    position: &FilePosition,
 +) -> Option<lsp_ext::CommandLinkGroup> {
 +    if snap.config.hover_actions().implementations && snap.config.client_commands().show_reference {
 +        if let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) {
 +            let uri = to_proto::url(snap, position.file_id);
 +            let line_index = snap.file_line_index(position.file_id).ok()?;
 +            let position = to_proto::position(&line_index, position.offset);
 +            let locations: Vec<_> = nav_data
 +                .info
 +                .into_iter()
 +                .filter_map(|nav| to_proto::location_from_nav(snap, nav).ok())
 +                .collect();
 +            let title = to_proto::implementation_title(locations.len());
 +            let command = to_proto::command::show_references(title, &uri, position, locations);
 +
 +            return Some(lsp_ext::CommandLinkGroup {
 +                commands: vec![to_command_link(command, "Go to implementations".into())],
 +                ..Default::default()
 +            });
 +        }
 +    }
 +    None
 +}
 +
 +fn show_ref_command_link(
 +    snap: &GlobalStateSnapshot,
 +    position: &FilePosition,
 +) -> Option<lsp_ext::CommandLinkGroup> {
 +    if snap.config.hover_actions().references && snap.config.client_commands().show_reference {
 +        if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) {
 +            let uri = to_proto::url(snap, position.file_id);
 +            let line_index = snap.file_line_index(position.file_id).ok()?;
 +            let position = to_proto::position(&line_index, position.offset);
 +            let locations: Vec<_> = ref_search_res
 +                .into_iter()
 +                .flat_map(|res| res.references)
 +                .flat_map(|(file_id, ranges)| {
 +                    ranges.into_iter().filter_map(move |(range, _)| {
 +                        to_proto::location(snap, FileRange { file_id, range }).ok()
 +                    })
 +                })
 +                .collect();
 +            let title = to_proto::reference_title(locations.len());
 +            let command = to_proto::command::show_references(title, &uri, position, locations);
 +
 +            return Some(lsp_ext::CommandLinkGroup {
 +                commands: vec![to_command_link(command, "Go to references".into())],
 +                ..Default::default()
 +            });
 +        }
 +    }
 +    None
 +}
 +
 +fn runnable_action_links(
 +    snap: &GlobalStateSnapshot,
 +    runnable: Runnable,
 +) -> Option<lsp_ext::CommandLinkGroup> {
 +    let hover_actions_config = snap.config.hover_actions();
 +    if !hover_actions_config.runnable() {
 +        return None;
 +    }
 +
 +    let cargo_spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id).ok()?;
 +    if should_skip_target(&runnable, cargo_spec.as_ref()) {
 +        return None;
 +    }
 +
 +    let client_commands_config = snap.config.client_commands();
 +    if !(client_commands_config.run_single || client_commands_config.debug_single) {
 +        return None;
 +    }
 +
 +    let title = runnable.title();
 +    let r = to_proto::runnable(snap, runnable).ok()?;
 +
 +    let mut group = lsp_ext::CommandLinkGroup::default();
 +
 +    if hover_actions_config.run && client_commands_config.run_single {
 +        let run_command = to_proto::command::run_single(&r, &title);
 +        group.commands.push(to_command_link(run_command, r.label.clone()));
 +    }
 +
 +    if hover_actions_config.debug && client_commands_config.debug_single {
 +        let dbg_command = to_proto::command::debug_single(&r);
 +        group.commands.push(to_command_link(dbg_command, r.label));
 +    }
 +
 +    Some(group)
 +}
 +
 +fn goto_type_action_links(
 +    snap: &GlobalStateSnapshot,
 +    nav_targets: &[HoverGotoTypeData],
 +) -> Option<lsp_ext::CommandLinkGroup> {
 +    if !snap.config.hover_actions().goto_type_def
 +        || nav_targets.is_empty()
 +        || !snap.config.client_commands().goto_location
 +    {
 +        return None;
 +    }
 +
 +    Some(lsp_ext::CommandLinkGroup {
 +        title: Some("Go to ".into()),
 +        commands: nav_targets
 +            .iter()
 +            .filter_map(|it| {
 +                to_proto::command::goto_location(snap, &it.nav)
 +                    .map(|cmd| to_command_link(cmd, it.mod_path.clone()))
 +            })
 +            .collect(),
 +    })
 +}
 +
 +fn prepare_hover_actions(
 +    snap: &GlobalStateSnapshot,
 +    actions: &[HoverAction],
 +) -> Vec<lsp_ext::CommandLinkGroup> {
 +    actions
 +        .iter()
 +        .filter_map(|it| match it {
 +            HoverAction::Implementation(position) => show_impl_command_link(snap, position),
 +            HoverAction::Reference(position) => show_ref_command_link(snap, position),
 +            HoverAction::Runnable(r) => runnable_action_links(snap, r.clone()),
 +            HoverAction::GoToType(targets) => goto_type_action_links(snap, targets),
 +        })
 +        .collect()
 +}
 +
 +fn should_skip_target(runnable: &Runnable, cargo_spec: Option<&CargoTargetSpec>) -> bool {
 +    match runnable.kind {
 +        RunnableKind::Bin => {
 +            // Do not suggest binary run on other target than binary
 +            match &cargo_spec {
 +                Some(spec) => !matches!(
 +                    spec.target_kind,
 +                    TargetKind::Bin | TargetKind::Example | TargetKind::Test
 +                ),
 +                None => true,
 +            }
 +        }
 +        _ => false,
 +    }
 +}
 +
 +fn run_rustfmt(
 +    snap: &GlobalStateSnapshot,
 +    text_document: TextDocumentIdentifier,
 +    range: Option<lsp_types::Range>,
 +) -> Result<Option<Vec<lsp_types::TextEdit>>> {
 +    let file_id = from_proto::file_id(snap, &text_document.uri)?;
 +    let file = snap.analysis.file_text(file_id)?;
 +    let crate_ids = snap.analysis.crate_for(file_id)?;
 +
 +    let line_index = snap.file_line_index(file_id)?;
 +
 +    let mut command = match snap.config.rustfmt() {
 +        RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => {
 +            let mut cmd = process::Command::new(toolchain::rustfmt());
 +            cmd.args(extra_args);
 +            // try to chdir to the file so we can respect `rustfmt.toml`
 +            // FIXME: use `rustfmt --config-path` once
 +            // https://github.com/rust-lang/rustfmt/issues/4660 gets fixed
 +            match text_document.uri.to_file_path() {
 +                Ok(mut path) => {
 +                    // pop off file name
 +                    if path.pop() && path.is_dir() {
 +                        cmd.current_dir(path);
 +                    }
 +                }
 +                Err(_) => {
 +                    tracing::error!(
 +                        "Unable to get file path for {}, rustfmt.toml might be ignored",
 +                        text_document.uri
 +                    );
 +                }
 +            }
 +            if let Some(&crate_id) = crate_ids.first() {
 +                // Assume all crates are in the same edition
 +                let edition = snap.analysis.crate_edition(crate_id)?;
 +                cmd.arg("--edition");
 +                cmd.arg(edition.to_string());
 +            }
 +
 +            if let Some(range) = range {
 +                if !enable_range_formatting {
 +                    return Err(LspError::new(
 +                        ErrorCode::InvalidRequest as i32,
 +                        String::from(
 +                            "rustfmt range formatting is unstable. \
 +                            Opt-in by using a nightly build of rustfmt and setting \
 +                            `rustfmt.rangeFormatting.enable` to true in your LSP configuration",
 +                        ),
 +                    )
 +                    .into());
 +                }
 +
 +                let frange = from_proto::file_range(snap, text_document, range)?;
 +                let start_line = line_index.index.line_col(frange.range.start()).line;
 +                let end_line = line_index.index.line_col(frange.range.end()).line;
 +
 +                cmd.arg("--unstable-features");
 +                cmd.arg("--file-lines");
 +                cmd.arg(
 +                    json!([{
 +                        "file": "stdin",
 +                        "range": [start_line, end_line]
 +                    }])
 +                    .to_string(),
 +                );
 +            }
 +
 +            cmd
 +        }
 +        RustfmtConfig::CustomCommand { command, args } => {
 +            let mut cmd = process::Command::new(command);
 +            cmd.args(args);
 +            cmd
 +        }
 +    };
 +
 +    let mut rustfmt = command
 +        .stdin(Stdio::piped())
 +        .stdout(Stdio::piped())
 +        .stderr(Stdio::piped())
 +        .spawn()
 +        .context(format!("Failed to spawn {:?}", command))?;
 +
 +    rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
 +
 +    let output = rustfmt.wait_with_output()?;
 +    let captured_stdout = String::from_utf8(output.stdout)?;
 +    let captured_stderr = String::from_utf8(output.stderr).unwrap_or_default();
 +
 +    if !output.status.success() {
 +        let rustfmt_not_installed =
 +            captured_stderr.contains("not installed") || captured_stderr.contains("not available");
 +
 +        return match output.status.code() {
 +            Some(1) if !rustfmt_not_installed => {
 +                // While `rustfmt` doesn't have a specific exit code for parse errors this is the
 +                // likely cause exiting with 1. Most Language Servers swallow parse errors on
 +                // formatting because otherwise an error is surfaced to the user on top of the
 +                // syntax error diagnostics they're already receiving. This is especially jarring
 +                // if they have format on save enabled.
 +                tracing::warn!(
 +                    ?command,
 +                    %captured_stderr,
 +                    "rustfmt exited with status 1"
 +                );
 +                Ok(None)
 +            }
 +            _ => {
 +                // Something else happened - e.g. `rustfmt` is missing or caught a signal
 +                Err(LspError::new(
 +                    -32900,
 +                    format!(
 +                        r#"rustfmt exited with:
 +                           Status: {}
 +                           stdout: {}
 +                           stderr: {}"#,
 +                        output.status, captured_stdout, captured_stderr,
 +                    ),
 +                )
 +                .into())
 +            }
 +        };
 +    }
 +
 +    let (new_text, new_line_endings) = LineEndings::normalize(captured_stdout);
 +
 +    if line_index.endings != new_line_endings {
 +        // If line endings are different, send the entire file.
 +        // Diffing would not work here, as the line endings might be the only
 +        // difference.
 +        Ok(Some(to_proto::text_edit_vec(
 +            &line_index,
 +            TextEdit::replace(TextRange::up_to(TextSize::of(&*file)), new_text),
 +        )))
 +    } else if *file == new_text {
 +        // The document is already formatted correctly -- no edits needed.
 +        Ok(None)
 +    } else {
 +        Ok(Some(to_proto::text_edit_vec(&line_index, diff(&file, &new_text))))
 +    }
 +}
index f187547019a13524416a7676c72c30e85fa748b9,0000000000000000000000000000000000000000..3cfbc2e4e45037cbaefb2a5116987ba995bfaa3a
mode 100644,000000..100644
--- /dev/null
@@@ -1,882 -1,0 +1,914 @@@
-                 let diagnostics =
 +//! The main loop of `rust-analyzer` responsible for dispatching LSP
 +//! requests/replies and notifications back to the client.
 +use std::{
 +    fmt,
 +    ops::Deref,
 +    sync::Arc,
 +    time::{Duration, Instant},
 +};
 +
 +use always_assert::always;
 +use crossbeam_channel::{select, Receiver};
 +use flycheck::FlycheckHandle;
 +use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath};
 +use itertools::Itertools;
 +use lsp_server::{Connection, Notification, Request};
 +use lsp_types::notification::Notification as _;
 +use vfs::{ChangeKind, FileId};
 +
 +use crate::{
 +    config::Config,
 +    dispatch::{NotificationDispatcher, RequestDispatcher},
 +    from_proto,
 +    global_state::{file_id_to_url, url_to_file_id, GlobalState},
 +    handlers, lsp_ext,
 +    lsp_utils::{apply_document_changes, notification_is, Progress},
 +    mem_docs::DocumentData,
 +    reload::{self, BuildDataProgress, ProjectWorkspaceProgress},
 +    Result,
 +};
 +
 +pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
 +    tracing::info!("initial config: {:#?}", config);
 +
 +    // Windows scheduler implements priority boosts: if thread waits for an
 +    // event (like a condvar), and event fires, priority of the thread is
 +    // temporary bumped. This optimization backfires in our case: each time the
 +    // `main_loop` schedules a task to run on a threadpool, the worker threads
 +    // gets a higher priority, and (on a machine with fewer cores) displaces the
 +    // main loop! We work-around this by marking the main loop as a
 +    // higher-priority thread.
 +    //
 +    // https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities
 +    // https://docs.microsoft.com/en-us/windows/win32/procthread/priority-boosts
 +    // https://github.com/rust-lang/rust-analyzer/issues/2835
 +    #[cfg(windows)]
 +    unsafe {
 +        use winapi::um::processthreadsapi::*;
 +        let thread = GetCurrentThread();
 +        let thread_priority_above_normal = 1;
 +        SetThreadPriority(thread, thread_priority_above_normal);
 +    }
 +
 +    GlobalState::new(connection.sender, config).run(connection.receiver)
 +}
 +
 +enum Event {
 +    Lsp(lsp_server::Message),
 +    Task(Task),
 +    Vfs(vfs::loader::Message),
 +    Flycheck(flycheck::Message),
 +}
 +
 +#[derive(Debug)]
 +pub(crate) enum Task {
 +    Response(lsp_server::Response),
 +    Retry(lsp_server::Request),
 +    Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
 +    PrimeCaches(PrimeCachesProgress),
 +    FetchWorkspace(ProjectWorkspaceProgress),
 +    FetchBuildData(BuildDataProgress),
 +}
 +
 +#[derive(Debug)]
 +pub(crate) enum PrimeCachesProgress {
 +    Begin,
 +    Report(ide::ParallelPrimeCachesProgress),
 +    End { cancelled: bool },
 +}
 +
 +impl fmt::Debug for Event {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        let debug_verbose_not = |not: &Notification, f: &mut fmt::Formatter<'_>| {
 +            f.debug_struct("Notification").field("method", &not.method).finish()
 +        };
 +
 +        match self {
 +            Event::Lsp(lsp_server::Message::Notification(not)) => {
 +                if notification_is::<lsp_types::notification::DidOpenTextDocument>(not)
 +                    || notification_is::<lsp_types::notification::DidChangeTextDocument>(not)
 +                {
 +                    return debug_verbose_not(not, f);
 +                }
 +            }
 +            Event::Task(Task::Response(resp)) => {
 +                return f
 +                    .debug_struct("Response")
 +                    .field("id", &resp.id)
 +                    .field("error", &resp.error)
 +                    .finish();
 +            }
 +            _ => (),
 +        }
 +        match self {
 +            Event::Lsp(it) => fmt::Debug::fmt(it, f),
 +            Event::Task(it) => fmt::Debug::fmt(it, f),
 +            Event::Vfs(it) => fmt::Debug::fmt(it, f),
 +            Event::Flycheck(it) => fmt::Debug::fmt(it, f),
 +        }
 +    }
 +}
 +
 +impl GlobalState {
 +    fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
 +        if self.config.linked_projects().is_empty()
 +            && self.config.detached_files().is_empty()
 +            && self.config.notifications().cargo_toml_not_found
 +        {
 +            self.show_and_log_error("rust-analyzer failed to discover workspace".to_string(), None);
 +        };
 +
 +        if self.config.did_save_text_document_dynamic_registration() {
 +            let save_registration_options = lsp_types::TextDocumentSaveRegistrationOptions {
 +                include_text: Some(false),
 +                text_document_registration_options: lsp_types::TextDocumentRegistrationOptions {
 +                    document_selector: Some(vec![
 +                        lsp_types::DocumentFilter {
 +                            language: None,
 +                            scheme: None,
 +                            pattern: Some("**/*.rs".into()),
 +                        },
 +                        lsp_types::DocumentFilter {
 +                            language: None,
 +                            scheme: None,
 +                            pattern: Some("**/Cargo.toml".into()),
 +                        },
 +                        lsp_types::DocumentFilter {
 +                            language: None,
 +                            scheme: None,
 +                            pattern: Some("**/Cargo.lock".into()),
 +                        },
 +                    ]),
 +                },
 +            };
 +
 +            let registration = lsp_types::Registration {
 +                id: "textDocument/didSave".to_string(),
 +                method: "textDocument/didSave".to_string(),
 +                register_options: Some(serde_json::to_value(save_registration_options).unwrap()),
 +            };
 +            self.send_request::<lsp_types::request::RegisterCapability>(
 +                lsp_types::RegistrationParams { registrations: vec![registration] },
 +                |_, _| (),
 +            );
 +        }
 +
 +        self.fetch_workspaces_queue.request_op("startup".to_string());
 +        if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
 +            self.fetch_workspaces(cause);
 +        }
 +
 +        while let Some(event) = self.next_event(&inbox) {
 +            if let Event::Lsp(lsp_server::Message::Notification(not)) = &event {
 +                if not.method == lsp_types::notification::Exit::METHOD {
 +                    return Ok(());
 +                }
 +            }
 +            self.handle_event(event)?
 +        }
 +
 +        Err("client exited without proper shutdown sequence".into())
 +    }
 +
 +    fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
 +        select! {
 +            recv(inbox) -> msg =>
 +                msg.ok().map(Event::Lsp),
 +
 +            recv(self.task_pool.receiver) -> task =>
 +                Some(Event::Task(task.unwrap())),
 +
 +            recv(self.loader.receiver) -> task =>
 +                Some(Event::Vfs(task.unwrap())),
 +
 +            recv(self.flycheck_receiver) -> task =>
 +                Some(Event::Flycheck(task.unwrap())),
 +        }
 +    }
 +
 +    fn handle_event(&mut self, event: Event) -> Result<()> {
 +        let loop_start = Instant::now();
 +        // NOTE: don't count blocking select! call as a loop-turn time
 +        let _p = profile::span("GlobalState::handle_event");
 +
 +        tracing::debug!("handle_event({:?})", event);
 +        let task_queue_len = self.task_pool.handle.len();
 +        if task_queue_len > 0 {
 +            tracing::info!("task queue len: {}", task_queue_len);
 +        }
 +
 +        let was_quiescent = self.is_quiescent();
 +        match event {
 +            Event::Lsp(msg) => match msg {
 +                lsp_server::Message::Request(req) => self.on_new_request(loop_start, req),
 +                lsp_server::Message::Notification(not) => {
 +                    self.on_notification(not)?;
 +                }
 +                lsp_server::Message::Response(resp) => self.complete_request(resp),
 +            },
 +            Event::Task(task) => {
 +                let _p = profile::span("GlobalState::handle_event/task");
 +                let mut prime_caches_progress = Vec::new();
 +
 +                self.handle_task(&mut prime_caches_progress, task);
 +                // Coalesce multiple task events into one loop turn
 +                while let Ok(task) = self.task_pool.receiver.try_recv() {
 +                    self.handle_task(&mut prime_caches_progress, task);
 +                }
 +
 +                for progress in prime_caches_progress {
 +                    let (state, message, fraction);
 +                    match progress {
 +                        PrimeCachesProgress::Begin => {
 +                            state = Progress::Begin;
 +                            message = None;
 +                            fraction = 0.0;
 +                        }
 +                        PrimeCachesProgress::Report(report) => {
 +                            state = Progress::Report;
 +
 +                            message = match &report.crates_currently_indexing[..] {
 +                                [crate_name] => Some(format!(
 +                                    "{}/{} ({})",
 +                                    report.crates_done, report.crates_total, crate_name
 +                                )),
 +                                [crate_name, rest @ ..] => Some(format!(
 +                                    "{}/{} ({} + {} more)",
 +                                    report.crates_done,
 +                                    report.crates_total,
 +                                    crate_name,
 +                                    rest.len()
 +                                )),
 +                                _ => None,
 +                            };
 +
 +                            fraction = Progress::fraction(report.crates_done, report.crates_total);
 +                        }
 +                        PrimeCachesProgress::End { cancelled } => {
 +                            state = Progress::End;
 +                            message = None;
 +                            fraction = 1.0;
 +
 +                            self.prime_caches_queue.op_completed(());
 +                            if cancelled {
 +                                self.prime_caches_queue
 +                                    .request_op("restart after cancellation".to_string());
 +                            }
 +                        }
 +                    };
 +
 +                    self.report_progress("Indexing", state, message, Some(fraction));
 +                }
 +            }
 +            Event::Vfs(message) => {
 +                let _p = profile::span("GlobalState::handle_event/vfs");
 +                self.handle_vfs_msg(message);
 +                // Coalesce many VFS event into a single loop turn
 +                while let Ok(message) = self.loader.receiver.try_recv() {
 +                    self.handle_vfs_msg(message);
 +                }
 +            }
 +            Event::Flycheck(message) => {
 +                let _p = profile::span("GlobalState::handle_event/flycheck");
 +                self.handle_flycheck_msg(message);
 +                // Coalesce many flycheck updates into a single loop turn
 +                while let Ok(message) = self.flycheck_receiver.try_recv() {
 +                    self.handle_flycheck_msg(message);
 +                }
 +            }
 +        }
 +
 +        let state_changed = self.process_changes();
 +        let memdocs_added_or_removed = self.mem_docs.take_changes();
 +
 +        if self.is_quiescent() {
 +            let became_quiescent = !(was_quiescent
 +                || self.fetch_workspaces_queue.op_requested()
 +                || self.fetch_build_data_queue.op_requested());
 +
 +            if became_quiescent {
 +                // Project has loaded properly, kick off initial flycheck
 +                self.flycheck.iter().for_each(FlycheckHandle::restart);
 +                if self.config.prefill_caches() {
 +                    self.prime_caches_queue.request_op("became quiescent".to_string());
 +                }
 +            }
 +
 +            if !was_quiescent || state_changed {
 +                // Refresh semantic tokens if the client supports it.
 +                if self.config.semantic_tokens_refresh() {
 +                    self.semantic_tokens_cache.lock().clear();
 +                    self.send_request::<lsp_types::request::SemanticTokensRefresh>((), |_, _| ());
 +                }
 +
 +                // Refresh code lens if the client supports it.
 +                if self.config.code_lens_refresh() {
 +                    self.send_request::<lsp_types::request::CodeLensRefresh>((), |_, _| ());
 +                }
 +            }
 +
 +            if !was_quiescent || state_changed || memdocs_added_or_removed {
 +                if self.config.publish_diagnostics() {
 +                    self.update_diagnostics()
 +                }
 +            }
 +        }
 +
 +        if let Some(diagnostic_changes) = self.diagnostics.take_changes() {
 +            for file_id in diagnostic_changes {
 +                let db = self.analysis_host.raw_database();
 +                let source_root = db.file_source_root(file_id);
 +                if db.source_root(source_root).is_library {
 +                    // Only publish diagnostics for files in the workspace, not from crates.io deps
 +                    // or the sysroot.
 +                    // While theoretically these should never have errors, we have quite a few false
 +                    // positives particularly in the stdlib, and those diagnostics would stay around
 +                    // forever if we emitted them here.
 +                    continue;
 +                }
 +
 +                let uri = file_id_to_url(&self.vfs.read().0, file_id);
++                let mut diagnostics =
 +                    self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
++
++                // VSCode assumes diagnostic messages to be non-empty strings, so we need to patch
++                // empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether
++                // diagnostic messages are actually allowed to be empty or not and patching this
++                // in the VSCode client does not work as the assertion happens in the protocol
++                // conversion. So this hack is here to stay, and will be considered a hack
++                // until the LSP decides to state that empty messages are allowed.
++
++                // See https://github.com/rust-lang/rust-analyzer/issues/11404
++                // See https://github.com/rust-lang/rust-analyzer/issues/13130
++                let patch_empty = |message: &mut String| {
++                    if message.is_empty() {
++                        *message = " ".to_string();
++                    }
++                };
++
++                for d in &mut diagnostics {
++                    patch_empty(&mut d.message);
++                    if let Some(dri) = &mut d.related_information {
++                        for dri in dri {
++                            patch_empty(&mut dri.message);
++                        }
++                    }
++                }
++
 +                let version = from_proto::vfs_path(&uri)
 +                    .map(|path| self.mem_docs.get(&path).map(|it| it.version))
 +                    .unwrap_or_default();
 +
 +                self.send_notification::<lsp_types::notification::PublishDiagnostics>(
 +                    lsp_types::PublishDiagnosticsParams { uri, diagnostics, version },
 +                );
 +            }
 +        }
 +
 +        if self.config.cargo_autoreload() {
 +            if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
 +                self.fetch_workspaces(cause);
 +            }
 +        }
 +
 +        if !self.fetch_workspaces_queue.op_in_progress() {
 +            if let Some(cause) = self.fetch_build_data_queue.should_start_op() {
 +                self.fetch_build_data(cause);
 +            }
 +        }
 +
 +        if let Some(cause) = self.prime_caches_queue.should_start_op() {
 +            tracing::debug!(%cause, "will prime caches");
 +            let num_worker_threads = self.config.prime_caches_num_threads();
 +
 +            self.task_pool.handle.spawn_with_sender({
 +                let analysis = self.snapshot().analysis;
 +                move |sender| {
 +                    sender.send(Task::PrimeCaches(PrimeCachesProgress::Begin)).unwrap();
 +                    let res = analysis.parallel_prime_caches(num_worker_threads, |progress| {
 +                        let report = PrimeCachesProgress::Report(progress);
 +                        sender.send(Task::PrimeCaches(report)).unwrap();
 +                    });
 +                    sender
 +                        .send(Task::PrimeCaches(PrimeCachesProgress::End {
 +                            cancelled: res.is_err(),
 +                        }))
 +                        .unwrap();
 +                }
 +            });
 +        }
 +
 +        let status = self.current_status();
 +        if self.last_reported_status.as_ref() != Some(&status) {
 +            self.last_reported_status = Some(status.clone());
 +
 +            if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message) {
 +                self.show_message(lsp_types::MessageType::ERROR, message.clone());
 +            }
 +
 +            if self.config.server_status_notification() {
 +                self.send_notification::<lsp_ext::ServerStatusNotification>(status);
 +            }
 +        }
 +
 +        let loop_duration = loop_start.elapsed();
 +        if loop_duration > Duration::from_millis(100) && was_quiescent {
 +            tracing::warn!("overly long loop turn: {:?}", loop_duration);
 +            self.poke_rust_analyzer_developer(format!(
 +                "overly long loop turn: {:?}",
 +                loop_duration
 +            ));
 +        }
 +        Ok(())
 +    }
 +
 +    fn handle_task(&mut self, prime_caches_progress: &mut Vec<PrimeCachesProgress>, task: Task) {
 +        match task {
 +            Task::Response(response) => self.respond(response),
 +            Task::Retry(req) => self.on_request(req),
 +            Task::Diagnostics(diagnostics_per_file) => {
 +                for (file_id, diagnostics) in diagnostics_per_file {
 +                    self.diagnostics.set_native_diagnostics(file_id, diagnostics)
 +                }
 +            }
 +            Task::PrimeCaches(progress) => match progress {
 +                PrimeCachesProgress::Begin => prime_caches_progress.push(progress),
 +                PrimeCachesProgress::Report(_) => {
 +                    match prime_caches_progress.last_mut() {
 +                        Some(last @ PrimeCachesProgress::Report(_)) => {
 +                            // Coalesce subsequent update events.
 +                            *last = progress;
 +                        }
 +                        _ => prime_caches_progress.push(progress),
 +                    }
 +                }
 +                PrimeCachesProgress::End { .. } => prime_caches_progress.push(progress),
 +            },
 +            Task::FetchWorkspace(progress) => {
 +                let (state, msg) = match progress {
 +                    ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
 +                    ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
 +                    ProjectWorkspaceProgress::End(workspaces) => {
 +                        self.fetch_workspaces_queue.op_completed(workspaces);
 +
 +                        let old = Arc::clone(&self.workspaces);
 +                        self.switch_workspaces("fetched workspace".to_string());
 +                        let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
 +
 +                        if self.config.run_build_scripts() && workspaces_updated {
 +                            self.fetch_build_data_queue.request_op(format!("workspace updated"));
 +                        }
 +
 +                        (Progress::End, None)
 +                    }
 +                };
 +
 +                self.report_progress("Fetching", state, msg, None);
 +            }
 +            Task::FetchBuildData(progress) => {
 +                let (state, msg) = match progress {
 +                    BuildDataProgress::Begin => (Some(Progress::Begin), None),
 +                    BuildDataProgress::Report(msg) => (Some(Progress::Report), Some(msg)),
 +                    BuildDataProgress::End(build_data_result) => {
 +                        self.fetch_build_data_queue.op_completed(build_data_result);
 +
 +                        self.switch_workspaces("fetched build data".to_string());
 +
 +                        (Some(Progress::End), None)
 +                    }
 +                };
 +
 +                if let Some(state) = state {
 +                    self.report_progress("Loading", state, msg, None);
 +                }
 +            }
 +        }
 +    }
 +
 +    fn handle_vfs_msg(&mut self, message: vfs::loader::Message) {
 +        match message {
 +            vfs::loader::Message::Loaded { files } => {
 +                let vfs = &mut self.vfs.write().0;
 +                for (path, contents) in files {
 +                    let path = VfsPath::from(path);
 +                    if !self.mem_docs.contains(&path) {
 +                        vfs.set_file_contents(path, contents);
 +                    }
 +                }
 +            }
 +            vfs::loader::Message::Progress { n_total, n_done, config_version } => {
 +                always!(config_version <= self.vfs_config_version);
 +
 +                self.vfs_progress_config_version = config_version;
 +                self.vfs_progress_n_total = n_total;
 +                self.vfs_progress_n_done = n_done;
 +
 +                let state = if n_done == 0 {
 +                    Progress::Begin
 +                } else if n_done < n_total {
 +                    Progress::Report
 +                } else {
 +                    assert_eq!(n_done, n_total);
 +                    Progress::End
 +                };
 +                self.report_progress(
 +                    "Roots Scanned",
 +                    state,
 +                    Some(format!("{}/{}", n_done, n_total)),
 +                    Some(Progress::fraction(n_done, n_total)),
 +                )
 +            }
 +        }
 +    }
 +
 +    fn handle_flycheck_msg(&mut self, message: flycheck::Message) {
 +        match message {
 +            flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {
 +                let snap = self.snapshot();
 +                let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
 +                    &self.config.diagnostics_map(),
 +                    &diagnostic,
 +                    &workspace_root,
 +                    &snap,
 +                );
 +                for diag in diagnostics {
 +                    match url_to_file_id(&self.vfs.read().0, &diag.url) {
 +                        Ok(file_id) => self.diagnostics.add_check_diagnostic(
 +                            id,
 +                            file_id,
 +                            diag.diagnostic,
 +                            diag.fix,
 +                        ),
 +                        Err(err) => {
 +                            tracing::error!("File with cargo diagnostic not found in VFS: {}", err);
 +                        }
 +                    };
 +                }
 +            }
 +
 +            flycheck::Message::Progress { id, progress } => {
 +                let (state, message) = match progress {
 +                    flycheck::Progress::DidStart => {
 +                        self.diagnostics.clear_check(id);
 +                        (Progress::Begin, None)
 +                    }
 +                    flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
 +                    flycheck::Progress::DidCancel => (Progress::End, None),
++                    flycheck::Progress::DidFailToRestart(err) => {
++                        self.show_and_log_error(
++                            "cargo check failed".to_string(),
++                            Some(err.to_string()),
++                        );
++                        return;
++                    }
 +                    flycheck::Progress::DidFinish(result) => {
 +                        if let Err(err) = result {
 +                            self.show_and_log_error(
 +                                "cargo check failed".to_string(),
 +                                Some(err.to_string()),
 +                            );
 +                        }
 +                        (Progress::End, None)
 +                    }
 +                };
 +
 +                // When we're running multiple flychecks, we have to include a disambiguator in
 +                // the title, or the editor complains. Note that this is a user-facing string.
 +                let title = if self.flycheck.len() == 1 {
 +                    match self.config.flycheck() {
 +                        Some(config) => format!("{}", config),
 +                        None => "cargo check".to_string(),
 +                    }
 +                } else {
 +                    format!("cargo check (#{})", id + 1)
 +                };
 +                self.report_progress(&title, state, message, None);
 +            }
 +        }
 +    }
 +
 +    /// Registers and handles a request. This should only be called once per incoming request.
 +    fn on_new_request(&mut self, request_received: Instant, req: Request) {
 +        self.register_request(&req, request_received);
 +        self.on_request(req);
 +    }
 +
 +    /// Handles a request.
 +    fn on_request(&mut self, req: Request) {
 +        if self.shutdown_requested {
 +            self.respond(lsp_server::Response::new_err(
 +                req.id,
 +                lsp_server::ErrorCode::InvalidRequest as i32,
 +                "Shutdown already requested.".to_owned(),
 +            ));
 +            return;
 +        }
 +
 +        // Avoid flashing a bunch of unresolved references during initial load.
 +        if self.workspaces.is_empty() && !self.is_quiescent() {
 +            self.respond(lsp_server::Response::new_err(
 +                req.id,
 +                lsp_server::ErrorCode::ContentModified as i32,
 +                "waiting for cargo metadata or cargo check".to_owned(),
 +            ));
 +            return;
 +        }
 +
 +        RequestDispatcher { req: Some(req), global_state: self }
 +            .on_sync_mut::<lsp_types::request::Shutdown>(|s, ()| {
 +                s.shutdown_requested = true;
 +                Ok(())
 +            })
 +            .on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
 +            .on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
 +            .on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
 +            .on_sync_mut::<lsp_ext::CancelFlycheck>(handlers::handle_cancel_flycheck)
 +            .on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
 +            .on_sync::<lsp_ext::OnEnter>(handlers::handle_on_enter)
 +            .on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
 +            .on_sync::<lsp_ext::MatchingBrace>(handlers::handle_matching_brace)
 +            .on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
 +            .on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
 +            .on::<lsp_ext::ViewHir>(handlers::handle_view_hir)
 +            .on::<lsp_ext::ViewFileText>(handlers::handle_view_file_text)
 +            .on::<lsp_ext::ViewCrateGraph>(handlers::handle_view_crate_graph)
 +            .on::<lsp_ext::ViewItemTree>(handlers::handle_view_item_tree)
 +            .on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)
 +            .on::<lsp_ext::ParentModule>(handlers::handle_parent_module)
 +            .on::<lsp_ext::Runnables>(handlers::handle_runnables)
 +            .on::<lsp_ext::RelatedTests>(handlers::handle_related_tests)
 +            .on::<lsp_ext::CodeActionRequest>(handlers::handle_code_action)
 +            .on::<lsp_ext::CodeActionResolveRequest>(handlers::handle_code_action_resolve)
 +            .on::<lsp_ext::HoverRequest>(handlers::handle_hover)
 +            .on::<lsp_ext::ExternalDocs>(handlers::handle_open_docs)
 +            .on::<lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml)
 +            .on::<lsp_ext::MoveItem>(handlers::handle_move_item)
 +            .on::<lsp_ext::WorkspaceSymbol>(handlers::handle_workspace_symbol)
 +            .on::<lsp_ext::OnTypeFormatting>(handlers::handle_on_type_formatting)
 +            .on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)
 +            .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
 +            .on::<lsp_types::request::GotoDeclaration>(handlers::handle_goto_declaration)
 +            .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
 +            .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
 +            .on::<lsp_types::request::InlayHintRequest>(handlers::handle_inlay_hints)
 +            .on::<lsp_types::request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
 +            .on::<lsp_types::request::Completion>(handlers::handle_completion)
 +            .on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
 +            .on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)
 +            .on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)
 +            .on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)
 +            .on::<lsp_types::request::SignatureHelpRequest>(handlers::handle_signature_help)
 +            .on::<lsp_types::request::PrepareRenameRequest>(handlers::handle_prepare_rename)
 +            .on::<lsp_types::request::Rename>(handlers::handle_rename)
 +            .on::<lsp_types::request::References>(handlers::handle_references)
 +            .on::<lsp_types::request::Formatting>(handlers::handle_formatting)
 +            .on::<lsp_types::request::RangeFormatting>(handlers::handle_range_formatting)
 +            .on::<lsp_types::request::DocumentHighlightRequest>(handlers::handle_document_highlight)
 +            .on::<lsp_types::request::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)
 +            .on::<lsp_types::request::CallHierarchyIncomingCalls>(
 +                handlers::handle_call_hierarchy_incoming,
 +            )
 +            .on::<lsp_types::request::CallHierarchyOutgoingCalls>(
 +                handlers::handle_call_hierarchy_outgoing,
 +            )
 +            .on::<lsp_types::request::SemanticTokensFullRequest>(
 +                handlers::handle_semantic_tokens_full,
 +            )
 +            .on::<lsp_types::request::SemanticTokensFullDeltaRequest>(
 +                handlers::handle_semantic_tokens_full_delta,
 +            )
 +            .on::<lsp_types::request::SemanticTokensRangeRequest>(
 +                handlers::handle_semantic_tokens_range,
 +            )
 +            .on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files)
 +            .on::<lsp_ext::Ssr>(handlers::handle_ssr)
 +            .finish();
 +    }
 +
 +    /// Handles an incoming notification.
 +    fn on_notification(&mut self, not: Notification) -> Result<()> {
 +        NotificationDispatcher { not: Some(not), global_state: self }
 +            .on::<lsp_types::notification::Cancel>(|this, params| {
 +                let id: lsp_server::RequestId = match params.id {
 +                    lsp_types::NumberOrString::Number(id) => id.into(),
 +                    lsp_types::NumberOrString::String(id) => id.into(),
 +                };
 +                this.cancel(id);
 +                Ok(())
 +            })?
 +            .on::<lsp_types::notification::WorkDoneProgressCancel>(|_this, _params| {
 +                // Just ignore this. It is OK to continue sending progress
 +                // notifications for this token, as the client can't know when
 +                // we accepted notification.
 +                Ok(())
 +            })?
 +            .on::<lsp_types::notification::DidOpenTextDocument>(|this, params| {
 +                if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
 +                    let already_exists = this
 +                        .mem_docs
 +                        .insert(path.clone(), DocumentData::new(params.text_document.version))
 +                        .is_err();
 +                    if already_exists {
 +                        tracing::error!("duplicate DidOpenTextDocument: {}", path)
 +                    }
 +                    this.vfs
 +                        .write()
 +                        .0
 +                        .set_file_contents(path, Some(params.text_document.text.into_bytes()));
 +                }
 +                Ok(())
 +            })?
 +            .on::<lsp_types::notification::DidChangeTextDocument>(|this, params| {
 +                if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
 +                    match this.mem_docs.get_mut(&path) {
 +                        Some(doc) => {
 +                            // The version passed in DidChangeTextDocument is the version after all edits are applied
 +                            // so we should apply it before the vfs is notified.
 +                            doc.version = params.text_document.version;
 +                        }
 +                        None => {
 +                            tracing::error!("unexpected DidChangeTextDocument: {}", path);
 +                            return Ok(());
 +                        }
 +                    };
 +
 +                    let vfs = &mut this.vfs.write().0;
 +                    let file_id = vfs.file_id(&path).unwrap();
 +                    let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap();
 +                    apply_document_changes(&mut text, params.content_changes);
 +
 +                    vfs.set_file_contents(path, Some(text.into_bytes()));
 +                }
 +                Ok(())
 +            })?
 +            .on::<lsp_types::notification::DidCloseTextDocument>(|this, params| {
 +                if let Ok(path) = from_proto::vfs_path(&params.text_document.uri) {
 +                    if this.mem_docs.remove(&path).is_err() {
 +                        tracing::error!("orphan DidCloseTextDocument: {}", path);
 +                    }
 +
 +                    this.semantic_tokens_cache.lock().remove(&params.text_document.uri);
 +
 +                    if let Some(path) = path.as_path() {
 +                        this.loader.handle.invalidate(path.to_path_buf());
 +                    }
 +                }
 +                Ok(())
 +            })?
 +            .on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
 +                let mut updated = false;
 +                if let Ok(vfs_path) = from_proto::vfs_path(&params.text_document.uri) {
 +                    let (vfs, _) = &*this.vfs.read();
 +
 +                    // Trigger flychecks for all workspaces that depend on the saved file
 +                    if let Some(file_id) = vfs.file_id(&vfs_path) {
 +                        let analysis = this.analysis_host.analysis();
 +                        // Crates containing or depending on the saved file
 +                        let crate_ids: Vec<_> = analysis
 +                            .crate_for(file_id)?
 +                            .into_iter()
 +                            .flat_map(|id| {
 +                                this.analysis_host
 +                                    .raw_database()
 +                                    .crate_graph()
 +                                    .transitive_rev_deps(id)
 +                            })
 +                            .sorted()
 +                            .unique()
 +                            .collect();
 +
 +                        let crate_root_paths: Vec<_> = crate_ids
 +                            .iter()
 +                            .filter_map(|&crate_id| {
 +                                analysis
 +                                    .crate_root(crate_id)
 +                                    .map(|file_id| {
 +                                        vfs.file_path(file_id).as_path().map(ToOwned::to_owned)
 +                                    })
 +                                    .transpose()
 +                            })
 +                            .collect::<ide::Cancellable<_>>()?;
 +                        let crate_root_paths: Vec<_> =
 +                            crate_root_paths.iter().map(Deref::deref).collect();
 +
 +                        // Find all workspaces that have at least one target containing the saved file
 +                        let workspace_ids =
 +                            this.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
 +                                project_model::ProjectWorkspace::Cargo { cargo, .. } => {
 +                                    cargo.packages().any(|pkg| {
 +                                        cargo[pkg].targets.iter().any(|&it| {
 +                                            crate_root_paths.contains(&cargo[it].root.as_path())
 +                                        })
 +                                    })
 +                                }
 +                                project_model::ProjectWorkspace::Json { project, .. } => project
 +                                    .crates()
 +                                    .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)),
 +                                project_model::ProjectWorkspace::DetachedFiles { .. } => false,
 +                            });
 +
 +                        // Find and trigger corresponding flychecks
 +                        for flycheck in &this.flycheck {
 +                            for (id, _) in workspace_ids.clone() {
 +                                if id == flycheck.id() {
 +                                    updated = true;
 +                                    flycheck.restart();
 +                                    continue;
 +                                }
 +                            }
 +                        }
 +                    }
 +
 +                    // Re-fetch workspaces if a workspace related file has changed
 +                    if let Some(abs_path) = vfs_path.as_path() {
 +                        if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
 +                            this.fetch_workspaces_queue
 +                                .request_op(format!("DidSaveTextDocument {}", abs_path.display()));
 +                        }
 +                    }
 +                }
 +
 +                // No specific flycheck was triggered, so let's trigger all of them.
 +                if !updated {
 +                    for flycheck in &this.flycheck {
 +                        flycheck.restart();
 +                    }
 +                }
 +                Ok(())
 +            })?
 +            .on::<lsp_types::notification::DidChangeConfiguration>(|this, _params| {
 +                // As stated in https://github.com/microsoft/language-server-protocol/issues/676,
 +                // this notification's parameters should be ignored and the actual config queried separately.
 +                this.send_request::<lsp_types::request::WorkspaceConfiguration>(
 +                    lsp_types::ConfigurationParams {
 +                        items: vec![lsp_types::ConfigurationItem {
 +                            scope_uri: None,
 +                            section: Some("rust-analyzer".to_string()),
 +                        }],
 +                    },
 +                    |this, resp| {
 +                        tracing::debug!("config update response: '{:?}", resp);
 +                        let lsp_server::Response { error, result, .. } = resp;
 +
 +                        match (error, result) {
 +                            (Some(err), _) => {
 +                                tracing::error!("failed to fetch the server settings: {:?}", err)
 +                            }
 +                            (None, Some(mut configs)) => {
 +                                if let Some(json) = configs.get_mut(0) {
 +                                    // Note that json can be null according to the spec if the client can't
 +                                    // provide a configuration. This is handled in Config::update below.
 +                                    let mut config = Config::clone(&*this.config);
 +                                    if let Err(error) = config.update(json.take()) {
 +                                        this.show_message(
 +                                            lsp_types::MessageType::WARNING,
 +                                            error.to_string(),
 +                                        );
 +                                    }
 +                                    this.update_configuration(config);
 +                                }
 +                            }
 +                            (None, None) => tracing::error!(
 +                                "received empty server settings response from the client"
 +                            ),
 +                        }
 +                    },
 +                );
 +
 +                Ok(())
 +            })?
 +            .on::<lsp_types::notification::DidChangeWatchedFiles>(|this, params| {
 +                for change in params.changes {
 +                    if let Ok(path) = from_proto::abs_path(&change.uri) {
 +                        this.loader.handle.invalidate(path);
 +                    }
 +                }
 +                Ok(())
 +            })?
 +            .finish();
 +        Ok(())
 +    }
 +
 +    fn update_diagnostics(&mut self) {
 +        let subscriptions = self
 +            .mem_docs
 +            .iter()
 +            .map(|path| self.vfs.read().0.file_id(path).unwrap())
 +            .collect::<Vec<_>>();
 +
 +        tracing::trace!("updating notifications for {:?}", subscriptions);
 +
 +        let snapshot = self.snapshot();
 +        self.task_pool.handle.spawn(move || {
 +            let diagnostics = subscriptions
 +                .into_iter()
 +                .filter_map(|file_id| {
 +                    handlers::publish_diagnostics(&snapshot, file_id)
 +                        .ok()
 +                        .map(|diags| (file_id, diags))
 +                })
 +                .collect::<Vec<_>>();
 +            Task::Diagnostics(diagnostics)
 +        })
 +    }
 +}
index 6c78b5df1a7050472d9cacb304bb799792aa4ee0,0000000000000000000000000000000000000000..c48410ed55e98472d4f0bc774703225e80048cad
mode 100644,000000..100644
--- /dev/null
@@@ -1,301 -1,0 +1,324 @@@
-     ($(($ident:ident, $string:literal)),*$(,)?) => {
-         $(pub(crate) const $ident: SemanticTokenType = SemanticTokenType::new($string);)*
 +//! Semantic Tokens helpers
 +
 +use std::ops;
 +
 +use lsp_types::{
 +    Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
 +    SemanticTokensEdit,
 +};
 +
 +macro_rules! define_semantic_token_types {
-             SemanticTokenType::COMMENT,
-             SemanticTokenType::KEYWORD,
-             SemanticTokenType::STRING,
-             SemanticTokenType::NUMBER,
-             SemanticTokenType::REGEXP,
-             SemanticTokenType::OPERATOR,
-             SemanticTokenType::NAMESPACE,
-             SemanticTokenType::TYPE,
-             SemanticTokenType::STRUCT,
-             SemanticTokenType::CLASS,
-             SemanticTokenType::INTERFACE,
-             SemanticTokenType::ENUM,
-             SemanticTokenType::ENUM_MEMBER,
-             SemanticTokenType::TYPE_PARAMETER,
-             SemanticTokenType::FUNCTION,
-             SemanticTokenType::METHOD,
-             SemanticTokenType::PROPERTY,
-             SemanticTokenType::MACRO,
-             SemanticTokenType::VARIABLE,
-             SemanticTokenType::PARAMETER,
-             $($ident),*
++    (
++        standard {
++            $($standard:ident),*$(,)?
++        }
++        custom {
++            $(($custom:ident, $string:literal)),*$(,)?
++        }
++
++    ) => {
++        $(pub(crate) const $standard: SemanticTokenType = SemanticTokenType::$standard;)*
++        $(pub(crate) const $custom: SemanticTokenType = SemanticTokenType::new($string);)*
 +
 +        pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
-     (ANGLE, "angle"),
-     (ARITHMETIC, "arithmetic"),
-     (ATTRIBUTE, "attribute"),
-     (ATTRIBUTE_BRACKET, "attributeBracket"),
-     (BITWISE, "bitwise"),
-     (BOOLEAN, "boolean"),
-     (BRACE, "brace"),
-     (BRACKET, "bracket"),
-     (BUILTIN_ATTRIBUTE, "builtinAttribute"),
-     (BUILTIN_TYPE, "builtinType"),
-     (CHAR, "character"),
-     (COLON, "colon"),
-     (COMMA, "comma"),
-     (COMPARISON, "comparison"),
-     (CONST_PARAMETER, "constParameter"),
-     (DERIVE, "derive"),
-     (DERIVE_HELPER, "deriveHelper"),
-     (DOT, "dot"),
-     (ESCAPE_SEQUENCE, "escapeSequence"),
-     (FORMAT_SPECIFIER, "formatSpecifier"),
-     (GENERIC, "generic"),
-     (LABEL, "label"),
-     (LIFETIME, "lifetime"),
-     (LOGICAL, "logical"),
-     (MACRO_BANG, "macroBang"),
-     (OPERATOR, "operator"),
-     (PARENTHESIS, "parenthesis"),
-     (PUNCTUATION, "punctuation"),
-     (SELF_KEYWORD, "selfKeyword"),
-     (SELF_TYPE_KEYWORD, "selfTypeKeyword"),
-     (SEMICOLON, "semicolon"),
-     (TYPE_ALIAS, "typeAlias"),
-     (TOOL_MODULE, "toolModule"),
-     (UNION, "union"),
-     (UNRESOLVED_REFERENCE, "unresolvedReference"),
++            $(SemanticTokenType::$standard,)*
++            $($custom),*
 +        ];
 +    };
 +}
 +
 +define_semantic_token_types![
-     ($(($ident:ident, $string:literal)),*$(,)?) => {
-         $(pub(crate) const $ident: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
++    standard {
++        COMMENT,
++        DECORATOR,
++        ENUM_MEMBER,
++        ENUM,
++        FUNCTION,
++        INTERFACE,
++        KEYWORD,
++        MACRO,
++        METHOD,
++        NAMESPACE,
++        NUMBER,
++        OPERATOR,
++        PARAMETER,
++        PROPERTY,
++        STRING,
++        STRUCT,
++        TYPE_PARAMETER,
++        VARIABLE,
++    }
++
++    custom {
++        (ANGLE, "angle"),
++        (ARITHMETIC, "arithmetic"),
++        (ATTRIBUTE, "attribute"),
++        (ATTRIBUTE_BRACKET, "attributeBracket"),
++        (BITWISE, "bitwise"),
++        (BOOLEAN, "boolean"),
++        (BRACE, "brace"),
++        (BRACKET, "bracket"),
++        (BUILTIN_ATTRIBUTE, "builtinAttribute"),
++        (BUILTIN_TYPE, "builtinType"),
++        (CHAR, "character"),
++        (COLON, "colon"),
++        (COMMA, "comma"),
++        (COMPARISON, "comparison"),
++        (CONST_PARAMETER, "constParameter"),
++        (DERIVE, "derive"),
++        (DERIVE_HELPER, "deriveHelper"),
++        (DOT, "dot"),
++        (ESCAPE_SEQUENCE, "escapeSequence"),
++        (FORMAT_SPECIFIER, "formatSpecifier"),
++        (GENERIC, "generic"),
++        (LABEL, "label"),
++        (LIFETIME, "lifetime"),
++        (LOGICAL, "logical"),
++        (MACRO_BANG, "macroBang"),
++        (PARENTHESIS, "parenthesis"),
++        (PUNCTUATION, "punctuation"),
++        (SELF_KEYWORD, "selfKeyword"),
++        (SELF_TYPE_KEYWORD, "selfTypeKeyword"),
++        (SEMICOLON, "semicolon"),
++        (TYPE_ALIAS, "typeAlias"),
++        (TOOL_MODULE, "toolModule"),
++        (UNION, "union"),
++        (UNRESOLVED_REFERENCE, "unresolvedReference"),
++    }
 +];
 +
 +macro_rules! define_semantic_token_modifiers {
-             SemanticTokenModifier::DOCUMENTATION,
-             SemanticTokenModifier::DECLARATION,
-             SemanticTokenModifier::DEFINITION,
-             SemanticTokenModifier::STATIC,
-             SemanticTokenModifier::ABSTRACT,
-             SemanticTokenModifier::DEPRECATED,
-             SemanticTokenModifier::READONLY,
-             SemanticTokenModifier::DEFAULT_LIBRARY,
-             $($ident),*
++    (
++        standard {
++            $($standard:ident),*$(,)?
++        }
++        custom {
++            $(($custom:ident, $string:literal)),*$(,)?
++        }
++
++    ) => {
++
++        $(pub(crate) const $standard: SemanticTokenModifier = SemanticTokenModifier::$standard;)*
++        $(pub(crate) const $custom: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
 +
 +        pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
-     (ASYNC, "async"),
-     (ATTRIBUTE_MODIFIER, "attribute"),
-     (CALLABLE, "callable"),
-     (CONSTANT, "constant"),
-     (CONSUMING, "consuming"),
-     (CONTROL_FLOW, "controlFlow"),
-     (CRATE_ROOT, "crateRoot"),
-     (INJECTED, "injected"),
-     (INTRA_DOC_LINK, "intraDocLink"),
-     (LIBRARY, "library"),
-     (MUTABLE, "mutable"),
-     (PUBLIC, "public"),
-     (REFERENCE, "reference"),
-     (TRAIT_MODIFIER, "trait"),
-     (UNSAFE, "unsafe"),
++            $(SemanticTokenModifier::$standard,)*
++            $($custom),*
 +        ];
 +    };
 +}
 +
 +define_semantic_token_modifiers![
++    standard {
++        DOCUMENTATION,
++        DECLARATION,
++        STATIC,
++        DEFAULT_LIBRARY,
++    }
++    custom {
++        (ASYNC, "async"),
++        (ATTRIBUTE_MODIFIER, "attribute"),
++        (CALLABLE, "callable"),
++        (CONSTANT, "constant"),
++        (CONSUMING, "consuming"),
++        (CONTROL_FLOW, "controlFlow"),
++        (CRATE_ROOT, "crateRoot"),
++        (INJECTED, "injected"),
++        (INTRA_DOC_LINK, "intraDocLink"),
++        (LIBRARY, "library"),
++        (MUTABLE, "mutable"),
++        (PUBLIC, "public"),
++        (REFERENCE, "reference"),
++        (TRAIT_MODIFIER, "trait"),
++        (UNSAFE, "unsafe"),
++    }
 +];
 +
 +#[derive(Default)]
 +pub(crate) struct ModifierSet(pub(crate) u32);
 +
 +impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
 +    fn bitor_assign(&mut self, rhs: SemanticTokenModifier) {
 +        let idx = SUPPORTED_MODIFIERS.iter().position(|it| it == &rhs).unwrap();
 +        self.0 |= 1 << idx;
 +    }
 +}
 +
 +/// Tokens are encoded relative to each other.
 +///
 +/// This is a direct port of <https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45>
 +pub(crate) struct SemanticTokensBuilder {
 +    id: String,
 +    prev_line: u32,
 +    prev_char: u32,
 +    data: Vec<SemanticToken>,
 +}
 +
 +impl SemanticTokensBuilder {
 +    pub(crate) fn new(id: String) -> Self {
 +        SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
 +    }
 +
 +    /// Push a new token onto the builder
 +    pub(crate) fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
 +        let mut push_line = range.start.line as u32;
 +        let mut push_char = range.start.character as u32;
 +
 +        if !self.data.is_empty() {
 +            push_line -= self.prev_line;
 +            if push_line == 0 {
 +                push_char -= self.prev_char;
 +            }
 +        }
 +
 +        // A token cannot be multiline
 +        let token_len = range.end.character - range.start.character;
 +
 +        let token = SemanticToken {
 +            delta_line: push_line,
 +            delta_start: push_char,
 +            length: token_len as u32,
 +            token_type: token_index,
 +            token_modifiers_bitset: modifier_bitset,
 +        };
 +
 +        self.data.push(token);
 +
 +        self.prev_line = range.start.line as u32;
 +        self.prev_char = range.start.character as u32;
 +    }
 +
 +    pub(crate) fn build(self) -> SemanticTokens {
 +        SemanticTokens { result_id: Some(self.id), data: self.data }
 +    }
 +}
 +
 +pub(crate) fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
 +    let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
 +
 +    let (_, old) = old.split_at(offset);
 +    let (_, new) = new.split_at(offset);
 +
 +    let offset_from_end =
 +        new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
 +
 +    let (old, _) = old.split_at(old.len() - offset_from_end);
 +    let (new, _) = new.split_at(new.len() - offset_from_end);
 +
 +    if old.is_empty() && new.is_empty() {
 +        vec![]
 +    } else {
 +        // The lsp data field is actually a byte-diff but we
 +        // travel in tokens so `start` and `delete_count` are in multiples of the
 +        // serialized size of `SemanticToken`.
 +        vec![SemanticTokensEdit {
 +            start: 5 * offset as u32,
 +            delete_count: 5 * old.len() as u32,
 +            data: Some(new.into()),
 +        }]
 +    }
 +}
 +
 +pub(crate) fn type_index(ty: SemanticTokenType) -> u32 {
 +    SUPPORTED_TYPES.iter().position(|it| *it == ty).unwrap() as u32
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use super::*;
 +
 +    fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
 +        SemanticToken {
 +            delta_line: t.0,
 +            delta_start: t.1,
 +            length: t.2,
 +            token_type: t.3,
 +            token_modifiers_bitset: t.4,
 +        }
 +    }
 +
 +    #[test]
 +    fn test_diff_insert_at_end() {
 +        let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
 +        let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
 +
 +        let edits = diff_tokens(&before, &after);
 +        assert_eq!(
 +            edits[0],
 +            SemanticTokensEdit {
 +                start: 10,
 +                delete_count: 0,
 +                data: Some(vec![from((11, 12, 13, 14, 15))])
 +            }
 +        );
 +    }
 +
 +    #[test]
 +    fn test_diff_insert_at_beginning() {
 +        let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
 +        let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
 +
 +        let edits = diff_tokens(&before, &after);
 +        assert_eq!(
 +            edits[0],
 +            SemanticTokensEdit {
 +                start: 0,
 +                delete_count: 0,
 +                data: Some(vec![from((11, 12, 13, 14, 15))])
 +            }
 +        );
 +    }
 +
 +    #[test]
 +    fn test_diff_insert_in_middle() {
 +        let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
 +        let after = [
 +            from((1, 2, 3, 4, 5)),
 +            from((10, 20, 30, 40, 50)),
 +            from((60, 70, 80, 90, 100)),
 +            from((6, 7, 8, 9, 10)),
 +        ];
 +
 +        let edits = diff_tokens(&before, &after);
 +        assert_eq!(
 +            edits[0],
 +            SemanticTokensEdit {
 +                start: 5,
 +                delete_count: 0,
 +                data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
 +            }
 +        );
 +    }
 +
 +    #[test]
 +    fn test_diff_remove_from_end() {
 +        let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
 +        let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
 +
 +        let edits = diff_tokens(&before, &after);
 +        assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
 +    }
 +
 +    #[test]
 +    fn test_diff_remove_from_beginning() {
 +        let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
 +        let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
 +
 +        let edits = diff_tokens(&before, &after);
 +        assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
 +    }
 +
 +    #[test]
 +    fn test_diff_remove_from_middle() {
 +        let before = [
 +            from((1, 2, 3, 4, 5)),
 +            from((10, 20, 30, 40, 50)),
 +            from((60, 70, 80, 90, 100)),
 +            from((6, 7, 8, 9, 10)),
 +        ];
 +        let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
 +
 +        let edits = diff_tokens(&before, &after);
 +        assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });
 +    }
 +}
index e7115b0732e20c6922627cd3675172cae612c686,0000000000000000000000000000000000000000..102cd602950439a4188b5006116353dd164299f8
mode 100644,000000..100644
--- /dev/null
@@@ -1,1397 -1,0 +1,1394 @@@
-     highlight_strings: bool,
 +//! Conversion of rust-analyzer specific types to lsp_types equivalents.
 +use std::{
 +    iter::once,
 +    path,
 +    sync::atomic::{AtomicU32, Ordering},
 +};
 +
 +use ide::{
 +    Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem,
 +    CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
 +    Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint,
 +    InlayKind, Markup, NavigationTarget, ReferenceCategory, RenameError, Runnable, Severity,
 +    SignatureHelp, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange, TextSize,
 +};
 +use itertools::Itertools;
 +use serde_json::to_value;
 +use vfs::AbsPath;
 +
 +use crate::{
 +    cargo_target_spec::CargoTargetSpec,
 +    config::{CallInfoConfig, Config},
 +    global_state::GlobalStateSnapshot,
 +    line_index::{LineEndings, LineIndex, OffsetEncoding},
 +    lsp_ext,
 +    lsp_utils::invalid_params_error,
 +    semantic_tokens, Result,
 +};
 +
 +pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
 +    let line_col = line_index.index.line_col(offset);
 +    match line_index.encoding {
 +        OffsetEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
 +        OffsetEncoding::Utf16 => {
 +            let line_col = line_index.index.to_utf16(line_col);
 +            lsp_types::Position::new(line_col.line, line_col.col)
 +        }
 +    }
 +}
 +
 +pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Range {
 +    let start = position(line_index, range.start());
 +    let end = position(line_index, range.end());
 +    lsp_types::Range::new(start, end)
 +}
 +
 +pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind {
 +    match symbol_kind {
 +        SymbolKind::Function => lsp_types::SymbolKind::FUNCTION,
 +        SymbolKind::Struct => lsp_types::SymbolKind::STRUCT,
 +        SymbolKind::Enum => lsp_types::SymbolKind::ENUM,
 +        SymbolKind::Variant => lsp_types::SymbolKind::ENUM_MEMBER,
 +        SymbolKind::Trait => lsp_types::SymbolKind::INTERFACE,
 +        SymbolKind::Macro
 +        | SymbolKind::BuiltinAttr
 +        | SymbolKind::Attribute
 +        | SymbolKind::Derive
 +        | SymbolKind::DeriveHelper => lsp_types::SymbolKind::FUNCTION,
 +        SymbolKind::Module | SymbolKind::ToolModule => lsp_types::SymbolKind::MODULE,
 +        SymbolKind::TypeAlias | SymbolKind::TypeParam | SymbolKind::SelfType => {
 +            lsp_types::SymbolKind::TYPE_PARAMETER
 +        }
 +        SymbolKind::Field => lsp_types::SymbolKind::FIELD,
 +        SymbolKind::Static => lsp_types::SymbolKind::CONSTANT,
 +        SymbolKind::Const => lsp_types::SymbolKind::CONSTANT,
 +        SymbolKind::ConstParam => lsp_types::SymbolKind::CONSTANT,
 +        SymbolKind::Impl => lsp_types::SymbolKind::OBJECT,
 +        SymbolKind::Local
 +        | SymbolKind::SelfParam
 +        | SymbolKind::LifetimeParam
 +        | SymbolKind::ValueParam
 +        | SymbolKind::Label => lsp_types::SymbolKind::VARIABLE,
 +        SymbolKind::Union => lsp_types::SymbolKind::STRUCT,
 +    }
 +}
 +
 +pub(crate) fn structure_node_kind(kind: StructureNodeKind) -> lsp_types::SymbolKind {
 +    match kind {
 +        StructureNodeKind::SymbolKind(symbol) => symbol_kind(symbol),
 +        StructureNodeKind::Region => lsp_types::SymbolKind::NAMESPACE,
 +    }
 +}
 +
 +pub(crate) fn document_highlight_kind(
 +    category: ReferenceCategory,
 +) -> lsp_types::DocumentHighlightKind {
 +    match category {
 +        ReferenceCategory::Read => lsp_types::DocumentHighlightKind::READ,
 +        ReferenceCategory::Write => lsp_types::DocumentHighlightKind::WRITE,
 +    }
 +}
 +
 +pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity {
 +    match severity {
 +        Severity::Error => lsp_types::DiagnosticSeverity::ERROR,
 +        Severity::WeakWarning => lsp_types::DiagnosticSeverity::HINT,
 +    }
 +}
 +
 +pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
 +    let value = crate::markdown::format_docs(documentation.as_str());
 +    let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
 +    lsp_types::Documentation::MarkupContent(markup_content)
 +}
 +
 +pub(crate) fn completion_item_kind(
 +    completion_item_kind: CompletionItemKind,
 +) -> lsp_types::CompletionItemKind {
 +    match completion_item_kind {
 +        CompletionItemKind::Binding => lsp_types::CompletionItemKind::VARIABLE,
 +        CompletionItemKind::BuiltinType => lsp_types::CompletionItemKind::STRUCT,
 +        CompletionItemKind::InferredType => lsp_types::CompletionItemKind::SNIPPET,
 +        CompletionItemKind::Keyword => lsp_types::CompletionItemKind::KEYWORD,
 +        CompletionItemKind::Method => lsp_types::CompletionItemKind::METHOD,
 +        CompletionItemKind::Snippet => lsp_types::CompletionItemKind::SNIPPET,
 +        CompletionItemKind::UnresolvedReference => lsp_types::CompletionItemKind::REFERENCE,
 +        CompletionItemKind::SymbolKind(symbol) => match symbol {
 +            SymbolKind::Attribute => lsp_types::CompletionItemKind::FUNCTION,
 +            SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT,
 +            SymbolKind::ConstParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
 +            SymbolKind::Derive => lsp_types::CompletionItemKind::FUNCTION,
 +            SymbolKind::DeriveHelper => lsp_types::CompletionItemKind::FUNCTION,
 +            SymbolKind::Enum => lsp_types::CompletionItemKind::ENUM,
 +            SymbolKind::Field => lsp_types::CompletionItemKind::FIELD,
 +            SymbolKind::Function => lsp_types::CompletionItemKind::FUNCTION,
 +            SymbolKind::Impl => lsp_types::CompletionItemKind::TEXT,
 +            SymbolKind::Label => lsp_types::CompletionItemKind::VARIABLE,
 +            SymbolKind::LifetimeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
 +            SymbolKind::Local => lsp_types::CompletionItemKind::VARIABLE,
 +            SymbolKind::Macro => lsp_types::CompletionItemKind::FUNCTION,
 +            SymbolKind::Module => lsp_types::CompletionItemKind::MODULE,
 +            SymbolKind::SelfParam => lsp_types::CompletionItemKind::VALUE,
 +            SymbolKind::SelfType => lsp_types::CompletionItemKind::TYPE_PARAMETER,
 +            SymbolKind::Static => lsp_types::CompletionItemKind::VALUE,
 +            SymbolKind::Struct => lsp_types::CompletionItemKind::STRUCT,
 +            SymbolKind::Trait => lsp_types::CompletionItemKind::INTERFACE,
 +            SymbolKind::TypeAlias => lsp_types::CompletionItemKind::STRUCT,
 +            SymbolKind::TypeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
 +            SymbolKind::Union => lsp_types::CompletionItemKind::STRUCT,
 +            SymbolKind::ValueParam => lsp_types::CompletionItemKind::VALUE,
 +            SymbolKind::Variant => lsp_types::CompletionItemKind::ENUM_MEMBER,
 +            SymbolKind::BuiltinAttr => lsp_types::CompletionItemKind::FUNCTION,
 +            SymbolKind::ToolModule => lsp_types::CompletionItemKind::MODULE,
 +        },
 +    }
 +}
 +
 +pub(crate) fn text_edit(line_index: &LineIndex, indel: Indel) -> lsp_types::TextEdit {
 +    let range = range(line_index, indel.delete);
 +    let new_text = match line_index.endings {
 +        LineEndings::Unix => indel.insert,
 +        LineEndings::Dos => indel.insert.replace('\n', "\r\n"),
 +    };
 +    lsp_types::TextEdit { range, new_text }
 +}
 +
 +pub(crate) fn completion_text_edit(
 +    line_index: &LineIndex,
 +    insert_replace_support: Option<lsp_types::Position>,
 +    indel: Indel,
 +) -> lsp_types::CompletionTextEdit {
 +    let text_edit = text_edit(line_index, indel);
 +    match insert_replace_support {
 +        Some(cursor_pos) => lsp_types::InsertReplaceEdit {
 +            new_text: text_edit.new_text,
 +            insert: lsp_types::Range { start: text_edit.range.start, end: cursor_pos },
 +            replace: text_edit.range,
 +        }
 +        .into(),
 +        None => text_edit.into(),
 +    }
 +}
 +
 +pub(crate) fn snippet_text_edit(
 +    line_index: &LineIndex,
 +    is_snippet: bool,
 +    indel: Indel,
 +) -> lsp_ext::SnippetTextEdit {
 +    let text_edit = text_edit(line_index, indel);
 +    let insert_text_format =
 +        if is_snippet { Some(lsp_types::InsertTextFormat::SNIPPET) } else { None };
 +    lsp_ext::SnippetTextEdit {
 +        range: text_edit.range,
 +        new_text: text_edit.new_text,
 +        insert_text_format,
 +        annotation_id: None,
 +    }
 +}
 +
 +pub(crate) fn text_edit_vec(
 +    line_index: &LineIndex,
 +    text_edit: TextEdit,
 +) -> Vec<lsp_types::TextEdit> {
 +    text_edit.into_iter().map(|indel| self::text_edit(line_index, indel)).collect()
 +}
 +
 +pub(crate) fn snippet_text_edit_vec(
 +    line_index: &LineIndex,
 +    is_snippet: bool,
 +    text_edit: TextEdit,
 +) -> Vec<lsp_ext::SnippetTextEdit> {
 +    text_edit
 +        .into_iter()
 +        .map(|indel| self::snippet_text_edit(line_index, is_snippet, indel))
 +        .collect()
 +}
 +
 +pub(crate) fn completion_items(
 +    config: &Config,
 +    line_index: &LineIndex,
 +    tdpp: lsp_types::TextDocumentPositionParams,
 +    items: Vec<CompletionItem>,
 +) -> Vec<lsp_types::CompletionItem> {
 +    let max_relevance = items.iter().map(|it| it.relevance().score()).max().unwrap_or_default();
 +    let mut res = Vec::with_capacity(items.len());
 +    for item in items {
 +        completion_item(&mut res, config, line_index, &tdpp, max_relevance, item)
 +    }
 +    res
 +}
 +
 +fn completion_item(
 +    acc: &mut Vec<lsp_types::CompletionItem>,
 +    config: &Config,
 +    line_index: &LineIndex,
 +    tdpp: &lsp_types::TextDocumentPositionParams,
 +    max_relevance: u32,
 +    item: CompletionItem,
 +) {
 +    let insert_replace_support = config.insert_replace_support().then(|| tdpp.position);
 +    let mut additional_text_edits = Vec::new();
 +
 +    // LSP does not allow arbitrary edits in completion, so we have to do a
 +    // non-trivial mapping here.
 +    let text_edit = {
 +        let mut text_edit = None;
 +        let source_range = item.source_range();
 +        for indel in item.text_edit().iter() {
 +            if indel.delete.contains_range(source_range) {
 +                text_edit = Some(if indel.delete == source_range {
 +                    self::completion_text_edit(line_index, insert_replace_support, indel.clone())
 +                } else {
 +                    assert!(source_range.end() == indel.delete.end());
 +                    let range1 = TextRange::new(indel.delete.start(), source_range.start());
 +                    let range2 = source_range;
 +                    let indel1 = Indel::replace(range1, String::new());
 +                    let indel2 = Indel::replace(range2, indel.insert.clone());
 +                    additional_text_edits.push(self::text_edit(line_index, indel1));
 +                    self::completion_text_edit(line_index, insert_replace_support, indel2)
 +                })
 +            } else {
 +                assert!(source_range.intersect(indel.delete).is_none());
 +                let text_edit = self::text_edit(line_index, indel.clone());
 +                additional_text_edits.push(text_edit);
 +            }
 +        }
 +        text_edit.unwrap()
 +    };
 +
 +    let insert_text_format = item.is_snippet().then(|| lsp_types::InsertTextFormat::SNIPPET);
 +    let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
 +    let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints {
 +        Some(command::trigger_parameter_hints())
 +    } else {
 +        None
 +    };
 +
 +    let mut lsp_item = lsp_types::CompletionItem {
 +        label: item.label().to_string(),
 +        detail: item.detail().map(|it| it.to_string()),
 +        filter_text: Some(item.lookup().to_string()),
 +        kind: Some(completion_item_kind(item.kind())),
 +        text_edit: Some(text_edit),
 +        additional_text_edits: Some(additional_text_edits),
 +        documentation: item.documentation().map(documentation),
 +        deprecated: Some(item.deprecated()),
 +        tags,
 +        command,
 +        insert_text_format,
 +        ..Default::default()
 +    };
 +
 +    if config.completion_label_details_support() {
 +        lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
 +            detail: None,
 +            description: lsp_item.detail.clone(),
 +        });
 +    }
 +
 +    set_score(&mut lsp_item, max_relevance, item.relevance());
 +
 +    if config.completion().enable_imports_on_the_fly {
 +        if let imports @ [_, ..] = item.imports_to_add() {
 +            let imports: Vec<_> = imports
 +                .iter()
 +                .filter_map(|import_edit| {
 +                    let import_path = &import_edit.import_path;
 +                    let import_name = import_path.segments().last()?;
 +                    Some(lsp_ext::CompletionImport {
 +                        full_import_path: import_path.to_string(),
 +                        imported_name: import_name.to_string(),
 +                    })
 +                })
 +                .collect();
 +            if !imports.is_empty() {
 +                let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports };
 +                lsp_item.data = Some(to_value(data).unwrap());
 +            }
 +        }
 +    }
 +
 +    if let Some((mutability, offset, relevance)) = item.ref_match() {
 +        let mut lsp_item_with_ref = lsp_item.clone();
 +        set_score(&mut lsp_item_with_ref, max_relevance, relevance);
 +        lsp_item_with_ref.label =
 +            format!("&{}{}", mutability.as_keyword_for_ref(), lsp_item_with_ref.label);
 +        lsp_item_with_ref.additional_text_edits.get_or_insert_with(Default::default).push(
 +            self::text_edit(
 +                line_index,
 +                Indel::insert(offset, format!("&{}", mutability.as_keyword_for_ref())),
 +            ),
 +        );
 +
 +        acc.push(lsp_item_with_ref);
 +    };
 +
 +    acc.push(lsp_item);
 +
 +    fn set_score(
 +        res: &mut lsp_types::CompletionItem,
 +        max_relevance: u32,
 +        relevance: CompletionRelevance,
 +    ) {
 +        if relevance.is_relevant() && relevance.score() == max_relevance {
 +            res.preselect = Some(true);
 +        }
 +        // The relevance needs to be inverted to come up with a sort score
 +        // because the client will sort ascending.
 +        let sort_score = relevance.score() ^ 0xFF_FF_FF_FF;
 +        // Zero pad the string to ensure values can be properly sorted
 +        // by the client. Hex format is used because it is easier to
 +        // visually compare very large values, which the sort text
 +        // tends to be since it is the opposite of the score.
 +        res.sort_text = Some(format!("{:08x}", sort_score));
 +    }
 +}
 +
 +pub(crate) fn signature_help(
 +    call_info: SignatureHelp,
 +    config: CallInfoConfig,
 +    label_offsets: bool,
 +) -> lsp_types::SignatureHelp {
 +    let (label, parameters) = match (config.params_only, label_offsets) {
 +        (concise, false) => {
 +            let params = call_info
 +                .parameter_labels()
 +                .map(|label| lsp_types::ParameterInformation {
 +                    label: lsp_types::ParameterLabel::Simple(label.to_string()),
 +                    documentation: None,
 +                })
 +                .collect::<Vec<_>>();
 +            let label =
 +                if concise { call_info.parameter_labels().join(", ") } else { call_info.signature };
 +            (label, params)
 +        }
 +        (false, true) => {
 +            let params = call_info
 +                .parameter_ranges()
 +                .iter()
 +                .map(|it| {
 +                    let start = call_info.signature[..it.start().into()].chars().count() as u32;
 +                    let end = call_info.signature[..it.end().into()].chars().count() as u32;
 +                    [start, end]
 +                })
 +                .map(|label_offsets| lsp_types::ParameterInformation {
 +                    label: lsp_types::ParameterLabel::LabelOffsets(label_offsets),
 +                    documentation: None,
 +                })
 +                .collect::<Vec<_>>();
 +            (call_info.signature, params)
 +        }
 +        (true, true) => {
 +            let mut params = Vec::new();
 +            let mut label = String::new();
 +            let mut first = true;
 +            for param in call_info.parameter_labels() {
 +                if !first {
 +                    label.push_str(", ");
 +                }
 +                first = false;
 +                let start = label.chars().count() as u32;
 +                label.push_str(param);
 +                let end = label.chars().count() as u32;
 +                params.push(lsp_types::ParameterInformation {
 +                    label: lsp_types::ParameterLabel::LabelOffsets([start, end]),
 +                    documentation: None,
 +                });
 +            }
 +
 +            (label, params)
 +        }
 +    };
 +
 +    let documentation = call_info.doc.filter(|_| config.docs).map(|doc| {
 +        lsp_types::Documentation::MarkupContent(lsp_types::MarkupContent {
 +            kind: lsp_types::MarkupKind::Markdown,
 +            value: doc,
 +        })
 +    });
 +
 +    let active_parameter = call_info.active_parameter.map(|it| it as u32);
 +
 +    let signature = lsp_types::SignatureInformation {
 +        label,
 +        documentation,
 +        parameters: Some(parameters),
 +        active_parameter,
 +    };
 +    lsp_types::SignatureHelp {
 +        signatures: vec![signature],
 +        active_signature: Some(0),
 +        active_parameter,
 +    }
 +}
 +
 +pub(crate) fn inlay_hint(
 +    snap: &GlobalStateSnapshot,
 +    line_index: &LineIndex,
 +    render_colons: bool,
 +    inlay_hint: InlayHint,
 +) -> lsp_types::InlayHint {
 +    lsp_types::InlayHint {
 +        position: match inlay_hint.kind {
 +            // before annotated thing
 +            InlayKind::ParameterHint
 +            | InlayKind::ImplicitReborrowHint
 +            | InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()),
 +            // after annotated thing
 +            InlayKind::ClosureReturnTypeHint
 +            | InlayKind::TypeHint
 +            | InlayKind::ChainingHint
 +            | InlayKind::GenericParamListHint
 +            | InlayKind::LifetimeHint
 +            | InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()),
 +        },
 +        padding_left: Some(match inlay_hint.kind {
 +            InlayKind::TypeHint => !render_colons,
 +            InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true,
 +            InlayKind::BindingModeHint
 +            | InlayKind::ClosureReturnTypeHint
 +            | InlayKind::GenericParamListHint
 +            | InlayKind::ImplicitReborrowHint
 +            | InlayKind::LifetimeHint
 +            | InlayKind::ParameterHint => false,
 +        }),
 +        padding_right: Some(match inlay_hint.kind {
 +            InlayKind::ChainingHint
 +            | InlayKind::ClosureReturnTypeHint
 +            | InlayKind::GenericParamListHint
 +            | InlayKind::ImplicitReborrowHint
 +            | InlayKind::TypeHint
 +            | InlayKind::ClosingBraceHint => false,
 +            InlayKind::BindingModeHint => inlay_hint.label != "&",
 +            InlayKind::ParameterHint | InlayKind::LifetimeHint => true,
 +        }),
 +        label: lsp_types::InlayHintLabel::String(match inlay_hint.kind {
 +            InlayKind::ParameterHint if render_colons => format!("{}:", inlay_hint.label),
 +            InlayKind::TypeHint if render_colons => format!(": {}", inlay_hint.label),
 +            InlayKind::ClosureReturnTypeHint => format!(" -> {}", inlay_hint.label),
 +            _ => inlay_hint.label.clone(),
 +        }),
 +        kind: match inlay_hint.kind {
 +            InlayKind::ParameterHint => Some(lsp_types::InlayHintKind::PARAMETER),
 +            InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => {
 +                Some(lsp_types::InlayHintKind::TYPE)
 +            }
 +            InlayKind::BindingModeHint
 +            | InlayKind::GenericParamListHint
 +            | InlayKind::LifetimeHint
 +            | InlayKind::ImplicitReborrowHint
 +            | InlayKind::ClosingBraceHint => None,
 +        },
 +        text_edits: None,
 +        data: (|| match inlay_hint.tooltip {
 +            Some(ide::InlayTooltip::HoverOffset(file_id, offset)) => {
 +                let uri = url(snap, file_id);
 +                let line_index = snap.file_line_index(file_id).ok()?;
 +
 +                let text_document = lsp_types::TextDocumentIdentifier { uri };
 +                to_value(lsp_ext::InlayHintResolveData {
 +                    text_document,
 +                    position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)),
 +                })
 +                .ok()
 +            }
 +            Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => {
 +                let uri = url(snap, file_id);
 +                let text_document = lsp_types::TextDocumentIdentifier { uri };
 +                let line_index = snap.file_line_index(file_id).ok()?;
 +                to_value(lsp_ext::InlayHintResolveData {
 +                    text_document,
 +                    position: lsp_ext::PositionOrRange::Range(range(&line_index, text_range)),
 +                })
 +                .ok()
 +            }
 +            _ => None,
 +        })(),
 +        tooltip: Some(match inlay_hint.tooltip {
 +            Some(ide::InlayTooltip::String(s)) => lsp_types::InlayHintTooltip::String(s),
 +            _ => lsp_types::InlayHintTooltip::String(inlay_hint.label),
 +        }),
 +    }
 +}
 +
 +static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
 +
 +pub(crate) fn semantic_tokens(
 +    text: &str,
 +    line_index: &LineIndex,
 +    highlights: Vec<HlRange>,
-         if !highlight_strings && ty == lsp_types::SemanticTokenType::STRING {
-             continue;
-         }
 +) -> lsp_types::SemanticTokens {
 +    let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
 +    let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
 +
 +    for highlight_range in highlights {
 +        if highlight_range.highlight.is_empty() {
 +            continue;
 +        }
++
 +        let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
-             SymbolKind::Attribute => semantic_tokens::ATTRIBUTE,
 +        let token_index = semantic_tokens::type_index(ty);
 +        let modifier_bitset = mods.0;
 +
 +        for mut text_range in line_index.index.lines(highlight_range.range) {
 +            if text[text_range].ends_with('\n') {
 +                text_range =
 +                    TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n'));
 +            }
 +            let range = range(line_index, text_range);
 +            builder.push(range, token_index, modifier_bitset);
 +        }
 +    }
 +
 +    builder.build()
 +}
 +
 +pub(crate) fn semantic_token_delta(
 +    previous: &lsp_types::SemanticTokens,
 +    current: &lsp_types::SemanticTokens,
 +) -> lsp_types::SemanticTokensDelta {
 +    let result_id = current.result_id.clone();
 +    let edits = semantic_tokens::diff_tokens(&previous.data, &current.data);
 +    lsp_types::SemanticTokensDelta { result_id, edits }
 +}
 +
 +fn semantic_token_type_and_modifiers(
 +    highlight: Highlight,
 +) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {
 +    let mut mods = semantic_tokens::ModifierSet::default();
 +    let type_ = match highlight.tag {
 +        HlTag::Symbol(symbol) => match symbol {
-             SymbolKind::Module => lsp_types::SemanticTokenType::NAMESPACE,
++            SymbolKind::Attribute => semantic_tokens::DECORATOR,
 +            SymbolKind::Derive => semantic_tokens::DERIVE,
 +            SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER,
-             SymbolKind::Field => lsp_types::SemanticTokenType::PROPERTY,
-             SymbolKind::TypeParam => lsp_types::SemanticTokenType::TYPE_PARAMETER,
++            SymbolKind::Module => semantic_tokens::NAMESPACE,
 +            SymbolKind::Impl => semantic_tokens::TYPE_ALIAS,
-             SymbolKind::ValueParam => lsp_types::SemanticTokenType::PARAMETER,
++            SymbolKind::Field => semantic_tokens::PROPERTY,
++            SymbolKind::TypeParam => semantic_tokens::TYPE_PARAMETER,
 +            SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER,
 +            SymbolKind::LifetimeParam => semantic_tokens::LIFETIME,
 +            SymbolKind::Label => semantic_tokens::LABEL,
-             SymbolKind::Local => lsp_types::SemanticTokenType::VARIABLE,
++            SymbolKind::ValueParam => semantic_tokens::PARAMETER,
 +            SymbolKind::SelfParam => semantic_tokens::SELF_KEYWORD,
 +            SymbolKind::SelfType => semantic_tokens::SELF_TYPE_KEYWORD,
-                     lsp_types::SemanticTokenType::METHOD
++            SymbolKind::Local => semantic_tokens::VARIABLE,
 +            SymbolKind::Function => {
 +                if highlight.mods.contains(HlMod::Associated) {
-                     lsp_types::SemanticTokenType::FUNCTION
++                    semantic_tokens::METHOD
 +                } else {
-                 mods |= lsp_types::SemanticTokenModifier::STATIC;
-                 lsp_types::SemanticTokenType::VARIABLE
++                    semantic_tokens::FUNCTION
 +                }
 +            }
 +            SymbolKind::Const => {
 +                mods |= semantic_tokens::CONSTANT;
-                 mods |= lsp_types::SemanticTokenModifier::STATIC;
-                 lsp_types::SemanticTokenType::VARIABLE
++                mods |= semantic_tokens::STATIC;
++                semantic_tokens::VARIABLE
 +            }
 +            SymbolKind::Static => {
-             SymbolKind::Struct => lsp_types::SemanticTokenType::STRUCT,
-             SymbolKind::Enum => lsp_types::SemanticTokenType::ENUM,
-             SymbolKind::Variant => lsp_types::SemanticTokenType::ENUM_MEMBER,
++                mods |= semantic_tokens::STATIC;
++                semantic_tokens::VARIABLE
 +            }
-             SymbolKind::Trait => lsp_types::SemanticTokenType::INTERFACE,
-             SymbolKind::Macro => lsp_types::SemanticTokenType::MACRO,
++            SymbolKind::Struct => semantic_tokens::STRUCT,
++            SymbolKind::Enum => semantic_tokens::ENUM,
++            SymbolKind::Variant => semantic_tokens::ENUM_MEMBER,
 +            SymbolKind::Union => semantic_tokens::UNION,
 +            SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS,
-         HlTag::ByteLiteral | HlTag::NumericLiteral => lsp_types::SemanticTokenType::NUMBER,
++            SymbolKind::Trait => semantic_tokens::INTERFACE,
++            SymbolKind::Macro => semantic_tokens::MACRO,
 +            SymbolKind::BuiltinAttr => semantic_tokens::BUILTIN_ATTRIBUTE,
 +            SymbolKind::ToolModule => semantic_tokens::TOOL_MODULE,
 +        },
 +        HlTag::AttributeBracket => semantic_tokens::ATTRIBUTE_BRACKET,
 +        HlTag::BoolLiteral => semantic_tokens::BOOLEAN,
 +        HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE,
-         HlTag::Comment => lsp_types::SemanticTokenType::COMMENT,
++        HlTag::ByteLiteral | HlTag::NumericLiteral => semantic_tokens::NUMBER,
 +        HlTag::CharLiteral => semantic_tokens::CHAR,
-         HlTag::Keyword => lsp_types::SemanticTokenType::KEYWORD,
++        HlTag::Comment => semantic_tokens::COMMENT,
 +        HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE,
 +        HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
-         HlTag::StringLiteral => lsp_types::SemanticTokenType::STRING,
++        HlTag::Keyword => semantic_tokens::KEYWORD,
 +        HlTag::None => semantic_tokens::GENERIC,
 +        HlTag::Operator(op) => match op {
 +            HlOperator::Bitwise => semantic_tokens::BITWISE,
 +            HlOperator::Arithmetic => semantic_tokens::ARITHMETIC,
 +            HlOperator::Logical => semantic_tokens::LOGICAL,
 +            HlOperator::Comparison => semantic_tokens::COMPARISON,
 +            HlOperator::Other => semantic_tokens::OPERATOR,
 +        },
-             HlMod::DefaultLibrary => lsp_types::SemanticTokenModifier::DEFAULT_LIBRARY,
-             HlMod::Definition => lsp_types::SemanticTokenModifier::DECLARATION,
-             HlMod::Documentation => lsp_types::SemanticTokenModifier::DOCUMENTATION,
++        HlTag::StringLiteral => semantic_tokens::STRING,
 +        HlTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE,
 +        HlTag::Punctuation(punct) => match punct {
 +            HlPunct::Bracket => semantic_tokens::BRACKET,
 +            HlPunct::Brace => semantic_tokens::BRACE,
 +            HlPunct::Parenthesis => semantic_tokens::PARENTHESIS,
 +            HlPunct::Angle => semantic_tokens::ANGLE,
 +            HlPunct::Comma => semantic_tokens::COMMA,
 +            HlPunct::Dot => semantic_tokens::DOT,
 +            HlPunct::Colon => semantic_tokens::COLON,
 +            HlPunct::Semi => semantic_tokens::SEMICOLON,
 +            HlPunct::Other => semantic_tokens::PUNCTUATION,
 +            HlPunct::MacroBang => semantic_tokens::MACRO_BANG,
 +        },
 +    };
 +
 +    for modifier in highlight.mods.iter() {
 +        let modifier = match modifier {
 +            HlMod::Associated => continue,
 +            HlMod::Async => semantic_tokens::ASYNC,
 +            HlMod::Attribute => semantic_tokens::ATTRIBUTE_MODIFIER,
 +            HlMod::Callable => semantic_tokens::CALLABLE,
 +            HlMod::Consuming => semantic_tokens::CONSUMING,
 +            HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW,
 +            HlMod::CrateRoot => semantic_tokens::CRATE_ROOT,
-             HlMod::Static => lsp_types::SemanticTokenModifier::STATIC,
++            HlMod::DefaultLibrary => semantic_tokens::DEFAULT_LIBRARY,
++            HlMod::Definition => semantic_tokens::DECLARATION,
++            HlMod::Documentation => semantic_tokens::DOCUMENTATION,
 +            HlMod::Injected => semantic_tokens::INJECTED,
 +            HlMod::IntraDocLink => semantic_tokens::INTRA_DOC_LINK,
 +            HlMod::Library => semantic_tokens::LIBRARY,
 +            HlMod::Mutable => semantic_tokens::MUTABLE,
 +            HlMod::Public => semantic_tokens::PUBLIC,
 +            HlMod::Reference => semantic_tokens::REFERENCE,
++            HlMod::Static => semantic_tokens::STATIC,
 +            HlMod::Trait => semantic_tokens::TRAIT_MODIFIER,
 +            HlMod::Unsafe => semantic_tokens::UNSAFE,
 +        };
 +        mods |= modifier;
 +    }
 +
 +    (type_, mods)
 +}
 +
 +pub(crate) fn folding_range(
 +    text: &str,
 +    line_index: &LineIndex,
 +    line_folding_only: bool,
 +    fold: Fold,
 +) -> lsp_types::FoldingRange {
 +    let kind = match fold.kind {
 +        FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
 +        FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
 +        FoldKind::Region => Some(lsp_types::FoldingRangeKind::Region),
 +        FoldKind::Mods
 +        | FoldKind::Block
 +        | FoldKind::ArgList
 +        | FoldKind::Consts
 +        | FoldKind::Statics
 +        | FoldKind::WhereClause
 +        | FoldKind::ReturnType
 +        | FoldKind::Array
 +        | FoldKind::MatchArm => None,
 +    };
 +
 +    let range = range(line_index, fold.range);
 +
 +    if line_folding_only {
 +        // Clients with line_folding_only == true (such as VSCode) will fold the whole end line
 +        // even if it contains text not in the folding range. To prevent that we exclude
 +        // range.end.line from the folding region if there is more text after range.end
 +        // on the same line.
 +        let has_more_text_on_end_line = text[TextRange::new(fold.range.end(), TextSize::of(text))]
 +            .chars()
 +            .take_while(|it| *it != '\n')
 +            .any(|it| !it.is_whitespace());
 +
 +        let end_line = if has_more_text_on_end_line {
 +            range.end.line.saturating_sub(1)
 +        } else {
 +            range.end.line
 +        };
 +
 +        lsp_types::FoldingRange {
 +            start_line: range.start.line,
 +            start_character: None,
 +            end_line,
 +            end_character: None,
 +            kind,
 +        }
 +    } else {
 +        lsp_types::FoldingRange {
 +            start_line: range.start.line,
 +            start_character: Some(range.start.character),
 +            end_line: range.end.line,
 +            end_character: Some(range.end.character),
 +            kind,
 +        }
 +    }
 +}
 +
 +pub(crate) fn url(snap: &GlobalStateSnapshot, file_id: FileId) -> lsp_types::Url {
 +    snap.file_id_to_url(file_id)
 +}
 +
 +/// Returns a `Url` object from a given path, will lowercase drive letters if present.
 +/// This will only happen when processing windows paths.
 +///
 +/// When processing non-windows path, this is essentially the same as `Url::from_file_path`.
 +pub(crate) fn url_from_abs_path(path: &AbsPath) -> lsp_types::Url {
 +    let url = lsp_types::Url::from_file_path(path).unwrap();
 +    match path.as_ref().components().next() {
 +        Some(path::Component::Prefix(prefix))
 +            if matches!(prefix.kind(), path::Prefix::Disk(_) | path::Prefix::VerbatimDisk(_)) =>
 +        {
 +            // Need to lowercase driver letter
 +        }
 +        _ => return url,
 +    }
 +
 +    let driver_letter_range = {
 +        let (scheme, drive_letter, _rest) = match url.as_str().splitn(3, ':').collect_tuple() {
 +            Some(it) => it,
 +            None => return url,
 +        };
 +        let start = scheme.len() + ':'.len_utf8();
 +        start..(start + drive_letter.len())
 +    };
 +
 +    // Note: lowercasing the `path` itself doesn't help, the `Url::parse`
 +    // machinery *also* canonicalizes the drive letter. So, just massage the
 +    // string in place.
 +    let mut url: String = url.into();
 +    url[driver_letter_range].make_ascii_lowercase();
 +    lsp_types::Url::parse(&url).unwrap()
 +}
 +
 +pub(crate) fn optional_versioned_text_document_identifier(
 +    snap: &GlobalStateSnapshot,
 +    file_id: FileId,
 +) -> lsp_types::OptionalVersionedTextDocumentIdentifier {
 +    let url = url(snap, file_id);
 +    let version = snap.url_file_version(&url);
 +    lsp_types::OptionalVersionedTextDocumentIdentifier { uri: url, version }
 +}
 +
 +pub(crate) fn location(
 +    snap: &GlobalStateSnapshot,
 +    frange: FileRange,
 +) -> Result<lsp_types::Location> {
 +    let url = url(snap, frange.file_id);
 +    let line_index = snap.file_line_index(frange.file_id)?;
 +    let range = range(&line_index, frange.range);
 +    let loc = lsp_types::Location::new(url, range);
 +    Ok(loc)
 +}
 +
 +/// Prefer using `location_link`, if the client has the cap.
 +pub(crate) fn location_from_nav(
 +    snap: &GlobalStateSnapshot,
 +    nav: NavigationTarget,
 +) -> Result<lsp_types::Location> {
 +    let url = url(snap, nav.file_id);
 +    let line_index = snap.file_line_index(nav.file_id)?;
 +    let range = range(&line_index, nav.full_range);
 +    let loc = lsp_types::Location::new(url, range);
 +    Ok(loc)
 +}
 +
 +pub(crate) fn location_link(
 +    snap: &GlobalStateSnapshot,
 +    src: Option<FileRange>,
 +    target: NavigationTarget,
 +) -> Result<lsp_types::LocationLink> {
 +    let origin_selection_range = match src {
 +        Some(src) => {
 +            let line_index = snap.file_line_index(src.file_id)?;
 +            let range = range(&line_index, src.range);
 +            Some(range)
 +        }
 +        None => None,
 +    };
 +    let (target_uri, target_range, target_selection_range) = location_info(snap, target)?;
 +    let res = lsp_types::LocationLink {
 +        origin_selection_range,
 +        target_uri,
 +        target_range,
 +        target_selection_range,
 +    };
 +    Ok(res)
 +}
 +
 +fn location_info(
 +    snap: &GlobalStateSnapshot,
 +    target: NavigationTarget,
 +) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
 +    let line_index = snap.file_line_index(target.file_id)?;
 +
 +    let target_uri = url(snap, target.file_id);
 +    let target_range = range(&line_index, target.full_range);
 +    let target_selection_range =
 +        target.focus_range.map(|it| range(&line_index, it)).unwrap_or(target_range);
 +    Ok((target_uri, target_range, target_selection_range))
 +}
 +
 +pub(crate) fn goto_definition_response(
 +    snap: &GlobalStateSnapshot,
 +    src: Option<FileRange>,
 +    targets: Vec<NavigationTarget>,
 +) -> Result<lsp_types::GotoDefinitionResponse> {
 +    if snap.config.location_link() {
 +        let links = targets
 +            .into_iter()
 +            .map(|nav| location_link(snap, src, nav))
 +            .collect::<Result<Vec<_>>>()?;
 +        Ok(links.into())
 +    } else {
 +        let locations = targets
 +            .into_iter()
 +            .map(|nav| {
 +                location(snap, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
 +            })
 +            .collect::<Result<Vec<_>>>()?;
 +        Ok(locations.into())
 +    }
 +}
 +
 +fn outside_workspace_annotation_id() -> String {
 +    String::from("OutsideWorkspace")
 +}
 +
 +pub(crate) fn snippet_text_document_edit(
 +    snap: &GlobalStateSnapshot,
 +    is_snippet: bool,
 +    file_id: FileId,
 +    edit: TextEdit,
 +) -> Result<lsp_ext::SnippetTextDocumentEdit> {
 +    let text_document = optional_versioned_text_document_identifier(snap, file_id);
 +    let line_index = snap.file_line_index(file_id)?;
 +    let mut edits: Vec<_> =
 +        edit.into_iter().map(|it| snippet_text_edit(&line_index, is_snippet, it)).collect();
 +
 +    if snap.analysis.is_library_file(file_id)? && snap.config.change_annotation_support() {
 +        for edit in &mut edits {
 +            edit.annotation_id = Some(outside_workspace_annotation_id())
 +        }
 +    }
 +    Ok(lsp_ext::SnippetTextDocumentEdit { text_document, edits })
 +}
 +
 +pub(crate) fn snippet_text_document_ops(
 +    snap: &GlobalStateSnapshot,
 +    file_system_edit: FileSystemEdit,
 +) -> Cancellable<Vec<lsp_ext::SnippetDocumentChangeOperation>> {
 +    let mut ops = Vec::new();
 +    match file_system_edit {
 +        FileSystemEdit::CreateFile { dst, initial_contents } => {
 +            let uri = snap.anchored_path(&dst);
 +            let create_file = lsp_types::ResourceOp::Create(lsp_types::CreateFile {
 +                uri: uri.clone(),
 +                options: None,
 +                annotation_id: None,
 +            });
 +            ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(create_file));
 +            if !initial_contents.is_empty() {
 +                let text_document =
 +                    lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version: None };
 +                let text_edit = lsp_ext::SnippetTextEdit {
 +                    range: lsp_types::Range::default(),
 +                    new_text: initial_contents,
 +                    insert_text_format: Some(lsp_types::InsertTextFormat::PLAIN_TEXT),
 +                    annotation_id: None,
 +                };
 +                let edit_file =
 +                    lsp_ext::SnippetTextDocumentEdit { text_document, edits: vec![text_edit] };
 +                ops.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit_file));
 +            }
 +        }
 +        FileSystemEdit::MoveFile { src, dst } => {
 +            let old_uri = snap.file_id_to_url(src);
 +            let new_uri = snap.anchored_path(&dst);
 +            let mut rename_file =
 +                lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None };
 +            if snap.analysis.is_library_file(src).ok() == Some(true)
 +                && snap.config.change_annotation_support()
 +            {
 +                rename_file.annotation_id = Some(outside_workspace_annotation_id())
 +            }
 +            ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(lsp_types::ResourceOp::Rename(
 +                rename_file,
 +            )))
 +        }
 +        FileSystemEdit::MoveDir { src, src_id, dst } => {
 +            let old_uri = snap.anchored_path(&src);
 +            let new_uri = snap.anchored_path(&dst);
 +            let mut rename_file =
 +                lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None };
 +            if snap.analysis.is_library_file(src_id).ok() == Some(true)
 +                && snap.config.change_annotation_support()
 +            {
 +                rename_file.annotation_id = Some(outside_workspace_annotation_id())
 +            }
 +            ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(lsp_types::ResourceOp::Rename(
 +                rename_file,
 +            )))
 +        }
 +    }
 +    Ok(ops)
 +}
 +
 +pub(crate) fn snippet_workspace_edit(
 +    snap: &GlobalStateSnapshot,
 +    source_change: SourceChange,
 +) -> Result<lsp_ext::SnippetWorkspaceEdit> {
 +    let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new();
 +
 +    for op in source_change.file_system_edits {
 +        let ops = snippet_text_document_ops(snap, op)?;
 +        document_changes.extend_from_slice(&ops);
 +    }
 +    for (file_id, edit) in source_change.source_file_edits {
 +        let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?;
 +        document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
 +    }
 +    let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit {
 +        changes: None,
 +        document_changes: Some(document_changes),
 +        change_annotations: None,
 +    };
 +    if snap.config.change_annotation_support() {
 +        workspace_edit.change_annotations = Some(
 +            once((
 +                outside_workspace_annotation_id(),
 +                lsp_types::ChangeAnnotation {
 +                    label: String::from("Edit outside of the workspace"),
 +                    needs_confirmation: Some(true),
 +                    description: Some(String::from(
 +                        "This edit lies outside of the workspace and may affect dependencies",
 +                    )),
 +                },
 +            ))
 +            .collect(),
 +        )
 +    }
 +    Ok(workspace_edit)
 +}
 +
 +pub(crate) fn workspace_edit(
 +    snap: &GlobalStateSnapshot,
 +    source_change: SourceChange,
 +) -> Result<lsp_types::WorkspaceEdit> {
 +    assert!(!source_change.is_snippet);
 +    snippet_workspace_edit(snap, source_change).map(|it| it.into())
 +}
 +
 +impl From<lsp_ext::SnippetWorkspaceEdit> for lsp_types::WorkspaceEdit {
 +    fn from(snippet_workspace_edit: lsp_ext::SnippetWorkspaceEdit) -> lsp_types::WorkspaceEdit {
 +        lsp_types::WorkspaceEdit {
 +            changes: None,
 +            document_changes: snippet_workspace_edit.document_changes.map(|changes| {
 +                lsp_types::DocumentChanges::Operations(
 +                    changes
 +                        .into_iter()
 +                        .map(|change| match change {
 +                            lsp_ext::SnippetDocumentChangeOperation::Op(op) => {
 +                                lsp_types::DocumentChangeOperation::Op(op)
 +                            }
 +                            lsp_ext::SnippetDocumentChangeOperation::Edit(edit) => {
 +                                lsp_types::DocumentChangeOperation::Edit(
 +                                    lsp_types::TextDocumentEdit {
 +                                        text_document: edit.text_document,
 +                                        edits: edit.edits.into_iter().map(From::from).collect(),
 +                                    },
 +                                )
 +                            }
 +                        })
 +                        .collect(),
 +                )
 +            }),
 +            change_annotations: snippet_workspace_edit.change_annotations,
 +        }
 +    }
 +}
 +
 +impl From<lsp_ext::SnippetTextEdit>
 +    for lsp_types::OneOf<lsp_types::TextEdit, lsp_types::AnnotatedTextEdit>
 +{
 +    fn from(
 +        lsp_ext::SnippetTextEdit { annotation_id, insert_text_format:_, new_text, range }: lsp_ext::SnippetTextEdit,
 +    ) -> Self {
 +        match annotation_id {
 +            Some(annotation_id) => lsp_types::OneOf::Right(lsp_types::AnnotatedTextEdit {
 +                text_edit: lsp_types::TextEdit { range, new_text },
 +                annotation_id,
 +            }),
 +            None => lsp_types::OneOf::Left(lsp_types::TextEdit { range, new_text }),
 +        }
 +    }
 +}
 +
 +pub(crate) fn call_hierarchy_item(
 +    snap: &GlobalStateSnapshot,
 +    target: NavigationTarget,
 +) -> Result<lsp_types::CallHierarchyItem> {
 +    let name = target.name.to_string();
 +    let detail = target.description.clone();
 +    let kind = target.kind.map(symbol_kind).unwrap_or(lsp_types::SymbolKind::FUNCTION);
 +    let (uri, range, selection_range) = location_info(snap, target)?;
 +    Ok(lsp_types::CallHierarchyItem {
 +        name,
 +        kind,
 +        tags: None,
 +        detail,
 +        uri,
 +        range,
 +        selection_range,
 +        data: None,
 +    })
 +}
 +
 +pub(crate) fn code_action_kind(kind: AssistKind) -> lsp_types::CodeActionKind {
 +    match kind {
 +        AssistKind::None | AssistKind::Generate => lsp_types::CodeActionKind::EMPTY,
 +        AssistKind::QuickFix => lsp_types::CodeActionKind::QUICKFIX,
 +        AssistKind::Refactor => lsp_types::CodeActionKind::REFACTOR,
 +        AssistKind::RefactorExtract => lsp_types::CodeActionKind::REFACTOR_EXTRACT,
 +        AssistKind::RefactorInline => lsp_types::CodeActionKind::REFACTOR_INLINE,
 +        AssistKind::RefactorRewrite => lsp_types::CodeActionKind::REFACTOR_REWRITE,
 +    }
 +}
 +
 +pub(crate) fn code_action(
 +    snap: &GlobalStateSnapshot,
 +    assist: Assist,
 +    resolve_data: Option<(usize, lsp_types::CodeActionParams)>,
 +) -> Result<lsp_ext::CodeAction> {
 +    let mut res = lsp_ext::CodeAction {
 +        title: assist.label.to_string(),
 +        group: assist.group.filter(|_| snap.config.code_action_group()).map(|gr| gr.0),
 +        kind: Some(code_action_kind(assist.id.1)),
 +        edit: None,
 +        is_preferred: None,
 +        data: None,
 +        command: None,
 +    };
 +
 +    if assist.trigger_signature_help && snap.config.client_commands().trigger_parameter_hints {
 +        res.command = Some(command::trigger_parameter_hints());
 +    }
 +
 +    match (assist.source_change, resolve_data) {
 +        (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?),
 +        (None, Some((index, code_action_params))) => {
 +            res.data = Some(lsp_ext::CodeActionData {
 +                id: format!("{}:{}:{}", assist.id.0, assist.id.1.name(), index),
 +                code_action_params,
 +            });
 +        }
 +        (None, None) => {
 +            stdx::never!("assist should always be resolved if client can't do lazy resolving")
 +        }
 +    };
 +    Ok(res)
 +}
 +
 +pub(crate) fn runnable(
 +    snap: &GlobalStateSnapshot,
 +    runnable: Runnable,
 +) -> Result<lsp_ext::Runnable> {
 +    let config = snap.config.runnables();
 +    let spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id)?;
 +    let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone());
 +    let target = spec.as_ref().map(|s| s.target.clone());
 +    let (cargo_args, executable_args) =
 +        CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg)?;
 +    let label = runnable.label(target);
 +    let location = location_link(snap, None, runnable.nav)?;
 +
 +    Ok(lsp_ext::Runnable {
 +        label,
 +        location: Some(location),
 +        kind: lsp_ext::RunnableKind::Cargo,
 +        args: lsp_ext::CargoRunnable {
 +            workspace_root: workspace_root.map(|it| it.into()),
 +            override_cargo: config.override_cargo,
 +            cargo_args,
 +            cargo_extra_args: config.cargo_extra_args,
 +            executable_args,
 +            expect_test: None,
 +        },
 +    })
 +}
 +
 +pub(crate) fn code_lens(
 +    acc: &mut Vec<lsp_types::CodeLens>,
 +    snap: &GlobalStateSnapshot,
 +    annotation: Annotation,
 +) -> Result<()> {
 +    let client_commands_config = snap.config.client_commands();
 +    match annotation.kind {
 +        AnnotationKind::Runnable(run) => {
 +            let line_index = snap.file_line_index(run.nav.file_id)?;
 +            let annotation_range = range(&line_index, annotation.range);
 +
 +            let title = run.title();
 +            let can_debug = match run.kind {
 +                ide::RunnableKind::DocTest { .. } => false,
 +                ide::RunnableKind::TestMod { .. }
 +                | ide::RunnableKind::Test { .. }
 +                | ide::RunnableKind::Bench { .. }
 +                | ide::RunnableKind::Bin => true,
 +            };
 +            let r = runnable(snap, run)?;
 +
 +            let lens_config = snap.config.lens();
 +            if lens_config.run && client_commands_config.run_single {
 +                let command = command::run_single(&r, &title);
 +                acc.push(lsp_types::CodeLens {
 +                    range: annotation_range,
 +                    command: Some(command),
 +                    data: None,
 +                })
 +            }
 +            if lens_config.debug && can_debug && client_commands_config.debug_single {
 +                let command = command::debug_single(&r);
 +                acc.push(lsp_types::CodeLens {
 +                    range: annotation_range,
 +                    command: Some(command),
 +                    data: None,
 +                })
 +            }
 +        }
 +        AnnotationKind::HasImpls { file_id, data } => {
 +            if !client_commands_config.show_reference {
 +                return Ok(());
 +            }
 +            let line_index = snap.file_line_index(file_id)?;
 +            let annotation_range = range(&line_index, annotation.range);
 +            let url = url(snap, file_id);
 +
 +            let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
 +
 +            let doc_pos = lsp_types::TextDocumentPositionParams::new(id, annotation_range.start);
 +
 +            let goto_params = lsp_types::request::GotoImplementationParams {
 +                text_document_position_params: doc_pos,
 +                work_done_progress_params: Default::default(),
 +                partial_result_params: Default::default(),
 +            };
 +
 +            let command = data.map(|ranges| {
 +                let locations: Vec<lsp_types::Location> = ranges
 +                    .into_iter()
 +                    .filter_map(|target| {
 +                        location(
 +                            snap,
 +                            FileRange { file_id: target.file_id, range: target.full_range },
 +                        )
 +                        .ok()
 +                    })
 +                    .collect();
 +
 +                command::show_references(
 +                    implementation_title(locations.len()),
 +                    &url,
 +                    annotation_range.start,
 +                    locations,
 +                )
 +            });
 +
 +            acc.push(lsp_types::CodeLens {
 +                range: annotation_range,
 +                command,
 +                data: Some(to_value(lsp_ext::CodeLensResolveData::Impls(goto_params)).unwrap()),
 +            })
 +        }
 +        AnnotationKind::HasReferences { file_id, data } => {
 +            if !client_commands_config.show_reference {
 +                return Ok(());
 +            }
 +            let line_index = snap.file_line_index(file_id)?;
 +            let annotation_range = range(&line_index, annotation.range);
 +            let url = url(snap, file_id);
 +
 +            let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
 +
 +            let doc_pos = lsp_types::TextDocumentPositionParams::new(id, annotation_range.start);
 +
 +            let command = data.map(|ranges| {
 +                let locations: Vec<lsp_types::Location> =
 +                    ranges.into_iter().filter_map(|range| location(snap, range).ok()).collect();
 +
 +                command::show_references(
 +                    reference_title(locations.len()),
 +                    &url,
 +                    annotation_range.start,
 +                    locations,
 +                )
 +            });
 +
 +            acc.push(lsp_types::CodeLens {
 +                range: annotation_range,
 +                command,
 +                data: Some(to_value(lsp_ext::CodeLensResolveData::References(doc_pos)).unwrap()),
 +            })
 +        }
 +    }
 +    Ok(())
 +}
 +
 +pub(crate) mod command {
 +    use ide::{FileRange, NavigationTarget};
 +    use serde_json::to_value;
 +
 +    use crate::{
 +        global_state::GlobalStateSnapshot,
 +        lsp_ext,
 +        to_proto::{location, location_link},
 +    };
 +
 +    pub(crate) fn show_references(
 +        title: String,
 +        uri: &lsp_types::Url,
 +        position: lsp_types::Position,
 +        locations: Vec<lsp_types::Location>,
 +    ) -> lsp_types::Command {
 +        // We cannot use the 'editor.action.showReferences' command directly
 +        // because that command requires vscode types which we convert in the handler
 +        // on the client side.
 +
 +        lsp_types::Command {
 +            title,
 +            command: "rust-analyzer.showReferences".into(),
 +            arguments: Some(vec![
 +                to_value(uri).unwrap(),
 +                to_value(position).unwrap(),
 +                to_value(locations).unwrap(),
 +            ]),
 +        }
 +    }
 +
 +    pub(crate) fn run_single(runnable: &lsp_ext::Runnable, title: &str) -> lsp_types::Command {
 +        lsp_types::Command {
 +            title: title.to_string(),
 +            command: "rust-analyzer.runSingle".into(),
 +            arguments: Some(vec![to_value(runnable).unwrap()]),
 +        }
 +    }
 +
 +    pub(crate) fn debug_single(runnable: &lsp_ext::Runnable) -> lsp_types::Command {
 +        lsp_types::Command {
 +            title: "Debug".into(),
 +            command: "rust-analyzer.debugSingle".into(),
 +            arguments: Some(vec![to_value(runnable).unwrap()]),
 +        }
 +    }
 +
 +    pub(crate) fn goto_location(
 +        snap: &GlobalStateSnapshot,
 +        nav: &NavigationTarget,
 +    ) -> Option<lsp_types::Command> {
 +        let value = if snap.config.location_link() {
 +            let link = location_link(snap, None, nav.clone()).ok()?;
 +            to_value(link).ok()?
 +        } else {
 +            let range = FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() };
 +            let location = location(snap, range).ok()?;
 +            to_value(location).ok()?
 +        };
 +
 +        Some(lsp_types::Command {
 +            title: nav.name.to_string(),
 +            command: "rust-analyzer.gotoLocation".into(),
 +            arguments: Some(vec![value]),
 +        })
 +    }
 +
 +    pub(crate) fn trigger_parameter_hints() -> lsp_types::Command {
 +        lsp_types::Command {
 +            title: "triggerParameterHints".into(),
 +            command: "editor.action.triggerParameterHints".into(),
 +            arguments: None,
 +        }
 +    }
 +}
 +
 +pub(crate) fn implementation_title(count: usize) -> String {
 +    if count == 1 {
 +        "1 implementation".into()
 +    } else {
 +        format!("{} implementations", count)
 +    }
 +}
 +
 +pub(crate) fn reference_title(count: usize) -> String {
 +    if count == 1 {
 +        "1 reference".into()
 +    } else {
 +        format!("{} references", count)
 +    }
 +}
 +
 +pub(crate) fn markup_content(
 +    markup: Markup,
 +    kind: ide::HoverDocFormat,
 +) -> lsp_types::MarkupContent {
 +    let kind = match kind {
 +        ide::HoverDocFormat::Markdown => lsp_types::MarkupKind::Markdown,
 +        ide::HoverDocFormat::PlainText => lsp_types::MarkupKind::PlainText,
 +    };
 +    let value = crate::markdown::format_docs(markup.as_str());
 +    lsp_types::MarkupContent { kind, value }
 +}
 +
 +pub(crate) fn rename_error(err: RenameError) -> crate::LspError {
 +    // This is wrong, but we don't have a better alternative I suppose?
 +    // https://github.com/microsoft/language-server-protocol/issues/1341
 +    invalid_params_error(err.to_string())
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use std::sync::Arc;
 +
 +    use ide::Analysis;
 +
 +    use super::*;
 +
 +    #[test]
 +    fn conv_fold_line_folding_only_fixup() {
 +        let text = r#"mod a;
 +mod b;
 +mod c;
 +
 +fn main() {
 +    if cond {
 +        a::do_a();
 +    } else {
 +        b::do_b();
 +    }
 +}"#;
 +
 +        let (analysis, file_id) = Analysis::from_single_file(text.to_string());
 +        let folds = analysis.folding_ranges(file_id).unwrap();
 +        assert_eq!(folds.len(), 4);
 +
 +        let line_index = LineIndex {
 +            index: Arc::new(ide::LineIndex::new(text)),
 +            endings: LineEndings::Unix,
 +            encoding: OffsetEncoding::Utf16,
 +        };
 +        let converted: Vec<lsp_types::FoldingRange> =
 +            folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
 +
 +        let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
 +        assert_eq!(converted.len(), expected_lines.len());
 +        for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
 +            assert_eq!(folding_range.start_line, *start_line);
 +            assert_eq!(folding_range.start_character, None);
 +            assert_eq!(folding_range.end_line, *end_line);
 +            assert_eq!(folding_range.end_character, None);
 +        }
 +    }
 +
 +    // `Url` is not able to parse windows paths on unix machines.
 +    #[test]
 +    #[cfg(target_os = "windows")]
 +    fn test_lowercase_drive_letter() {
 +        use std::path::Path;
 +
 +        let url = url_from_abs_path(Path::new("C:\\Test").try_into().unwrap());
 +        assert_eq!(url.to_string(), "file:///c:/Test");
 +
 +        let url = url_from_abs_path(Path::new(r#"\\localhost\C$\my_dir"#).try_into().unwrap());
 +        assert_eq!(url.to_string(), "file://localhost/C$/my_dir");
 +    }
 +}
index 0000000000000000000000000000000000000000,0000000000000000000000000000000000000000..9909d71bdf066bd4117531998f054aeeaab45444
new file mode 100644 (file)
--- /dev/null
--- /dev/null
@@@ -1,0 -1,0 +1,80 @@@
++//! A none hashing [`Hasher`] implementation.
++use std::{
++    hash::{BuildHasher, Hasher},
++    marker::PhantomData,
++};
++
++pub type NoHashHashMap<K, V> = std::collections::HashMap<K, V, NoHashHasherBuilder<K>>;
++pub type NoHashHashSet<K> = std::collections::HashSet<K, NoHashHasherBuilder<K>>;
++
++#[derive(Copy, Clone, Debug, PartialEq, Eq)]
++pub struct NoHashHasherBuilder<T>(PhantomData<T>);
++
++impl<T> Default for NoHashHasherBuilder<T> {
++    fn default() -> Self {
++        Self(Default::default())
++    }
++}
++
++pub trait NoHashHashable {}
++impl NoHashHashable for usize {}
++impl NoHashHashable for u32 {}
++
++pub struct NoHashHasher(u64);
++
++impl<T: NoHashHashable> BuildHasher for NoHashHasherBuilder<T> {
++    type Hasher = NoHashHasher;
++    fn build_hasher(&self) -> Self::Hasher {
++        NoHashHasher(0)
++    }
++}
++
++impl Hasher for NoHashHasher {
++    fn finish(&self) -> u64 {
++        self.0
++    }
++
++    fn write(&mut self, _: &[u8]) {
++        unimplemented!("NoHashHasher should only be used for hashing primitive integers")
++    }
++
++    fn write_u8(&mut self, i: u8) {
++        self.0 = i as u64;
++    }
++
++    fn write_u16(&mut self, i: u16) {
++        self.0 = i as u64;
++    }
++
++    fn write_u32(&mut self, i: u32) {
++        self.0 = i as u64;
++    }
++
++    fn write_u64(&mut self, i: u64) {
++        self.0 = i as u64;
++    }
++
++    fn write_usize(&mut self, i: usize) {
++        self.0 = i as u64;
++    }
++
++    fn write_i8(&mut self, i: i8) {
++        self.0 = i as u64;
++    }
++
++    fn write_i16(&mut self, i: i16) {
++        self.0 = i as u64;
++    }
++
++    fn write_i32(&mut self, i: i32) {
++        self.0 = i as u64;
++    }
++
++    fn write_i64(&mut self, i: i64) {
++        self.0 = i as u64;
++    }
++
++    fn write_isize(&mut self, i: isize) {
++        self.0 = i as u64;
++    }
++}
index b4d45206c44ea6fabb1b4cd8a73983d0e7e0e45c,0000000000000000000000000000000000000000..51e109798d1df3c22f119920424fd11969986936
mode 100644,000000..100644
--- /dev/null
@@@ -1,247 -1,0 +1,248 @@@
 +//! Missing batteries for standard libraries.
 +
 +#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 +
 +use std::process::Command;
 +use std::{cmp::Ordering, ops, time::Instant};
 +use std::{io as sio, iter};
 +
 +mod macros;
++pub mod hash;
 +pub mod process;
 +pub mod panic_context;
 +pub mod non_empty_vec;
 +
 +pub use always_assert::{always, never};
 +
 +#[inline(always)]
 +pub fn is_ci() -> bool {
 +    option_env!("CI").is_some()
 +}
 +
 +#[must_use]
 +pub fn timeit(label: &'static str) -> impl Drop {
 +    let start = Instant::now();
 +    defer(move || eprintln!("{}: {:.2?}", label, start.elapsed()))
 +}
 +
 +/// Prints backtrace to stderr, useful for debugging.
 +pub fn print_backtrace() {
 +    #[cfg(feature = "backtrace")]
 +    eprintln!("{:?}", backtrace::Backtrace::new());
 +
 +    #[cfg(not(feature = "backtrace"))]
 +    eprintln!(
 +        r#"Enable the backtrace feature.
 +Uncomment `default = [ "backtrace" ]` in `crates/stdx/Cargo.toml`.
 +"#
 +    );
 +}
 +
 +pub fn to_lower_snake_case(s: &str) -> String {
 +    to_snake_case(s, char::to_ascii_lowercase)
 +}
 +pub fn to_upper_snake_case(s: &str) -> String {
 +    to_snake_case(s, char::to_ascii_uppercase)
 +}
 +
 +// Code partially taken from rust/compiler/rustc_lint/src/nonstandard_style.rs
 +// commit: 9626f2b
 +fn to_snake_case<F: Fn(&char) -> char>(mut s: &str, change_case: F) -> String {
 +    let mut words = vec![];
 +
 +    // Preserve leading underscores
 +    s = s.trim_start_matches(|c: char| {
 +        if c == '_' {
 +            words.push(String::new());
 +            true
 +        } else {
 +            false
 +        }
 +    });
 +
 +    for s in s.split('_') {
 +        let mut last_upper = false;
 +        let mut buf = String::new();
 +
 +        if s.is_empty() {
 +            continue;
 +        }
 +
 +        for ch in s.chars() {
 +            if !buf.is_empty() && buf != "'" && ch.is_uppercase() && !last_upper {
 +                words.push(buf);
 +                buf = String::new();
 +            }
 +
 +            last_upper = ch.is_uppercase();
 +            buf.extend(iter::once(change_case(&ch)));
 +        }
 +
 +        words.push(buf);
 +    }
 +
 +    words.join("_")
 +}
 +
 +pub fn replace(buf: &mut String, from: char, to: &str) {
 +    if !buf.contains(from) {
 +        return;
 +    }
 +    // FIXME: do this in place.
 +    *buf = buf.replace(from, to);
 +}
 +
 +pub fn trim_indent(mut text: &str) -> String {
 +    if text.starts_with('\n') {
 +        text = &text[1..];
 +    }
 +    let indent = text
 +        .lines()
 +        .filter(|it| !it.trim().is_empty())
 +        .map(|it| it.len() - it.trim_start().len())
 +        .min()
 +        .unwrap_or(0);
 +    text.split_inclusive('\n')
 +        .map(
 +            |line| {
 +                if line.len() <= indent {
 +                    line.trim_start_matches(' ')
 +                } else {
 +                    &line[indent..]
 +                }
 +            },
 +        )
 +        .collect()
 +}
 +
 +pub fn equal_range_by<T, F>(slice: &[T], mut key: F) -> ops::Range<usize>
 +where
 +    F: FnMut(&T) -> Ordering,
 +{
 +    let start = slice.partition_point(|it| key(it) == Ordering::Less);
 +    let len = slice[start..].partition_point(|it| key(it) == Ordering::Equal);
 +    start..start + len
 +}
 +
 +#[must_use]
 +pub fn defer<F: FnOnce()>(f: F) -> impl Drop {
 +    struct D<F: FnOnce()>(Option<F>);
 +    impl<F: FnOnce()> Drop for D<F> {
 +        fn drop(&mut self) {
 +            if let Some(f) = self.0.take() {
 +                f();
 +            }
 +        }
 +    }
 +    D(Some(f))
 +}
 +
 +/// A [`std::process::Child`] wrapper that will kill the child on drop.
 +#[cfg_attr(not(target_arch = "wasm32"), repr(transparent))]
 +#[derive(Debug)]
 +pub struct JodChild(pub std::process::Child);
 +
 +impl ops::Deref for JodChild {
 +    type Target = std::process::Child;
 +    fn deref(&self) -> &std::process::Child {
 +        &self.0
 +    }
 +}
 +
 +impl ops::DerefMut for JodChild {
 +    fn deref_mut(&mut self) -> &mut std::process::Child {
 +        &mut self.0
 +    }
 +}
 +
 +impl Drop for JodChild {
 +    fn drop(&mut self) {
 +        let _ = self.0.kill();
 +        let _ = self.0.wait();
 +    }
 +}
 +
 +impl JodChild {
 +    pub fn spawn(mut command: Command) -> sio::Result<Self> {
 +        command.spawn().map(Self)
 +    }
 +
 +    pub fn into_inner(self) -> std::process::Child {
 +        if cfg!(target_arch = "wasm32") {
 +            panic!("no processes on wasm");
 +        }
 +        // SAFETY: repr transparent, except on WASM
 +        unsafe { std::mem::transmute::<JodChild, std::process::Child>(self) }
 +    }
 +}
 +
 +// feature: iter_order_by
 +// Iterator::eq_by
 +pub fn iter_eq_by<I, I2, F>(this: I2, other: I, mut eq: F) -> bool
 +where
 +    I: IntoIterator,
 +    I2: IntoIterator,
 +    F: FnMut(I2::Item, I::Item) -> bool,
 +{
 +    let mut other = other.into_iter();
 +    let mut this = this.into_iter();
 +
 +    loop {
 +        let x = match this.next() {
 +            None => return other.next().is_none(),
 +            Some(val) => val,
 +        };
 +
 +        let y = match other.next() {
 +            None => return false,
 +            Some(val) => val,
 +        };
 +
 +        if !eq(x, y) {
 +            return false;
 +        }
 +    }
 +}
 +
 +/// Returns all final segments of the argument, longest first.
 +pub fn slice_tails<T>(this: &[T]) -> impl Iterator<Item = &[T]> {
 +    (0..this.len()).map(|i| &this[i..])
 +}
 +
 +#[cfg(test)]
 +mod tests {
 +    use super::*;
 +
 +    #[test]
 +    fn test_trim_indent() {
 +        assert_eq!(trim_indent(""), "");
 +        assert_eq!(
 +            trim_indent(
 +                "
 +            hello
 +            world
 +"
 +            ),
 +            "hello\nworld\n"
 +        );
 +        assert_eq!(
 +            trim_indent(
 +                "
 +            hello
 +            world"
 +            ),
 +            "hello\nworld"
 +        );
 +        assert_eq!(trim_indent("    hello\n    world\n"), "hello\nworld\n");
 +        assert_eq!(
 +            trim_indent(
 +                "
 +            fn main() {
 +                return 92;
 +            }
 +        "
 +            ),
 +            "fn main() {\n    return 92;\n}\n"
 +        );
 +    }
 +}
index c6377348784a46a2eea7de09d11968b8e6c079e2,0000000000000000000000000000000000000000..d7549a2841539bb1e9df66e5592d99b5d7c8a809
mode 100644,000000..100644
--- /dev/null
@@@ -1,17 -1,0 +1,18 @@@
- indexmap = "1.9.1"
 +[package]
 +name = "vfs"
 +version = "0.0.0"
 +description = "TBD"
 +license = "MIT OR Apache-2.0"
 +edition = "2021"
 +rust-version = "1.57"
 +
 +[lib]
 +doctest = false
 +
 +[dependencies]
 +rustc-hash = "1.1.0"
 +fst = "0.4.7"
++indexmap = "1.9.1"
 +
 +paths = { path = "../paths", version = "0.0.0" }
++stdx = { path = "../stdx", version = "0.0.0" }
index 6a89263e53988723aaa0623a036c3f1e69ff1c51,0000000000000000000000000000000000000000..e0ef737b3fc09efe338e12ac996d8faf3690134a
mode 100644,000000..100644
--- /dev/null
@@@ -1,218 -1,0 +1,219 @@@
-     paths: FxHashMap<FileId, VfsPath>,
 +//! Partitions a list of files into disjoint subsets.
 +//!
 +//! Files which do not belong to any explicitly configured `FileSet` belong to
 +//! the default `FileSet`.
 +use std::fmt;
 +
 +use fst::{IntoStreamer, Streamer};
 +use rustc_hash::FxHashMap;
++use stdx::hash::NoHashHashMap;
 +
 +use crate::{AnchoredPath, FileId, Vfs, VfsPath};
 +
 +/// A set of [`VfsPath`]s identified by [`FileId`]s.
 +#[derive(Default, Clone, Eq, PartialEq)]
 +pub struct FileSet {
 +    files: FxHashMap<VfsPath, FileId>,
++    paths: NoHashHashMap<FileId, VfsPath>,
 +}
 +
 +impl FileSet {
 +    /// Returns the number of stored paths.
 +    pub fn len(&self) -> usize {
 +        self.files.len()
 +    }
 +
 +    /// Get the id of the file corresponding to `path`.
 +    ///
 +    /// If either `path`'s [`anchor`](AnchoredPath::anchor) or the resolved path is not in
 +    /// the set, returns [`None`].
 +    pub fn resolve_path(&self, path: AnchoredPath<'_>) -> Option<FileId> {
 +        let mut base = self.paths[&path.anchor].clone();
 +        base.pop();
 +        let path = base.join(path.path)?;
 +        self.files.get(&path).copied()
 +    }
 +
 +    /// Get the id corresponding to `path` if it exists in the set.
 +    pub fn file_for_path(&self, path: &VfsPath) -> Option<&FileId> {
 +        self.files.get(path)
 +    }
 +
 +    /// Get the path corresponding to `file` if it exists in the set.
 +    pub fn path_for_file(&self, file: &FileId) -> Option<&VfsPath> {
 +        self.paths.get(file)
 +    }
 +
 +    /// Insert the `file_id, path` pair into the set.
 +    ///
 +    /// # Note
 +    /// Multiple [`FileId`] can be mapped to the same [`VfsPath`], and vice-versa.
 +    pub fn insert(&mut self, file_id: FileId, path: VfsPath) {
 +        self.files.insert(path.clone(), file_id);
 +        self.paths.insert(file_id, path);
 +    }
 +
 +    /// Iterate over this set's ids.
 +    pub fn iter(&self) -> impl Iterator<Item = FileId> + '_ {
 +        self.paths.keys().copied()
 +    }
 +}
 +
 +impl fmt::Debug for FileSet {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        f.debug_struct("FileSet").field("n_files", &self.files.len()).finish()
 +    }
 +}
 +
 +/// This contains path prefixes to partition a [`Vfs`] into [`FileSet`]s.
 +///
 +/// # Example
 +/// ```rust
 +/// # use vfs::{file_set::FileSetConfigBuilder, VfsPath, Vfs};
 +/// let mut builder = FileSetConfigBuilder::default();
 +/// builder.add_file_set(vec![VfsPath::new_virtual_path("/src".to_string())]);
 +/// let config = builder.build();
 +/// let mut file_system = Vfs::default();
 +/// file_system.set_file_contents(VfsPath::new_virtual_path("/src/main.rs".to_string()), Some(vec![]));
 +/// file_system.set_file_contents(VfsPath::new_virtual_path("/src/lib.rs".to_string()), Some(vec![]));
 +/// file_system.set_file_contents(VfsPath::new_virtual_path("/build.rs".to_string()), Some(vec![]));
 +/// // contains the sets :
 +/// // { "/src/main.rs", "/src/lib.rs" }
 +/// // { "build.rs" }
 +/// let sets = config.partition(&file_system);
 +/// ```
 +#[derive(Debug)]
 +pub struct FileSetConfig {
 +    /// Number of sets that `self` can partition a [`Vfs`] into.
 +    ///
 +    /// This should be the number of sets in `self.map` + 1 for files that don't fit in any
 +    /// defined set.
 +    n_file_sets: usize,
 +    /// Map from encoded paths to the set they belong to.
 +    map: fst::Map<Vec<u8>>,
 +}
 +
 +impl Default for FileSetConfig {
 +    fn default() -> Self {
 +        FileSetConfig::builder().build()
 +    }
 +}
 +
 +impl FileSetConfig {
 +    /// Returns a builder for `FileSetConfig`.
 +    pub fn builder() -> FileSetConfigBuilder {
 +        FileSetConfigBuilder::default()
 +    }
 +
 +    /// Partition `vfs` into `FileSet`s.
 +    ///
 +    /// Creates a new [`FileSet`] for every set of prefixes in `self`.
 +    pub fn partition(&self, vfs: &Vfs) -> Vec<FileSet> {
 +        let mut scratch_space = Vec::new();
 +        let mut res = vec![FileSet::default(); self.len()];
 +        for (file_id, path) in vfs.iter() {
 +            let root = self.classify(path, &mut scratch_space);
 +            res[root].insert(file_id, path.clone());
 +        }
 +        res
 +    }
 +
 +    /// Number of sets that `self` can partition a [`Vfs`] into.
 +    fn len(&self) -> usize {
 +        self.n_file_sets
 +    }
 +
 +    /// Returns the set index for the given `path`.
 +    ///
 +    /// `scratch_space` is used as a buffer and will be entirely replaced.
 +    fn classify(&self, path: &VfsPath, scratch_space: &mut Vec<u8>) -> usize {
 +        scratch_space.clear();
 +        path.encode(scratch_space);
 +        let automaton = PrefixOf::new(scratch_space.as_slice());
 +        let mut longest_prefix = self.len() - 1;
 +        let mut stream = self.map.search(automaton).into_stream();
 +        while let Some((_, v)) = stream.next() {
 +            longest_prefix = v as usize;
 +        }
 +        longest_prefix
 +    }
 +}
 +
 +/// Builder for [`FileSetConfig`].
 +pub struct FileSetConfigBuilder {
 +    roots: Vec<Vec<VfsPath>>,
 +}
 +
 +impl Default for FileSetConfigBuilder {
 +    fn default() -> Self {
 +        FileSetConfigBuilder { roots: Vec::new() }
 +    }
 +}
 +
 +impl FileSetConfigBuilder {
 +    /// Returns the number of sets currently held.
 +    pub fn len(&self) -> usize {
 +        self.roots.len()
 +    }
 +
 +    /// Add a new set of paths prefixes.
 +    pub fn add_file_set(&mut self, roots: Vec<VfsPath>) {
 +        self.roots.push(roots);
 +    }
 +
 +    /// Build the `FileSetConfig`.
 +    pub fn build(self) -> FileSetConfig {
 +        let n_file_sets = self.roots.len() + 1;
 +        let map = {
 +            let mut entries = Vec::new();
 +            for (i, paths) in self.roots.into_iter().enumerate() {
 +                for p in paths {
 +                    let mut buf = Vec::new();
 +                    p.encode(&mut buf);
 +                    entries.push((buf, i as u64));
 +                }
 +            }
 +            entries.sort();
 +            entries.dedup_by(|(a, _), (b, _)| a == b);
 +            fst::Map::from_iter(entries).unwrap()
 +        };
 +        FileSetConfig { n_file_sets, map }
 +    }
 +}
 +
 +/// Implements [`fst::Automaton`]
 +///
 +/// It will match if `prefix_of` is a prefix of the given data.
 +struct PrefixOf<'a> {
 +    prefix_of: &'a [u8],
 +}
 +
 +impl<'a> PrefixOf<'a> {
 +    /// Creates a new `PrefixOf` from the given slice.
 +    fn new(prefix_of: &'a [u8]) -> Self {
 +        Self { prefix_of }
 +    }
 +}
 +
 +impl fst::Automaton for PrefixOf<'_> {
 +    type State = usize;
 +    fn start(&self) -> usize {
 +        0
 +    }
 +    fn is_match(&self, &state: &usize) -> bool {
 +        state != !0
 +    }
 +    fn can_match(&self, &state: &usize) -> bool {
 +        state != !0
 +    }
 +    fn accept(&self, &state: &usize, byte: u8) -> usize {
 +        if self.prefix_of.get(state) == Some(&byte) {
 +            state + 1
 +        } else {
 +            !0
 +        }
 +    }
 +}
 +
 +#[cfg(test)]
 +mod tests;
index 7badb1c363b44b46cfa0b74766a6fd74eae9d878,0000000000000000000000000000000000000000..afc9a0fa6fb206b1a0c41024137639f016518697
mode 100644,000000..100644
--- /dev/null
@@@ -1,221 -1,0 +1,228 @@@
- #[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
 +//! # Virtual File System
 +//!
 +//! VFS stores all files read by rust-analyzer. Reading file contents from VFS
 +//! always returns the same contents, unless VFS was explicitly modified with
 +//! [`set_file_contents`]. All changes to VFS are logged, and can be retrieved via
 +//! [`take_changes`] method. The pack of changes is then pushed to `salsa` and
 +//! triggers incremental recomputation.
 +//!
 +//! Files in VFS are identified with [`FileId`]s -- interned paths. The notion of
 +//! the path, [`VfsPath`] is somewhat abstract: at the moment, it is represented
 +//! as an [`std::path::PathBuf`] internally, but this is an implementation detail.
 +//!
 +//! VFS doesn't do IO or file watching itself. For that, see the [`loader`]
 +//! module. [`loader::Handle`] is an object-safe trait which abstracts both file
 +//! loading and file watching. [`Handle`] is dynamically configured with a set of
 +//! directory entries which should be scanned and watched. [`Handle`] then
 +//! asynchronously pushes file changes. Directory entries are configured in
 +//! free-form via list of globs, it's up to the [`Handle`] to interpret the globs
 +//! in any specific way.
 +//!
 +//! VFS stores a flat list of files. [`file_set::FileSet`] can partition this list
 +//! of files into disjoint sets of files. Traversal-like operations (including
 +//! getting the neighbor file by the relative path) are handled by the [`FileSet`].
 +//! [`FileSet`]s are also pushed to salsa and cause it to re-check `mod foo;`
 +//! declarations when files are created or deleted.
 +//!
 +//! [`FileSet`] and [`loader::Entry`] play similar, but different roles.
 +//! Both specify the "set of paths/files", one is geared towards file watching,
 +//! the other towards salsa changes. In particular, single [`FileSet`]
 +//! may correspond to several [`loader::Entry`]. For example, a crate from
 +//! crates.io which uses code generation would have two [`Entries`] -- for sources
 +//! in `~/.cargo`, and for generated code in `./target/debug/build`. It will
 +//! have a single [`FileSet`] which unions the two sources.
 +//!
 +//! [`set_file_contents`]: Vfs::set_file_contents
 +//! [`take_changes`]: Vfs::take_changes
 +//! [`FileSet`]: file_set::FileSet
 +//! [`Handle`]: loader::Handle
 +//! [`Entries`]: loader::Entry
 +
 +#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
 +
 +mod anchored_path;
 +pub mod file_set;
 +pub mod loader;
 +mod path_interner;
 +mod vfs_path;
 +
 +use std::{fmt, mem};
 +
 +use crate::path_interner::PathInterner;
 +
 +pub use crate::{
 +    anchored_path::{AnchoredPath, AnchoredPathBuf},
 +    vfs_path::VfsPath,
 +};
 +pub use paths::{AbsPath, AbsPathBuf};
 +
 +/// Handle to a file in [`Vfs`]
 +///
 +/// Most functions in rust-analyzer use this when they need to refer to a file.
++#[derive(Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)]
 +pub struct FileId(pub u32);
 +
++impl stdx::hash::NoHashHashable for FileId {}
++impl std::hash::Hash for FileId {
++    fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
++        self.0.hash(state);
++    }
++}
++
 +/// Storage for all files read by rust-analyzer.
 +///
 +/// For more information see the [crate-level](crate) documentation.
 +#[derive(Default)]
 +pub struct Vfs {
 +    interner: PathInterner,
 +    data: Vec<Option<Vec<u8>>>,
 +    changes: Vec<ChangedFile>,
 +}
 +
 +/// Changed file in the [`Vfs`].
 +pub struct ChangedFile {
 +    /// Id of the changed file
 +    pub file_id: FileId,
 +    /// Kind of change
 +    pub change_kind: ChangeKind,
 +}
 +
 +impl ChangedFile {
 +    /// Returns `true` if the change is not [`Delete`](ChangeKind::Delete).
 +    pub fn exists(&self) -> bool {
 +        self.change_kind != ChangeKind::Delete
 +    }
 +
 +    /// Returns `true` if the change is [`Create`](ChangeKind::Create) or
 +    /// [`Delete`](ChangeKind::Delete).
 +    pub fn is_created_or_deleted(&self) -> bool {
 +        matches!(self.change_kind, ChangeKind::Create | ChangeKind::Delete)
 +    }
 +}
 +
 +/// Kind of [file change](ChangedFile).
 +#[derive(Eq, PartialEq, Copy, Clone, Debug)]
 +pub enum ChangeKind {
 +    /// The file was (re-)created
 +    Create,
 +    /// The file was modified
 +    Modify,
 +    /// The file was deleted
 +    Delete,
 +}
 +
 +impl Vfs {
 +    /// Amount of files currently stored.
 +    ///
 +    /// Note that this includes deleted files.
 +    pub fn len(&self) -> usize {
 +        self.data.len()
 +    }
 +
 +    /// Id of the given path if it exists in the `Vfs` and is not deleted.
 +    pub fn file_id(&self, path: &VfsPath) -> Option<FileId> {
 +        self.interner.get(path).filter(|&it| self.get(it).is_some())
 +    }
 +
 +    /// File path corresponding to the given `file_id`.
 +    ///
 +    /// # Panics
 +    ///
 +    /// Panics if the id is not present in the `Vfs`.
 +    pub fn file_path(&self, file_id: FileId) -> VfsPath {
 +        self.interner.lookup(file_id).clone()
 +    }
 +
 +    /// File content corresponding to the given `file_id`.
 +    ///
 +    /// # Panics
 +    ///
 +    /// Panics if the id is not present in the `Vfs`, or if the corresponding file is
 +    /// deleted.
 +    pub fn file_contents(&self, file_id: FileId) -> &[u8] {
 +        self.get(file_id).as_deref().unwrap()
 +    }
 +
 +    /// Returns an iterator over the stored ids and their corresponding paths.
 +    ///
 +    /// This will skip deleted files.
 +    pub fn iter(&self) -> impl Iterator<Item = (FileId, &VfsPath)> + '_ {
 +        (0..self.data.len())
 +            .map(|it| FileId(it as u32))
 +            .filter(move |&file_id| self.get(file_id).is_some())
 +            .map(move |file_id| {
 +                let path = self.interner.lookup(file_id);
 +                (file_id, path)
 +            })
 +    }
 +
 +    /// Update the `path` with the given `contents`. `None` means the file was deleted.
 +    ///
 +    /// Returns `true` if the file was modified, and saves the [change](ChangedFile).
 +    ///
 +    /// If the path does not currently exists in the `Vfs`, allocates a new
 +    /// [`FileId`] for it.
 +    pub fn set_file_contents(&mut self, path: VfsPath, contents: Option<Vec<u8>>) -> bool {
 +        let file_id = self.alloc_file_id(path);
 +        let change_kind = match (&self.get(file_id), &contents) {
 +            (None, None) => return false,
 +            (None, Some(_)) => ChangeKind::Create,
 +            (Some(_), None) => ChangeKind::Delete,
 +            (Some(old), Some(new)) if old == new => return false,
 +            (Some(_), Some(_)) => ChangeKind::Modify,
 +        };
 +
 +        *self.get_mut(file_id) = contents;
 +        self.changes.push(ChangedFile { file_id, change_kind });
 +        true
 +    }
 +
 +    /// Returns `true` if the `Vfs` contains [changes](ChangedFile).
 +    pub fn has_changes(&self) -> bool {
 +        !self.changes.is_empty()
 +    }
 +
 +    /// Drain and returns all the changes in the `Vfs`.
 +    pub fn take_changes(&mut self) -> Vec<ChangedFile> {
 +        mem::take(&mut self.changes)
 +    }
 +
 +    /// Returns the id associated with `path`
 +    ///
 +    /// - If `path` does not exists in the `Vfs`, allocate a new id for it, associated with a
 +    /// deleted file;
 +    /// - Else, returns `path`'s id.
 +    ///
 +    /// Does not record a change.
 +    fn alloc_file_id(&mut self, path: VfsPath) -> FileId {
 +        let file_id = self.interner.intern(path);
 +        let idx = file_id.0 as usize;
 +        let len = self.data.len().max(idx + 1);
 +        self.data.resize_with(len, || None);
 +        file_id
 +    }
 +
 +    /// Returns the content associated with the given `file_id`.
 +    ///
 +    /// # Panics
 +    ///
 +    /// Panics if no file is associated to that id.
 +    fn get(&self, file_id: FileId) -> &Option<Vec<u8>> {
 +        &self.data[file_id.0 as usize]
 +    }
 +
 +    /// Mutably returns the content associated with the given `file_id`.
 +    ///
 +    /// # Panics
 +    ///
 +    /// Panics if no file is associated to that id.
 +    fn get_mut(&mut self, file_id: FileId) -> &mut Option<Vec<u8>> {
 +        &mut self.data[file_id.0 as usize]
 +    }
 +}
 +
 +impl fmt::Debug for Vfs {
 +    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
 +        f.debug_struct("Vfs").field("n_files", &self.data.len()).finish()
 +    }
 +}
index 751ec79af0fd4f2bcb644017d6209eaecde5e4fb,0000000000000000000000000000000000000000..72b925726479e014c18142acb6b4546bffc2b2b8
mode 100644,000000..100644
--- /dev/null
@@@ -1,630 -1,0 +1,676 @@@
 +[[rust-analyzer.assist.expressionFillDefault]]rust-analyzer.assist.expressionFillDefault (default: `"todo"`)::
 ++
 +--
 +Placeholder expression to use for missing expressions in assists.
 +--
 +[[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`)::
 ++
 +--
 +Warm up caches on project load.
 +--
 +[[rust-analyzer.cachePriming.numThreads]]rust-analyzer.cachePriming.numThreads (default: `0`)::
 ++
 +--
 +How many worker threads to handle priming caches. The default `0` means to pick automatically.
 +--
 +[[rust-analyzer.cargo.autoreload]]rust-analyzer.cargo.autoreload (default: `true`)::
 ++
 +--
 +Automatically refresh project info via `cargo metadata` on
 +`Cargo.toml` or `.cargo/config.toml` changes.
 +--
 +[[rust-analyzer.cargo.buildScripts.enable]]rust-analyzer.cargo.buildScripts.enable (default: `true`)::
 ++
 +--
 +Run build scripts (`build.rs`) for more precise code analysis.
 +--
 +[[rust-analyzer.cargo.buildScripts.overrideCommand]]rust-analyzer.cargo.buildScripts.overrideCommand (default: `null`)::
 ++
 +--
 +Override the command rust-analyzer uses to run build scripts and
 +build procedural macros. The command is required to output json
 +and should therefore include `--message-format=json` or a similar
 +option.
 +
 +By default, a cargo invocation will be constructed for the configured
 +targets and features, with the following base command line:
 +
 +```bash
 +cargo check --quiet --workspace --message-format=json --all-targets
 +```
 +.
 +--
 +[[rust-analyzer.cargo.buildScripts.useRustcWrapper]]rust-analyzer.cargo.buildScripts.useRustcWrapper (default: `true`)::
 ++
 +--
 +Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
 +avoid checking unnecessary things.
 +--
 +[[rust-analyzer.cargo.features]]rust-analyzer.cargo.features (default: `[]`)::
 ++
 +--
 +List of features to activate.
 +
 +Set this to `"all"` to pass `--all-features` to cargo.
 +--
 +[[rust-analyzer.cargo.noDefaultFeatures]]rust-analyzer.cargo.noDefaultFeatures (default: `false`)::
 ++
 +--
 +Whether to pass `--no-default-features` to cargo.
 +--
 +[[rust-analyzer.cargo.noSysroot]]rust-analyzer.cargo.noSysroot (default: `false`)::
 ++
 +--
 +Internal config for debugging, disables loading of sysroot crates.
 +--
 +[[rust-analyzer.cargo.target]]rust-analyzer.cargo.target (default: `null`)::
 ++
 +--
 +Compilation target override (target triple).
 +--
 +[[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest (default: `["core"]`)::
 ++
 +--
 +Unsets `#[cfg(test)]` for the specified crates.
 +--
 +[[rust-analyzer.checkOnSave.allTargets]]rust-analyzer.checkOnSave.allTargets (default: `true`)::
 ++
 +--
 +Check all targets and tests (`--all-targets`).
 +--
 +[[rust-analyzer.checkOnSave.command]]rust-analyzer.checkOnSave.command (default: `"check"`)::
 ++
 +--
 +Cargo command to use for `cargo check`.
 +--
 +[[rust-analyzer.checkOnSave.enable]]rust-analyzer.checkOnSave.enable (default: `true`)::
 ++
 +--
 +Run specified `cargo check` command for diagnostics on save.
 +--
 +[[rust-analyzer.checkOnSave.extraArgs]]rust-analyzer.checkOnSave.extraArgs (default: `[]`)::
 ++
 +--
 +Extra arguments for `cargo check`.
 +--
 +[[rust-analyzer.checkOnSave.features]]rust-analyzer.checkOnSave.features (default: `null`)::
 ++
 +--
 +List of features to activate. Defaults to
 +`#rust-analyzer.cargo.features#`.
 +
 +Set to `"all"` to pass `--all-features` to Cargo.
 +--
 +[[rust-analyzer.checkOnSave.noDefaultFeatures]]rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`)::
 ++
 +--
 +Whether to pass `--no-default-features` to Cargo. Defaults to
 +`#rust-analyzer.cargo.noDefaultFeatures#`.
 +--
 +[[rust-analyzer.checkOnSave.overrideCommand]]rust-analyzer.checkOnSave.overrideCommand (default: `null`)::
 ++
 +--
 +Override the command rust-analyzer uses instead of `cargo check` for
 +diagnostics on save. The command is required to output json and
 +should therefor include `--message-format=json` or a similar option.
 +
 +If you're changing this because you're using some tool wrapping
 +Cargo, you might also want to change
 +`#rust-analyzer.cargo.buildScripts.overrideCommand#`.
 +
 +If there are multiple linked projects, this command is invoked for
 +each of them, with the working directory being the project root
 +(i.e., the folder containing the `Cargo.toml`).
 +
 +An example command would be:
 +
 +```bash
 +cargo check --workspace --message-format=json --all-targets
 +```
 +.
 +--
 +[[rust-analyzer.checkOnSave.target]]rust-analyzer.checkOnSave.target (default: `null`)::
 ++
 +--
 +Check for a specific target. Defaults to
 +`#rust-analyzer.cargo.target#`.
 +--
 +[[rust-analyzer.completion.autoimport.enable]]rust-analyzer.completion.autoimport.enable (default: `true`)::
 ++
 +--
 +Toggles the additional completions that automatically add imports when completed.
 +Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
 +--
 +[[rust-analyzer.completion.autoself.enable]]rust-analyzer.completion.autoself.enable (default: `true`)::
 ++
 +--
 +Toggles the additional completions that automatically show method calls and field accesses
 +with `self` prefixed to them when inside a method.
 +--
 +[[rust-analyzer.completion.callable.snippets]]rust-analyzer.completion.callable.snippets (default: `"fill_arguments"`)::
 ++
 +--
 +Whether to add parenthesis and argument snippets when completing function.
 +--
 +[[rust-analyzer.completion.postfix.enable]]rust-analyzer.completion.postfix.enable (default: `true`)::
 ++
 +--
 +Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
 +--
 +[[rust-analyzer.completion.privateEditable.enable]]rust-analyzer.completion.privateEditable.enable (default: `false`)::
 ++
 +--
 +Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
 +--
 +[[rust-analyzer.completion.snippets.custom]]rust-analyzer.completion.snippets.custom::
 ++
 +--
 +Default:
 +----
 +{
 +            "Arc::new": {
 +                "postfix": "arc",
 +                "body": "Arc::new(${receiver})",
 +                "requires": "std::sync::Arc",
 +                "description": "Put the expression into an `Arc`",
 +                "scope": "expr"
 +            },
 +            "Rc::new": {
 +                "postfix": "rc",
 +                "body": "Rc::new(${receiver})",
 +                "requires": "std::rc::Rc",
 +                "description": "Put the expression into an `Rc`",
 +                "scope": "expr"
 +            },
 +            "Box::pin": {
 +                "postfix": "pinbox",
 +                "body": "Box::pin(${receiver})",
 +                "requires": "std::boxed::Box",
 +                "description": "Put the expression into a pinned `Box`",
 +                "scope": "expr"
 +            },
 +            "Ok": {
 +                "postfix": "ok",
 +                "body": "Ok(${receiver})",
 +                "description": "Wrap the expression in a `Result::Ok`",
 +                "scope": "expr"
 +            },
 +            "Err": {
 +                "postfix": "err",
 +                "body": "Err(${receiver})",
 +                "description": "Wrap the expression in a `Result::Err`",
 +                "scope": "expr"
 +            },
 +            "Some": {
 +                "postfix": "some",
 +                "body": "Some(${receiver})",
 +                "description": "Wrap the expression in an `Option::Some`",
 +                "scope": "expr"
 +            }
 +        }
 +----
 +Custom completion snippets.
 +
 +--
 +[[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`)::
 ++
 +--
 +List of rust-analyzer diagnostics to disable.
 +--
 +[[rust-analyzer.diagnostics.enable]]rust-analyzer.diagnostics.enable (default: `true`)::
 ++
 +--
 +Whether to show native rust-analyzer diagnostics.
 +--
 +[[rust-analyzer.diagnostics.experimental.enable]]rust-analyzer.diagnostics.experimental.enable (default: `false`)::
 ++
 +--
 +Whether to show experimental rust-analyzer diagnostics that might
 +have more false positives than usual.
 +--
 +[[rust-analyzer.diagnostics.remapPrefix]]rust-analyzer.diagnostics.remapPrefix (default: `{}`)::
 ++
 +--
 +Map of prefixes to be substituted when parsing diagnostic file paths.
 +This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
 +--
 +[[rust-analyzer.diagnostics.warningsAsHint]]rust-analyzer.diagnostics.warningsAsHint (default: `[]`)::
 ++
 +--
 +List of warnings that should be displayed with hint severity.
 +
 +The warnings will be indicated by faded text or three dots in code
 +and will not show up in the `Problems Panel`.
 +--
 +[[rust-analyzer.diagnostics.warningsAsInfo]]rust-analyzer.diagnostics.warningsAsInfo (default: `[]`)::
 ++
 +--
 +List of warnings that should be displayed with info severity.
 +
 +The warnings will be indicated by a blue squiggly underline in code
 +and a blue icon in the `Problems Panel`.
 +--
 +[[rust-analyzer.files.excludeDirs]]rust-analyzer.files.excludeDirs (default: `[]`)::
 ++
 +--
 +These directories will be ignored by rust-analyzer. They are
 +relative to the workspace root, and globs are not supported. You may
 +also need to add the folders to Code's `files.watcherExclude`.
 +--
 +[[rust-analyzer.files.watcher]]rust-analyzer.files.watcher (default: `"client"`)::
 ++
 +--
 +Controls file watching implementation.
 +--
 +[[rust-analyzer.highlightRelated.breakPoints.enable]]rust-analyzer.highlightRelated.breakPoints.enable (default: `true`)::
 ++
 +--
 +Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
 +--
 +[[rust-analyzer.highlightRelated.exitPoints.enable]]rust-analyzer.highlightRelated.exitPoints.enable (default: `true`)::
 ++
 +--
 +Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
 +--
 +[[rust-analyzer.highlightRelated.references.enable]]rust-analyzer.highlightRelated.references.enable (default: `true`)::
 ++
 +--
 +Enables highlighting of related references while the cursor is on any identifier.
 +--
 +[[rust-analyzer.highlightRelated.yieldPoints.enable]]rust-analyzer.highlightRelated.yieldPoints.enable (default: `true`)::
 ++
 +--
 +Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
 +--
 +[[rust-analyzer.hover.actions.debug.enable]]rust-analyzer.hover.actions.debug.enable (default: `true`)::
 ++
 +--
 +Whether to show `Debug` action. Only applies when
 +`#rust-analyzer.hover.actions.enable#` is set.
 +--
 +[[rust-analyzer.hover.actions.enable]]rust-analyzer.hover.actions.enable (default: `true`)::
 ++
 +--
 +Whether to show HoverActions in Rust files.
 +--
 +[[rust-analyzer.hover.actions.gotoTypeDef.enable]]rust-analyzer.hover.actions.gotoTypeDef.enable (default: `true`)::
 ++
 +--
 +Whether to show `Go to Type Definition` action. Only applies when
 +`#rust-analyzer.hover.actions.enable#` is set.
 +--
 +[[rust-analyzer.hover.actions.implementations.enable]]rust-analyzer.hover.actions.implementations.enable (default: `true`)::
 ++
 +--
 +Whether to show `Implementations` action. Only applies when
 +`#rust-analyzer.hover.actions.enable#` is set.
 +--
 +[[rust-analyzer.hover.actions.references.enable]]rust-analyzer.hover.actions.references.enable (default: `false`)::
 ++
 +--
 +Whether to show `References` action. Only applies when
 +`#rust-analyzer.hover.actions.enable#` is set.
 +--
 +[[rust-analyzer.hover.actions.run.enable]]rust-analyzer.hover.actions.run.enable (default: `true`)::
 ++
 +--
 +Whether to show `Run` action. Only applies when
 +`#rust-analyzer.hover.actions.enable#` is set.
 +--
 +[[rust-analyzer.hover.documentation.enable]]rust-analyzer.hover.documentation.enable (default: `true`)::
 ++
 +--
 +Whether to show documentation on hover.
 +--
 +[[rust-analyzer.hover.documentation.keywords.enable]]rust-analyzer.hover.documentation.keywords.enable (default: `true`)::
 ++
 +--
 +Whether to show keyword hover popups. Only applies when
 +`#rust-analyzer.hover.documentation.enable#` is set.
 +--
 +[[rust-analyzer.hover.links.enable]]rust-analyzer.hover.links.enable (default: `true`)::
 ++
 +--
 +Use markdown syntax for links in hover.
 +--
 +[[rust-analyzer.imports.granularity.enforce]]rust-analyzer.imports.granularity.enforce (default: `false`)::
 ++
 +--
 +Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
 +--
 +[[rust-analyzer.imports.granularity.group]]rust-analyzer.imports.granularity.group (default: `"crate"`)::
 ++
 +--
 +How imports should be grouped into use statements.
 +--
 +[[rust-analyzer.imports.group.enable]]rust-analyzer.imports.group.enable (default: `true`)::
 ++
 +--
 +Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
 +--
 +[[rust-analyzer.imports.merge.glob]]rust-analyzer.imports.merge.glob (default: `true`)::
 ++
 +--
 +Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
 +--
 +[[rust-analyzer.imports.prefix]]rust-analyzer.imports.prefix (default: `"plain"`)::
 ++
 +--
 +The path structure for newly inserted paths to use.
 +--
 +[[rust-analyzer.inlayHints.bindingModeHints.enable]]rust-analyzer.inlayHints.bindingModeHints.enable (default: `false`)::
 ++
 +--
 +Whether to show inlay type hints for binding modes.
 +--
 +[[rust-analyzer.inlayHints.chainingHints.enable]]rust-analyzer.inlayHints.chainingHints.enable (default: `true`)::
 ++
 +--
 +Whether to show inlay type hints for method chains.
 +--
 +[[rust-analyzer.inlayHints.closingBraceHints.enable]]rust-analyzer.inlayHints.closingBraceHints.enable (default: `true`)::
 ++
 +--
 +Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
 +--
 +[[rust-analyzer.inlayHints.closingBraceHints.minLines]]rust-analyzer.inlayHints.closingBraceHints.minLines (default: `25`)::
 ++
 +--
 +Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
 +to always show them).
 +--
 +[[rust-analyzer.inlayHints.closureReturnTypeHints.enable]]rust-analyzer.inlayHints.closureReturnTypeHints.enable (default: `"never"`)::
 ++
 +--
 +Whether to show inlay type hints for return types of closures.
 +--
 +[[rust-analyzer.inlayHints.lifetimeElisionHints.enable]]rust-analyzer.inlayHints.lifetimeElisionHints.enable (default: `"never"`)::
 ++
 +--
 +Whether to show inlay type hints for elided lifetimes in function signatures.
 +--
 +[[rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames]]rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames (default: `false`)::
 ++
 +--
 +Whether to prefer using parameter names as the name for elided lifetime hints if possible.
 +--
 +[[rust-analyzer.inlayHints.maxLength]]rust-analyzer.inlayHints.maxLength (default: `25`)::
 ++
 +--
 +Maximum length for inlay hints. Set to null to have an unlimited length.
 +--
 +[[rust-analyzer.inlayHints.parameterHints.enable]]rust-analyzer.inlayHints.parameterHints.enable (default: `true`)::
 ++
 +--
 +Whether to show function parameter name inlay hints at the call
 +site.
 +--
 +[[rust-analyzer.inlayHints.reborrowHints.enable]]rust-analyzer.inlayHints.reborrowHints.enable (default: `"never"`)::
 ++
 +--
 +Whether to show inlay type hints for compiler inserted reborrows.
 +--
 +[[rust-analyzer.inlayHints.renderColons]]rust-analyzer.inlayHints.renderColons (default: `true`)::
 ++
 +--
 +Whether to render leading colons for type hints, and trailing colons for parameter hints.
 +--
 +[[rust-analyzer.inlayHints.typeHints.enable]]rust-analyzer.inlayHints.typeHints.enable (default: `true`)::
 ++
 +--
 +Whether to show inlay type hints for variables.
 +--
 +[[rust-analyzer.inlayHints.typeHints.hideClosureInitialization]]rust-analyzer.inlayHints.typeHints.hideClosureInitialization (default: `false`)::
 ++
 +--
 +Whether to hide inlay type hints for `let` statements that initialize to a closure.
 +Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
 +--
 +[[rust-analyzer.inlayHints.typeHints.hideNamedConstructor]]rust-analyzer.inlayHints.typeHints.hideNamedConstructor (default: `false`)::
 ++
 +--
 +Whether to hide inlay type hints for constructors.
 +--
 +[[rust-analyzer.joinLines.joinAssignments]]rust-analyzer.joinLines.joinAssignments (default: `true`)::
 ++
 +--
 +Join lines merges consecutive declaration and initialization of an assignment.
 +--
 +[[rust-analyzer.joinLines.joinElseIf]]rust-analyzer.joinLines.joinElseIf (default: `true`)::
 ++
 +--
 +Join lines inserts else between consecutive ifs.
 +--
 +[[rust-analyzer.joinLines.removeTrailingComma]]rust-analyzer.joinLines.removeTrailingComma (default: `true`)::
 ++
 +--
 +Join lines removes trailing commas.
 +--
 +[[rust-analyzer.joinLines.unwrapTrivialBlock]]rust-analyzer.joinLines.unwrapTrivialBlock (default: `true`)::
 ++
 +--
 +Join lines unwraps trivial blocks.
 +--
 +[[rust-analyzer.lens.debug.enable]]rust-analyzer.lens.debug.enable (default: `true`)::
 ++
 +--
 +Whether to show `Debug` lens. Only applies when
 +`#rust-analyzer.lens.enable#` is set.
 +--
 +[[rust-analyzer.lens.enable]]rust-analyzer.lens.enable (default: `true`)::
 ++
 +--
 +Whether to show CodeLens in Rust files.
 +--
 +[[rust-analyzer.lens.forceCustomCommands]]rust-analyzer.lens.forceCustomCommands (default: `true`)::
 ++
 +--
 +Internal config: use custom client-side commands even when the
 +client doesn't set the corresponding capability.
 +--
 +[[rust-analyzer.lens.implementations.enable]]rust-analyzer.lens.implementations.enable (default: `true`)::
 ++
 +--
 +Whether to show `Implementations` lens. Only applies when
 +`#rust-analyzer.lens.enable#` is set.
 +--
 +[[rust-analyzer.lens.references.adt.enable]]rust-analyzer.lens.references.adt.enable (default: `false`)::
 ++
 +--
 +Whether to show `References` lens for Struct, Enum, and Union.
 +Only applies when `#rust-analyzer.lens.enable#` is set.
 +--
 +[[rust-analyzer.lens.references.enumVariant.enable]]rust-analyzer.lens.references.enumVariant.enable (default: `false`)::
 ++
 +--
 +Whether to show `References` lens for Enum Variants.
 +Only applies when `#rust-analyzer.lens.enable#` is set.
 +--
 +[[rust-analyzer.lens.references.method.enable]]rust-analyzer.lens.references.method.enable (default: `false`)::
 ++
 +--
 +Whether to show `Method References` lens. Only applies when
 +`#rust-analyzer.lens.enable#` is set.
 +--
 +[[rust-analyzer.lens.references.trait.enable]]rust-analyzer.lens.references.trait.enable (default: `false`)::
 ++
 +--
 +Whether to show `References` lens for Trait.
 +Only applies when `#rust-analyzer.lens.enable#` is set.
 +--
 +[[rust-analyzer.lens.run.enable]]rust-analyzer.lens.run.enable (default: `true`)::
 ++
 +--
 +Whether to show `Run` lens. Only applies when
 +`#rust-analyzer.lens.enable#` is set.
 +--
 +[[rust-analyzer.linkedProjects]]rust-analyzer.linkedProjects (default: `[]`)::
 ++
 +--
 +Disable project auto-discovery in favor of explicitly specified set
 +of projects.
 +
 +Elements must be paths pointing to `Cargo.toml`,
 +`rust-project.json`, or JSON objects in `rust-project.json` format.
 +--
 +[[rust-analyzer.lru.capacity]]rust-analyzer.lru.capacity (default: `null`)::
 ++
 +--
 +Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
 +--
 +[[rust-analyzer.notifications.cargoTomlNotFound]]rust-analyzer.notifications.cargoTomlNotFound (default: `true`)::
 ++
 +--
 +Whether to show `can't find Cargo.toml` error message.
 +--
 +[[rust-analyzer.procMacro.attributes.enable]]rust-analyzer.procMacro.attributes.enable (default: `true`)::
 ++
 +--
 +Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
 +--
 +[[rust-analyzer.procMacro.enable]]rust-analyzer.procMacro.enable (default: `true`)::
 ++
 +--
 +Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
 +--
 +[[rust-analyzer.procMacro.ignored]]rust-analyzer.procMacro.ignored (default: `{}`)::
 ++
 +--
 +These proc-macros will be ignored when trying to expand them.
 +
 +This config takes a map of crate names with the exported proc-macro names to ignore as values.
 +--
 +[[rust-analyzer.procMacro.server]]rust-analyzer.procMacro.server (default: `null`)::
 ++
 +--
 +Internal config, path to proc-macro server executable (typically,
 +this is rust-analyzer itself, but we override this in tests).
 +--
 +[[rust-analyzer.runnables.command]]rust-analyzer.runnables.command (default: `null`)::
 ++
 +--
 +Command to be executed instead of 'cargo' for runnables.
 +--
 +[[rust-analyzer.runnables.extraArgs]]rust-analyzer.runnables.extraArgs (default: `[]`)::
 ++
 +--
 +Additional arguments to be passed to cargo for runnables such as
 +tests or binaries. For example, it may be `--release`.
 +--
 +[[rust-analyzer.rustc.source]]rust-analyzer.rustc.source (default: `null`)::
 ++
 +--
 +Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
 +projects, or "discover" to try to automatically find it if the `rustc-dev` component
 +is installed.
 +
 +Any project which uses rust-analyzer with the rustcPrivate
 +crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
 +
 +This option does not take effect until rust-analyzer is restarted.
 +--
 +[[rust-analyzer.rustfmt.extraArgs]]rust-analyzer.rustfmt.extraArgs (default: `[]`)::
 ++
 +--
 +Additional arguments to `rustfmt`.
 +--
 +[[rust-analyzer.rustfmt.overrideCommand]]rust-analyzer.rustfmt.overrideCommand (default: `null`)::
 ++
 +--
 +Advanced option, fully override the command rust-analyzer uses for
 +formatting.
 +--
 +[[rust-analyzer.rustfmt.rangeFormatting.enable]]rust-analyzer.rustfmt.rangeFormatting.enable (default: `false`)::
 ++
 +--
 +Enables the use of rustfmt's unstable range formatting command for the
 +`textDocument/rangeFormatting` request. The rustfmt option is unstable and only
 +available on a nightly build.
 +--
++[[rust-analyzer.semanticHighlighting.doc.comment.inject.enable]]rust-analyzer.semanticHighlighting.doc.comment.inject.enable (default: `true`)::
+++
++--
++Inject additional highlighting into doc comments.
++
++When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
++doc links.
++--
++[[rust-analyzer.semanticHighlighting.operator.enable]]rust-analyzer.semanticHighlighting.operator.enable (default: `true`)::
+++
++--
++Use semantic tokens for operators.
++
++When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
++they are tagged with modifiers.
++--
++[[rust-analyzer.semanticHighlighting.operator.specialization.enable]]rust-analyzer.semanticHighlighting.operator.specialization.enable (default: `false`)::
+++
++--
++Use specialized semantic tokens for operators.
++
++When enabled, rust-analyzer will emit special token types for operator tokens instead
++of the generic `operator` token type.
++--
++[[rust-analyzer.semanticHighlighting.punctuation.enable]]rust-analyzer.semanticHighlighting.punctuation.enable (default: `false`)::
+++
++--
++Use semantic tokens for punctuations.
++
++When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
++they are tagged with modifiers or have a special role.
++--
++[[rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang]]rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang (default: `false`)::
+++
++--
++When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
++calls.
++--
++[[rust-analyzer.semanticHighlighting.punctuation.specialization.enable]]rust-analyzer.semanticHighlighting.punctuation.specialization.enable (default: `false`)::
+++
++--
++Use specialized semantic tokens for punctuations.
++
++When enabled, rust-analyzer will emit special token types for punctuation tokens instead
++of the generic `punctuation` token type.
++--
 +[[rust-analyzer.semanticHighlighting.strings.enable]]rust-analyzer.semanticHighlighting.strings.enable (default: `true`)::
 ++
 +--
 +Use semantic tokens for strings.
 +
 +In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
 +By disabling semantic tokens for strings, other grammars can be used to highlight
 +their contents.
 +--
 +[[rust-analyzer.signatureInfo.detail]]rust-analyzer.signatureInfo.detail (default: `"full"`)::
 ++
 +--
 +Show full signature of the callable. Only shows parameters if disabled.
 +--
 +[[rust-analyzer.signatureInfo.documentation.enable]]rust-analyzer.signatureInfo.documentation.enable (default: `true`)::
 ++
 +--
 +Show documentation.
 +--
 +[[rust-analyzer.typing.autoClosingAngleBrackets.enable]]rust-analyzer.typing.autoClosingAngleBrackets.enable (default: `false`)::
 ++
 +--
 +Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
 +--
 +[[rust-analyzer.workspace.symbol.search.kind]]rust-analyzer.workspace.symbol.search.kind (default: `"only_types"`)::
 ++
 +--
 +Workspace symbol search kind.
 +--
 +[[rust-analyzer.workspace.symbol.search.limit]]rust-analyzer.workspace.symbol.search.limit (default: `128`)::
 ++
 +--
 +Limits the number of items returned from a workspace symbol search (Defaults to 128).
 +Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
 +Other clients requires all results upfront and might require a higher limit.
 +--
 +[[rust-analyzer.workspace.symbol.search.scope]]rust-analyzer.workspace.symbol.search.scope (default: `"workspace"`)::
 ++
 +--
 +Workspace symbol search scope.
 +--
index c482fcbed0e01ccea9ab998dad16a7649f6fb61a,0000000000000000000000000000000000000000..9bd3b6a692b1a7514263ca06c4b39e97281f42df
mode 100644,000000..100644
--- /dev/null
@@@ -1,863 -1,0 +1,874 @@@
 += User Manual
 +:toc: preamble
 +:sectanchors:
 +:page-layout: post
 +:icons: font
 +:source-highlighter: rouge
 +:experimental:
 +
 +////
 +IMPORTANT: the master copy of this document lives in the https://github.com/rust-lang/rust-analyzer repository
 +////
 +
 +At its core, rust-analyzer is a *library* for semantic analysis of Rust code as it changes over time.
 +This manual focuses on a specific usage of the library -- running it as part of a server that implements the
 +https://microsoft.github.io/language-server-protocol/[Language Server Protocol] (LSP).
 +The LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic features like completion or goto definition by talking to an external language server process.
 +
 +[TIP]
 +====
 +[.lead]
 +To improve this document, send a pull request: +
 +https://github.com/rust-lang/rust-analyzer/blob/master/docs/user/manual.adoc[https://github.com/rust-analyzer/.../manual.adoc]
 +
 +The manual is written in https://asciidoc.org[AsciiDoc] and includes some extra files which are generated from the source code. Run `cargo test` and `cargo test -p xtask` to create these and then `asciidoctor manual.adoc` to create an HTML copy.
 +====
 +
 +If you have questions about using rust-analyzer, please ask them in the https://users.rust-lang.org/c/ide/14["`IDEs and Editors`"] topic of Rust users forum.
 +
 +== Installation
 +
 +In theory, one should be able to just install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> and have it automatically work with any editor.
 +We are not there yet, so some editor specific setup is required.
 +
 +Additionally, rust-analyzer needs the sources of the standard library.
 +If the source code is not present, rust-analyzer will attempt to install it automatically.
 +
 +To add the sources manually, run the following command:
 +
 +```bash
 +$ rustup component add rust-src
 +```
 +
 +=== Toolchain
 +
 +Only the latest stable standard library source is officially supported for use with rust-analyzer.
 +If you are using an older toolchain or have an override set, rust-analyzer may fail to understand the Rust source.
 +You will either need to update your toolchain or use an older version of rust-analyzer that is compatible with your toolchain.
 +
 +If you are using an override in your project, you can still force rust-analyzer to use the stable toolchain via the environment variable `RUSTUP_TOOLCHAIN`.
 +For example, with VS Code or coc-rust-analyzer:
 +
 +[source,json]
 +----
 +{ "rust-analyzer.server.extraEnv": { "RUSTUP_TOOLCHAIN": "stable" } }
 +----
 +
 +=== VS Code
 +
 +This is the best supported editor at the moment.
 +The rust-analyzer plugin for VS Code is maintained
 +https://github.com/rust-lang/rust-analyzer/tree/master/editors/code[in tree].
 +
 +You can install the latest release of the plugin from
 +https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer[the marketplace].
 +
 +Note that the plugin may cause conflicts with the
 +https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[official Rust plugin].
 +It is recommended to disable the Rust plugin when using the rust-analyzer extension.
 +
 +By default, the plugin will prompt you to download the matching version of the server as well:
 +
 +image::https://user-images.githubusercontent.com/9021944/75067008-17502500-54ba-11ea-835a-f92aac50e866.png[]
 +
 +[NOTE]
 +====
 +To disable this notification put the following to `settings.json`
 +
 +[source,json]
 +----
 +{ "rust-analyzer.updates.askBeforeDownload": false }
 +----
 +====
 +
 +The server binary is stored in the extension install directory, which starts with `rust-lang.rust-analyzer-` and is located under:
 +
 +* Linux: `~/.vscode/extensions`
 +* Linux (Remote, such as WSL): `~/.vscode-server/extensions`
 +* macOS: `~/.vscode/extensions`
 +* Windows: `%USERPROFILE%\.vscode\extensions`
 +
 +As an exception, on NixOS, the extension makes a copy of the server and stores it under `~/.config/Code/User/globalStorage/rust-lang.rust-analyzer`.
 +
 +Note that we only support the two most recent versions of VS Code.
 +
 +==== Updates
 +
 +The extension will be updated automatically as new versions become available.
 +It will ask your permission to download the matching language server version binary if needed.
 +
 +===== Nightly
 +
 +We ship nightly releases for VS Code.
 +To help us out by testing the newest code, you can enable pre-release versions in the Code extension page.
 +
 +==== Manual installation
 +
 +Alternatively, download a VSIX corresponding to your platform from the
 +https://github.com/rust-lang/rust-analyzer/releases[releases] page.
 +
 +Install the extension with the `Extensions: Install from VSIX` command within VS Code, or from the command line via:
 +[source]
 +----
 +$ code --install-extension /path/to/rust-analyzer.vsix
 +----
 +
 +If you are running an unsupported platform, you can install `rust-analyzer-no-server.vsix` and compile or obtain a server binary.
 +Copy the server anywhere, then add the path to your settings.json, for example:
 +[source,json]
 +----
 +{ "rust-analyzer.server.path": "~/.local/bin/rust-analyzer-linux" }
 +----
 +
 +==== Building From Source
 +
 +Both the server and the Code plugin can be installed from source:
 +
 +[source]
 +----
 +$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
 +$ cargo xtask install
 +----
 +
 +You'll need Cargo, nodejs (matching a supported version of VS Code) and npm for this.
 +
 +Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually.
 +
 +If you're not using Code, you can compile and install only the LSP server:
 +
 +[source]
 +----
 +$ cargo xtask install --server
 +----
 +
 +=== rust-analyzer Language Server Binary
 +
 +Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
 +You can download pre-built binaries from the https://github.com/rust-lang/rust-analyzer/releases[releases] page.
 +You will need to uncompress and rename the binary for your platform, e.g. from `rust-analyzer-aarch64-apple-darwin.gz` on Mac OS to `rust-analyzer`, make it executable, then move it into a directory in your `$PATH`.
 +
 +On Linux to install the `rust-analyzer` binary into `~/.local/bin`, these commands should work:
 +
 +[source,bash]
 +----
 +$ mkdir -p ~/.local/bin
 +$ curl -L https://github.com/rust-lang/rust-analyzer/releases/latest/download/rust-analyzer-x86_64-unknown-linux-gnu.gz | gunzip -c - > ~/.local/bin/rust-analyzer
 +$ chmod +x ~/.local/bin/rust-analyzer
 +----
 +
 +Make sure that `~/.local/bin` is listed in the `$PATH` variable and use the appropriate URL if you're not on a `x86-64` system.
 +
 +You don't have to use `~/.local/bin`, any other path like `~/.cargo/bin` or `/usr/local/bin` will work just as well.
 +
 +Alternatively, you can install it from source using the command below.
 +You'll need the latest stable version of the Rust toolchain.
 +
 +[source,bash]
 +----
 +$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
 +$ cargo xtask install --server
 +----
 +
 +If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-lang/rust-analyzer/issues/1811[this issue].
 +On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help.
 +
 +==== `rustup`
 +
 +`rust-analyzer` is available in `rustup`, but only in the nightly toolchain:
 +
 +[source,bash]
 +----
 +$ rustup +nightly component add rust-analyzer-preview
 +----
 +
 +However, in contrast to `component add clippy` or `component add rustfmt`, this does not actually place a `rust-analyzer` binary in `~/.cargo/bin`, see https://github.com/rust-lang/rustup/issues/2411[this issue].
 +
 +==== Arch Linux
 +
 +The `rust-analyzer` binary can be installed from the repos or AUR (Arch User Repository):
 +
 +- https://www.archlinux.org/packages/community/x86_64/rust-analyzer/[`rust-analyzer`] (built from latest tagged source)
 +- https://aur.archlinux.org/packages/rust-analyzer-git[`rust-analyzer-git`] (latest Git version)
 +
 +Install it with pacman, for example:
 +
 +[source,bash]
 +----
 +$ pacman -S rust-analyzer
 +----
 +
 +==== Gentoo Linux
 +
 +`rust-analyzer` is available in the GURU repository:
 +
 +- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer`] builds from source
 +- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer-bin?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer-bin`] installs an official binary release
 +
 +If not already, GURU must be enabled (e.g. using `app-eselect/eselect-repository`) and sync'd before running `emerge`:
 +
 +[source,bash]
 +----
 +$ eselect repository enable guru && emaint sync -r guru
 +$ emerge rust-analyzer-bin
 +----
 +
 +==== macOS
 +
 +The `rust-analyzer` binary can be installed via https://brew.sh/[Homebrew].
 +
 +[source,bash]
 +----
 +$ brew install rust-analyzer
 +----
 +
 +=== Emacs
 +
 +Note this excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm].
 +
 +Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
 +
 +Emacs support is maintained as part of the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP] package in https://github.com/emacs-lsp/lsp-mode/blob/master/lsp-rust.el[lsp-rust.el].
 +
 +1. Install the most recent version of `emacs-lsp` package by following the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP instructions].
 +2. Set `lsp-rust-server` to `'rust-analyzer`.
 +3. Run `lsp` in a Rust buffer.
 +4. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys.
 +
 +=== Vim/NeoVim
 +
 +Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
 +Not needed if the extension can install/update it on its own, coc-rust-analyzer is one example.
 +
 +There are several LSP client implementations for vim or neovim:
 +
 +==== coc-rust-analyzer
 +
 +1. Install coc.nvim by following the instructions at
 +   https://github.com/neoclide/coc.nvim[coc.nvim]
 +   (Node.js required)
 +2. Run `:CocInstall coc-rust-analyzer` to install
 +   https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
 +   this extension implements _most_ of the features supported in the VSCode extension:
 +   * automatically install and upgrade stable/nightly releases
 +   * same configurations as VSCode extension, `rust-analyzer.server.path`, `rust-analyzer.cargo.features` etc.
 +   * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc.
 +   * inlay hints for variables and method chaining, _Neovim Only_
 +
 +Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected`; `coc-codeaction` and `coc-codeaction-line` are unlikely to be useful.
 +
 +==== LanguageClient-neovim
 +
 +1. Install LanguageClient-neovim by following the instructions
 +   https://github.com/autozimu/LanguageClient-neovim[here]
 +   * The GitHub project wiki has extra tips on configuration
 +
 +2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists):
 ++
 +[source,vim]
 +----
 +let g:LanguageClient_serverCommands = {
 +\ 'rust': ['rust-analyzer'],
 +\ }
 +----
 +
 +==== YouCompleteMe
 +
 +Install YouCompleteMe by following the instructions
 +  https://github.com/ycm-core/YouCompleteMe#installation[here].
 +
 +rust-analyzer is the default in ycm, it should work out of the box.
 +
 +==== ALE
 +
 +To use the LSP server in https://github.com/dense-analysis/ale[ale]:
 +
 +[source,vim]
 +----
 +let g:ale_linters = {'rust': ['analyzer']}
 +----
 +
 +==== nvim-lsp
 +
 +NeoVim 0.5 has built-in language server support.
 +For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lspconfig#rust_analyzer[neovim/nvim-lspconfig].
 +Once `neovim/nvim-lspconfig` is installed, use `+lua require'lspconfig'.rust_analyzer.setup({})+` in your `init.vim`.
 +
 +You can also pass LSP settings to the server:
 +
 +[source,vim]
 +----
 +lua << EOF
 +local nvim_lsp = require'lspconfig'
 +
 +local on_attach = function(client)
 +    require'completion'.on_attach(client)
 +end
 +
 +nvim_lsp.rust_analyzer.setup({
 +    on_attach=on_attach,
 +    settings = {
 +        ["rust-analyzer"] = {
 +            imports = {
 +                granularity = {
 +                    group = "module",
 +                },
 +                prefix = "self",
 +            },
 +            cargo = {
 +                buildScripts = {
 +                    enable = true,
 +                },
 +            },
 +            procMacro = {
 +                enable = true
 +            },
 +        }
 +    }
 +})
 +EOF
 +----
 +
 +See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started.
 +
 +Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for neovim.
 +
 +==== vim-lsp
 +
 +vim-lsp is installed by following https://github.com/prabirshrestha/vim-lsp[the plugin instructions].
 +It can be as simple as adding this line to your `.vimrc`:
 +
 +[source,vim]
 +----
 +Plug 'prabirshrestha/vim-lsp'
 +----
 +
 +Next you need to register the `rust-analyzer` binary.
 +If it is available in `$PATH`, you may want to add this to your `.vimrc`:
 +
 +[source,vim]
 +----
 +if executable('rust-analyzer')
 +  au User lsp_setup call lsp#register_server({
 +        \   'name': 'Rust Language Server',
 +        \   'cmd': {server_info->['rust-analyzer']},
 +        \   'whitelist': ['rust'],
 +        \ })
 +endif
 +----
 +
 +There is no dedicated UI for the server configuration, so you would need to send any options as a value of the `initialization_options` field, as described in the <<_configuration,Configuration>> section.
 +Here is an example of how to enable the proc-macro support:
 +
 +[source,vim]
 +----
 +if executable('rust-analyzer')
 +  au User lsp_setup call lsp#register_server({
 +        \   'name': 'Rust Language Server',
 +        \   'cmd': {server_info->['rust-analyzer']},
 +        \   'whitelist': ['rust'],
 +        \   'initialization_options': {
 +        \     'cargo': {
 +        \       'buildScripts': {
 +        \         'enable': v:true,
 +        \       },
 +        \     },
 +        \     'procMacro': {
 +        \       'enable': v:true,
 +        \     },
 +        \   },
 +        \ })
 +endif
 +----
 +
 +=== Sublime Text
 +
 +==== Sublime Text 4:
 +* Follow the instructions in link:https://github.com/sublimelsp/LSP-rust-analyzer[LSP-rust-analyzer].
 +
 +NOTE: Install link:https://packagecontrol.io/packages/LSP-file-watcher-chokidar[LSP-file-watcher-chokidar] to enable file watching (`workspace/didChangeWatchedFiles`).
 +
 +==== Sublime Text 3:
 +* Install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
 +* Install the link:https://packagecontrol.io/packages/LSP[LSP package].
 +* From the command palette, run `LSP: Enable Language Server Globally` and select `rust-analyzer`.
 +
 +If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the status bar, and after waiting a bit, functionalities like tooltips on hovering over variables should become available.
 +
 +If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary.
 +
 +=== GNOME Builder
 +
 +GNOME Builder 3.37.1 and newer has native `rust-analyzer` support.
 +If the LSP binary is not available, GNOME Builder can install it when opening a Rust file.
 +
 +
 +=== Eclipse IDE
 +
 +Support for Rust development in the Eclipse IDE is provided by link:https://github.com/eclipse/corrosion[Eclipse Corrosion].
 +If available in PATH or in some standard location, `rust-analyzer` is detected and powers editing of Rust files without further configuration.
 +If `rust-analyzer` is not detected, Corrosion will prompt you for configuration of your Rust toolchain and language server with a link to the __Window > Preferences > Rust__ preference page; from here a button allows to download and configure `rust-analyzer`, but you can also reference another installation.
 +You'll need to close and reopen all .rs and Cargo files, or to restart the IDE, for this change to take effect.
 +
 +=== Kate Text Editor
 +
 +Support for the language server protocol is built into Kate through the LSP plugin, which is included by default.
 +It is preconfigured to use rust-analyzer for Rust sources since Kate 21.12.
 +
 +Earlier versions allow you to use rust-analyzer through a simple settings change.
 +In the LSP Client settings of Kate, copy the content of the third tab "default parameters" to the second tab "server configuration".
 +Then in the configuration replace:
 +[source,json]
 +----
 +        "rust": {
 +            "command": ["rls"],
 +            "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
 +            "url": "https://github.com/rust-lang/rls",
 +            "highlightingModeRegex": "^Rust$"
 +        },
 +----
 +With
 +[source,json]
 +----
 +        "rust": {
 +            "command": ["rust-analyzer"],
 +            "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
 +            "url": "https://github.com/rust-lang/rust-analyzer",
 +            "highlightingModeRegex": "^Rust$"
 +        },
 +----
 +Then click on apply, and restart the LSP server for your rust project.
 +
 +=== juCi++
 +
 +https://gitlab.com/cppit/jucipp[juCi++] has built-in support for the language server protocol, and since version 1.7.0 offers installation of both Rust and rust-analyzer when opening a Rust file.
 +
 +=== Kakoune
 +
 +https://kakoune.org/[Kakoune] supports LSP with the help of https://github.com/kak-lsp/kak-lsp[`kak-lsp`].
 +Follow the https://github.com/kak-lsp/kak-lsp#installation[instructions] to install `kak-lsp`.
 +To configure `kak-lsp`, refer to the https://github.com/kak-lsp/kak-lsp#configuring-kak-lsp[configuration section] which is basically about copying the https://github.com/kak-lsp/kak-lsp/blob/master/kak-lsp.toml[configuration file] in the right place (latest versions should use `rust-analyzer` by default).
 +
 +Finally, you need to configure Kakoune to talk to `kak-lsp` (see https://github.com/kak-lsp/kak-lsp#usage[Usage section]).
 +A basic configuration will only get you LSP but you can also activate inlay diagnostics and auto-formatting on save.
 +The following might help you get all of this.
 +
 +[source,txt]
 +----
 +eval %sh{kak-lsp --kakoune -s $kak_session}  # Not needed if you load it with plug.kak.
 +hook global WinSetOption filetype=rust %{
 +    # Enable LSP
 +    lsp-enable-window
 +
 +    # Auto-formatting on save
 +    hook window BufWritePre .* lsp-formatting-sync
 +
 +    # Configure inlay hints (only on save)
 +    hook window -group rust-inlay-hints BufWritePost .* rust-analyzer-inlay-hints
 +    hook -once -always window WinSetOption filetype=.* %{
 +        remove-hooks window rust-inlay-hints
 +    }
 +}
 +----
 +
 +=== Helix
 +
 +https://docs.helix-editor.com/[Helix] supports LSP by default.
 +However, it won't install `rust-analyzer` automatically.
 +You can follow instructions for installing <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
 +
 +== Troubleshooting
 +
 +Start with looking at the rust-analyzer version.
 +Try **rust-analyzer: Show RA Version** in VS Code (using **Command Palette** feature typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the command line.
 +If the date is more than a week ago, it's better to update rust-analyzer version.
 +
 +The next thing to check would be panic messages in rust-analyzer's log.
 +Log messages are printed to stderr, in VS Code you can see then in the `Output > Rust Analyzer Language Server` tab of the panel.
 +To see more logs, set the `RA_LOG=info` environment variable, this can be done either by setting the environment variable manually or by using `rust-analyzer.server.extraEnv`, note that both of these approaches require the server to be restarted.
 +
 +To fully capture LSP messages between the editor and the server, set `"rust-analyzer.trace.server": "verbose"` config and check
 +`Output > Rust Analyzer Language Server Trace`.
 +
 +The root cause for many "`nothing works`" problems is that rust-analyzer fails to understand the project structure.
 +To debug that, first note the `rust-analyzer` section in the status bar.
 +If it has an error icon and red, that's the problem (hover will have somewhat helpful error message).
 +**rust-analyzer: Status** prints dependency information for the current file.
 +Finally, `RA_LOG=project_model=debug` enables verbose logs during project loading.
 +
 +If rust-analyzer outright crashes, try running `rust-analyzer analysis-stats /path/to/project/directory/` on the command line.
 +This command type checks the whole project in batch mode bypassing LSP machinery.
 +
 +When filing issues, it is useful (but not necessary) to try to minimize examples.
 +An ideal bug reproduction looks like this:
 +
 +```bash
 +$ git clone https://github.com/username/repo.git && cd repo && git switch --detach commit-hash
 +$ rust-analyzer --version
 +rust-analyzer dd12184e4 2021-05-08 dev
 +$ rust-analyzer analysis-stats .
 +💀 💀 💀
 +```
 +
 +It is especially useful when the `repo` doesn't use external crates or the standard library.
 +
 +If you want to go as far as to modify the source code to debug the problem, be sure to take a look at the
 +https://github.com/rust-lang/rust-analyzer/tree/master/docs/dev[dev docs]!
 +
 +== Configuration
 +
 +**Source:** https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs[config.rs]
 +
 +The <<_installation,Installation>> section contains details on configuration for some of the editors.
 +In general `rust-analyzer` is configured via LSP messages, which means that it's up to the editor to decide on the exact format and location of configuration files.
 +
 +Some clients, such as <<vs-code,VS Code>> or <<coc-rust-analyzer,COC plugin in Vim>> provide `rust-analyzer` specific configuration UIs. Others may require you to know a bit more about the interaction with `rust-analyzer`.
 +
 +For the later category, it might help to know that the initial configuration is specified as a value of the `initializationOptions` field of the https://microsoft.github.io/language-server-protocol/specifications/specification-current/#initialize[`InitializeParams` message, in the LSP protocol].
 +The spec says that the field type is `any?`, but `rust-analyzer` is looking for a JSON object that is constructed using settings from the list below.
 +Name of the setting, ignoring the `rust-analyzer.` prefix, is used as a path, and value of the setting becomes the JSON property value.
 +
 +For example, a very common configuration is to enable proc-macro support, can be achieved by sending this JSON:
 +
 +[source,json]
 +----
 +{
 +  "cargo": {
 +    "buildScripts": {
 +      "enable": true,
 +    },
 +  },
 +  "procMacro": {
 +    "enable": true,
 +  }
 +}
 +----
 +
 +Please consult your editor's documentation to learn more about how to configure https://microsoft.github.io/language-server-protocol/[LSP servers].
 +
 +To verify which configuration is actually used by `rust-analyzer`, set `RA_LOG` environment variable to `rust_analyzer=info` and look for config-related messages.
 +Logs should show both the JSON that `rust-analyzer` sees as well as the updated config.
 +
 +This is the list of config options `rust-analyzer` supports:
 +
 +include::./generated_config.adoc[]
 +
 +== Non-Cargo Based Projects
 +
 +rust-analyzer does not require Cargo.
 +However, if you use some other build system, you'll have to describe the structure of your project for rust-analyzer in the `rust-project.json` format:
 +
 +[source,TypeScript]
 +----
 +interface JsonProject {
 +    /// Path to the directory with *source code* of
 +    /// sysroot crates.
 +    ///
 +    /// It should point to the directory where std,
 +    /// core, and friends can be found:
 +    ///
 +    /// https://github.com/rust-lang/rust/tree/master/library.
 +    ///
 +    /// If provided, rust-analyzer automatically adds
 +    /// dependencies on sysroot crates. Conversely,
 +    /// if you omit this path, you can specify sysroot
 +    /// dependencies yourself and, for example, have
 +    /// several different "sysroots" in one graph of
 +    /// crates.
 +    sysroot_src?: string;
 +    /// The set of crates comprising the current
 +    /// project. Must include all transitive
 +    /// dependencies as well as sysroot crate (libstd,
 +    /// libcore and such).
 +    crates: Crate[];
 +}
 +
 +interface Crate {
 +    /// Optional crate name used for display purposes,
 +    /// without affecting semantics. See the `deps`
 +    /// key for semantically-significant crate names.
 +    display_name?: string;
 +    /// Path to the root module of the crate.
 +    root_module: string;
 +    /// Edition of the crate.
 +    edition: "2015" | "2018" | "2021";
 +    /// Dependencies
 +    deps: Dep[];
 +    /// Should this crate be treated as a member of
 +    /// current "workspace".
 +    ///
 +    /// By default, inferred from the `root_module`
 +    /// (members are the crates which reside inside
 +    /// the directory opened in the editor).
 +    ///
 +    /// Set this to `false` for things like standard
 +    /// library and 3rd party crates to enable
 +    /// performance optimizations (rust-analyzer
 +    /// assumes that non-member crates don't change).
 +    is_workspace_member?: boolean;
 +    /// Optionally specify the (super)set of `.rs`
 +    /// files comprising this crate.
 +    ///
 +    /// By default, rust-analyzer assumes that only
 +    /// files under `root_module.parent` can belong
 +    /// to a crate. `include_dirs` are included
 +    /// recursively, unless a subdirectory is in
 +    /// `exclude_dirs`.
 +    ///
 +    /// Different crates can share the same `source`.
 +    ///
 +    /// If two crates share an `.rs` file in common,
 +    /// they *must* have the same `source`.
 +    /// rust-analyzer assumes that files from one
 +    /// source can't refer to files in another source.
 +    source?: {
 +        include_dirs: string[],
 +        exclude_dirs: string[],
 +    },
 +    /// The set of cfgs activated for a given crate, like
 +    /// `["unix", "feature=\"foo\"", "feature=\"bar\""]`.
 +    cfg: string[];
 +    /// Target triple for this Crate.
 +    ///
 +    /// Used when running `rustc --print cfg`
 +    /// to get target-specific cfgs.
 +    target?: string;
 +    /// Environment variables, used for
 +    /// the `env!` macro
 +    env: { [key: string]: string; },
 +
 +    /// Whether the crate is a proc-macro crate.
 +    is_proc_macro: boolean;
 +    /// For proc-macro crates, path to compiled
 +    /// proc-macro (.so file).
 +    proc_macro_dylib_path?: string;
 +}
 +
 +interface Dep {
 +    /// Index of a crate in the `crates` array.
 +    crate: number,
 +    /// Name as should appear in the (implicit)
 +    /// `extern crate name` declaration.
 +    name: string,
 +}
 +----
 +
 +This format is provisional and subject to change.
 +Specifically, the `roots` setup will be different eventually.
 +
 +There are three ways to feed `rust-project.json` to rust-analyzer:
 +
 +* Place `rust-project.json` file at the root of the project, and rust-analyzer will discover it.
 +* Specify `"rust-analyzer.linkedProjects": [ "path/to/rust-project.json" ]` in the settings (and make sure that your LSP client sends settings as a part of initialize request).
 +* Specify `"rust-analyzer.linkedProjects": [ { "roots": [...], "crates": [...] }]` inline.
 +
 +Relative paths are interpreted relative to `rust-project.json` file location or (for inline JSON) relative to `rootUri`.
 +
 +See https://github.com/rust-analyzer/rust-project.json-example for a small example.
 +
 +You can set the `RA_LOG` environment variable to `rust_analyzer=info` to inspect how rust-analyzer handles config and project loading.
 +
 +Note that calls to `cargo check` are disabled when using `rust-project.json` by default, so compilation errors and warnings will no longer be sent to your LSP client. To enable these compilation errors you will need to specify explicitly what command rust-analyzer should run to perform the checks using the `checkOnSave.overrideCommand` configuration. As an example, the following configuration explicitly sets `cargo check` as the `checkOnSave` command.
 +
 +[source,json]
 +----
 +{ "rust-analyzer.checkOnSave.overrideCommand": ["cargo", "check", "--message-format=json"] }
 +----
 +
 +The `checkOnSave.overrideCommand` requires the command specified to output json error messages for rust-analyzer to consume. The `--message-format=json` flag does this for `cargo check` so whichever command you use must also output errors in this format. See the <<Configuration>> section for more information.
 +
 +== Security
 +
 +At the moment, rust-analyzer assumes that all code is trusted.
 +Here is a **non-exhaustive** list of ways to make rust-analyzer execute arbitrary code:
 +
 +* proc macros and build scripts are executed by default
 +* `.cargo/config` can override `rustc` with an arbitrary executable
 +* `rust-toolchain.toml` can override `rustc` with an arbitrary executable
 +* VS Code plugin reads configuration from project directory, and that can be used to override paths to various executables, like `rustfmt` or `rust-analyzer` itself.
 +* rust-analyzer's syntax trees library uses a lot of `unsafe` and hasn't been properly audited for memory safety.
 +
 +== Privacy
 +
 +The LSP server performs no network access in itself, but runs `cargo metadata` which will update or download the crate registry and the source code of the project dependencies.
 +If enabled (the default), build scripts and procedural macros can do anything.
 +
 +The Code extension does not access the network.
 +
 +Any other editor plugins are not under the control of the `rust-analyzer` developers. For any privacy concerns, you should check with their respective developers.
 +
 +For `rust-analyzer` developers, `cargo xtask release` uses the GitHub API to put together the release notes.
 +
 +== Features
 +
 +include::./generated_features.adoc[]
 +
 +== Assists (Code Actions)
 +
 +Assists, or code actions, are small local refactorings, available in a particular context.
 +They are usually triggered by a shortcut or by clicking a light bulb icon in the editor.
 +Cursor position or selection is signified by `┃` character.
 +
 +include::./generated_assists.adoc[]
 +
 +== Diagnostics
 +
 +While most errors and warnings provided by rust-analyzer come from the `cargo check` integration, there's a growing number of diagnostics implemented using rust-analyzer's own analysis.
 +Some of these diagnostics don't respect `\#[allow]` or `\#[deny]` attributes yet, but can be turned off using the `rust-analyzer.diagnostics.enable`, `rust-analyzer.diagnostics.experimental.enable` or `rust-analyzer.diagnostics.disabled` settings.
 +
 +include::./generated_diagnostic.adoc[]
 +
 +== Editor Features
 +=== VS Code
 +
 +==== Color configurations
 +
 +It is possible to change the foreground/background color and font family/size of inlay hints.
 +Just add this to your `settings.json`:
 +
 +[source,jsonc]
 +----
 +{
 +  "editor.inlayHints.fontFamily": "Courier New",
 +  "editor.inlayHints.fontSize": 11,
 +
 +  "workbench.colorCustomizations": {
 +    // Name of the theme you are currently using
 +    "[Default Dark+]": {
 +      "editorInlayHint.foreground": "#868686f0",
 +      "editorInlayHint.background": "#3d3d3d48",
 +
 +      // Overrides for specific kinds of inlay hints
 +      "editorInlayHint.typeForeground": "#fdb6fdf0",
 +      "editorInlayHint.parameterForeground": "#fdb6fdf0",
 +    }
 +  }
 +}
 +----
 +
 +==== Semantic style customizations
 +
 +You can customize the look of different semantic elements in the source code.
 +For example, mutable bindings are underlined by default and you can override this behavior by adding the following section to your `settings.json`:
 +
 +[source,jsonc]
 +----
 +{
 +  "editor.semanticTokenColorCustomizations": {
 +    "rules": {
 +      "*.mutable": {
 +        "fontStyle": "", // underline is the default
 +      },
 +    }
 +  },
 +}
 +----
 +
 +Most themes doesn't support styling unsafe operations differently yet. You can fix this by adding overrides for the rules `operator.unsafe`, `function.unsafe`, and `method.unsafe`:
 +
 +[source,jsonc]
 +----
 +{
 +   "editor.semanticTokenColorCustomizations": {
 +         "rules": {
 +             "operator.unsafe": "#ff6600",
 +             "function.unsafe": "#ff6600",
 +             "method.unsafe": "#ff6600"
 +         }
 +    },
 +}
 +----
 +
 +In addition to the top-level rules you can specify overrides for specific themes. For example, if you wanted to use a darker text color on a specific light theme, you might write:
 +
 +[source,jsonc]
 +----
 +{
 +   "editor.semanticTokenColorCustomizations": {
 +         "rules": {
 +             "operator.unsafe": "#ff6600"
 +         },
 +         "[Ayu Light]": {
 +            "rules": {
 +               "operator.unsafe": "#572300"
 +            }
 +         }
 +    },
 +}
 +----
 +
 +Make sure you include the brackets around the theme name. For example, use `"[Ayu Light]"` to customize the theme Ayu Light.
 +
 +==== Special `when` clause context for keybindings.
 +You may use `inRustProject` context to configure keybindings for rust projects only.
 +For example:
 +
 +[source,json]
 +----
 +{
 +  "key": "ctrl+alt+d",
 +  "command": "rust-analyzer.openDocs",
 +  "when": "inRustProject"
 +}
 +----
 +More about `when` clause contexts https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts[here].
 +
 +==== Setting runnable environment variables
 +You can use "rust-analyzer.runnableEnv" setting to define runnable environment-specific substitution variables.
 +The simplest way for all runnables in a bunch:
 +```jsonc
 +"rust-analyzer.runnableEnv": {
 +    "RUN_SLOW_TESTS": "1"
 +}
 +```
 +
 +Or it is possible to specify vars more granularly:
 +```jsonc
 +"rust-analyzer.runnableEnv": [
 +    {
 +        // "mask": null, // null mask means that this rule will be applied for all runnables
 +        env: {
 +             "APP_ID": "1",
 +             "APP_DATA": "asdf"
 +        }
 +    },
 +    {
 +        "mask": "test_name",
 +        "env": {
 +             "APP_ID": "2", // overwrites only APP_ID
 +        }
 +    }
 +]
 +```
 +
 +You can use any valid regular expression as a mask.
 +Also note that a full runnable name is something like *run bin_or_example_name*, *test some::mod::test_name* or *test-mod some::mod*, so it is possible to distinguish binaries, single tests, and test modules with this masks: `"^run"`, `"^test "` (the trailing space matters!), and `"^test-mod"` respectively.
 +
 +==== Compiler feedback from external commands
 +
 +Instead of relying on the built-in `cargo check`, you can configure Code to run a command in the background and use the `$rustc-watch` problem matcher to generate inline error markers from its output.
 +
 +To do this you need to create a new https://code.visualstudio.com/docs/editor/tasks[VS Code Task] and set `rust-analyzer.checkOnSave.enable: false` in preferences.
 +
 +For example, if you want to run https://crates.io/crates/cargo-watch[`cargo watch`] instead, you might add the following to `.vscode/tasks.json`:
 +
 +```json
 +{
 +    "label": "Watch",
 +    "group": "build",
 +    "type": "shell",
 +    "command": "cargo watch",
 +    "problemMatcher": "$rustc-watch",
 +    "isBackground": true
 +}
 +```
++
++==== Live Share
++
++VS Code Live Share has partial support for rust-analyzer.
++
++Live Share _requires_ the official Microsoft build of VS Code, OSS builds will not work correctly.
++
++The host's rust-analyzer instance will be shared with all guests joining the session.
++The guests do not have to have the rust-analyzer extension installed for this to work.
++
++If you are joining a Live Share session and _do_ have rust-analyzer installed locally, commands from the command palette will not work correctly since they will attempt to communicate with the local server.
index 67eabc313c832b75478a5d303e57104f21df50a8,0000000000000000000000000000000000000000..767c5875bf7e7e844c1178675ee082288ff1ac29
mode 100644,000000..100644
--- /dev/null
@@@ -1,1628 -1,0 +1,1658 @@@
 +{
 +    "name": "rust-analyzer",
 +    "displayName": "rust-analyzer",
 +    "description": "Rust language support for Visual Studio Code",
 +    "private": true,
 +    "icon": "icon.png",
 +    "version": "0.5.0-dev",
 +    "releaseTag": null,
 +    "publisher": "rust-lang",
 +    "repository": {
 +        "url": "https://github.com/rust-lang/rust-analyzer.git",
 +        "type": "git"
 +    },
 +    "homepage": "https://rust-analyzer.github.io/",
 +    "license": "MIT OR Apache-2.0",
 +    "keywords": [
 +        "rust"
 +    ],
 +    "categories": [
 +        "Programming Languages"
 +    ],
 +    "engines": {
 +        "vscode": "^1.66.0"
 +    },
 +    "enabledApiProposals": [],
 +    "scripts": {
 +        "vscode:prepublish": "npm run build-base -- --minify",
 +        "package": "vsce package -o rust-analyzer.vsix",
 +        "build-base": "esbuild ./src/main.ts --bundle --outfile=out/main.js --external:vscode --format=cjs --platform=node --target=node16",
 +        "build": "npm run build-base -- --sourcemap",
 +        "watch": "npm run build-base -- --sourcemap --watch",
 +        "lint": "prettier --check . && eslint -c .eslintrc.js --ext ts ./src ./tests",
 +        "fix": "prettier --write . && eslint -c .eslintrc.js --ext ts ./src ./tests --fix",
 +        "pretest": "tsc && npm run build",
 +        "test": "cross-env TEST_VARIABLE=test node ./out/tests/runTests.js"
 +    },
 +    "dependencies": {
 +        "d3": "^7.6.1",
 +        "d3-graphviz": "^4.1.1",
 +        "vscode-languageclient": "^8.0.0-next.14"
 +    },
 +    "devDependencies": {
 +        "@types/node": "~16.11.7",
 +        "@types/vscode": "~1.66.0",
 +        "@typescript-eslint/eslint-plugin": "^5.30.5",
 +        "@typescript-eslint/parser": "^5.30.5",
 +        "@vscode/test-electron": "^2.1.5",
 +        "cross-env": "^7.0.3",
 +        "esbuild": "^0.14.48",
 +        "eslint": "^8.19.0",
 +        "eslint-config-prettier": "^8.5.0",
 +        "ovsx": "^0.5.1",
 +        "prettier": "^2.7.1",
 +        "tslib": "^2.4.0",
 +        "typescript": "^4.7.4",
 +        "vsce": "^2.9.2"
 +    },
 +    "activationEvents": [
 +        "onLanguage:rust",
 +        "onCommand:rust-analyzer.analyzerStatus",
 +        "onCommand:rust-analyzer.memoryUsage",
 +        "onCommand:rust-analyzer.reloadWorkspace",
 +        "workspaceContains:*/Cargo.toml",
 +        "workspaceContains:*/rust-project.json"
 +    ],
 +    "main": "./out/main",
 +    "contributes": {
 +        "taskDefinitions": [
 +            {
 +                "type": "cargo",
 +                "required": [
 +                    "command"
 +                ],
 +                "properties": {
 +                    "label": {
 +                        "type": "string"
 +                    },
 +                    "command": {
 +                        "type": "string"
 +                    },
 +                    "args": {
 +                        "type": "array",
 +                        "items": {
 +                            "type": "string"
 +                        }
 +                    },
 +                    "env": {
 +                        "type": "object",
 +                        "patternProperties": {
 +                            ".+": {
 +                                "type": "string"
 +                            }
 +                        }
 +                    }
 +                }
 +            }
 +        ],
 +        "commands": [
 +            {
 +                "command": "rust-analyzer.syntaxTree",
 +                "title": "Show Syntax Tree",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.viewHir",
 +                "title": "View Hir",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.viewFileText",
 +                "title": "View File Text (as seen by the server)",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.viewItemTree",
 +                "title": "Debug ItemTree",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.viewCrateGraph",
 +                "title": "View Crate Graph",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.viewFullCrateGraph",
 +                "title": "View Crate Graph (Full)",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.expandMacro",
 +                "title": "Expand macro recursively",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.matchingBrace",
 +                "title": "Find matching brace",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.parentModule",
 +                "title": "Locate parent module",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.joinLines",
 +                "title": "Join lines",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.run",
 +                "title": "Run",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.copyRunCommandLine",
 +                "title": "Copy Run Command Line",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.debug",
 +                "title": "Debug",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.newDebugConfig",
 +                "title": "Generate launch configuration",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.analyzerStatus",
 +                "title": "Status",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.memoryUsage",
 +                "title": "Memory Usage (Clears Database)",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.shuffleCrateGraph",
 +                "title": "Shuffle Crate Graph",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.reloadWorkspace",
 +                "title": "Reload workspace",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.reload",
 +                "title": "Restart server",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.onEnter",
 +                "title": "Enhanced enter key",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.ssr",
 +                "title": "Structural Search Replace",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.serverVersion",
 +                "title": "Show RA Version",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.toggleInlayHints",
 +                "title": "Toggle inlay hints",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.openDocs",
 +                "title": "Open docs under cursor",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.openCargoToml",
 +                "title": "Open Cargo.toml",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.peekTests",
 +                "title": "Peek related tests",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.moveItemUp",
 +                "title": "Move item up",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.moveItemDown",
 +                "title": "Move item down",
 +                "category": "rust-analyzer"
 +            },
 +            {
 +                "command": "rust-analyzer.cancelFlycheck",
 +                "title": "Cancel running flychecks",
 +                "category": "rust-analyzer"
 +            }
 +        ],
 +        "keybindings": [
 +            {
 +                "command": "rust-analyzer.parentModule",
 +                "key": "ctrl+shift+u",
 +                "when": "editorTextFocus && editorLangId == rust"
 +            },
 +            {
 +                "command": "rust-analyzer.matchingBrace",
 +                "key": "ctrl+shift+m",
 +                "when": "editorTextFocus && editorLangId == rust"
 +            },
 +            {
 +                "command": "rust-analyzer.joinLines",
 +                "key": "ctrl+shift+j",
 +                "when": "editorTextFocus && editorLangId == rust"
 +            }
 +        ],
 +        "configuration": {
 +            "type": "object",
 +            "title": "rust-analyzer",
 +            "properties": {
 +                "rust-analyzer.cargoRunner": {
 +                    "type": [
 +                        "null",
 +                        "string"
 +                    ],
 +                    "default": null,
 +                    "description": "Custom cargo runner extension ID."
 +                },
 +                "rust-analyzer.runnableEnv": {
 +                    "anyOf": [
 +                        {
 +                            "type": "null"
 +                        },
 +                        {
 +                            "type": "array",
 +                            "items": {
 +                                "type": "object",
 +                                "properties": {
 +                                    "mask": {
 +                                        "type": "string",
 +                                        "description": "Runnable name mask"
 +                                    },
 +                                    "env": {
 +                                        "type": "object",
 +                                        "description": "Variables in form of { \"key\": \"value\"}"
 +                                    }
 +                                }
 +                            }
 +                        },
 +                        {
 +                            "type": "object",
 +                            "description": "Variables in form of { \"key\": \"value\"}"
 +                        }
 +                    ],
 +                    "default": null,
 +                    "markdownDescription": "Environment variables passed to the runnable launched using `Test` or `Debug` lens or `rust-analyzer.run` command."
 +                },
 +                "rust-analyzer.server.path": {
 +                    "type": [
 +                        "null",
 +                        "string"
 +                    ],
 +                    "scope": "machine-overridable",
 +                    "default": null,
 +                    "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default)."
 +                },
 +                "rust-analyzer.server.extraEnv": {
 +                    "type": [
 +                        "null",
 +                        "object"
 +                    ],
 +                    "additionalProperties": {
 +                        "type": [
 +                            "string",
 +                            "number"
 +                        ]
 +                    },
 +                    "default": null,
 +                    "markdownDescription": "Extra environment variables that will be passed to the rust-analyzer executable. Useful for passing e.g. `RA_LOG` for debugging."
 +                },
 +                "rust-analyzer.trace.server": {
 +                    "type": "string",
 +                    "scope": "window",
 +                    "enum": [
 +                        "off",
 +                        "messages",
 +                        "verbose"
 +                    ],
 +                    "enumDescriptions": [
 +                        "No traces",
 +                        "Error only",
 +                        "Full log"
 +                    ],
 +                    "default": "off",
 +                    "description": "Trace requests to the rust-analyzer (this is usually overly verbose and not recommended for regular users)."
 +                },
 +                "rust-analyzer.trace.extension": {
 +                    "description": "Enable logging of VS Code extensions itself.",
 +                    "type": "boolean",
 +                    "default": false
 +                },
 +                "rust-analyzer.debug.engine": {
 +                    "type": "string",
 +                    "enum": [
 +                        "auto",
 +                        "vadimcn.vscode-lldb",
 +                        "ms-vscode.cpptools"
 +                    ],
 +                    "default": "auto",
 +                    "description": "Preferred debug engine.",
 +                    "markdownEnumDescriptions": [
 +                        "First try to use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb), if it's not installed try to use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools).",
 +                        "Use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb)",
 +                        "Use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools)"
 +                    ]
 +                },
 +                "rust-analyzer.debug.sourceFileMap": {
 +                    "type": [
 +                        "object",
 +                        "string"
 +                    ],
 +                    "const": "auto",
 +                    "description": "Optional source file mappings passed to the debug engine.",
 +                    "default": {
 +                        "/rustc/<id>": "${env:USERPROFILE}/.rustup/toolchains/<toolchain-id>/lib/rustlib/src/rust"
 +                    }
 +                },
 +                "rust-analyzer.debug.openDebugPane": {
 +                    "markdownDescription": "Whether to open up the `Debug Panel` on debugging start.",
 +                    "type": "boolean",
 +                    "default": false
 +                },
 +                "rust-analyzer.debug.engineSettings": {
 +                    "type": "object",
 +                    "default": {},
 +                    "markdownDescription": "Optional settings passed to the debug engine. Example: `{ \"lldb\": { \"terminal\":\"external\"} }`"
 +                },
 +                "rust-analyzer.restartServerOnConfigChange": {
 +                    "markdownDescription": "Whether to restart the server automatically when certain settings that require a restart are changed.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.typing.continueCommentsOnNewline": {
 +                    "markdownDescription": "Whether to prefix newlines after comments with the corresponding comment prefix.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "$generated-start": {},
 +                "rust-analyzer.assist.expressionFillDefault": {
 +                    "markdownDescription": "Placeholder expression to use for missing expressions in assists.",
 +                    "default": "todo",
 +                    "type": "string",
 +                    "enum": [
 +                        "todo",
 +                        "default"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Fill missing expressions with the `todo` macro",
 +                        "Fill missing expressions with reasonable defaults, `new` or `default` constructors."
 +                    ]
 +                },
 +                "rust-analyzer.cachePriming.enable": {
 +                    "markdownDescription": "Warm up caches on project load.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.cachePriming.numThreads": {
 +                    "markdownDescription": "How many worker threads to handle priming caches. The default `0` means to pick automatically.",
 +                    "default": 0,
 +                    "type": "number",
 +                    "minimum": 0,
 +                    "maximum": 255
 +                },
 +                "rust-analyzer.cargo.autoreload": {
 +                    "markdownDescription": "Automatically refresh project info via `cargo metadata` on\n`Cargo.toml` or `.cargo/config.toml` changes.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.cargo.buildScripts.enable": {
 +                    "markdownDescription": "Run build scripts (`build.rs`) for more precise code analysis.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.cargo.buildScripts.overrideCommand": {
 +                    "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets\n```\n.",
 +                    "default": null,
 +                    "type": [
 +                        "null",
 +                        "array"
 +                    ],
 +                    "items": {
 +                        "type": "string"
 +                    }
 +                },
 +                "rust-analyzer.cargo.buildScripts.useRustcWrapper": {
 +                    "markdownDescription": "Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to\navoid checking unnecessary things.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.cargo.features": {
 +                    "markdownDescription": "List of features to activate.\n\nSet this to `\"all\"` to pass `--all-features` to cargo.",
 +                    "default": [],
 +                    "anyOf": [
 +                        {
 +                            "type": "string",
 +                            "enum": [
 +                                "all"
 +                            ],
 +                            "enumDescriptions": [
 +                                "Pass `--all-features` to cargo"
 +                            ]
 +                        },
 +                        {
 +                            "type": "array",
 +                            "items": {
 +                                "type": "string"
 +                            }
 +                        }
 +                    ]
 +                },
 +                "rust-analyzer.cargo.noDefaultFeatures": {
 +                    "markdownDescription": "Whether to pass `--no-default-features` to cargo.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.cargo.noSysroot": {
 +                    "markdownDescription": "Internal config for debugging, disables loading of sysroot crates.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.cargo.target": {
 +                    "markdownDescription": "Compilation target override (target triple).",
 +                    "default": null,
 +                    "type": [
 +                        "null",
 +                        "string"
 +                    ]
 +                },
 +                "rust-analyzer.cargo.unsetTest": {
 +                    "markdownDescription": "Unsets `#[cfg(test)]` for the specified crates.",
 +                    "default": [
 +                        "core"
 +                    ],
 +                    "type": "array",
 +                    "items": {
 +                        "type": "string"
 +                    }
 +                },
 +                "rust-analyzer.checkOnSave.allTargets": {
 +                    "markdownDescription": "Check all targets and tests (`--all-targets`).",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.checkOnSave.command": {
 +                    "markdownDescription": "Cargo command to use for `cargo check`.",
 +                    "default": "check",
 +                    "type": "string"
 +                },
 +                "rust-analyzer.checkOnSave.enable": {
 +                    "markdownDescription": "Run specified `cargo check` command for diagnostics on save.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.checkOnSave.extraArgs": {
 +                    "markdownDescription": "Extra arguments for `cargo check`.",
 +                    "default": [],
 +                    "type": "array",
 +                    "items": {
 +                        "type": "string"
 +                    }
 +                },
 +                "rust-analyzer.checkOnSave.features": {
 +                    "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.",
 +                    "default": null,
 +                    "anyOf": [
 +                        {
 +                            "type": "string",
 +                            "enum": [
 +                                "all"
 +                            ],
 +                            "enumDescriptions": [
 +                                "Pass `--all-features` to cargo"
 +                            ]
 +                        },
 +                        {
 +                            "type": "array",
 +                            "items": {
 +                                "type": "string"
 +                            }
 +                        },
 +                        {
 +                            "type": "null"
 +                        }
 +                    ]
 +                },
 +                "rust-analyzer.checkOnSave.noDefaultFeatures": {
 +                    "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.",
 +                    "default": null,
 +                    "type": [
 +                        "null",
 +                        "boolean"
 +                    ]
 +                },
 +                "rust-analyzer.checkOnSave.overrideCommand": {
 +                    "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefor include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.",
 +                    "default": null,
 +                    "type": [
 +                        "null",
 +                        "array"
 +                    ],
 +                    "items": {
 +                        "type": "string"
 +                    }
 +                },
 +                "rust-analyzer.checkOnSave.target": {
 +                    "markdownDescription": "Check for a specific target. Defaults to\n`#rust-analyzer.cargo.target#`.",
 +                    "default": null,
 +                    "type": [
 +                        "null",
 +                        "string"
 +                    ]
 +                },
 +                "rust-analyzer.completion.autoimport.enable": {
 +                    "markdownDescription": "Toggles the additional completions that automatically add imports when completed.\nNote that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.completion.autoself.enable": {
 +                    "markdownDescription": "Toggles the additional completions that automatically show method calls and field accesses\nwith `self` prefixed to them when inside a method.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.completion.callable.snippets": {
 +                    "markdownDescription": "Whether to add parenthesis and argument snippets when completing function.",
 +                    "default": "fill_arguments",
 +                    "type": "string",
 +                    "enum": [
 +                        "fill_arguments",
 +                        "add_parentheses",
 +                        "none"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Add call parentheses and pre-fill arguments.",
 +                        "Add call parentheses.",
 +                        "Do no snippet completions for callables."
 +                    ]
 +                },
 +                "rust-analyzer.completion.postfix.enable": {
 +                    "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.completion.privateEditable.enable": {
 +                    "markdownDescription": "Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.completion.snippets.custom": {
 +                    "markdownDescription": "Custom completion snippets.",
 +                    "default": {
 +                        "Arc::new": {
 +                            "postfix": "arc",
 +                            "body": "Arc::new(${receiver})",
 +                            "requires": "std::sync::Arc",
 +                            "description": "Put the expression into an `Arc`",
 +                            "scope": "expr"
 +                        },
 +                        "Rc::new": {
 +                            "postfix": "rc",
 +                            "body": "Rc::new(${receiver})",
 +                            "requires": "std::rc::Rc",
 +                            "description": "Put the expression into an `Rc`",
 +                            "scope": "expr"
 +                        },
 +                        "Box::pin": {
 +                            "postfix": "pinbox",
 +                            "body": "Box::pin(${receiver})",
 +                            "requires": "std::boxed::Box",
 +                            "description": "Put the expression into a pinned `Box`",
 +                            "scope": "expr"
 +                        },
 +                        "Ok": {
 +                            "postfix": "ok",
 +                            "body": "Ok(${receiver})",
 +                            "description": "Wrap the expression in a `Result::Ok`",
 +                            "scope": "expr"
 +                        },
 +                        "Err": {
 +                            "postfix": "err",
 +                            "body": "Err(${receiver})",
 +                            "description": "Wrap the expression in a `Result::Err`",
 +                            "scope": "expr"
 +                        },
 +                        "Some": {
 +                            "postfix": "some",
 +                            "body": "Some(${receiver})",
 +                            "description": "Wrap the expression in an `Option::Some`",
 +                            "scope": "expr"
 +                        }
 +                    },
 +                    "type": "object"
 +                },
 +                "rust-analyzer.diagnostics.disabled": {
 +                    "markdownDescription": "List of rust-analyzer diagnostics to disable.",
 +                    "default": [],
 +                    "type": "array",
 +                    "items": {
 +                        "type": "string"
 +                    },
 +                    "uniqueItems": true
 +                },
 +                "rust-analyzer.diagnostics.enable": {
 +                    "markdownDescription": "Whether to show native rust-analyzer diagnostics.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.diagnostics.experimental.enable": {
 +                    "markdownDescription": "Whether to show experimental rust-analyzer diagnostics that might\nhave more false positives than usual.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.diagnostics.remapPrefix": {
 +                    "markdownDescription": "Map of prefixes to be substituted when parsing diagnostic file paths.\nThis should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.",
 +                    "default": {},
 +                    "type": "object"
 +                },
 +                "rust-analyzer.diagnostics.warningsAsHint": {
 +                    "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code\nand will not show up in the `Problems Panel`.",
 +                    "default": [],
 +                    "type": "array",
 +                    "items": {
 +                        "type": "string"
 +                    }
 +                },
 +                "rust-analyzer.diagnostics.warningsAsInfo": {
 +                    "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.",
 +                    "default": [],
 +                    "type": "array",
 +                    "items": {
 +                        "type": "string"
 +                    }
 +                },
 +                "rust-analyzer.files.excludeDirs": {
 +                    "markdownDescription": "These directories will be ignored by rust-analyzer. They are\nrelative to the workspace root, and globs are not supported. You may\nalso need to add the folders to Code's `files.watcherExclude`.",
 +                    "default": [],
 +                    "type": "array",
 +                    "items": {
 +                        "type": "string"
 +                    }
 +                },
 +                "rust-analyzer.files.watcher": {
 +                    "markdownDescription": "Controls file watching implementation.",
 +                    "default": "client",
 +                    "type": "string",
 +                    "enum": [
 +                        "client",
 +                        "server"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Use the client (editor) to watch files for changes",
 +                        "Use server-side file watching"
 +                    ]
 +                },
 +                "rust-analyzer.highlightRelated.breakPoints.enable": {
 +                    "markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.highlightRelated.exitPoints.enable": {
 +                    "markdownDescription": "Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.highlightRelated.references.enable": {
 +                    "markdownDescription": "Enables highlighting of related references while the cursor is on any identifier.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.highlightRelated.yieldPoints.enable": {
 +                    "markdownDescription": "Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.hover.actions.debug.enable": {
 +                    "markdownDescription": "Whether to show `Debug` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.hover.actions.enable": {
 +                    "markdownDescription": "Whether to show HoverActions in Rust files.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.hover.actions.gotoTypeDef.enable": {
 +                    "markdownDescription": "Whether to show `Go to Type Definition` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.hover.actions.implementations.enable": {
 +                    "markdownDescription": "Whether to show `Implementations` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.hover.actions.references.enable": {
 +                    "markdownDescription": "Whether to show `References` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.hover.actions.run.enable": {
 +                    "markdownDescription": "Whether to show `Run` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.hover.documentation.enable": {
 +                    "markdownDescription": "Whether to show documentation on hover.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.hover.documentation.keywords.enable": {
 +                    "markdownDescription": "Whether to show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.hover.links.enable": {
 +                    "markdownDescription": "Use markdown syntax for links in hover.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.imports.granularity.enforce": {
 +                    "markdownDescription": "Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.imports.granularity.group": {
 +                    "markdownDescription": "How imports should be grouped into use statements.",
 +                    "default": "crate",
 +                    "type": "string",
 +                    "enum": [
 +                        "preserve",
 +                        "crate",
 +                        "module",
 +                        "item"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Do not change the granularity of any imports and preserve the original structure written by the developer.",
 +                        "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
 +                        "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
 +                        "Flatten imports so that each has its own use statement."
 +                    ]
 +                },
 +                "rust-analyzer.imports.group.enable": {
 +                    "markdownDescription": "Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.imports.merge.glob": {
 +                    "markdownDescription": "Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.imports.prefix": {
 +                    "markdownDescription": "The path structure for newly inserted paths to use.",
 +                    "default": "plain",
 +                    "type": "string",
 +                    "enum": [
 +                        "plain",
 +                        "self",
 +                        "crate"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.",
 +                        "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item. Prefixes `self` in front of the path if it starts with a module.",
 +                        "Force import paths to be absolute by always starting them with `crate` or the extern crate name they come from."
 +                    ]
 +                },
 +                "rust-analyzer.inlayHints.bindingModeHints.enable": {
 +                    "markdownDescription": "Whether to show inlay type hints for binding modes.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.inlayHints.chainingHints.enable": {
 +                    "markdownDescription": "Whether to show inlay type hints for method chains.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.inlayHints.closingBraceHints.enable": {
 +                    "markdownDescription": "Whether to show inlay hints after a closing `}` to indicate what item it belongs to.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.inlayHints.closingBraceHints.minLines": {
 +                    "markdownDescription": "Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1\nto always show them).",
 +                    "default": 25,
 +                    "type": "integer",
 +                    "minimum": 0
 +                },
 +                "rust-analyzer.inlayHints.closureReturnTypeHints.enable": {
 +                    "markdownDescription": "Whether to show inlay type hints for return types of closures.",
 +                    "default": "never",
 +                    "type": "string",
 +                    "enum": [
 +                        "always",
 +                        "never",
 +                        "with_block"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Always show type hints for return types of closures.",
 +                        "Never show type hints for return types of closures.",
 +                        "Only show type hints for return types of closures with blocks."
 +                    ]
 +                },
 +                "rust-analyzer.inlayHints.lifetimeElisionHints.enable": {
 +                    "markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.",
 +                    "default": "never",
 +                    "type": "string",
 +                    "enum": [
 +                        "always",
 +                        "never",
 +                        "skip_trivial"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Always show lifetime elision hints.",
 +                        "Never show lifetime elision hints.",
 +                        "Only show lifetime elision hints if a return type is involved."
 +                    ]
 +                },
 +                "rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames": {
 +                    "markdownDescription": "Whether to prefer using parameter names as the name for elided lifetime hints if possible.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.inlayHints.maxLength": {
 +                    "markdownDescription": "Maximum length for inlay hints. Set to null to have an unlimited length.",
 +                    "default": 25,
 +                    "type": [
 +                        "null",
 +                        "integer"
 +                    ],
 +                    "minimum": 0
 +                },
 +                "rust-analyzer.inlayHints.parameterHints.enable": {
 +                    "markdownDescription": "Whether to show function parameter name inlay hints at the call\nsite.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.inlayHints.reborrowHints.enable": {
 +                    "markdownDescription": "Whether to show inlay type hints for compiler inserted reborrows.",
 +                    "default": "never",
 +                    "type": "string",
 +                    "enum": [
 +                        "always",
 +                        "never",
 +                        "mutable"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Always show reborrow hints.",
 +                        "Never show reborrow hints.",
 +                        "Only show mutable reborrow hints."
 +                    ]
 +                },
 +                "rust-analyzer.inlayHints.renderColons": {
 +                    "markdownDescription": "Whether to render leading colons for type hints, and trailing colons for parameter hints.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.inlayHints.typeHints.enable": {
 +                    "markdownDescription": "Whether to show inlay type hints for variables.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.inlayHints.typeHints.hideClosureInitialization": {
 +                    "markdownDescription": "Whether to hide inlay type hints for `let` statements that initialize to a closure.\nOnly applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.inlayHints.typeHints.hideNamedConstructor": {
 +                    "markdownDescription": "Whether to hide inlay type hints for constructors.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.joinLines.joinAssignments": {
 +                    "markdownDescription": "Join lines merges consecutive declaration and initialization of an assignment.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.joinLines.joinElseIf": {
 +                    "markdownDescription": "Join lines inserts else between consecutive ifs.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.joinLines.removeTrailingComma": {
 +                    "markdownDescription": "Join lines removes trailing commas.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.joinLines.unwrapTrivialBlock": {
 +                    "markdownDescription": "Join lines unwraps trivial blocks.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.lens.debug.enable": {
 +                    "markdownDescription": "Whether to show `Debug` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.lens.enable": {
 +                    "markdownDescription": "Whether to show CodeLens in Rust files.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.lens.forceCustomCommands": {
 +                    "markdownDescription": "Internal config: use custom client-side commands even when the\nclient doesn't set the corresponding capability.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.lens.implementations.enable": {
 +                    "markdownDescription": "Whether to show `Implementations` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.lens.references.adt.enable": {
 +                    "markdownDescription": "Whether to show `References` lens for Struct, Enum, and Union.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.lens.references.enumVariant.enable": {
 +                    "markdownDescription": "Whether to show `References` lens for Enum Variants.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.lens.references.method.enable": {
 +                    "markdownDescription": "Whether to show `Method References` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.lens.references.trait.enable": {
 +                    "markdownDescription": "Whether to show `References` lens for Trait.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.lens.run.enable": {
 +                    "markdownDescription": "Whether to show `Run` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.linkedProjects": {
 +                    "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set\nof projects.\n\nElements must be paths pointing to `Cargo.toml`,\n`rust-project.json`, or JSON objects in `rust-project.json` format.",
 +                    "default": [],
 +                    "type": "array",
 +                    "items": {
 +                        "type": [
 +                            "string",
 +                            "object"
 +                        ]
 +                    }
 +                },
 +                "rust-analyzer.lru.capacity": {
 +                    "markdownDescription": "Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.",
 +                    "default": null,
 +                    "type": [
 +                        "null",
 +                        "integer"
 +                    ],
 +                    "minimum": 0
 +                },
 +                "rust-analyzer.notifications.cargoTomlNotFound": {
 +                    "markdownDescription": "Whether to show `can't find Cargo.toml` error message.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.procMacro.attributes.enable": {
 +                    "markdownDescription": "Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.procMacro.enable": {
 +                    "markdownDescription": "Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.procMacro.ignored": {
 +                    "markdownDescription": "These proc-macros will be ignored when trying to expand them.\n\nThis config takes a map of crate names with the exported proc-macro names to ignore as values.",
 +                    "default": {},
 +                    "type": "object"
 +                },
 +                "rust-analyzer.procMacro.server": {
 +                    "markdownDescription": "Internal config, path to proc-macro server executable (typically,\nthis is rust-analyzer itself, but we override this in tests).",
 +                    "default": null,
 +                    "type": [
 +                        "null",
 +                        "string"
 +                    ]
 +                },
 +                "rust-analyzer.runnables.command": {
 +                    "markdownDescription": "Command to be executed instead of 'cargo' for runnables.",
 +                    "default": null,
 +                    "type": [
 +                        "null",
 +                        "string"
 +                    ]
 +                },
 +                "rust-analyzer.runnables.extraArgs": {
 +                    "markdownDescription": "Additional arguments to be passed to cargo for runnables such as\ntests or binaries. For example, it may be `--release`.",
 +                    "default": [],
 +                    "type": "array",
 +                    "items": {
 +                        "type": "string"
 +                    }
 +                },
 +                "rust-analyzer.rustc.source": {
 +                    "markdownDescription": "Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private\nprojects, or \"discover\" to try to automatically find it if the `rustc-dev` component\nis installed.\n\nAny project which uses rust-analyzer with the rustcPrivate\ncrates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.\n\nThis option does not take effect until rust-analyzer is restarted.",
 +                    "default": null,
 +                    "type": [
 +                        "null",
 +                        "string"
 +                    ]
 +                },
 +                "rust-analyzer.rustfmt.extraArgs": {
 +                    "markdownDescription": "Additional arguments to `rustfmt`.",
 +                    "default": [],
 +                    "type": "array",
 +                    "items": {
 +                        "type": "string"
 +                    }
 +                },
 +                "rust-analyzer.rustfmt.overrideCommand": {
 +                    "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting.",
 +                    "default": null,
 +                    "type": [
 +                        "null",
 +                        "array"
 +                    ],
 +                    "items": {
 +                        "type": "string"
 +                    }
 +                },
 +                "rust-analyzer.rustfmt.rangeFormatting.enable": {
 +                    "markdownDescription": "Enables the use of rustfmt's unstable range formatting command for the\n`textDocument/rangeFormatting` request. The rustfmt option is unstable and only\navailable on a nightly build.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
++                "rust-analyzer.semanticHighlighting.doc.comment.inject.enable": {
++                    "markdownDescription": "Inject additional highlighting into doc comments.\n\nWhen enabled, rust-analyzer will highlight rust source in doc comments as well as intra\ndoc links.",
++                    "default": true,
++                    "type": "boolean"
++                },
++                "rust-analyzer.semanticHighlighting.operator.enable": {
++                    "markdownDescription": "Use semantic tokens for operators.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for operator tokens when\nthey are tagged with modifiers.",
++                    "default": true,
++                    "type": "boolean"
++                },
++                "rust-analyzer.semanticHighlighting.operator.specialization.enable": {
++                    "markdownDescription": "Use specialized semantic tokens for operators.\n\nWhen enabled, rust-analyzer will emit special token types for operator tokens instead\nof the generic `operator` token type.",
++                    "default": false,
++                    "type": "boolean"
++                },
++                "rust-analyzer.semanticHighlighting.punctuation.enable": {
++                    "markdownDescription": "Use semantic tokens for punctuations.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.",
++                    "default": false,
++                    "type": "boolean"
++                },
++                "rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang": {
++                    "markdownDescription": "When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro\ncalls.",
++                    "default": false,
++                    "type": "boolean"
++                },
++                "rust-analyzer.semanticHighlighting.punctuation.specialization.enable": {
++                    "markdownDescription": "Use specialized semantic tokens for punctuations.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.",
++                    "default": false,
++                    "type": "boolean"
++                },
 +                "rust-analyzer.semanticHighlighting.strings.enable": {
 +                    "markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.signatureInfo.detail": {
 +                    "markdownDescription": "Show full signature of the callable. Only shows parameters if disabled.",
 +                    "default": "full",
 +                    "type": "string",
 +                    "enum": [
 +                        "full",
 +                        "parameters"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Show the entire signature.",
 +                        "Show only the parameters."
 +                    ]
 +                },
 +                "rust-analyzer.signatureInfo.documentation.enable": {
 +                    "markdownDescription": "Show documentation.",
 +                    "default": true,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.typing.autoClosingAngleBrackets.enable": {
 +                    "markdownDescription": "Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.",
 +                    "default": false,
 +                    "type": "boolean"
 +                },
 +                "rust-analyzer.workspace.symbol.search.kind": {
 +                    "markdownDescription": "Workspace symbol search kind.",
 +                    "default": "only_types",
 +                    "type": "string",
 +                    "enum": [
 +                        "only_types",
 +                        "all_symbols"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Search for types only.",
 +                        "Search for all symbols kinds."
 +                    ]
 +                },
 +                "rust-analyzer.workspace.symbol.search.limit": {
 +                    "markdownDescription": "Limits the number of items returned from a workspace symbol search (Defaults to 128).\nSome clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.\nOther clients requires all results upfront and might require a higher limit.",
 +                    "default": 128,
 +                    "type": "integer",
 +                    "minimum": 0
 +                },
 +                "rust-analyzer.workspace.symbol.search.scope": {
 +                    "markdownDescription": "Workspace symbol search scope.",
 +                    "default": "workspace",
 +                    "type": "string",
 +                    "enum": [
 +                        "workspace",
 +                        "workspace_and_dependencies"
 +                    ],
 +                    "enumDescriptions": [
 +                        "Search in current workspace only.",
 +                        "Search in current workspace and dependencies."
 +                    ]
 +                },
 +                "$generated-end": {}
 +            }
 +        },
 +        "problemPatterns": [
 +            {
 +                "name": "rustc",
 +                "patterns": [
 +                    {
 +                        "regexp": "^(warning|warn|error)(?:\\[(.*?)\\])?: (.*)$",
 +                        "severity": 1,
 +                        "code": 2,
 +                        "message": 3
 +                    },
 +                    {
 +                        "regexp": "^[\\s->=]*(.*?):(\\d*):(\\d*)\\s*$",
 +                        "file": 1,
 +                        "line": 2,
 +                        "column": 3
 +                    }
 +                ]
 +            },
 +            {
 +                "name": "rustc-json",
 +                "patterns": [
 +                    {
 +                        "regexp": "^.*\"message\":{\"message\":\"([^\"]*).*?\"file_name\":\"([^\"]+).*?\"line_start\":(\\d+).*?\"line_end\":(\\d+).*?\"column_start\":(\\d+).*?\"column_end\":(\\d+).*}$",
 +                        "message": 1,
 +                        "file": 2,
 +                        "line": 3,
 +                        "endLine": 4,
 +                        "column": 5,
 +                        "endColumn": 6
 +                    }
 +                ]
 +            }
 +        ],
 +        "languages": [
 +            {
 +                "id": "ra_syntax_tree",
 +                "extensions": [
 +                    ".rast"
 +                ]
 +            },
 +            {
 +                "id": "rust",
 +                "extensions": [
 +                    ".rs"
 +                ],
 +                "aliases": [
 +                    "Rust",
 +                    "rs"
 +                ],
 +                "configuration": "language-configuration.json"
 +            }
 +        ],
 +        "grammars": [
 +            {
 +                "language": "ra_syntax_tree",
 +                "scopeName": "source.ra_syntax_tree",
 +                "path": "ra_syntax_tree.tmGrammar.json"
 +            }
 +        ],
 +        "problemMatchers": [
 +            {
 +                "name": "rustc",
 +                "owner": "rustc",
 +                "source": "rustc",
 +                "fileLocation": [
 +                    "autoDetect",
 +                    "${workspaceRoot}"
 +                ],
 +                "pattern": "$rustc"
 +            },
 +            {
 +                "name": "rustc-json",
 +                "owner": "rustc",
 +                "source": "rustc",
 +                "fileLocation": [
 +                    "autoDetect",
 +                    "${workspaceRoot}"
 +                ],
 +                "pattern": "$rustc-json"
 +            },
 +            {
 +                "name": "rustc-watch",
 +                "owner": "rustc",
 +                "source": "rustc",
 +                "fileLocation": [
 +                    "autoDetect",
 +                    "${workspaceRoot}"
 +                ],
 +                "background": {
 +                    "beginsPattern": "^\\[Running\\b",
 +                    "endsPattern": "^\\[Finished running\\b"
 +                },
 +                "pattern": "$rustc"
 +            }
 +        ],
 +        "colors": [
 +            {
 +                "id": "rust_analyzer.syntaxTreeBorder",
 +                "description": "Color of the border displayed in the Rust source code for the selected syntax node (see \"Show Syntax Tree\" command)",
 +                "defaults": {
 +                    "dark": "#ffffff",
 +                    "light": "#b700ff",
 +                    "highContrast": "#b700ff"
 +                }
 +            }
 +        ],
 +        "semanticTokenTypes": [
 +            {
 +                "id": "angle",
 +                "description": "Style for < or >",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "arithmetic",
 +                "description": "Style for arithmetic operators",
 +                "superType": "operator"
 +            },
 +            {
 +                "id": "attribute",
 +                "description": "Style for attributes"
 +            },
 +            {
 +                "id": "attributeBracket",
 +                "description": "Style for attribute invocation brackets, that is the `#[` and `]` tokens",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "bitwise",
 +                "description": "Style for bitwise operators",
 +                "superType": "operator"
 +            },
 +            {
 +                "id": "boolean",
 +                "description": "Style for boolean literals",
 +                "superType": "keyword"
 +            },
 +            {
 +                "id": "brace",
 +                "description": "Style for { or }",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "bracket",
 +                "description": "Style for [ or ]",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "builtinAttribute",
 +                "description": "Style for builtin attributes",
 +                "superType": "attribute"
 +            },
 +            {
 +                "id": "builtinType",
 +                "description": "Style for builtin types",
 +                "superType": "type"
 +            },
 +            {
 +                "id": "character",
 +                "description": "Style for character literals",
 +                "superType": "string"
 +            },
 +            {
 +                "id": "colon",
 +                "description": "Style for :",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "comma",
 +                "description": "Style for ,",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "comparison",
 +                "description": "Style for comparison operators",
 +                "superType": "operator"
 +            },
 +            {
 +                "id": "constParameter",
 +                "description": "Style for const generics"
 +            },
 +            {
 +                "id": "derive",
 +                "description": "Style for derives",
 +                "superType": "attribute"
 +            },
 +            {
 +                "id": "dot",
 +                "description": "Style for .",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "escapeSequence",
 +                "description": "Style for char escapes in strings"
 +            },
 +            {
 +                "id": "formatSpecifier",
 +                "description": "Style for {} placeholders in format strings"
 +            },
 +            {
 +                "id": "label",
 +                "description": "Style for labels"
 +            },
 +            {
 +                "id": "lifetime",
 +                "description": "Style for lifetimes"
 +            },
 +            {
 +                "id": "logical",
 +                "description": "Style for logic operators",
 +                "superType": "operator"
 +            },
 +            {
 +                "id": "macroBang",
 +                "description": "Style for the ! token of macro calls",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "operator",
 +                "description": "Style for operators",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "parenthesis",
 +                "description": "Style for ( or )",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "punctuation",
 +                "description": "Style for generic punctuation"
 +            },
 +            {
 +                "id": "selfKeyword",
 +                "description": "Style for the self keyword",
 +                "superType": "keyword"
 +            },
 +            {
 +                "id": "selfTypeKeyword",
 +                "description": "Style for the self type keyword",
 +                "superType": "keyword"
 +            },
 +            {
 +                "id": "semicolon",
 +                "description": "Style for ;",
 +                "superType": "punctuation"
 +            },
 +            {
 +                "id": "typeAlias",
 +                "description": "Style for type aliases",
 +                "superType": "type"
 +            },
 +            {
 +                "id": "union",
 +                "description": "Style for C-style untagged unions",
 +                "superType": "type"
 +            },
 +            {
 +                "id": "unresolvedReference",
 +                "description": "Style for names which can not be resolved due to compilation errors"
 +            }
 +        ],
 +        "semanticTokenModifiers": [
 +            {
 +                "id": "async",
 +                "description": "Style for async functions and the `async` and `await` keywords"
 +            },
 +            {
 +                "id": "attribute",
 +                "description": "Style for elements within attributes"
 +            },
 +            {
 +                "id": "callable",
 +                "description": "Style for locals whose types implements one of the `Fn*` traits"
 +            },
 +            {
 +                "id": "constant",
 +                "description": "Style for compile-time constants"
 +            },
 +            {
 +                "id": "consuming",
 +                "description": "Style for locals that are being consumed when use in a function call"
 +            },
 +            {
 +                "id": "controlFlow",
 +                "description": "Style for control-flow related tokens, this includes the `?` operator"
 +            },
 +            {
 +                "id": "crateRoot",
 +                "description": "Style for names resolving to a crate root"
 +            },
 +            {
 +                "id": "injected",
 +                "description": "Style for doc-string injected highlighting like rust source blocks in documentation"
 +            },
 +            {
 +                "id": "intraDocLink",
 +                "description": "Style for intra doc links in doc-strings"
 +            },
 +            {
 +                "id": "library",
 +                "description": "Style for items that are defined outside of the current crate"
 +            },
 +            {
 +                "id": "mutable",
 +                "description": "Style for mutable locals and statics as well as functions taking `&mut self`"
 +            },
 +            {
 +                "id": "public",
 +                "description": "Style for items that are from the current crate and are `pub`"
 +            },
 +            {
 +                "id": "reference",
 +                "description": "Style for locals behind a reference and functions taking `self` by reference"
 +            },
 +            {
 +                "id": "trait",
 +                "description": "Style for associated trait items"
 +            },
 +            {
 +                "id": "unsafe",
 +                "description": "Style for unsafe operations, like unsafe function calls, as well as the `unsafe` token"
 +            }
 +        ],
 +        "semanticTokenScopes": [
 +            {
 +                "language": "rust",
 +                "scopes": {
 +                    "attribute": [
 +                        "meta.attribute.rust"
 +                    ],
 +                    "boolean": [
 +                        "constant.language.boolean.rust"
 +                    ],
 +                    "builtinType": [
 +                        "support.type.primitive.rust"
 +                    ],
 +                    "constParameter": [
 +                        "constant.other.caps.rust"
 +                    ],
 +                    "enum": [
 +                        "entity.name.type.enum.rust"
 +                    ],
 +                    "formatSpecifier": [
 +                        "punctuation.section.embedded.rust"
 +                    ],
 +                    "function": [
 +                        "entity.name.function.rust"
 +                    ],
 +                    "interface": [
 +                        "entity.name.type.trait.rust"
 +                    ],
 +                    "keyword": [
 +                        "keyword.other.rust"
 +                    ],
 +                    "keyword.controlFlow": [
 +                        "keyword.control.rust"
 +                    ],
 +                    "lifetime": [
 +                        "storage.modifier.lifetime.rust"
 +                    ],
 +                    "macroBang": [
 +                        "entity.name.function.macro.rust"
 +                    ],
 +                    "method": [
 +                        "entity.name.function.rust"
 +                    ],
 +                    "struct": [
 +                        "entity.name.type.struct.rust"
 +                    ],
 +                    "typeAlias": [
 +                        "entity.name.type.declaration.rust"
 +                    ],
 +                    "union": [
 +                        "entity.name.type.union.rust"
 +                    ],
 +                    "variable": [
 +                        "variable.other.rust"
 +                    ],
 +                    "variable.constant": [
 +                        "variable.other.constant.rust"
 +                    ],
 +                    "*.mutable": [
 +                        "markup.underline"
 +                    ]
 +                }
 +            }
 +        ],
 +        "menus": {
 +            "commandPalette": [
 +                {
 +                    "command": "rust-analyzer.syntaxTree",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.viewHir",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.viewFileText",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.expandMacro",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.matchingBrace",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.parentModule",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.joinLines",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.run",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.debug",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.newDebugConfig",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.analyzerStatus",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.memoryUsage",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.reloadWorkspace",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.reload",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.onEnter",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.ssr",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.serverVersion",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.toggleInlayHints",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.openDocs",
 +                    "when": "inRustProject"
 +                },
 +                {
 +                    "command": "rust-analyzer.openCargoToml",
 +                    "when": "inRustProject"
 +                }
 +            ],
 +            "editor/context": [
 +                {
 +                    "command": "rust-analyzer.peekTests",
 +                    "when": "inRustProject",
 +                    "group": "navigation@1000"
 +                }
 +            ]
 +        }
 +    }
 +}
index 27ab31db8db4fc668b91953bae3d45cca59dc6e2,0000000000000000000000000000000000000000..05d4d08f70b62d17436dcb32715011bb53449dd6
mode 100644,000000..100644
--- /dev/null
@@@ -1,315 -1,0 +1,293 @@@
- import { updateConfig } from "./config";
 +import * as lc from "vscode-languageclient/node";
 +import * as vscode from "vscode";
 +import * as ra from "../src/lsp_ext";
 +import * as Is from "vscode-languageclient/lib/common/utils/is";
 +import { assert } from "./util";
 +import { WorkspaceEdit } from "vscode";
 +import { Workspace } from "./ctx";
-     // Update outdated user configs
-     await updateConfig(initializationOptions).catch((err) => {
-         void vscode.window.showErrorMessage(`Failed updating old config keys: ${err.message}`);
-     });
 +import { substituteVariablesInEnv } from "./config";
 +import { outputChannel, traceOutputChannel } from "./main";
 +import { randomUUID } from "crypto";
 +
 +export interface Env {
 +    [name: string]: string;
 +}
 +
 +// Command URIs have a form of command:command-name?arguments, where
 +// arguments is a percent-encoded array of data we want to pass along to
 +// the command function. For "Show References" this is a list of all file
 +// URIs with locations of every reference, and it can get quite long.
 +//
 +// To work around it we use an intermediary linkToCommand command. When
 +// we render a command link, a reference to a command with all its arguments
 +// is stored in a map, and instead a linkToCommand link is rendered
 +// with the key to that map.
 +export const LINKED_COMMANDS = new Map<string, ra.CommandLink>();
 +
 +// For now the map is cleaned up periodically (I've set it to every
 +// 10 minutes). In general case we'll probably need to introduce TTLs or
 +// flags to denote ephemeral links (like these in hover popups) and
 +// persistent links and clean those separately. But for now simply keeping
 +// the last few links in the map should be good enough. Likewise, we could
 +// add code to remove a target command from the map after the link is
 +// clicked, but assuming most links in hover sheets won't be clicked anyway
 +// this code won't change the overall memory use much.
 +setInterval(function cleanupOlderCommandLinks() {
 +    // keys are returned in insertion order, we'll keep a few
 +    // of recent keys available, and clean the rest
 +    const keys = [...LINKED_COMMANDS.keys()];
 +    const keysToRemove = keys.slice(0, keys.length - 10);
 +    for (const key of keysToRemove) {
 +        LINKED_COMMANDS.delete(key);
 +    }
 +}, 10 * 60 * 1000);
 +
 +function renderCommand(cmd: ra.CommandLink): string {
 +    const commandId = randomUUID();
 +    LINKED_COMMANDS.set(commandId, cmd);
 +    return `[${cmd.title}](command:rust-analyzer.linkToCommand?${encodeURIComponent(
 +        JSON.stringify([commandId])
 +    )} '${cmd.tooltip}')`;
 +}
 +
 +function renderHoverActions(actions: ra.CommandLinkGroup[]): vscode.MarkdownString {
 +    const text = actions
 +        .map(
 +            (group) =>
 +                (group.title ? group.title + " " : "") +
 +                group.commands.map(renderCommand).join(" | ")
 +        )
 +        .join("___");
 +
 +    const result = new vscode.MarkdownString(text);
 +    result.isTrusted = true;
 +    return result;
 +}
 +
 +export async function createClient(
 +    serverPath: string,
 +    workspace: Workspace,
 +    extraEnv: Env
 +): Promise<lc.LanguageClient> {
 +    // '.' Is the fallback if no folder is open
 +    // TODO?: Workspace folders support Uri's (eg: file://test.txt).
 +    // It might be a good idea to test if the uri points to a file.
 +
 +    const newEnv = substituteVariablesInEnv(Object.assign({}, process.env, extraEnv));
 +    const run: lc.Executable = {
 +        command: serverPath,
 +        options: { env: newEnv },
 +    };
 +    const serverOptions: lc.ServerOptions = {
 +        run,
 +        debug: run,
 +    };
 +
 +    let initializationOptions = vscode.workspace.getConfiguration("rust-analyzer");
 +
-             async handleDiagnostics(uri, diagnostics, next) {
-                 // Workaround for https://github.com/microsoft/vscode/issues/155531
-                 for (const diagnostic of diagnostics) {
-                     if (!diagnostic.message) {
-                         diagnostic.message = " ";
-                     }
-                     if (diagnostic.relatedInformation) {
-                         for (const relatedInformation of diagnostic.relatedInformation) {
-                             if (!relatedInformation.message) {
-                                 relatedInformation.message = " ";
-                             }
-                         }
-                     }
-                 }
-                 next(uri, diagnostics);
-             },
 +    if (workspace.kind === "Detached Files") {
 +        initializationOptions = {
 +            detachedFiles: workspace.files.map((file) => file.uri.fsPath),
 +            ...initializationOptions,
 +        };
 +    }
 +
 +    const clientOptions: lc.LanguageClientOptions = {
 +        documentSelector: [{ scheme: "file", language: "rust" }],
 +        initializationOptions,
 +        diagnosticCollectionName: "rustc",
 +        traceOutputChannel: traceOutputChannel(),
 +        outputChannel: outputChannel(),
 +        middleware: {
 +            async provideHover(
 +                document: vscode.TextDocument,
 +                position: vscode.Position,
 +                token: vscode.CancellationToken,
 +                _next: lc.ProvideHoverSignature
 +            ) {
 +                const editor = vscode.window.activeTextEditor;
 +                const positionOrRange = editor?.selection?.contains(position)
 +                    ? client.code2ProtocolConverter.asRange(editor.selection)
 +                    : client.code2ProtocolConverter.asPosition(position);
 +                return client
 +                    .sendRequest(
 +                        ra.hover,
 +                        {
 +                            textDocument:
 +                                client.code2ProtocolConverter.asTextDocumentIdentifier(document),
 +                            position: positionOrRange,
 +                        },
 +                        token
 +                    )
 +                    .then(
 +                        (result) => {
 +                            const hover = client.protocol2CodeConverter.asHover(result);
 +                            if (hover) {
 +                                const actions = (<any>result).actions;
 +                                if (actions) {
 +                                    hover.contents.push(renderHoverActions(actions));
 +                                }
 +                            }
 +                            return hover;
 +                        },
 +                        (error) => {
 +                            client.handleFailedRequest(lc.HoverRequest.type, token, error, null);
 +                            return Promise.resolve(null);
 +                        }
 +                    );
 +            },
 +            // Using custom handling of CodeActions to support action groups and snippet edits.
 +            // Note that this means we have to re-implement lazy edit resolving ourselves as well.
 +            async provideCodeActions(
 +                document: vscode.TextDocument,
 +                range: vscode.Range,
 +                context: vscode.CodeActionContext,
 +                token: vscode.CancellationToken,
 +                _next: lc.ProvideCodeActionsSignature
 +            ) {
 +                const params: lc.CodeActionParams = {
 +                    textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(document),
 +                    range: client.code2ProtocolConverter.asRange(range),
 +                    context: await client.code2ProtocolConverter.asCodeActionContext(
 +                        context,
 +                        token
 +                    ),
 +                };
 +                return client.sendRequest(lc.CodeActionRequest.type, params, token).then(
 +                    async (values) => {
 +                        if (values === null) return undefined;
 +                        const result: (vscode.CodeAction | vscode.Command)[] = [];
 +                        const groups = new Map<
 +                            string,
 +                            { index: number; items: vscode.CodeAction[] }
 +                        >();
 +                        for (const item of values) {
 +                            // In our case we expect to get code edits only from diagnostics
 +                            if (lc.CodeAction.is(item)) {
 +                                assert(
 +                                    !item.command,
 +                                    "We don't expect to receive commands in CodeActions"
 +                                );
 +                                const action = await client.protocol2CodeConverter.asCodeAction(
 +                                    item,
 +                                    token
 +                                );
 +                                result.push(action);
 +                                continue;
 +                            }
 +                            assert(
 +                                isCodeActionWithoutEditsAndCommands(item),
 +                                "We don't expect edits or commands here"
 +                            );
 +                            const kind = client.protocol2CodeConverter.asCodeActionKind(
 +                                (item as any).kind
 +                            );
 +                            const action = new vscode.CodeAction(item.title, kind);
 +                            const group = (item as any).group;
 +                            action.command = {
 +                                command: "rust-analyzer.resolveCodeAction",
 +                                title: item.title,
 +                                arguments: [item],
 +                            };
 +
 +                            // Set a dummy edit, so that VS Code doesn't try to resolve this.
 +                            action.edit = new WorkspaceEdit();
 +
 +                            if (group) {
 +                                let entry = groups.get(group);
 +                                if (!entry) {
 +                                    entry = { index: result.length, items: [] };
 +                                    groups.set(group, entry);
 +                                    result.push(action);
 +                                }
 +                                entry.items.push(action);
 +                            } else {
 +                                result.push(action);
 +                            }
 +                        }
 +                        for (const [group, { index, items }] of groups) {
 +                            if (items.length === 1) {
 +                                result[index] = items[0];
 +                            } else {
 +                                const action = new vscode.CodeAction(group);
 +                                action.kind = items[0].kind;
 +                                action.command = {
 +                                    command: "rust-analyzer.applyActionGroup",
 +                                    title: "",
 +                                    arguments: [
 +                                        items.map((item) => {
 +                                            return {
 +                                                label: item.title,
 +                                                arguments: item.command!.arguments![0],
 +                                            };
 +                                        }),
 +                                    ],
 +                                };
 +
 +                                // Set a dummy edit, so that VS Code doesn't try to resolve this.
 +                                action.edit = new WorkspaceEdit();
 +
 +                                result[index] = action;
 +                            }
 +                        }
 +                        return result;
 +                    },
 +                    (_error) => undefined
 +                );
 +            },
 +        },
 +        markdown: {
 +            supportHtml: true,
 +        },
 +    };
 +
 +    const client = new lc.LanguageClient(
 +        "rust-analyzer",
 +        "Rust Analyzer Language Server",
 +        serverOptions,
 +        clientOptions
 +    );
 +
 +    // To turn on all proposed features use: client.registerProposedFeatures();
 +    client.registerFeature(new ExperimentalFeatures());
 +
 +    return client;
 +}
 +
 +class ExperimentalFeatures implements lc.StaticFeature {
 +    fillClientCapabilities(capabilities: lc.ClientCapabilities): void {
 +        const caps: any = capabilities.experimental ?? {};
 +        caps.snippetTextEdit = true;
 +        caps.codeActionGroup = true;
 +        caps.hoverActions = true;
 +        caps.serverStatusNotification = true;
 +        caps.commands = {
 +            commands: [
 +                "rust-analyzer.runSingle",
 +                "rust-analyzer.debugSingle",
 +                "rust-analyzer.showReferences",
 +                "rust-analyzer.gotoLocation",
 +                "editor.action.triggerParameterHints",
 +            ],
 +        };
 +        capabilities.experimental = caps;
 +    }
 +    initialize(
 +        _capabilities: lc.ServerCapabilities<any>,
 +        _documentSelector: lc.DocumentSelector | undefined
 +    ): void {}
 +    dispose(): void {}
 +}
 +
 +function isCodeActionWithoutEditsAndCommands(value: any): boolean {
 +    const candidate: lc.CodeAction = value;
 +    return (
 +        candidate &&
 +        Is.string(candidate.title) &&
 +        (candidate.diagnostics === void 0 ||
 +            Is.typedArray(candidate.diagnostics, lc.Diagnostic.is)) &&
 +        (candidate.kind === void 0 || Is.string(candidate.kind)) &&
 +        candidate.edit === void 0 &&
 +        candidate.command === void 0
 +    );
 +}
index b83582a344a9841ae61063a88f0cbde0864ef4e9,0000000000000000000000000000000000000000..a9c0f079b3da9350eeb3624fbed5247b465da7d9
mode 100644,000000..100644
--- /dev/null
@@@ -1,396 -1,0 +1,300 @@@
- export type UpdatesChannel = "stable" | "nightly";
 +import path = require("path");
 +import * as vscode from "vscode";
 +import { Env } from "./client";
 +import { log } from "./util";
 +
- export async function updateConfig(config: vscode.WorkspaceConfiguration) {
-     const renames = [
-         ["assist.allowMergingIntoGlobImports", "imports.merge.glob"],
-         ["assist.exprFillDefault", "assist.expressionFillDefault"],
-         ["assist.importEnforceGranularity", "imports.granularity.enforce"],
-         ["assist.importGranularity", "imports.granularity.group"],
-         ["assist.importMergeBehavior", "imports.granularity.group"],
-         ["assist.importMergeBehaviour", "imports.granularity.group"],
-         ["assist.importGroup", "imports.group.enable"],
-         ["assist.importPrefix", "imports.prefix"],
-         ["primeCaches.enable", "cachePriming.enable"],
-         ["cache.warmup", "cachePriming.enable"],
-         ["cargo.loadOutDirsFromCheck", "cargo.buildScripts.enable"],
-         ["cargo.runBuildScripts", "cargo.buildScripts.enable"],
-         ["cargo.runBuildScriptsCommand", "cargo.buildScripts.overrideCommand"],
-         ["cargo.useRustcWrapperForBuildScripts", "cargo.buildScripts.useRustcWrapper"],
-         ["completion.snippets", "completion.snippets.custom"],
-         ["diagnostics.enableExperimental", "diagnostics.experimental.enable"],
-         ["experimental.procAttrMacros", "procMacro.attributes.enable"],
-         ["highlighting.strings", "semanticHighlighting.strings.enable"],
-         ["highlightRelated.breakPoints", "highlightRelated.breakPoints.enable"],
-         ["highlightRelated.exitPoints", "highlightRelated.exitPoints.enable"],
-         ["highlightRelated.yieldPoints", "highlightRelated.yieldPoints.enable"],
-         ["highlightRelated.references", "highlightRelated.references.enable"],
-         ["hover.documentation", "hover.documentation.enable"],
-         ["hover.linksInHover", "hover.links.enable"],
-         ["hoverActions.linksInHover", "hover.links.enable"],
-         ["hoverActions.debug", "hover.actions.debug.enable"],
-         ["hoverActions.enable", "hover.actions.enable.enable"],
-         ["hoverActions.gotoTypeDef", "hover.actions.gotoTypeDef.enable"],
-         ["hoverActions.implementations", "hover.actions.implementations.enable"],
-         ["hoverActions.references", "hover.actions.references.enable"],
-         ["hoverActions.run", "hover.actions.run.enable"],
-         ["inlayHints.chainingHints", "inlayHints.chainingHints.enable"],
-         ["inlayHints.closureReturnTypeHints", "inlayHints.closureReturnTypeHints.enable"],
-         ["inlayHints.hideNamedConstructorHints", "inlayHints.typeHints.hideNamedConstructor"],
-         ["inlayHints.parameterHints", "inlayHints.parameterHints.enable"],
-         ["inlayHints.reborrowHints", "inlayHints.reborrowHints.enable"],
-         ["inlayHints.typeHints", "inlayHints.typeHints.enable"],
-         ["lruCapacity", "lru.capacity"],
-         ["runnables.cargoExtraArgs", "runnables.extraArgs"],
-         ["runnables.overrideCargo", "runnables.command"],
-         ["rustcSource", "rustc.source"],
-         ["rustfmt.enableRangeFormatting", "rustfmt.rangeFormatting.enable"],
-     ];
-     for (const [oldKey, newKey] of renames) {
-         const inspect = config.inspect(oldKey);
-         if (inspect !== undefined) {
-             const valMatrix = [
-                 {
-                     val: inspect.globalValue,
-                     langVal: inspect.globalLanguageValue,
-                     target: vscode.ConfigurationTarget.Global,
-                 },
-                 {
-                     val: inspect.workspaceFolderValue,
-                     langVal: inspect.workspaceFolderLanguageValue,
-                     target: vscode.ConfigurationTarget.WorkspaceFolder,
-                 },
-                 {
-                     val: inspect.workspaceValue,
-                     langVal: inspect.workspaceLanguageValue,
-                     target: vscode.ConfigurationTarget.Workspace,
-                 },
-             ];
-             for (const { val, langVal, target } of valMatrix) {
-                 const patch = (val: unknown) => {
-                     // some of the updates we do only append "enable" or "custom"
-                     // that means on the next run we would find these again, but as objects with
-                     // these properties causing us to destroy the config
-                     // so filter those already updated ones out
-                     return (
-                         val !== undefined &&
-                         !(
-                             typeof val === "object" &&
-                             val !== null &&
-                             (oldKey === "completion.snippets" || !val.hasOwnProperty("custom"))
-                         )
-                     );
-                 };
-                 if (patch(val)) {
-                     await config.update(newKey, val, target, false);
-                     await config.update(oldKey, undefined, target, false);
-                 }
-                 if (patch(langVal)) {
-                     await config.update(newKey, langVal, target, true);
-                     await config.update(oldKey, undefined, target, true);
-                 }
-             }
-         }
-     }
- }
 +export type RunnableEnvCfg =
 +    | undefined
 +    | Record<string, string>
 +    | { mask?: string; env: Record<string, string> }[];
 +
 +export class Config {
 +    readonly extensionId = "rust-lang.rust-analyzer";
 +
 +    readonly rootSection = "rust-analyzer";
 +    private readonly requiresWorkspaceReloadOpts = [
 +        "serverPath",
 +        "server",
 +        // FIXME: This shouldn't be here, changing this setting should reload
 +        // `continueCommentsOnNewline` behavior without restart
 +        "typing",
 +    ].map((opt) => `${this.rootSection}.${opt}`);
 +    private readonly requiresReloadOpts = [
 +        "cargo",
 +        "procMacro",
 +        "files",
 +        "lens", // works as lens.*
 +    ]
 +        .map((opt) => `${this.rootSection}.${opt}`)
 +        .concat(this.requiresWorkspaceReloadOpts);
 +
 +    readonly package: {
 +        version: string;
 +        releaseTag: string | null;
 +        enableProposedApi: boolean | undefined;
 +    } = vscode.extensions.getExtension(this.extensionId)!.packageJSON;
 +
 +    readonly globalStorageUri: vscode.Uri;
 +
 +    constructor(ctx: vscode.ExtensionContext) {
 +        this.globalStorageUri = ctx.globalStorageUri;
 +        vscode.workspace.onDidChangeConfiguration(
 +            this.onDidChangeConfiguration,
 +            this,
 +            ctx.subscriptions
 +        );
 +        this.refreshLogging();
 +    }
 +
 +    private refreshLogging() {
 +        log.setEnabled(this.traceExtension);
 +        log.info("Extension version:", this.package.version);
 +
 +        const cfg = Object.entries(this.cfg).filter(([_, val]) => !(val instanceof Function));
 +        log.info("Using configuration", Object.fromEntries(cfg));
 +    }
 +
 +    private async onDidChangeConfiguration(event: vscode.ConfigurationChangeEvent) {
 +        this.refreshLogging();
 +
 +        const requiresReloadOpt = this.requiresReloadOpts.find((opt) =>
 +            event.affectsConfiguration(opt)
 +        );
 +
 +        if (!requiresReloadOpt) return;
 +
 +        const requiresWorkspaceReloadOpt = this.requiresWorkspaceReloadOpts.find((opt) =>
 +            event.affectsConfiguration(opt)
 +        );
 +
 +        if (!requiresWorkspaceReloadOpt && this.restartServerOnConfigChange) {
 +            await vscode.commands.executeCommand("rust-analyzer.reload");
 +            return;
 +        }
 +
 +        const message = requiresWorkspaceReloadOpt
 +            ? `Changing "${requiresWorkspaceReloadOpt}" requires a window reload`
 +            : `Changing "${requiresReloadOpt}" requires a reload`;
 +        const userResponse = await vscode.window.showInformationMessage(message, "Reload now");
 +
 +        if (userResponse === "Reload now") {
 +            const command = requiresWorkspaceReloadOpt
 +                ? "workbench.action.reloadWindow"
 +                : "rust-analyzer.reload";
 +            if (userResponse === "Reload now") {
 +                await vscode.commands.executeCommand(command);
 +            }
 +        }
 +    }
 +
 +    // We don't do runtime config validation here for simplicity. More on stackoverflow:
 +    // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension
 +
 +    private get cfg(): vscode.WorkspaceConfiguration {
 +        return vscode.workspace.getConfiguration(this.rootSection);
 +    }
 +
 +    /**
 +     * Beware that postfix `!` operator erases both `null` and `undefined`.
 +     * This is why the following doesn't work as expected:
 +     *
 +     * ```ts
 +     * const nullableNum = vscode
 +     *  .workspace
 +     *  .getConfiguration
 +     *  .getConfiguration("rust-analyzer")
 +     *  .get<number | null>(path)!;
 +     *
 +     * // What happens is that type of `nullableNum` is `number` but not `null | number`:
 +     * const fullFledgedNum: number = nullableNum;
 +     * ```
 +     * So this getter handles this quirk by not requiring the caller to use postfix `!`
 +     */
 +    private get<T>(path: string): T {
 +        return this.cfg.get<T>(path)!;
 +    }
 +
 +    get serverPath() {
 +        return this.get<null | string>("server.path") ?? this.get<null | string>("serverPath");
 +    }
 +    get serverExtraEnv(): Env {
 +        const extraEnv =
 +            this.get<{ [key: string]: string | number } | null>("server.extraEnv") ?? {};
 +        return Object.fromEntries(
 +            Object.entries(extraEnv).map(([k, v]) => [k, typeof v !== "string" ? v.toString() : v])
 +        );
 +    }
 +    get traceExtension() {
 +        return this.get<boolean>("trace.extension");
 +    }
 +
 +    get cargoRunner() {
 +        return this.get<string | undefined>("cargoRunner");
 +    }
 +
 +    get runnableEnv() {
 +        return this.get<RunnableEnvCfg>("runnableEnv");
 +    }
 +
 +    get restartServerOnConfigChange() {
 +        return this.get<boolean>("restartServerOnConfigChange");
 +    }
 +
 +    get typingContinueCommentsOnNewline() {
 +        return this.get<boolean>("typing.continueCommentsOnNewline");
 +    }
 +
 +    get debug() {
 +        let sourceFileMap = this.get<Record<string, string> | "auto">("debug.sourceFileMap");
 +        if (sourceFileMap !== "auto") {
 +            // "/rustc/<id>" used by suggestions only.
 +            const { ["/rustc/<id>"]: _, ...trimmed } =
 +                this.get<Record<string, string>>("debug.sourceFileMap");
 +            sourceFileMap = trimmed;
 +        }
 +
 +        return {
 +            engine: this.get<string>("debug.engine"),
 +            engineSettings: this.get<object>("debug.engineSettings"),
 +            openDebugPane: this.get<boolean>("debug.openDebugPane"),
 +            sourceFileMap: sourceFileMap,
 +        };
 +    }
 +
 +    get hoverActions() {
 +        return {
 +            enable: this.get<boolean>("hover.actions.enable"),
 +            implementations: this.get<boolean>("hover.actions.implementations.enable"),
 +            references: this.get<boolean>("hover.actions.references.enable"),
 +            run: this.get<boolean>("hover.actions.run.enable"),
 +            debug: this.get<boolean>("hover.actions.debug.enable"),
 +            gotoTypeDef: this.get<boolean>("hover.actions.gotoTypeDef.enable"),
 +        };
 +    }
 +}
 +
 +export function substituteVariablesInEnv(env: Env): Env {
 +    const missingDeps = new Set<string>();
 +    // vscode uses `env:ENV_NAME` for env vars resolution, and it's easier
 +    // to follow the same convention for our dependency tracking
 +    const definedEnvKeys = new Set(Object.keys(env).map((key) => `env:${key}`));
 +    const envWithDeps = Object.fromEntries(
 +        Object.entries(env).map(([key, value]) => {
 +            const deps = new Set<string>();
 +            const depRe = new RegExp(/\${(?<depName>.+?)}/g);
 +            let match = undefined;
 +            while ((match = depRe.exec(value))) {
 +                const depName = match.groups!.depName;
 +                deps.add(depName);
 +                // `depName` at this point can have a form of `expression` or
 +                // `prefix:expression`
 +                if (!definedEnvKeys.has(depName)) {
 +                    missingDeps.add(depName);
 +                }
 +            }
 +            return [`env:${key}`, { deps: [...deps], value }];
 +        })
 +    );
 +
 +    const resolved = new Set<string>();
 +    for (const dep of missingDeps) {
 +        const match = /(?<prefix>.*?):(?<body>.+)/.exec(dep);
 +        if (match) {
 +            const { prefix, body } = match.groups!;
 +            if (prefix === "env") {
 +                const envName = body;
 +                envWithDeps[dep] = {
 +                    value: process.env[envName] ?? "",
 +                    deps: [],
 +                };
 +                resolved.add(dep);
 +            } else {
 +                // we can't handle other prefixes at the moment
 +                // leave values as is, but still mark them as resolved
 +                envWithDeps[dep] = {
 +                    value: "${" + dep + "}",
 +                    deps: [],
 +                };
 +                resolved.add(dep);
 +            }
 +        } else {
 +            envWithDeps[dep] = {
 +                value: computeVscodeVar(dep),
 +                deps: [],
 +            };
 +        }
 +    }
 +    const toResolve = new Set(Object.keys(envWithDeps));
 +
 +    let leftToResolveSize;
 +    do {
 +        leftToResolveSize = toResolve.size;
 +        for (const key of toResolve) {
 +            if (envWithDeps[key].deps.every((dep) => resolved.has(dep))) {
 +                envWithDeps[key].value = envWithDeps[key].value.replace(
 +                    /\${(?<depName>.+?)}/g,
 +                    (_wholeMatch, depName) => {
 +                        return envWithDeps[depName].value;
 +                    }
 +                );
 +                resolved.add(key);
 +                toResolve.delete(key);
 +            }
 +        }
 +    } while (toResolve.size > 0 && toResolve.size < leftToResolveSize);
 +
 +    const resolvedEnv: Env = {};
 +    for (const key of Object.keys(env)) {
 +        resolvedEnv[key] = envWithDeps[`env:${key}`].value;
 +    }
 +    return resolvedEnv;
 +}
 +
 +function computeVscodeVar(varName: string): string {
 +    // https://code.visualstudio.com/docs/editor/variables-reference
 +    const supportedVariables: { [k: string]: () => string } = {
 +        workspaceFolder: () => {
 +            const folders = vscode.workspace.workspaceFolders ?? [];
 +            if (folders.length === 1) {
 +                // TODO: support for remote workspaces?
 +                return folders[0].uri.fsPath;
 +            } else if (folders.length > 1) {
 +                // could use currently opened document to detect the correct
 +                // workspace. However, that would be determined by the document
 +                // user has opened on Editor startup. Could lead to
 +                // unpredictable workspace selection in practice.
 +                // It's better to pick the first one
 +                return folders[0].uri.fsPath;
 +            } else {
 +                // no workspace opened
 +                return "";
 +            }
 +        },
 +
 +        workspaceFolderBasename: () => {
 +            const workspaceFolder = computeVscodeVar("workspaceFolder");
 +            if (workspaceFolder) {
 +                return path.basename(workspaceFolder);
 +            } else {
 +                return "";
 +            }
 +        },
 +
 +        cwd: () => process.cwd(),
 +
 +        // see
 +        // https://github.com/microsoft/vscode/blob/08ac1bb67ca2459496b272d8f4a908757f24f56f/src/vs/workbench/api/common/extHostVariableResolverService.ts#L81
 +        // or
 +        // https://github.com/microsoft/vscode/blob/29eb316bb9f154b7870eb5204ec7f2e7cf649bec/src/vs/server/node/remoteTerminalChannel.ts#L56
 +        execPath: () => process.env.VSCODE_EXEC_PATH ?? process.execPath,
 +
 +        pathSeparator: () => path.sep,
 +    };
 +
 +    if (varName in supportedVariables) {
 +        return supportedVariables[varName]();
 +    } else {
 +        // can't resolve, keep the expression as is
 +        return "${" + varName + "}";
 +    }
 +}
index a9847dd2a6521ba013d76b4c7562ff89334397c9,0000000000000000000000000000000000000000..e9b62e0cc2578b876747093577439bfec057192e
mode 100644,000000..100644
--- /dev/null
@@@ -1,406 -1,0 +1,416 @@@
-     client: lc.LanguageClient;
 +import * as vscode from "vscode";
 +import * as lc from "vscode-languageclient/node";
 +import * as os from "os";
 +
 +import * as commands from "./commands";
 +import { Ctx } from "./ctx";
 +import { Config } from "./config";
 +import { log, isValidExecutable, isRustDocument } from "./util";
 +import { PersistentState } from "./persistent_state";
 +import { activateTaskProvider } from "./tasks";
 +import { setContextValue } from "./util";
 +import { exec } from "child_process";
 +
 +let ctx: Ctx | undefined;
 +
 +const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
 +
 +let TRACE_OUTPUT_CHANNEL: vscode.OutputChannel | null = null;
 +export function traceOutputChannel() {
 +    if (!TRACE_OUTPUT_CHANNEL) {
 +        TRACE_OUTPUT_CHANNEL = vscode.window.createOutputChannel(
 +            "Rust Analyzer Language Server Trace"
 +        );
 +    }
 +    return TRACE_OUTPUT_CHANNEL;
 +}
 +let OUTPUT_CHANNEL: vscode.OutputChannel | null = null;
 +export function outputChannel() {
 +    if (!OUTPUT_CHANNEL) {
 +        OUTPUT_CHANNEL = vscode.window.createOutputChannel("Rust Analyzer Language Server");
 +    }
 +    return OUTPUT_CHANNEL;
 +}
 +
 +export interface RustAnalyzerExtensionApi {
-     if ((vscode.workspace.workspaceFolders || []).length === 0) {
-         const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
-             isRustDocument(document)
-         );
-         if (rustDocuments.length > 0) {
-             ctx = await Ctx.create(config, context, serverPath, {
-                 kind: "Detached Files",
-                 files: rustDocuments,
-             });
-         } else {
-             throw new Error("no rust files are opened");
-         }
++    client?: lc.LanguageClient;
 +}
 +
 +export async function activate(
 +    context: vscode.ExtensionContext
 +): Promise<RustAnalyzerExtensionApi> {
 +    // VS Code doesn't show a notification when an extension fails to activate
 +    // so we do it ourselves.
 +    return await tryActivate(context).catch((err) => {
 +        void vscode.window.showErrorMessage(`Cannot activate rust-analyzer: ${err.message}`);
 +        throw err;
 +    });
 +}
 +
 +async function tryActivate(context: vscode.ExtensionContext): Promise<RustAnalyzerExtensionApi> {
++    // We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if
++    // only those are in use.
++    // (r-a still somewhat works with Live Share, because commands are tunneled to the host)
++    const folders = (vscode.workspace.workspaceFolders || []).filter(
++        (folder) => folder.uri.scheme === "file"
++    );
++    const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
++        isRustDocument(document)
++    );
++
++    if (folders.length === 0 && rustDocuments.length === 0) {
++        // FIXME: Ideally we would choose not to activate at all (and avoid registering
++        // non-functional editor commands), but VS Code doesn't seem to have a good way of doing
++        // that
++        return {};
++    }
++
 +    const config = new Config(context);
 +    const state = new PersistentState(context.globalState);
 +    const serverPath = await bootstrap(context, config, state).catch((err) => {
 +        let message = "bootstrap error. ";
 +
 +        message += 'See the logs in "OUTPUT > Rust Analyzer Client" (should open automatically). ';
 +        message += 'To enable verbose logs use { "rust-analyzer.trace.extension": true }';
 +
 +        log.error("Bootstrap error", err);
 +        throw new Error(message);
 +    });
 +
++    if (folders.length === 0) {
++        ctx = await Ctx.create(config, context, serverPath, {
++            kind: "Detached Files",
++            files: rustDocuments,
++        });
 +    } else {
 +        // Note: we try to start the server before we activate type hints so that it
 +        // registers its `onDidChangeDocument` handler before us.
 +        //
 +        // This a horribly, horribly wrong way to deal with this problem.
 +        ctx = await Ctx.create(config, context, serverPath, { kind: "Workspace Folder" });
 +        ctx.pushCleanup(activateTaskProvider(ctx.config));
 +    }
 +    await initCommonContext(context, ctx);
 +
 +    warnAboutExtensionConflicts();
 +
 +    if (config.typingContinueCommentsOnNewline) {
 +        ctx.pushCleanup(configureLanguage());
 +    }
 +
 +    vscode.workspace.onDidChangeConfiguration(
 +        (_) =>
 +            ctx?.client
 +                ?.sendNotification("workspace/didChangeConfiguration", { settings: "" })
 +                .catch(log.error),
 +        null,
 +        ctx.subscriptions
 +    );
 +
 +    return {
 +        client: ctx.client,
 +    };
 +}
 +
 +async function initCommonContext(context: vscode.ExtensionContext, ctx: Ctx) {
 +    // Register a "dumb" onEnter command for the case where server fails to
 +    // start.
 +    //
 +    // FIXME: refactor command registration code such that commands are
 +    // **always** registered, even if the server does not start. Use API like
 +    // this perhaps?
 +    //
 +    // ```TypeScript
 +    // registerCommand(
 +    //    factory: (Ctx) => ((Ctx) => any),
 +    //    fallback: () => any = () => vscode.window.showErrorMessage(
 +    //        "rust-analyzer is not available"
 +    //    ),
 +    // )
 +    const defaultOnEnter = vscode.commands.registerCommand("rust-analyzer.onEnter", () =>
 +        vscode.commands.executeCommand("default:type", { text: "\n" })
 +    );
 +    context.subscriptions.push(defaultOnEnter);
 +
 +    await setContextValue(RUST_PROJECT_CONTEXT_NAME, true);
 +
 +    // Commands which invokes manually via command palette, shortcut, etc.
 +
 +    // Reloading is inspired by @DanTup maneuver: https://github.com/microsoft/vscode/issues/45774#issuecomment-373423895
 +    ctx.registerCommand("reload", (_) => async () => {
 +        void vscode.window.showInformationMessage("Reloading rust-analyzer...");
 +        await doDeactivate();
 +        while (context.subscriptions.length > 0) {
 +            try {
 +                context.subscriptions.pop()!.dispose();
 +            } catch (err) {
 +                log.error("Dispose error:", err);
 +            }
 +        }
 +        await activate(context).catch(log.error);
 +    });
 +
 +    ctx.registerCommand("analyzerStatus", commands.analyzerStatus);
 +    ctx.registerCommand("memoryUsage", commands.memoryUsage);
 +    ctx.registerCommand("shuffleCrateGraph", commands.shuffleCrateGraph);
 +    ctx.registerCommand("reloadWorkspace", commands.reloadWorkspace);
 +    ctx.registerCommand("matchingBrace", commands.matchingBrace);
 +    ctx.registerCommand("joinLines", commands.joinLines);
 +    ctx.registerCommand("parentModule", commands.parentModule);
 +    ctx.registerCommand("syntaxTree", commands.syntaxTree);
 +    ctx.registerCommand("viewHir", commands.viewHir);
 +    ctx.registerCommand("viewFileText", commands.viewFileText);
 +    ctx.registerCommand("viewItemTree", commands.viewItemTree);
 +    ctx.registerCommand("viewCrateGraph", commands.viewCrateGraph);
 +    ctx.registerCommand("viewFullCrateGraph", commands.viewFullCrateGraph);
 +    ctx.registerCommand("expandMacro", commands.expandMacro);
 +    ctx.registerCommand("run", commands.run);
 +    ctx.registerCommand("copyRunCommandLine", commands.copyRunCommandLine);
 +    ctx.registerCommand("debug", commands.debug);
 +    ctx.registerCommand("newDebugConfig", commands.newDebugConfig);
 +    ctx.registerCommand("openDocs", commands.openDocs);
 +    ctx.registerCommand("openCargoToml", commands.openCargoToml);
 +    ctx.registerCommand("peekTests", commands.peekTests);
 +    ctx.registerCommand("moveItemUp", commands.moveItemUp);
 +    ctx.registerCommand("moveItemDown", commands.moveItemDown);
 +    ctx.registerCommand("cancelFlycheck", commands.cancelFlycheck);
 +
 +    defaultOnEnter.dispose();
 +    ctx.registerCommand("onEnter", commands.onEnter);
 +
 +    ctx.registerCommand("ssr", commands.ssr);
 +    ctx.registerCommand("serverVersion", commands.serverVersion);
 +    ctx.registerCommand("toggleInlayHints", commands.toggleInlayHints);
 +
 +    // Internal commands which are invoked by the server.
 +    ctx.registerCommand("runSingle", commands.runSingle);
 +    ctx.registerCommand("debugSingle", commands.debugSingle);
 +    ctx.registerCommand("showReferences", commands.showReferences);
 +    ctx.registerCommand("applySnippetWorkspaceEdit", commands.applySnippetWorkspaceEditCommand);
 +    ctx.registerCommand("resolveCodeAction", commands.resolveCodeAction);
 +    ctx.registerCommand("applyActionGroup", commands.applyActionGroup);
 +    ctx.registerCommand("gotoLocation", commands.gotoLocation);
 +
 +    ctx.registerCommand("linkToCommand", commands.linkToCommand);
 +}
 +
 +export async function deactivate() {
 +    TRACE_OUTPUT_CHANNEL?.dispose();
 +    TRACE_OUTPUT_CHANNEL = null;
 +    OUTPUT_CHANNEL?.dispose();
 +    OUTPUT_CHANNEL = null;
 +    await doDeactivate();
 +}
 +
 +async function doDeactivate() {
 +    await setContextValue(RUST_PROJECT_CONTEXT_NAME, undefined);
 +    await ctx?.client.stop();
 +    ctx = undefined;
 +}
 +
 +async function bootstrap(
 +    context: vscode.ExtensionContext,
 +    config: Config,
 +    state: PersistentState
 +): Promise<string> {
 +    const path = await getServer(context, config, state);
 +    if (!path) {
 +        throw new Error(
 +            "Rust Analyzer Language Server is not available. " +
 +                "Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation)."
 +        );
 +    }
 +
 +    log.info("Using server binary at", path);
 +
 +    if (!isValidExecutable(path)) {
 +        if (config.serverPath) {
 +            throw new Error(`Failed to execute ${path} --version. \`config.server.path\` or \`config.serverPath\` has been set explicitly.\
 +            Consider removing this config or making a valid server binary available at that path.`);
 +        } else {
 +            throw new Error(`Failed to execute ${path} --version`);
 +        }
 +    }
 +
 +    return path;
 +}
 +
 +async function patchelf(dest: vscode.Uri): Promise<void> {
 +    await vscode.window.withProgress(
 +        {
 +            location: vscode.ProgressLocation.Notification,
 +            title: "Patching rust-analyzer for NixOS",
 +        },
 +        async (progress, _) => {
 +            const expression = `
 +            {srcStr, pkgs ? import <nixpkgs> {}}:
 +                pkgs.stdenv.mkDerivation {
 +                    name = "rust-analyzer";
 +                    src = /. + srcStr;
 +                    phases = [ "installPhase" "fixupPhase" ];
 +                    installPhase = "cp $src $out";
 +                    fixupPhase = ''
 +                    chmod 755 $out
 +                    patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" $out
 +                    '';
 +                }
 +            `;
 +            const origFile = vscode.Uri.file(dest.fsPath + "-orig");
 +            await vscode.workspace.fs.rename(dest, origFile, { overwrite: true });
 +            try {
 +                progress.report({ message: "Patching executable", increment: 20 });
 +                await new Promise((resolve, reject) => {
 +                    const handle = exec(
 +                        `nix-build -E - --argstr srcStr '${origFile.fsPath}' -o '${dest.fsPath}'`,
 +                        (err, stdout, stderr) => {
 +                            if (err != null) {
 +                                reject(Error(stderr));
 +                            } else {
 +                                resolve(stdout);
 +                            }
 +                        }
 +                    );
 +                    handle.stdin?.write(expression);
 +                    handle.stdin?.end();
 +                });
 +            } finally {
 +                await vscode.workspace.fs.delete(origFile);
 +            }
 +        }
 +    );
 +}
 +
 +async function getServer(
 +    context: vscode.ExtensionContext,
 +    config: Config,
 +    state: PersistentState
 +): Promise<string | undefined> {
 +    const explicitPath = serverPath(config);
 +    if (explicitPath) {
 +        if (explicitPath.startsWith("~/")) {
 +            return os.homedir() + explicitPath.slice("~".length);
 +        }
 +        return explicitPath;
 +    }
 +    if (config.package.releaseTag === null) return "rust-analyzer";
 +
 +    const ext = process.platform === "win32" ? ".exe" : "";
 +    const bundled = vscode.Uri.joinPath(context.extensionUri, "server", `rust-analyzer${ext}`);
 +    const bundledExists = await vscode.workspace.fs.stat(bundled).then(
 +        () => true,
 +        () => false
 +    );
 +    if (bundledExists) {
 +        let server = bundled;
 +        if (await isNixOs()) {
 +            await vscode.workspace.fs.createDirectory(config.globalStorageUri).then();
 +            const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`);
 +            let exists = await vscode.workspace.fs.stat(dest).then(
 +                () => true,
 +                () => false
 +            );
 +            if (exists && config.package.version !== state.serverVersion) {
 +                await vscode.workspace.fs.delete(dest);
 +                exists = false;
 +            }
 +            if (!exists) {
 +                await vscode.workspace.fs.copy(bundled, dest);
 +                await patchelf(dest);
 +            }
 +            server = dest;
 +        }
 +        await state.updateServerVersion(config.package.version);
 +        return server.fsPath;
 +    }
 +
 +    await state.updateServerVersion(undefined);
 +    await vscode.window.showErrorMessage(
 +        "Unfortunately we don't ship binaries for your platform yet. " +
 +            "You need to manually clone the rust-analyzer repository and " +
 +            "run `cargo xtask install --server` to build the language server from sources. " +
 +            "If you feel that your platform should be supported, please create an issue " +
 +            "about that [here](https://github.com/rust-lang/rust-analyzer/issues) and we " +
 +            "will consider it."
 +    );
 +    return undefined;
 +}
 +
 +function serverPath(config: Config): string | null {
 +    return process.env.__RA_LSP_SERVER_DEBUG ?? config.serverPath;
 +}
 +
 +async function isNixOs(): Promise<boolean> {
 +    try {
 +        const contents = (
 +            await vscode.workspace.fs.readFile(vscode.Uri.file("/etc/os-release"))
 +        ).toString();
 +        const idString = contents.split("\n").find((a) => a.startsWith("ID=")) || "ID=linux";
 +        return idString.indexOf("nixos") !== -1;
 +    } catch {
 +        return false;
 +    }
 +}
 +
 +function warnAboutExtensionConflicts() {
 +    if (vscode.extensions.getExtension("rust-lang.rust")) {
 +        vscode.window
 +            .showWarningMessage(
 +                `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` +
 +                    "plugins enabled. These are known to conflict and cause various functions of " +
 +                    "both plugins to not work correctly. You should disable one of them.",
 +                "Got it"
 +            )
 +            .then(() => {}, console.error);
 +    }
 +}
 +
 +/**
 + * Sets up additional language configuration that's impossible to do via a
 + * separate language-configuration.json file. See [1] for more information.
 + *
 + * [1]: https://github.com/Microsoft/vscode/issues/11514#issuecomment-244707076
 + */
 +function configureLanguage(): vscode.Disposable {
 +    const indentAction = vscode.IndentAction.None;
 +    return vscode.languages.setLanguageConfiguration("rust", {
 +        onEnterRules: [
 +            {
 +                // Doc single-line comment
 +                // e.g. ///|
 +                beforeText: /^\s*\/{3}.*$/,
 +                action: { indentAction, appendText: "/// " },
 +            },
 +            {
 +                // Parent doc single-line comment
 +                // e.g. //!|
 +                beforeText: /^\s*\/{2}\!.*$/,
 +                action: { indentAction, appendText: "//! " },
 +            },
 +            {
 +                // Begins an auto-closed multi-line comment (standard or parent doc)
 +                // e.g. /** | */ or /*! | */
 +                beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
 +                afterText: /^\s*\*\/$/,
 +                action: { indentAction: vscode.IndentAction.IndentOutdent, appendText: " * " },
 +            },
 +            {
 +                // Begins a multi-line comment (standard or parent doc)
 +                // e.g. /** ...| or /*! ...|
 +                beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
 +                action: { indentAction, appendText: " * " },
 +            },
 +            {
 +                // Continues a multi-line comment
 +                // e.g.  * ...|
 +                beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/,
 +                action: { indentAction, appendText: "* " },
 +            },
 +            {
 +                // Dedents after closing a multi-line comment
 +                // e.g.  */|
 +                beforeText: /^(\ \ )*\ \*\/\s*$/,
 +                action: { indentAction, removeText: 1 },
 +            },
 +        ],
 +    });
 +}
index 4a59c4c0fad050a65372a56b2c2281cdd4ca0423,0000000000000000000000000000000000000000..36d728456f77c48248c34102ad2ab5f6fc1da12b
mode 100644,000000..100644
--- /dev/null
@@@ -1,46 -1,0 +1,46 @@@
-     let (writer_sender, writer) = make_write(stream.try_clone().unwrap());
 +use std::{
 +    io::{self, BufReader},
 +    net::TcpStream,
 +    thread,
 +};
 +
 +use crossbeam_channel::{bounded, Receiver, Sender};
 +
 +use crate::{
 +    stdio::{make_io_threads, IoThreads},
 +    Message,
 +};
 +
 +pub(crate) fn socket_transport(
 +    stream: TcpStream,
 +) -> (Sender<Message>, Receiver<Message>, IoThreads) {
 +    let (reader_receiver, reader) = make_reader(stream.try_clone().unwrap());
++    let (writer_sender, writer) = make_write(stream);
 +    let io_threads = make_io_threads(reader, writer);
 +    (writer_sender, reader_receiver, io_threads)
 +}
 +
 +fn make_reader(stream: TcpStream) -> (Receiver<Message>, thread::JoinHandle<io::Result<()>>) {
 +    let (reader_sender, reader_receiver) = bounded::<Message>(0);
 +    let reader = thread::spawn(move || {
 +        let mut buf_read = BufReader::new(stream);
 +        while let Some(msg) = Message::read(&mut buf_read).unwrap() {
 +            let is_exit = matches!(&msg, Message::Notification(n) if n.is_exit());
 +            reader_sender.send(msg).unwrap();
 +            if is_exit {
 +                break;
 +            }
 +        }
 +        Ok(())
 +    });
 +    (reader_receiver, reader)
 +}
 +
 +fn make_write(mut stream: TcpStream) -> (Sender<Message>, thread::JoinHandle<io::Result<()>>) {
 +    let (writer_sender, writer_receiver) = bounded::<Message>(0);
 +    let writer = thread::spawn(move || {
 +        writer_receiver.into_iter().try_for_each(|it| it.write(&mut stream)).unwrap();
 +        Ok(())
 +    });
 +    (writer_sender, writer)
 +}