--- /dev/null
- "parking_lot_core 0.9.3",
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "addr2line"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b9ecd88a8c8378ca913a680cd98f0f13ac67383d35993f86c90a70e3f137816b"
+dependencies = [
+ "gimli",
+]
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "always-assert"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fbf688625d06217d5b1bb0ea9d9c44a1635fd0ee3534466388d18203174f4d11"
+dependencies = [
+ "log",
+]
+
+[[package]]
+name = "ansi_term"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "anyhow"
+version = "1.0.65"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "98161a4e3e2184da77bb14f02184cdd111e83bbbcc9979dfee3c44b9a85f5602"
+
+[[package]]
+name = "anymap"
+version = "1.0.0-beta.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8f1f8f5a6f3d50d89e3797d7593a50f96bb2aaa20ca0cc7be1fb673232c91d72"
+
+[[package]]
+name = "arbitrary"
+version = "1.1.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d86fd10d912cab78764cc44307d9cd5f164e09abbeb87fb19fb6d95937e8da5f"
+
+[[package]]
+name = "arrayvec"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8da52d66c7071e2e3fa2a1e5c6d088fec47b593032b254f5e980de8ea54454d6"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "backtrace"
+version = "0.3.66"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
+dependencies = [
+ "addr2line",
+ "cc",
+ "cfg-if",
+ "libc",
+ "miniz_oxide",
+ "object",
+ "rustc-demangle",
+]
+
+[[package]]
+name = "base-db"
+version = "0.0.0"
+dependencies = [
+ "cfg",
+ "profile",
+ "rustc-hash",
+ "salsa",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tt",
+ "vfs",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "camino"
+version = "1.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88ad0e1e3e88dd237a156ab9f571021b8a158caa0ae44b1968a241efb5144c1e"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo-platform"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbdb825da8a5df079a43676dbe042702f1707b1109f713a01420fbb4cc71fa27"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "cargo_metadata"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3abb7553d5b9b8421c6de7cb02606ff15e0c6eea7d8eadd75ef013fd636bec36"
+dependencies = [
+ "camino",
+ "cargo-platform",
+ "semver",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "cc"
+version = "1.0.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+
+[[package]]
+name = "cfg"
+version = "0.0.0"
+dependencies = [
+ "arbitrary",
+ "derive_arbitrary",
+ "expect-test",
+ "mbe",
+ "oorandom",
+ "rustc-hash",
+ "syntax",
+ "tt",
+]
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "chalk-derive"
+version = "0.86.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5499d415d855b5094366a824815341893ad3de0ecb6048c430118bdae6d27402"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "chalk-ir"
+version = "0.86.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3800118c76a48507b0eece3a01f3a429b5c478d203c493096e6040c67ab960e1"
+dependencies = [
+ "bitflags",
+ "chalk-derive",
+ "lazy_static",
+]
+
+[[package]]
+name = "chalk-recursive"
+version = "0.86.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1baf60628fd73104d1f8562586a52d48f37f1e84435aab2e62674b1fd935b8c8"
+dependencies = [
+ "chalk-derive",
+ "chalk-ir",
+ "chalk-solve",
+ "rustc-hash",
+ "tracing",
+]
+
+[[package]]
+name = "chalk-solve"
+version = "0.86.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0e9c3c068f9358786348e58a1b94ef0a5cf90a9810fc1f10fda896f0b5d80185"
+dependencies = [
+ "chalk-derive",
+ "chalk-ir",
+ "ena",
+ "indexmap",
+ "itertools",
+ "petgraph",
+ "rustc-hash",
+ "tracing",
+]
+
++[[package]]
++name = "command-group"
++version = "1.0.8"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "f7a8a86f409b4a59df3a3e4bee2de0b83f1755fdd2a25e3a9684c396fc4bed2c"
++dependencies = [
++ "nix",
++ "winapi",
++]
++
+[[package]]
+name = "countme"
+version = "3.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636"
+dependencies = [
+ "dashmap",
+ "once_cell",
+ "rustc-hash",
+]
+
+[[package]]
+name = "cov-mark"
+version = "2.0.0-pre.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d48d8f76bd9331f19fe2aaf3821a9f9fb32c3963e1e3d6ce82a8c09cef7444a"
+
+[[package]]
+name = "crc32fast"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "crossbeam-channel"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521"
+dependencies = [
+ "cfg-if",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-deque"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc"
+dependencies = [
+ "cfg-if",
+ "crossbeam-epoch",
+ "crossbeam-utils",
+]
+
+[[package]]
+name = "crossbeam-epoch"
+version = "0.9.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f916dfc5d356b0ed9dae65f1db9fc9770aa2851d2662b988ccf4fe3516e86348"
+dependencies = [
+ "autocfg",
+ "cfg-if",
+ "crossbeam-utils",
+ "memoffset",
+ "scopeguard",
+]
+
+[[package]]
+name = "crossbeam-utils"
+version = "0.8.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "edbafec5fa1f196ca66527c1b12c2ec4745ca14b50f1ad8f9f6f720b55d11fac"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "dashmap"
+version = "5.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc"
+dependencies = [
+ "cfg-if",
+ "hashbrown",
+ "lock_api",
+ "once_cell",
- version = "0.2.17"
++ "parking_lot_core 0.9.4",
+]
+
+[[package]]
+name = "derive_arbitrary"
+version = "1.1.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "226ad66541d865d7a7173ad6a9e691c33fdb910ac723f4bc734b3e5294a1f931"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "dissimilar"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8c97b9233581d84b8e1e689cdd3a47b6f69770084fc246e86a7f78b0d9c1d4a5"
+
+[[package]]
+name = "dot"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a74b6c4d4a1cff5f454164363c16b72fa12463ca6b31f4b5f2035a65fa3d5906"
+
+[[package]]
+name = "drop_bomb"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9bda8e21c04aca2ae33ffc2fd8c23134f3cac46db123ba97bd9d3f3b8a4a85e1"
+
+[[package]]
+name = "either"
+version = "1.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797"
+
+[[package]]
+name = "ena"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d7402b94a93c24e742487327a7cd839dc9d36fec9de9fb25b09f2dae459f36c3"
+dependencies = [
+ "log",
+]
+
+[[package]]
+name = "expect-test"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1d4661aca38d826eb7c72fe128e4238220616de4c0cc00db7bfc38e2e1364dd3"
+dependencies = [
+ "dissimilar",
+ "once_cell",
+]
+
+[[package]]
+name = "filetime"
- checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c"
++version = "0.2.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
- "windows-sys 0.36.1",
++checksum = "4b9663d381d07ae25dc88dbdf27df458faa83a9b25336bcac83d5e452b5fc9d3"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
- version = "0.4.0"
++ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "fixedbitset"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "37ab347416e802de484e4d03c7316c48f1ecb56574dfd4a46a80f173ce1de04d"
+
+[[package]]
+name = "flate2"
+version = "1.0.24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
+dependencies = [
+ "crc32fast",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "flycheck"
+version = "0.0.0"
+dependencies = [
+ "cargo_metadata",
++ "command-group",
+ "crossbeam-channel",
+ "jod-thread",
+ "paths",
+ "rustc-hash",
+ "serde",
+ "serde_json",
+ "stdx",
+ "toolchain",
+ "tracing",
+]
+
+[[package]]
+name = "form_urlencoded"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9c384f161156f5260c24a097c56119f9be8c798586aecc13afbcbe7b7e26bf8"
+dependencies = [
+ "percent-encoding",
+]
+
+[[package]]
+name = "fs_extra"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394"
+
+[[package]]
+name = "fsevent-sys"
+version = "4.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "fst"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a"
+
+[[package]]
+name = "gimli"
+version = "0.26.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d"
+
+[[package]]
+name = "hashbrown"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+
+[[package]]
+name = "heck"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d621efb26863f0e9924c6ac577e8275e5e6b77455db64ffa6c65c904e9e132c"
+dependencies = [
+ "unicode-segmentation",
+]
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "hir"
+version = "0.0.0"
+dependencies = [
+ "arrayvec",
+ "base-db",
+ "cfg",
+ "either",
+ "hir-def",
+ "hir-expand",
+ "hir-ty",
+ "itertools",
+ "once_cell",
+ "profile",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "tt",
+]
+
+[[package]]
+name = "hir-def"
+version = "0.0.0"
+dependencies = [
+ "anymap",
+ "arrayvec",
+ "base-db",
+ "bitflags",
+ "cfg",
+ "cov-mark",
+ "dashmap",
+ "drop_bomb",
+ "either",
+ "expect-test",
+ "fst",
+ "hashbrown",
+ "hir-expand",
+ "indexmap",
+ "itertools",
+ "la-arena",
+ "limit",
+ "mbe",
+ "once_cell",
+ "profile",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "hir-expand"
+version = "0.0.0"
+dependencies = [
+ "base-db",
+ "cfg",
+ "cov-mark",
+ "either",
+ "expect-test",
+ "hashbrown",
+ "itertools",
+ "la-arena",
+ "limit",
+ "mbe",
+ "profile",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "hir-ty"
+version = "0.0.0"
+dependencies = [
+ "arrayvec",
+ "base-db",
+ "chalk-derive",
+ "chalk-ir",
+ "chalk-recursive",
+ "chalk-solve",
+ "cov-mark",
+ "ena",
+ "expect-test",
+ "hir-def",
+ "hir-expand",
+ "itertools",
+ "la-arena",
+ "limit",
+ "once_cell",
+ "profile",
+ "rustc-hash",
+ "scoped-tls",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tracing",
+ "tracing-subscriber",
+ "tracing-tree",
+ "typed-arena",
+]
+
+[[package]]
+name = "home"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "747309b4b440c06d57b0b25f2aee03ee9b5e5397d288c60e21fc709bb98a7408"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "ide"
+version = "0.0.0"
+dependencies = [
+ "cfg",
+ "cov-mark",
+ "crossbeam-channel",
+ "dot",
+ "either",
+ "expect-test",
+ "hir",
+ "ide-assists",
+ "ide-completion",
+ "ide-db",
+ "ide-diagnostics",
+ "ide-ssr",
+ "itertools",
+ "oorandom",
+ "profile",
+ "pulldown-cmark",
+ "pulldown-cmark-to-cmark",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+ "toolchain",
+ "tracing",
+ "url",
+]
+
+[[package]]
+name = "ide-assists"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "either",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "profile",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "ide-completion"
+version = "0.0.0"
+dependencies = [
+ "base-db",
+ "cov-mark",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "once_cell",
+ "profile",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "ide-db"
+version = "0.0.0"
+dependencies = [
+ "arrayvec",
+ "base-db",
+ "cov-mark",
+ "either",
+ "expect-test",
+ "fst",
+ "hir",
+ "indexmap",
+ "itertools",
+ "limit",
+ "memchr",
+ "once_cell",
+ "parser",
+ "profile",
+ "rayon",
+ "rustc-hash",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+ "tracing",
+ "xshell",
+]
+
+[[package]]
+name = "ide-diagnostics"
+version = "0.0.0"
+dependencies = [
+ "cfg",
+ "cov-mark",
+ "either",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "profile",
+ "serde_json",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "ide-ssr"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "expect-test",
+ "hir",
+ "ide-db",
+ "itertools",
+ "parser",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "text-edit",
+]
+
+[[package]]
+name = "idna"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e14ddfc70884202db2244c223200c204c2bda1bc6e0998d11b5e024d657209e6"
+dependencies = [
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "inotify"
+version = "0.9.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8069d3ec154eb856955c1c0fbffefbf5f3c40a104ec912d4797314c1801abff"
+dependencies = [
+ "bitflags",
+ "inotify-sys",
+ "libc",
+]
+
+[[package]]
+name = "inotify-sys"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "itertools"
+version = "0.10.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473"
+dependencies = [
+ "either",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4217ad341ebadf8d8e724e264f13e593e0648f5b3e94b3896a5df283be015ecc"
+
+[[package]]
+name = "jod-thread"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b23360e99b8717f20aaa4598f5a6541efbe30630039fbc7706cf954a87947ae"
+
+[[package]]
+name = "kqueue"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4d6112e8f37b59803ac47a42d14f1f3a59bbf72fc6857ffc5be455e28a691f8e"
+dependencies = [
+ "kqueue-sys",
+ "libc",
+]
+
+[[package]]
+name = "kqueue-sys"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8367585489f01bc55dd27404dcf56b95e6da061a256a666ab23be9ba96a2e587"
+dependencies = [
+ "bitflags",
+ "libc",
+]
+
+[[package]]
+name = "la-arena"
+version = "0.3.0"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "libc"
+version = "0.2.135"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "68783febc7782c6c5cb401fbda4de5a9898be1762314da0bb2c10ced61f18b0c"
+
+[[package]]
+name = "libloading"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
+dependencies = [
+ "cfg-if",
+ "winapi",
+]
+
+[[package]]
+name = "libmimalloc-sys"
+version = "0.1.26"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8fc093ab289b0bfda3aa1bdfab9c9542be29c7ef385cfcbe77f8c9813588eb48"
+dependencies = [
+ "cc",
+]
+
+[[package]]
+name = "limit"
+version = "0.0.0"
+
+[[package]]
+name = "lock_api"
+version = "0.4.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df"
+dependencies = [
+ "autocfg",
+ "scopeguard",
+]
+
+[[package]]
+name = "log"
+version = "0.4.17"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "lsp-server"
+version = "0.7.0"
+dependencies = [
+ "crossbeam-channel",
+ "log",
+ "lsp-types",
+ "serde",
+ "serde_json",
+]
+
+[[package]]
+name = "lsp-types"
+version = "0.93.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9be6e9c7e2d18f651974370d7aff703f9513e0df6e464fd795660edc77e6ca51"
+dependencies = [
+ "bitflags",
+ "serde",
+ "serde_json",
+ "serde_repr",
+ "url",
+]
+
+[[package]]
+name = "matchers"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
+dependencies = [
+ "regex-automata",
+]
+
+[[package]]
+name = "mbe"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "parser",
+ "rustc-hash",
+ "smallvec",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "memchr"
+version = "2.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
+
+[[package]]
+name = "memmap2"
+version = "0.5.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "95af15f345b17af2efc8ead6080fb8bc376f8cec1b35277b935637595fe77498"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "memoffset"
+version = "0.6.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5aa361d4faea93603064a027415f07bd8e1d5c88c9fbf68bf56a285428fd79ce"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "mimalloc"
+version = "0.1.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "76ce6a4b40d3bff9eb3ce9881ca0737a85072f9f975886082640cd46a75cdb35"
+dependencies = [
+ "libmimalloc-sys",
+]
+
+[[package]]
+name = "miniz_oxide"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "mio"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf"
+dependencies = [
+ "libc",
+ "log",
+ "wasi",
+ "windows-sys 0.36.1",
+]
+
+[[package]]
+name = "miow"
- checksum = "a7377f7792b3afb6a3cba68daa54ca23c032137010460d667fda53a8d66be00e"
++version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
- "windows-sys 0.28.0",
++checksum = "52ffbca2f655e33c08be35d87278e5b18b89550a37dbd598c20db92f6a471123"
+dependencies = [
- "parking_lot_core 0.9.3",
++ "windows-sys 0.42.0",
++]
++
++[[package]]
++name = "nix"
++version = "0.22.3"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "e4916f159ed8e5de0082076562152a76b7a1f64a01fd9d1e0fea002c37624faf"
++dependencies = [
++ "bitflags",
++ "cc",
++ "cfg-if",
++ "libc",
++ "memoffset",
+]
+
+[[package]]
+name = "notify"
+version = "5.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ed2c66da08abae1c024c01d635253e402341b4060a12e99b31c7594063bf490a"
+dependencies = [
+ "bitflags",
+ "crossbeam-channel",
+ "filetime",
+ "fsevent-sys",
+ "inotify",
+ "kqueue",
+ "libc",
+ "mio",
+ "walkdir",
+ "winapi",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "object"
+version = "0.29.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "once_cell"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1"
+
+[[package]]
+name = "oorandom"
+version = "11.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575"
+
+[[package]]
+name = "parking_lot"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d17b78036a60663b797adeaee46f5c9dfebb86948d1255007a1d6be0271ff99"
+dependencies = [
+ "instant",
+ "lock_api",
+ "parking_lot_core 0.8.5",
+]
+
+[[package]]
+name = "parking_lot"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f"
+dependencies = [
+ "lock_api",
- version = "0.9.3"
++ "parking_lot_core 0.9.4",
+]
+
+[[package]]
+name = "parking_lot_core"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d76e8e1493bcac0d2766c42737f34458f1c8c50c0d23bcb24ea953affb273216"
+dependencies = [
+ "cfg-if",
+ "instant",
+ "libc",
+ "redox_syscall",
+ "smallvec",
+ "winapi",
+]
+
+[[package]]
+name = "parking_lot_core"
- checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929"
++version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
- "windows-sys 0.36.1",
++checksum = "4dc9e0dc2adc1c69d09143aff38d3d30c5c3f0df0dad82e6d25547af174ebec0"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "smallvec",
- [[package]]
- name = "windows-sys"
- version = "0.28.0"
- source = "registry+https://github.com/rust-lang/crates.io-index"
- checksum = "82ca39602d5cbfa692c4b67e3bcbb2751477355141c1ed434c94da4186836ff6"
- dependencies = [
- "windows_aarch64_msvc 0.28.0",
- "windows_i686_gnu 0.28.0",
- "windows_i686_msvc 0.28.0",
- "windows_x86_64_gnu 0.28.0",
- "windows_x86_64_msvc 0.28.0",
- ]
-
++ "windows-sys 0.42.0",
+]
+
+[[package]]
+name = "parser"
+version = "0.0.0"
+dependencies = [
+ "drop_bomb",
+ "expect-test",
+ "limit",
+ "rustc-ap-rustc_lexer",
+ "sourcegen",
+]
+
+[[package]]
+name = "paste"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1"
+
+[[package]]
+name = "paths"
+version = "0.0.0"
+
+[[package]]
+name = "percent-encoding"
+version = "2.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "478c572c3d73181ff3c2539045f6eb99e5491218eae919370993b890cdbdd98e"
+
+[[package]]
+name = "perf-event"
+version = "0.4.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5396562cd2eaa828445d6d34258ae21ee1eb9d40fe626ca7f51c8dccb4af9d66"
+dependencies = [
+ "libc",
+ "perf-event-open-sys",
+]
+
+[[package]]
+name = "perf-event-open-sys"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce9bedf5da2c234fdf2391ede2b90fabf585355f33100689bc364a3ea558561a"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "petgraph"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "467d164a6de56270bd7c4d070df81d07beace25012d5103ced4e9ff08d6afdb7"
+dependencies = [
+ "fixedbitset",
+ "indexmap",
+]
+
+[[package]]
+name = "pin-project-lite"
+version = "0.2.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0a7ae3ac2f1173085d398531c705756c94a4c56843785df85a60c1a0afac116"
+
+[[package]]
+name = "proc-macro-api"
+version = "0.0.0"
+dependencies = [
+ "memmap2",
+ "object",
+ "paths",
+ "profile",
+ "serde",
+ "serde_json",
+ "snap",
+ "stdx",
+ "tracing",
+ "tt",
+]
+
+[[package]]
+name = "proc-macro-srv"
+version = "0.0.0"
+dependencies = [
+ "expect-test",
+ "libloading",
+ "mbe",
+ "memmap2",
+ "object",
+ "paths",
+ "proc-macro-api",
+ "proc-macro-test",
+ "tt",
+]
+
+[[package]]
+name = "proc-macro-srv-cli"
+version = "0.0.0"
+dependencies = [
+ "proc-macro-srv",
+]
+
+[[package]]
+name = "proc-macro-test"
+version = "0.0.0"
+dependencies = [
+ "cargo_metadata",
+ "proc-macro-test-impl",
+ "toolchain",
+]
+
+[[package]]
+name = "proc-macro-test-impl"
+version = "0.0.0"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.47"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "profile"
+version = "0.0.0"
+dependencies = [
+ "cfg-if",
+ "countme",
+ "la-arena",
+ "libc",
+ "once_cell",
+ "perf-event",
+ "tikv-jemalloc-ctl",
+ "winapi",
+]
+
+[[package]]
+name = "project-model"
+version = "0.0.0"
+dependencies = [
+ "anyhow",
+ "base-db",
+ "cargo_metadata",
+ "cfg",
+ "expect-test",
+ "la-arena",
+ "paths",
+ "profile",
+ "rustc-hash",
+ "semver",
+ "serde",
+ "serde_json",
+ "stdx",
+ "toolchain",
+ "tracing",
+]
+
+[[package]]
+name = "protobuf"
+version = "3.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4ee4a7d8b91800c8f167a6268d1a1026607368e1adc84e98fe044aeb905302f7"
+dependencies = [
+ "once_cell",
+ "protobuf-support",
+ "thiserror",
+]
+
+[[package]]
+name = "protobuf-support"
+version = "3.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ca157fe12fc7ee2e315f2f735e27df41b3d97cdd70ea112824dac1ffb08ee1c"
+dependencies = [
+ "thiserror",
+]
+
+[[package]]
+name = "pulldown-cmark"
+version = "0.9.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63"
+dependencies = [
+ "bitflags",
+ "memchr",
+ "unicase",
+]
+
+[[package]]
+name = "pulldown-cmark-to-cmark"
+version = "10.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0194e6e1966c23cc5fd988714f85b18d548d773e81965413555d96569931833d"
+dependencies = [
+ "pulldown-cmark",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rayon"
+version = "1.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd99e5772ead8baa5215278c9b15bf92087709e9c1b2d1f97cdb5a183c933a7d"
+dependencies = [
+ "autocfg",
+ "crossbeam-deque",
+ "either",
+ "rayon-core",
+]
+
+[[package]]
+name = "rayon-core"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
+dependencies = [
+ "crossbeam-channel",
+ "crossbeam-deque",
+ "crossbeam-utils",
+ "num_cpus",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "regex"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
+dependencies = [
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-automata"
+version = "0.1.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
+dependencies = [
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.27"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
+
+[[package]]
+name = "rowan"
+version = "0.15.10"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5811547e7ba31e903fe48c8ceab10d40d70a101f3d15523c847cce91aa71f332"
+dependencies = [
+ "countme",
+ "hashbrown",
+ "memoffset",
+ "rustc-hash",
+ "text-size",
+]
+
+[[package]]
+name = "rust-analyzer"
+version = "0.0.0"
+dependencies = [
+ "always-assert",
+ "anyhow",
+ "cfg",
+ "crossbeam-channel",
+ "dissimilar",
+ "expect-test",
+ "flycheck",
+ "hir",
+ "hir-def",
+ "hir-ty",
+ "ide",
+ "ide-db",
+ "ide-ssr",
+ "itertools",
+ "jod-thread",
+ "lsp-server",
+ "lsp-types",
+ "mbe",
+ "mimalloc",
+ "num_cpus",
+ "oorandom",
+ "parking_lot 0.12.1",
+ "proc-macro-api",
+ "proc-macro-srv",
+ "profile",
+ "project-model",
+ "rayon",
+ "rustc-hash",
+ "scip",
+ "serde",
+ "serde_json",
+ "sourcegen",
+ "stdx",
+ "syntax",
+ "test-utils",
+ "threadpool",
+ "tikv-jemallocator",
+ "toolchain",
+ "tracing",
+ "tracing-log",
+ "tracing-subscriber",
+ "tracing-tree",
+ "tt",
+ "vfs",
+ "vfs-notify",
+ "winapi",
+ "xflags",
+ "xshell",
+]
+
+[[package]]
+name = "rustc-ap-rustc_lexer"
+version = "725.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f950742ef8a203aa7661aad3ab880438ddeb7f95d4b837c30d65db1a2c5df68e"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.21"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7ef03e0a2b150c7a90d01faf6254c9c48a41e95fb2a8c2ac1c6f0d2b9aefc342"
+
+[[package]]
+name = "rustc-hash"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2"
+
+[[package]]
+name = "ryu"
+version = "1.0.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09"
+
+[[package]]
+name = "salsa"
+version = "0.17.0-pre.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9b223dccb46c32753144d0b51290da7230bb4aedcd8379d6b4c9a474c18bf17a"
+dependencies = [
+ "crossbeam-utils",
+ "indexmap",
+ "lock_api",
+ "log",
+ "oorandom",
+ "parking_lot 0.11.2",
+ "rustc-hash",
+ "salsa-macros",
+ "smallvec",
+]
+
+[[package]]
+name = "salsa-macros"
+version = "0.17.0-pre.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac6c2e352df550bf019da7b16164ed2f7fa107c39653d1311d1bba42d1582ff7"
+dependencies = [
+ "heck",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "scip"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b2bfbb10286f69fad7c78db71004b7839bf957788359fe0c479f029f9849136b"
+dependencies = [
+ "protobuf",
+]
+
+[[package]]
+name = "scoped-tls"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea6a9290e3c9cf0f18145ef7ffa62d68ee0bf5fcd651017e586dc7fd5da448c2"
+
+[[package]]
+name = "scopeguard"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
+
+[[package]]
+name = "semver"
+version = "1.0.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.145"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.145"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.86"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074"
+dependencies = [
+ "indexmap",
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_repr"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1fe39d9fbb0ebf5eb2c7cb7e2a47e4f462fad1379f1166b8ae49ad9eae89a7ca"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "sharded-slab"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31"
+dependencies = [
+ "lazy_static",
+]
+
+[[package]]
+name = "smallvec"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a507befe795404456341dfab10cef66ead4c041f62b8b11bbb92bffe5d0953e0"
+
+[[package]]
+name = "smol_str"
+version = "0.1.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7475118a28b7e3a2e157ce0131ba8c5526ea96e90ee601d9f6bb2e286a35ab44"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "snap"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "45456094d1983e2ee2a18fdfebce3189fa451699d0502cb8e3b49dba5ba41451"
+
+[[package]]
+name = "sourcegen"
+version = "0.0.0"
+dependencies = [
+ "xshell",
+]
+
+[[package]]
+name = "stdx"
+version = "0.0.0"
+dependencies = [
+ "always-assert",
+ "backtrace",
+ "libc",
+ "miow",
+ "winapi",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.102"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3fcd952facd492f9be3ef0d0b7032a6e442ee9b361d4acc2b1d0c4aaa5f613a1"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "synstructure"
+version = "0.12.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "unicode-xid",
+]
+
+[[package]]
+name = "syntax"
+version = "0.0.0"
+dependencies = [
+ "cov-mark",
+ "expect-test",
+ "indexmap",
+ "itertools",
+ "once_cell",
+ "parser",
+ "proc-macro2",
+ "profile",
+ "quote",
+ "rayon",
+ "rowan",
+ "rustc-ap-rustc_lexer",
+ "rustc-hash",
+ "smol_str",
+ "sourcegen",
+ "stdx",
+ "test-utils",
+ "text-edit",
+ "ungrammar",
+]
+
+[[package]]
+name = "test-utils"
+version = "0.0.0"
+dependencies = [
+ "dissimilar",
+ "profile",
+ "rustc-hash",
+ "stdx",
+ "text-size",
+]
+
+[[package]]
+name = "text-edit"
+version = "0.0.0"
+dependencies = [
+ "itertools",
+ "text-size",
+]
+
+[[package]]
+name = "text-size"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "288cb548dbe72b652243ea797201f3d481a0609a967980fcc5b2315ea811560a"
+
+[[package]]
+name = "thiserror"
+version = "1.0.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "thread_local"
+version = "1.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5516c27b78311c50bf42c071425c560ac799b11c30b31f87e3081965fe5e0180"
+dependencies = [
+ "once_cell",
+]
+
+[[package]]
+name = "threadpool"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
+dependencies = [
+ "num_cpus",
+]
+
+[[package]]
+name = "tikv-jemalloc-ctl"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e37706572f4b151dff7a0146e040804e9c26fe3a3118591112f05cf12a4216c1"
+dependencies = [
+ "libc",
+ "paste",
+ "tikv-jemalloc-sys",
+]
+
+[[package]]
+name = "tikv-jemalloc-sys"
+version = "0.5.2+5.3.0-patched"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec45c14da997d0925c7835883e4d5c181f196fa142f8c19d7643d1e9af2592c3"
+dependencies = [
+ "cc",
+ "fs_extra",
+ "libc",
+]
+
+[[package]]
+name = "tikv-jemallocator"
+version = "0.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "20612db8a13a6c06d57ec83953694185a367e16945f66565e8028d2c0bd76979"
+dependencies = [
+ "libc",
+ "tikv-jemalloc-sys",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
+
+[[package]]
+name = "toolchain"
+version = "0.0.0"
+dependencies = [
+ "home",
+]
+
+[[package]]
+name = "tracing"
+version = "0.1.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8"
+dependencies = [
+ "cfg-if",
+ "pin-project-lite",
+ "tracing-attributes",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-attributes"
+version = "0.1.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "tracing-core"
+version = "0.1.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24eb03ba0eab1fd845050058ce5e616558e8f8d8fca633e6b163fe25c797213a"
+dependencies = [
+ "once_cell",
+ "valuable",
+]
+
+[[package]]
+name = "tracing-log"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922"
+dependencies = [
+ "lazy_static",
+ "log",
+ "tracing-core",
+]
+
+[[package]]
+name = "tracing-subscriber"
+version = "0.3.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a6176eae26dd70d0c919749377897b54a9276bd7061339665dd68777926b5a70"
+dependencies = [
+ "matchers",
+ "once_cell",
+ "regex",
+ "sharded-slab",
+ "thread_local",
+ "tracing",
+ "tracing-core",
+ "tracing-log",
+]
+
+[[package]]
+name = "tracing-tree"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d07e90b329c621ade432823988574e820212648aa40e7a2497777d58de0fb453"
+dependencies = [
+ "ansi_term",
+ "atty",
+ "tracing-core",
+ "tracing-log",
+ "tracing-subscriber",
+]
+
+[[package]]
+name = "tt"
+version = "0.0.0"
+dependencies = [
+ "smol_str",
+ "stdx",
+]
+
+[[package]]
+name = "typed-arena"
+version = "2.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0685c84d5d54d1c26f7d3eb96cd41550adb97baed141a761cf335d3d33bcd0ae"
+
+[[package]]
+name = "ungrammar"
+version = "1.16.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3e5df347f0bf3ec1d670aad6ca5c6a1859cd9ea61d2113125794654ccced68f"
+
+[[package]]
+name = "unicase"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+dependencies = [
+ "version_check",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "099b7128301d285f79ddd55b9a83d5e6b9e97c92e0ea0daebee7263e932de992"
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.22"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-segmentation"
+version = "1.10.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c"
+
+[[package]]
+name = "url"
+version = "2.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0d68c799ae75762b8c3fe375feb6600ef5602c883c5d21eb51c09f22b83c4643"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "percent-encoding",
+ "serde",
+]
+
+[[package]]
+name = "valuable"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "vfs"
+version = "0.0.0"
+dependencies = [
+ "fst",
+ "indexmap",
+ "paths",
+ "rustc-hash",
+ "stdx",
+]
+
+[[package]]
+name = "vfs-notify"
+version = "0.0.0"
+dependencies = [
+ "crossbeam-channel",
+ "jod-thread",
+ "notify",
+ "paths",
+ "tracing",
+ "vfs",
+ "walkdir",
+]
+
+[[package]]
+name = "walkdir"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
+dependencies = [
+ "same-file",
+ "winapi",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasi"
+version = "0.11.0+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
- name = "windows_aarch64_msvc"
- version = "0.28.0"
+[[package]]
+name = "windows-sys"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ea04155a16a59f9eab786fe12a4a450e75cdb175f9e0d80da1e17db09f55b8d2"
+dependencies = [
+ "windows_aarch64_msvc 0.36.1",
+ "windows_i686_gnu 0.36.1",
+ "windows_i686_msvc 0.36.1",
+ "windows_x86_64_gnu 0.36.1",
+ "windows_x86_64_msvc 0.36.1",
+]
+
+[[package]]
- checksum = "52695a41e536859d5308cc613b4a022261a274390b25bd29dfff4bf08505f3c2"
++name = "windows-sys"
++version = "0.42.0"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "5a3e1820f08b8513f676f7ab6c1f99ff312fb97b553d30ff4dd86f9f15728aa7"
++dependencies = [
++ "windows_aarch64_gnullvm",
++ "windows_aarch64_msvc 0.42.0",
++ "windows_i686_gnu 0.42.0",
++ "windows_i686_msvc 0.42.0",
++ "windows_x86_64_gnu 0.42.0",
++ "windows_x86_64_gnullvm",
++ "windows_x86_64_msvc 0.42.0",
++]
++
++[[package]]
++name = "windows_aarch64_gnullvm"
++version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
- name = "windows_i686_gnu"
- version = "0.28.0"
++checksum = "41d2aa71f6f0cbe00ae5167d90ef3cfe66527d6f613ca78ac8024c3ccab9a19e"
+
+[[package]]
+name = "windows_aarch64_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9bb8c3fd39ade2d67e9874ac4f3db21f0d710bee00fe7cab16949ec184eeaa47"
+
+[[package]]
- checksum = "f54725ac23affef038fecb177de6c9bf065787c2f432f79e3c373da92f3e1d8a"
++name = "windows_aarch64_msvc"
++version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
- name = "windows_i686_msvc"
- version = "0.28.0"
++checksum = "dd0f252f5a35cac83d6311b2e795981f5ee6e67eb1f9a7f64eb4500fbc4dcdb4"
+
+[[package]]
+name = "windows_i686_gnu"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "180e6ccf01daf4c426b846dfc66db1fc518f074baa793aa7d9b9aaeffad6a3b6"
+
+[[package]]
- checksum = "51d5158a43cc43623c0729d1ad6647e62fa384a3d135fd15108d37c683461f64"
++name = "windows_i686_gnu"
++version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
- name = "windows_x86_64_gnu"
- version = "0.28.0"
++checksum = "fbeae19f6716841636c28d695375df17562ca208b2b7d0dc47635a50ae6c5de7"
+
+[[package]]
+name = "windows_i686_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2e7917148b2812d1eeafaeb22a97e4813dfa60a3f8f78ebe204bcc88f12f024"
+
+[[package]]
- checksum = "bc31f409f565611535130cfe7ee8e6655d3fa99c1c61013981e491921b5ce954"
++name = "windows_i686_msvc"
++version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
- name = "windows_x86_64_msvc"
- version = "0.28.0"
++checksum = "84c12f65daa39dd2babe6e442988fc329d6243fdce47d7d2d155b8d874862246"
+
+[[package]]
+name = "windows_x86_64_gnu"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4dcd171b8776c41b97521e5da127a2d86ad280114807d0b2ab1e462bc764d9e1"
+
+[[package]]
- checksum = "3f2b8c7cbd3bfdddd9ab98769f9746a7fad1bca236554cd032b78d768bc0e89f"
++name = "windows_x86_64_gnu"
++version = "0.42.0"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "bf7b1b21b5362cbc318f686150e5bcea75ecedc74dd157d874d754a2ca44b0ed"
++
++[[package]]
++name = "windows_x86_64_gnullvm"
++version = "0.42.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "09d525d2ba30eeb3297665bd434a54297e4170c7f1a44cad4ef58095b4cd2028"
+
+[[package]]
+name = "windows_x86_64_msvc"
+version = "0.36.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c811ca4a8c853ef420abd8592ba53ddbbac90410fab6903b3e79972a631f7680"
+
++[[package]]
++name = "windows_x86_64_msvc"
++version = "0.42.0"
++source = "registry+https://github.com/rust-lang/crates.io-index"
++checksum = "f40009d85759725a34da6d89a94e63d7bdc50a862acf0dbc7c8e488f1edcb6f5"
++
+[[package]]
+name = "write-json"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "06069a848f95fceae3e5e03c0ddc8cb78452b56654ee0c8e68f938cf790fb9e3"
+
+[[package]]
+name = "xflags"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbf19f5031a1a812e96fede16f8161218883079946cea87619d3613db1efd268"
+dependencies = [
+ "xflags-macros",
+]
+
+[[package]]
+name = "xflags-macros"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2afbd7f2039bb6cad2dd45f0c5dff49c0d4e26118398768b7a605524d4251809"
+
+[[package]]
+name = "xshell"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6d47097dc5c85234b1e41851b3422dd6d19b3befdd35b4ae5ce386724aeca981"
+dependencies = [
+ "xshell-macros",
+]
+
+[[package]]
+name = "xshell-macros"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88301b56c26dd9bf5c43d858538f82d6f3f7764767defbc5d34e59459901c41a"
+
+[[package]]
+name = "xtask"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "flate2",
+ "write-json",
+ "xflags",
+ "xshell",
+]
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "base-db"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+salsa = "0.17.0-pre.2"
+rustc-hash = "1.1.0"
+
+syntax = { path = "../syntax", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+test-utils = { path = "../test-utils", version = "0.0.0" }
+vfs = { path = "../vfs", version = "0.0.0" }
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "cfg"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+
+tt = { path = "../tt", version = "0.0.0" }
+
+[dev-dependencies]
+mbe = { path = "../mbe" }
+syntax = { path = "../syntax" }
+expect-test = "1.4.0"
+oorandom = "11.1.3"
+# We depend on both individually instead of using `features = ["derive"]` to microoptimize the
+# build graph: if the feature was enabled, syn would be built early on in the graph if `smolstr`
+# supports `arbitrary`. This way, we avoid feature unification.
+arbitrary = "1.1.7"
+derive_arbitrary = "1.1.6"
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "flycheck"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+crossbeam-channel = "0.5.5"
+tracing = "0.1.37"
+cargo_metadata = "0.15.0"
+rustc-hash = "1.1.0"
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = "1.0.86"
+jod-thread = "0.1.2"
++command-group = "1.0.8"
+
+toolchain = { path = "../toolchain", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
--- /dev/null
- use stdx::{process::streaming_output, JodChild};
+//! Flycheck provides the functionality needed to run `cargo check` or
+//! another compatible command (f.x. clippy) in a background thread and provide
+//! LSP diagnostics based on the output of the command.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{
+ fmt, io,
+ process::{ChildStderr, ChildStdout, Command, Stdio},
+ time::Duration,
+};
+
++use command_group::{CommandGroup, GroupChild};
+use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
+use paths::AbsPathBuf;
+use rustc_hash::FxHashMap;
+use serde::Deserialize;
- target_triple: Option<String>,
++use stdx::process::streaming_output;
+
+pub use cargo_metadata::diagnostic::{
+ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
+ DiagnosticSpanMacroExpansion,
+};
+
+#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]
+pub enum InvocationStrategy {
+ Once,
+ #[default]
+ PerWorkspace,
+}
+
+#[derive(Clone, Debug, Default, PartialEq, Eq)]
+pub enum InvocationLocation {
+ Root(AbsPathBuf),
+ #[default]
+ Workspace,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum FlycheckConfig {
+ CargoCommand {
+ command: String,
- target_triple,
++ target_triples: Vec<String>,
+ all_targets: bool,
+ no_default_features: bool,
+ all_features: bool,
+ features: Vec<String>,
+ extra_args: Vec<String>,
+ extra_env: FxHashMap<String, String>,
+ },
+ CustomCommand {
+ command: String,
+ args: Vec<String>,
+ extra_env: FxHashMap<String, String>,
+ invocation_strategy: InvocationStrategy,
+ invocation_location: InvocationLocation,
+ },
+}
+
+impl fmt::Display for FlycheckConfig {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {}", command),
+ FlycheckConfig::CustomCommand { command, args, .. } => {
+ write!(f, "{} {}", command, args.join(" "))
+ }
+ }
+ }
+}
+
+/// Flycheck wraps the shared state and communication machinery used for
+/// running `cargo check` (or other compatible command) and providing
+/// diagnostics based on the output.
+/// The spawned thread is shut down when this struct is dropped.
+#[derive(Debug)]
+pub struct FlycheckHandle {
+ // XXX: drop order is significant
+ sender: Sender<Restart>,
+ _thread: jod_thread::JoinHandle,
+ id: usize,
+}
+
+impl FlycheckHandle {
+ pub fn spawn(
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ ) -> FlycheckHandle {
+ let actor = FlycheckActor::new(id, sender, config, workspace_root);
+ let (sender, receiver) = unbounded::<Restart>();
+ let thread = jod_thread::Builder::new()
+ .name("Flycheck".to_owned())
+ .spawn(move || actor.run(receiver))
+ .expect("failed to spawn thread");
+ FlycheckHandle { id, sender, _thread: thread }
+ }
+
+ /// Schedule a re-start of the cargo check worker.
+ pub fn restart(&self) {
+ self.sender.send(Restart::Yes).unwrap();
+ }
+
+ /// Stop this cargo check worker.
+ pub fn cancel(&self) {
+ self.sender.send(Restart::No).unwrap();
+ }
+
+ pub fn id(&self) -> usize {
+ self.id
+ }
+}
+
+pub enum Message {
+ /// Request adding a diagnostic with fixes included to a file
+ AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic },
+
+ /// Request check progress notification to client
+ Progress {
+ /// Flycheck instance ID
+ id: usize,
+ progress: Progress,
+ },
+}
+
+impl fmt::Debug for Message {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Message::AddDiagnostic { id, workspace_root, diagnostic } => f
+ .debug_struct("AddDiagnostic")
+ .field("id", id)
+ .field("workspace_root", workspace_root)
+ .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
+ .finish(),
+ Message::Progress { id, progress } => {
+ f.debug_struct("Progress").field("id", id).field("progress", progress).finish()
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+pub enum Progress {
+ DidStart,
+ DidCheckCrate(String),
+ DidFinish(io::Result<()>),
+ DidCancel,
+ DidFailToRestart(String),
+}
+
+enum Restart {
+ Yes,
+ No,
+}
+
+/// A [`FlycheckActor`] is a single check instance of a workspace.
+struct FlycheckActor {
+ /// The workspace id of this flycheck instance.
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ /// Either the workspace root of the workspace we are flychecking,
+ /// or the project root of the project.
+ root: AbsPathBuf,
+ /// CargoHandle exists to wrap around the communication needed to be able to
+ /// run `cargo check` without blocking. Currently the Rust standard library
+ /// doesn't provide a way to read sub-process output without blocking, so we
+ /// have to wrap sub-processes output handling in a thread and pass messages
+ /// back over a channel.
+ cargo_handle: Option<CargoHandle>,
+}
+
+enum Event {
+ Restart(Restart),
+ CheckEvent(Option<CargoMessage>),
+}
+
+impl FlycheckActor {
+ fn new(
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ ) -> FlycheckActor {
+ tracing::info!(%id, ?workspace_root, "Spawning flycheck");
+ FlycheckActor { id, sender, config, root: workspace_root, cargo_handle: None }
+ }
+
+ fn report_progress(&self, progress: Progress) {
+ self.send(Message::Progress { id: self.id, progress });
+ }
+
+ fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> {
+ let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver);
+ if let Ok(msg) = inbox.try_recv() {
+ // give restarts a preference so check outputs don't block a restart or stop
+ return Some(Event::Restart(msg));
+ }
+ select! {
+ recv(inbox) -> msg => msg.ok().map(Event::Restart),
+ recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
+ }
+ }
+
+ fn run(mut self, inbox: Receiver<Restart>) {
+ 'event: while let Some(event) = self.next_event(&inbox) {
+ match event {
+ Event::Restart(Restart::No) => {
+ self.cancel_check_process();
+ }
+ Event::Restart(Restart::Yes) => {
+ // Cancel the previously spawned process
+ self.cancel_check_process();
+ while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) {
+ // restart chained with a stop, so just cancel
+ if let Restart::No = restart {
+ continue 'event;
+ }
+ }
+
+ let command = self.check_command();
+ tracing::debug!(?command, "will restart flycheck");
+ match CargoHandle::spawn(command) {
+ Ok(cargo_handle) => {
+ tracing::debug!(
+ command = ?self.check_command(),
+ "did restart flycheck"
+ );
+ self.cargo_handle = Some(cargo_handle);
+ self.report_progress(Progress::DidStart);
+ }
+ Err(error) => {
+ self.report_progress(Progress::DidFailToRestart(format!(
+ "Failed to run the following command: {:?} error={}",
+ self.check_command(),
+ error
+ )));
+ }
+ }
+ }
+ Event::CheckEvent(None) => {
+ tracing::debug!(flycheck_id = self.id, "flycheck finished");
+
+ // Watcher finished
+ let cargo_handle = self.cargo_handle.take().unwrap();
+ let res = cargo_handle.join();
+ if res.is_err() {
+ tracing::error!(
+ "Flycheck failed to run the following command: {:?}",
+ self.check_command()
+ );
+ }
+ self.report_progress(Progress::DidFinish(res));
+ }
+ Event::CheckEvent(Some(message)) => match message {
+ CargoMessage::CompilerArtifact(msg) => {
+ self.report_progress(Progress::DidCheckCrate(msg.target.name));
+ }
+
+ CargoMessage::Diagnostic(msg) => {
+ self.send(Message::AddDiagnostic {
+ id: self.id,
+ workspace_root: self.root.clone(),
+ diagnostic: msg,
+ });
+ }
+ },
+ }
+ }
+ // If we rerun the thread, we need to discard the previous check results first
+ self.cancel_check_process();
+ }
+
+ fn cancel_check_process(&mut self) {
+ if let Some(cargo_handle) = self.cargo_handle.take() {
+ tracing::debug!(
+ command = ?self.check_command(),
+ "did cancel flycheck"
+ );
+ cargo_handle.cancel();
+ self.report_progress(Progress::DidCancel);
+ }
+ }
+
+ fn check_command(&self) -> Command {
+ let (mut cmd, args) = match &self.config {
+ FlycheckConfig::CargoCommand {
+ command,
- if let Some(target) = target_triple {
++ target_triples,
+ no_default_features,
+ all_targets,
+ all_features,
+ extra_args,
+ features,
+ extra_env,
+ } => {
+ let mut cmd = Command::new(toolchain::cargo());
+ cmd.arg(command);
+ cmd.current_dir(&self.root);
+ cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
+ .arg(self.root.join("Cargo.toml").as_os_str());
+
- /// a read syscall dropping and therefor terminating the process is our best option.
++ for target in target_triples {
+ cmd.args(&["--target", target.as_str()]);
+ }
+ if *all_targets {
+ cmd.arg("--all-targets");
+ }
+ if *all_features {
+ cmd.arg("--all-features");
+ } else {
+ if *no_default_features {
+ cmd.arg("--no-default-features");
+ }
+ if !features.is_empty() {
+ cmd.arg("--features");
+ cmd.arg(features.join(" "));
+ }
+ }
+ cmd.envs(extra_env);
+ (cmd, extra_args)
+ }
+ FlycheckConfig::CustomCommand {
+ command,
+ args,
+ extra_env,
+ invocation_strategy,
+ invocation_location,
+ } => {
+ let mut cmd = Command::new(command);
+ cmd.envs(extra_env);
+
+ match invocation_location {
+ InvocationLocation::Workspace => {
+ match invocation_strategy {
+ InvocationStrategy::Once => {
+ cmd.current_dir(&self.root);
+ }
+ InvocationStrategy::PerWorkspace => {
+ // FIXME: cmd.current_dir(&affected_workspace);
+ cmd.current_dir(&self.root);
+ }
+ }
+ }
+ InvocationLocation::Root(root) => {
+ cmd.current_dir(root);
+ }
+ }
+
+ (cmd, args)
+ }
+ };
+
+ cmd.args(args);
+ cmd
+ }
+
+ fn send(&self, check_task: Message) {
+ (self.sender)(check_task);
+ }
+}
+
++struct JodChild(GroupChild);
++
+/// A handle to a cargo process used for fly-checking.
+struct CargoHandle {
+ /// The handle to the actual cargo process. As we cannot cancel directly from with
- let mut child = JodChild::spawn(command)?;
++ /// a read syscall dropping and therefore terminating the process is our best option.
+ child: JodChild,
+ thread: jod_thread::JoinHandle<io::Result<(bool, String)>>,
+ receiver: Receiver<CargoMessage>,
+}
+
+impl CargoHandle {
+ fn spawn(mut command: Command) -> std::io::Result<CargoHandle> {
+ command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
- let stdout = child.stdout.take().unwrap();
- let stderr = child.stderr.take().unwrap();
++ let mut child = command.group_spawn().map(JodChild)?;
+
- let _ = self.child.kill();
- let _ = self.child.wait();
++ let stdout = child.0.inner().stdout.take().unwrap();
++ let stderr = child.0.inner().stderr.take().unwrap();
+
+ let (sender, receiver) = unbounded();
+ let actor = CargoActor::new(sender, stdout, stderr);
+ let thread = jod_thread::Builder::new()
+ .name("CargoHandle".to_owned())
+ .spawn(move || actor.run())
+ .expect("failed to spawn thread");
+ Ok(CargoHandle { child, thread, receiver })
+ }
+
+ fn cancel(mut self) {
- let _ = self.child.kill();
- let exit_status = self.child.wait()?;
++ let _ = self.child.0.kill();
++ let _ = self.child.0.wait();
+ }
+
+ fn join(mut self) -> io::Result<()> {
++ let _ = self.child.0.kill();
++ let exit_status = self.child.0.wait()?;
+ let (read_at_least_one_message, error) = self.thread.join()?;
+ if read_at_least_one_message || exit_status.success() {
+ Ok(())
+ } else {
+ Err(io::Error::new(io::ErrorKind::Other, format!(
+ "Cargo watcher failed, the command produced no valid metadata (exit code: {:?}):\n{}",
+ exit_status, error
+ )))
+ }
+ }
+}
+
+struct CargoActor {
+ sender: Sender<CargoMessage>,
+ stdout: ChildStdout,
+ stderr: ChildStderr,
+}
+
+impl CargoActor {
+ fn new(sender: Sender<CargoMessage>, stdout: ChildStdout, stderr: ChildStderr) -> CargoActor {
+ CargoActor { sender, stdout, stderr }
+ }
+
+ fn run(self) -> io::Result<(bool, String)> {
+ // We manually read a line at a time, instead of using serde's
+ // stream deserializers, because the deserializer cannot recover
+ // from an error, resulting in it getting stuck, because we try to
+ // be resilient against failures.
+ //
+ // Because cargo only outputs one JSON object per line, we can
+ // simply skip a line if it doesn't parse, which just ignores any
+ // erroneous output.
+
+ let mut error = String::new();
+ let mut read_at_least_one_message = false;
+ let output = streaming_output(
+ self.stdout,
+ self.stderr,
+ &mut |line| {
+ read_at_least_one_message = true;
+
+ // Try to deserialize a message from Cargo or Rustc.
+ let mut deserializer = serde_json::Deserializer::from_str(line);
+ deserializer.disable_recursion_limit();
+ if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
+ match message {
+ // Skip certain kinds of messages to only spend time on what's useful
+ JsonMessage::Cargo(message) => match message {
+ cargo_metadata::Message::CompilerArtifact(artifact)
+ if !artifact.fresh =>
+ {
+ self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
+ }
+ cargo_metadata::Message::CompilerMessage(msg) => {
+ self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
+ }
+ _ => (),
+ },
+ JsonMessage::Rustc(message) => {
+ self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
+ }
+ }
+ }
+ },
+ &mut |line| {
+ error.push_str(line);
+ error.push('\n');
+ },
+ );
+ match output {
+ Ok(_) => Ok((read_at_least_one_message, error)),
+ Err(e) => Err(io::Error::new(e.kind(), format!("{:?}: {}", e, error))),
+ }
+ }
+}
+
+enum CargoMessage {
+ CompilerArtifact(cargo_metadata::Artifact),
+ Diagnostic(Diagnostic),
+}
+
+#[derive(Deserialize)]
+#[serde(untagged)]
+enum JsonMessage {
+ Cargo(cargo_metadata::Message),
+ Rustc(Diagnostic),
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "hir-def"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+anymap = "1.0.0-beta.2"
+arrayvec = "0.7.2"
+bitflags = "1.3.2"
+cov-mark = "2.0.0-pre.1"
+# We need to freeze the version of the crate, as the raw-api feature is considered unstable
+dashmap = { version = "=5.4.0", features = ["raw-api"] }
+drop_bomb = "0.1.5"
+either = "1.7.0"
+fst = { version = "0.4.7", default-features = false }
+hashbrown = { version = "0.12.1", default-features = false }
+indexmap = "1.9.1"
+itertools = "0.10.5"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+once_cell = "1.15.0"
+rustc-hash = "1.1.0"
+smallvec = "1.10.0"
+tracing = "0.1.35"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+hir-expand = { path = "../hir-expand", version = "0.0.0" }
+mbe = { path = "../mbe", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
--- /dev/null
- let mut collector =
- AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::TraitId(tr));
- collector.collect(&item_tree, tree_id.tree_id(), &tr_def.items);
- let (items, attribute_calls, diagnostics) = collector.finish();
-
+//! Contains basic data about various HIR declarations.
+
+use std::sync::Arc;
+
+use hir_expand::{name::Name, AstId, ExpandResult, HirFileId, InFile, MacroCallId, MacroDefKind};
+use smallvec::SmallVec;
+use syntax::ast;
+
+use crate::{
+ attr::Attrs,
+ body::{Expander, Mark},
+ db::DefDatabase,
+ intern::Interned,
+ item_tree::{self, AssocItem, FnFlags, ItemTree, ItemTreeId, ModItem, Param, TreeId},
+ nameres::{
+ attr_resolution::ResolvedAttr, diagnostics::DefDiagnostic, proc_macro::ProcMacroKind,
+ DefMap,
+ },
+ type_ref::{TraitRef, TypeBound, TypeRef},
+ visibility::RawVisibility,
+ AssocItemId, AstIdWithPath, ConstId, ConstLoc, FunctionId, FunctionLoc, HasModule, ImplId,
+ Intern, ItemContainerId, ItemLoc, Lookup, Macro2Id, MacroRulesId, ModuleId, ProcMacroId,
+ StaticId, TraitId, TypeAliasId, TypeAliasLoc,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct FunctionData {
+ pub name: Name,
+ pub params: Vec<(Option<Name>, Interned<TypeRef>)>,
+ pub ret_type: Interned<TypeRef>,
+ pub async_ret_type: Option<Interned<TypeRef>>,
+ pub attrs: Attrs,
+ pub visibility: RawVisibility,
+ pub abi: Option<Interned<str>>,
+ pub legacy_const_generics_indices: Box<[u32]>,
+ flags: FnFlags,
+}
+
+impl FunctionData {
+ pub(crate) fn fn_data_query(db: &dyn DefDatabase, func: FunctionId) -> Arc<FunctionData> {
+ let loc = func.lookup(db);
+ let krate = loc.container.module(db).krate;
+ let crate_graph = db.crate_graph();
+ let cfg_options = &crate_graph[krate].cfg_options;
+ let item_tree = loc.id.item_tree(db);
+ let func = &item_tree[loc.id.value];
+ let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
+ db.trait_data(trait_id).visibility.clone()
+ } else {
+ item_tree[func.visibility].clone()
+ };
+
+ let enabled_params = func
+ .params
+ .clone()
+ .filter(|¶m| item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options));
+
+ // If last cfg-enabled param is a `...` param, it's a varargs function.
+ let is_varargs = enabled_params
+ .clone()
+ .next_back()
+ .map_or(false, |param| matches!(item_tree[param], Param::Varargs));
+
+ let mut flags = func.flags;
+ if is_varargs {
+ flags |= FnFlags::IS_VARARGS;
+ }
+ if flags.contains(FnFlags::HAS_SELF_PARAM) {
+ // If there's a self param in the syntax, but it is cfg'd out, remove the flag.
+ let is_cfgd_out = match func.params.clone().next() {
+ Some(param) => {
+ !item_tree.attrs(db, krate, param.into()).is_cfg_enabled(cfg_options)
+ }
+ None => {
+ stdx::never!("fn HAS_SELF_PARAM but no parameters allocated");
+ true
+ }
+ };
+ if is_cfgd_out {
+ cov_mark::hit!(cfgd_out_self_param);
+ flags.remove(FnFlags::HAS_SELF_PARAM);
+ }
+ }
+
+ let legacy_const_generics_indices = item_tree
+ .attrs(db, krate, ModItem::from(loc.id.value).into())
+ .by_key("rustc_legacy_const_generics")
+ .tt_values()
+ .next()
+ .map(parse_rustc_legacy_const_generics)
+ .unwrap_or_default();
+
+ Arc::new(FunctionData {
+ name: func.name.clone(),
+ params: enabled_params
+ .clone()
+ .filter_map(|id| match &item_tree[id] {
+ Param::Normal(name, ty) => Some((name.clone(), ty.clone())),
+ Param::Varargs => None,
+ })
+ .collect(),
+ ret_type: func.ret_type.clone(),
+ async_ret_type: func.async_ret_type.clone(),
+ attrs: item_tree.attrs(db, krate, ModItem::from(loc.id.value).into()),
+ visibility,
+ abi: func.abi.clone(),
+ legacy_const_generics_indices,
+ flags,
+ })
+ }
+
+ pub fn has_body(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_BODY)
+ }
+
+ /// True if the first param is `self`. This is relevant to decide whether this
+ /// can be called as a method.
+ pub fn has_self_param(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_SELF_PARAM)
+ }
+
+ pub fn has_default_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_DEFAULT_KW)
+ }
+
+ pub fn has_const_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_CONST_KW)
+ }
+
+ pub fn has_async_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_ASYNC_KW)
+ }
+
+ pub fn has_unsafe_kw(&self) -> bool {
+ self.flags.contains(FnFlags::HAS_UNSAFE_KW)
+ }
+
+ pub fn is_varargs(&self) -> bool {
+ self.flags.contains(FnFlags::IS_VARARGS)
+ }
+}
+
+fn parse_rustc_legacy_const_generics(tt: &tt::Subtree) -> Box<[u32]> {
+ let mut indices = Vec::new();
+ for args in tt.token_trees.chunks(2) {
+ match &args[0] {
+ tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => match lit.text.parse() {
+ Ok(index) => indices.push(index),
+ Err(_) => break,
+ },
+ _ => break,
+ }
+
+ if let Some(comma) = args.get(1) {
+ match comma {
+ tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) if punct.char == ',' => {}
+ _ => break,
+ }
+ }
+ }
+
+ indices.into_boxed_slice()
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TypeAliasData {
+ pub name: Name,
+ pub type_ref: Option<Interned<TypeRef>>,
+ pub visibility: RawVisibility,
+ pub is_extern: bool,
+ /// Bounds restricting the type alias itself (eg. `type Ty: Bound;` in a trait or impl).
+ pub bounds: Vec<Interned<TypeBound>>,
+}
+
+impl TypeAliasData {
+ pub(crate) fn type_alias_data_query(
+ db: &dyn DefDatabase,
+ typ: TypeAliasId,
+ ) -> Arc<TypeAliasData> {
+ let loc = typ.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let typ = &item_tree[loc.id.value];
+ let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
+ db.trait_data(trait_id).visibility.clone()
+ } else {
+ item_tree[typ.visibility].clone()
+ };
+
+ Arc::new(TypeAliasData {
+ name: typ.name.clone(),
+ type_ref: typ.type_ref.clone(),
+ visibility,
+ is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
+ bounds: typ.bounds.to_vec(),
+ })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TraitData {
+ pub name: Name,
+ pub items: Vec<(Name, AssocItemId)>,
+ pub is_auto: bool,
+ pub is_unsafe: bool,
+ pub visibility: RawVisibility,
+ /// Whether the trait has `#[rust_skip_array_during_method_dispatch]`. `hir_ty` will ignore
+ /// method calls to this trait's methods when the receiver is an array and the crate edition is
+ /// 2015 or 2018.
+ pub skip_array_during_method_dispatch: bool,
+ // box it as the vec is usually empty anyways
+ pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+}
+
+impl TraitData {
+ pub(crate) fn trait_data_query(db: &dyn DefDatabase, tr: TraitId) -> Arc<TraitData> {
+ db.trait_data_with_diagnostics(tr).0
+ }
+
+ pub(crate) fn trait_data_with_diagnostics_query(
+ db: &dyn DefDatabase,
+ tr: TraitId,
+ ) -> (Arc<TraitData>, Arc<[DefDiagnostic]>) {
+ let tr_loc @ ItemLoc { container: module_id, id: tree_id } = tr.lookup(db);
+ let item_tree = tree_id.item_tree(db);
+ let tr_def = &item_tree[tree_id.value];
+ let _cx = stdx::panic_context::enter(format!(
+ "trait_data_query({:?} -> {:?} -> {:?})",
+ tr, tr_loc, tr_def
+ ));
+ let name = tr_def.name.clone();
+ let is_auto = tr_def.is_auto;
+ let is_unsafe = tr_def.is_unsafe;
+ let visibility = item_tree[tr_def.visibility].clone();
+ let skip_array_during_method_dispatch = item_tree
+ .attrs(db, module_id.krate(), ModItem::from(tree_id.value).into())
+ .by_key("rustc_skip_array_during_method_dispatch")
+ .exists();
+
++ let (items, attribute_calls, diagnostics) = match &tr_def.items {
++ Some(items) => {
++ let mut collector = AssocItemCollector::new(
++ db,
++ module_id,
++ tree_id.file_id(),
++ ItemContainerId::TraitId(tr),
++ );
++ collector.collect(&item_tree, tree_id.tree_id(), items);
++ collector.finish()
++ }
++ None => Default::default(),
++ };
+ (
+ Arc::new(TraitData {
+ name,
+ attribute_calls,
+ items,
+ is_auto,
+ is_unsafe,
+ visibility,
+ skip_array_during_method_dispatch,
+ }),
+ diagnostics.into(),
+ )
+ }
+
+ pub fn associated_types(&self) -> impl Iterator<Item = TypeAliasId> + '_ {
+ self.items.iter().filter_map(|(_name, item)| match item {
+ AssocItemId::TypeAliasId(t) => Some(*t),
+ _ => None,
+ })
+ }
+
+ pub fn associated_type_by_name(&self, name: &Name) -> Option<TypeAliasId> {
+ self.items.iter().find_map(|(item_name, item)| match item {
+ AssocItemId::TypeAliasId(t) if item_name == name => Some(*t),
+ _ => None,
+ })
+ }
+
+ pub fn method_by_name(&self, name: &Name) -> Option<FunctionId> {
+ self.items.iter().find_map(|(item_name, item)| match item {
+ AssocItemId::FunctionId(t) if item_name == name => Some(*t),
+ _ => None,
+ })
+ }
+
+ pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ self.attribute_calls.iter().flat_map(|it| it.iter()).copied()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ImplData {
+ pub target_trait: Option<Interned<TraitRef>>,
+ pub self_ty: Interned<TypeRef>,
+ pub items: Vec<AssocItemId>,
+ pub is_negative: bool,
+ // box it as the vec is usually empty anyways
+ pub attribute_calls: Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+}
+
+impl ImplData {
+ pub(crate) fn impl_data_query(db: &dyn DefDatabase, id: ImplId) -> Arc<ImplData> {
+ db.impl_data_with_diagnostics(id).0
+ }
+
+ pub(crate) fn impl_data_with_diagnostics_query(
+ db: &dyn DefDatabase,
+ id: ImplId,
+ ) -> (Arc<ImplData>, Arc<[DefDiagnostic]>) {
+ let _p = profile::span("impl_data_with_diagnostics_query");
+ let ItemLoc { container: module_id, id: tree_id } = id.lookup(db);
+
+ let item_tree = tree_id.item_tree(db);
+ let impl_def = &item_tree[tree_id.value];
+ let target_trait = impl_def.target_trait.clone();
+ let self_ty = impl_def.self_ty.clone();
+ let is_negative = impl_def.is_negative;
+
+ let mut collector =
+ AssocItemCollector::new(db, module_id, tree_id.file_id(), ItemContainerId::ImplId(id));
+ collector.collect(&item_tree, tree_id.tree_id(), &impl_def.items);
+
+ let (items, attribute_calls, diagnostics) = collector.finish();
+ let items = items.into_iter().map(|(_, item)| item).collect();
+
+ (
+ Arc::new(ImplData { target_trait, self_ty, items, is_negative, attribute_calls }),
+ diagnostics.into(),
+ )
+ }
+
+ pub fn attribute_calls(&self) -> impl Iterator<Item = (AstId<ast::Item>, MacroCallId)> + '_ {
+ self.attribute_calls.iter().flat_map(|it| it.iter()).copied()
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Macro2Data {
+ pub name: Name,
+ pub visibility: RawVisibility,
+}
+
+impl Macro2Data {
+ pub(crate) fn macro2_data_query(db: &dyn DefDatabase, makro: Macro2Id) -> Arc<Macro2Data> {
+ let loc = makro.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+
+ Arc::new(Macro2Data {
+ name: makro.name.clone(),
+ visibility: item_tree[makro.visibility].clone(),
+ })
+ }
+}
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct MacroRulesData {
+ pub name: Name,
+ pub macro_export: bool,
+}
+
+impl MacroRulesData {
+ pub(crate) fn macro_rules_data_query(
+ db: &dyn DefDatabase,
+ makro: MacroRulesId,
+ ) -> Arc<MacroRulesData> {
+ let loc = makro.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+
+ let macro_export = item_tree
+ .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
+ .by_key("macro_export")
+ .exists();
+
+ Arc::new(MacroRulesData { name: makro.name.clone(), macro_export })
+ }
+}
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ProcMacroData {
+ pub name: Name,
+ /// Derive helpers, if this is a derive
+ pub helpers: Option<Box<[Name]>>,
+}
+
+impl ProcMacroData {
+ pub(crate) fn proc_macro_data_query(
+ db: &dyn DefDatabase,
+ makro: ProcMacroId,
+ ) -> Arc<ProcMacroData> {
+ let loc = makro.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let makro = &item_tree[loc.id.value];
+
+ let (name, helpers) = if let Some(def) = item_tree
+ .attrs(db, loc.container.krate(), ModItem::from(loc.id.value).into())
+ .parse_proc_macro_decl(&makro.name)
+ {
+ (
+ def.name,
+ match def.kind {
+ ProcMacroKind::CustomDerive { helpers } => Some(helpers),
+ ProcMacroKind::FnLike | ProcMacroKind::Attr => None,
+ },
+ )
+ } else {
+ // eeeh...
+ stdx::never!("proc macro declaration is not a proc macro");
+ (makro.name.clone(), None)
+ };
+ Arc::new(ProcMacroData { name, helpers })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ConstData {
+ /// `None` for `const _: () = ();`
+ pub name: Option<Name>,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibility,
+}
+
+impl ConstData {
+ pub(crate) fn const_data_query(db: &dyn DefDatabase, konst: ConstId) -> Arc<ConstData> {
+ let loc = konst.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let konst = &item_tree[loc.id.value];
+ let visibility = if let ItemContainerId::TraitId(trait_id) = loc.container {
+ db.trait_data(trait_id).visibility.clone()
+ } else {
+ item_tree[konst.visibility].clone()
+ };
+
+ Arc::new(ConstData {
+ name: konst.name.clone(),
+ type_ref: konst.type_ref.clone(),
+ visibility,
+ })
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct StaticData {
+ pub name: Name,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibility,
+ pub mutable: bool,
+ pub is_extern: bool,
+}
+
+impl StaticData {
+ pub(crate) fn static_data_query(db: &dyn DefDatabase, konst: StaticId) -> Arc<StaticData> {
+ let loc = konst.lookup(db);
+ let item_tree = loc.id.item_tree(db);
+ let statik = &item_tree[loc.id.value];
+
+ Arc::new(StaticData {
+ name: statik.name.clone(),
+ type_ref: statik.type_ref.clone(),
+ visibility: item_tree[statik.visibility].clone(),
+ mutable: statik.mutable,
+ is_extern: matches!(loc.container, ItemContainerId::ExternBlockId(_)),
+ })
+ }
+}
+
+struct AssocItemCollector<'a> {
+ db: &'a dyn DefDatabase,
+ module_id: ModuleId,
+ def_map: Arc<DefMap>,
+ inactive_diagnostics: Vec<DefDiagnostic>,
+ container: ItemContainerId,
+ expander: Expander,
+
+ items: Vec<(Name, AssocItemId)>,
+ attr_calls: Vec<(AstId<ast::Item>, MacroCallId)>,
+}
+
+impl<'a> AssocItemCollector<'a> {
+ fn new(
+ db: &'a dyn DefDatabase,
+ module_id: ModuleId,
+ file_id: HirFileId,
+ container: ItemContainerId,
+ ) -> Self {
+ Self {
+ db,
+ module_id,
+ def_map: module_id.def_map(db),
+ container,
+ expander: Expander::new(db, file_id, module_id),
+ items: Vec::new(),
+ attr_calls: Vec::new(),
+ inactive_diagnostics: Vec::new(),
+ }
+ }
+
+ fn finish(
+ self,
+ ) -> (
+ Vec<(Name, AssocItemId)>,
+ Option<Box<Vec<(AstId<ast::Item>, MacroCallId)>>>,
+ Vec<DefDiagnostic>,
+ ) {
+ (
+ self.items,
+ if self.attr_calls.is_empty() { None } else { Some(Box::new(self.attr_calls)) },
+ self.inactive_diagnostics,
+ )
+ }
+
+ // FIXME: proc-macro diagnostics
+ fn collect(&mut self, item_tree: &ItemTree, tree_id: TreeId, assoc_items: &[AssocItem]) {
+ let container = self.container;
+ self.items.reserve(assoc_items.len());
+
+ 'items: for &item in assoc_items {
+ let attrs = item_tree.attrs(self.db, self.module_id.krate, ModItem::from(item).into());
+ if !attrs.is_cfg_enabled(self.expander.cfg_options()) {
+ self.inactive_diagnostics.push(DefDiagnostic::unconfigured_code(
+ self.module_id.local_id,
+ InFile::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast()),
+ attrs.cfg().unwrap(),
+ self.expander.cfg_options().clone(),
+ ));
+ continue;
+ }
+
+ 'attrs: for attr in &*attrs {
+ let ast_id =
+ AstId::new(self.expander.current_file_id(), item.ast_id(&item_tree).upcast());
+ let ast_id_with_path = AstIdWithPath { path: (*attr.path).clone(), ast_id };
+
+ if let Ok(ResolvedAttr::Macro(call_id)) = self.def_map.resolve_attr_macro(
+ self.db,
+ self.module_id.local_id,
+ ast_id_with_path,
+ attr,
+ ) {
+ self.attr_calls.push((ast_id, call_id));
+ // If proc attribute macro expansion is disabled, skip expanding it here
+ if !self.db.enable_proc_attr_macros() {
+ continue 'attrs;
+ }
+ let loc = self.db.lookup_intern_macro_call(call_id);
+ if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind {
+ // If there's no expander for the proc macro (e.g. the
+ // proc macro is ignored, or building the proc macro
+ // crate failed), skip expansion like we would if it was
+ // disabled. This is analogous to the handling in
+ // `DefCollector::collect_macros`.
+ if exp.is_dummy() {
+ continue 'attrs;
+ }
+ }
+ match self.expander.enter_expand_id::<ast::MacroItems>(self.db, call_id) {
+ ExpandResult { value: Some((mark, _)), .. } => {
+ self.collect_macro_items(mark);
+ continue 'items;
+ }
+ ExpandResult { .. } => {}
+ }
+ }
+ }
+
+ match item {
+ AssocItem::Function(id) => {
+ let item = &item_tree[id];
+
+ let def =
+ FunctionLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
+ self.items.push((item.name.clone(), def.into()));
+ }
+ AssocItem::Const(id) => {
+ let item = &item_tree[id];
+
+ let name = match item.name.clone() {
+ Some(name) => name,
+ None => continue,
+ };
+ let def =
+ ConstLoc { container, id: ItemTreeId::new(tree_id, id) }.intern(self.db);
+ self.items.push((name, def.into()));
+ }
+ AssocItem::TypeAlias(id) => {
+ let item = &item_tree[id];
+
+ let def = TypeAliasLoc { container, id: ItemTreeId::new(tree_id, id) }
+ .intern(self.db);
+ self.items.push((item.name.clone(), def.into()));
+ }
+ AssocItem::MacroCall(call) => {
+ if let Some(root) = self.db.parse_or_expand(self.expander.current_file_id()) {
+ let call = &item_tree[call];
+
+ let ast_id_map = self.db.ast_id_map(self.expander.current_file_id());
+ let call = ast_id_map.get(call.ast_id).to_node(&root);
+ let _cx = stdx::panic_context::enter(format!(
+ "collect_items MacroCall: {}",
+ call
+ ));
+ let res = self.expander.enter_expand::<ast::MacroItems>(self.db, call);
+
+ if let Ok(ExpandResult { value: Some((mark, _)), .. }) = res {
+ self.collect_macro_items(mark);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ fn collect_macro_items(&mut self, mark: Mark) {
+ let tree_id = item_tree::TreeId::new(self.expander.current_file_id(), None);
+ let item_tree = tree_id.item_tree(self.db);
+ let iter: SmallVec<[_; 2]> =
+ item_tree.top_level_items().iter().filter_map(ModItem::as_assoc_item).collect();
+
+ self.collect(&item_tree, tree_id, &iter);
+
+ self.expander.exit(self.db, mark);
+ }
+}
--- /dev/null
- pub items: Box<[AssocItem]>,
+//! A simplified AST that only contains items.
+//!
+//! This is the primary IR used throughout `hir_def`. It is the input to the name resolution
+//! algorithm, as well as to the queries defined in `adt.rs`, `data.rs`, and most things in
+//! `attr.rs`.
+//!
+//! `ItemTree`s are built per `HirFileId`, from the syntax tree of the parsed file. This means that
+//! they are crate-independent: they don't know which `#[cfg]`s are active or which module they
+//! belong to, since those concepts don't exist at this level (a single `ItemTree` might be part of
+//! multiple crates, or might be included into the same crate twice via `#[path]`).
+//!
+//! One important purpose of this layer is to provide an "invalidation barrier" for incremental
+//! computations: when typing inside an item body, the `ItemTree` of the modified file is typically
+//! unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
+//!
+//! The `ItemTree` for the currently open file can be displayed by using the VS Code command
+//! "rust-analyzer: Debug ItemTree".
+//!
+//! Compared to rustc's architecture, `ItemTree` has properties from both rustc's AST and HIR: many
+//! syntax-level Rust features are already desugared to simpler forms in the `ItemTree`, but name
+//! resolution has not yet been performed. `ItemTree`s are per-file, while rustc's AST and HIR are
+//! per-crate, because we are interested in incrementally computing it.
+//!
+//! The representation of items in the `ItemTree` should generally mirror the surface syntax: it is
+//! usually a bad idea to desugar a syntax-level construct to something that is structurally
+//! different here. Name resolution needs to be able to process attributes and expand macros
+//! (including attribute macros), and having a 1-to-1 mapping between syntax and the `ItemTree`
+//! avoids introducing subtle bugs.
+//!
+//! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
+//! surface syntax.
+
+mod lower;
+mod pretty;
+#[cfg(test)]
+mod tests;
+
+use std::{
+ fmt::{self, Debug},
+ hash::{Hash, Hasher},
+ marker::PhantomData,
+ ops::Index,
+ sync::Arc,
+};
+
+use ast::{AstNode, HasName, StructKind};
+use base_db::CrateId;
+use either::Either;
+use hir_expand::{
+ ast_id_map::FileAstId,
+ hygiene::Hygiene,
+ name::{name, AsName, Name},
+ ExpandTo, HirFileId, InFile,
+};
+use la_arena::{Arena, Idx, IdxRange, RawIdx};
+use profile::Count;
+use rustc_hash::FxHashMap;
+use smallvec::SmallVec;
+use stdx::never;
+use syntax::{ast, match_ast, SyntaxKind};
+
+use crate::{
+ attr::{Attrs, RawAttrs},
+ db::DefDatabase,
+ generics::GenericParams,
+ intern::Interned,
+ path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind},
+ type_ref::{Mutability, TraitRef, TypeBound, TypeRef},
+ visibility::RawVisibility,
+ BlockId,
+};
+
+#[derive(Copy, Clone, Eq, PartialEq)]
+pub struct RawVisibilityId(u32);
+
+impl RawVisibilityId {
+ pub const PUB: Self = RawVisibilityId(u32::max_value());
+ pub const PRIV: Self = RawVisibilityId(u32::max_value() - 1);
+ pub const PUB_CRATE: Self = RawVisibilityId(u32::max_value() - 2);
+}
+
+impl fmt::Debug for RawVisibilityId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut f = f.debug_tuple("RawVisibilityId");
+ match *self {
+ Self::PUB => f.field(&"pub"),
+ Self::PRIV => f.field(&"pub(self)"),
+ Self::PUB_CRATE => f.field(&"pub(crate)"),
+ _ => f.field(&self.0),
+ };
+ f.finish()
+ }
+}
+
+/// The item tree of a source file.
+#[derive(Debug, Default, Eq, PartialEq)]
+pub struct ItemTree {
+ _c: Count<Self>,
+
+ top_level: SmallVec<[ModItem; 1]>,
+ attrs: FxHashMap<AttrOwner, RawAttrs>,
+
+ data: Option<Box<ItemTreeData>>,
+}
+
+impl ItemTree {
+ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
+ let _p = profile::span("file_item_tree_query").detail(|| format!("{:?}", file_id));
+ let syntax = match db.parse_or_expand(file_id) {
+ Some(node) => node,
+ None => return Default::default(),
+ };
+ if never!(syntax.kind() == SyntaxKind::ERROR) {
+ // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
+ return Default::default();
+ }
+
+ let ctx = lower::Ctx::new(db, file_id);
+ let mut top_attrs = None;
+ let mut item_tree = match_ast! {
+ match syntax {
+ ast::SourceFile(file) => {
+ top_attrs = Some(RawAttrs::new(db, &file, ctx.hygiene()));
+ ctx.lower_module_items(&file)
+ },
+ ast::MacroItems(items) => {
+ ctx.lower_module_items(&items)
+ },
+ ast::MacroStmts(stmts) => {
+ // The produced statements can include items, which should be added as top-level
+ // items.
+ ctx.lower_macro_stmts(stmts)
+ },
+ _ => {
+ panic!("cannot create item tree from {:?} {}", syntax, syntax);
+ },
+ }
+ };
+
+ if let Some(attrs) = top_attrs {
+ item_tree.attrs.insert(AttrOwner::TopLevel, attrs);
+ }
+ item_tree.shrink_to_fit();
+ Arc::new(item_tree)
+ }
+
+ /// Returns an iterator over all items located at the top level of the `HirFileId` this
+ /// `ItemTree` was created from.
+ pub fn top_level_items(&self) -> &[ModItem] {
+ &self.top_level
+ }
+
+ /// Returns the inner attributes of the source file.
+ pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: CrateId) -> Attrs {
+ self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone().filter(db, krate)
+ }
+
+ pub(crate) fn raw_attrs(&self, of: AttrOwner) -> &RawAttrs {
+ self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
+ }
+
+ pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: CrateId, of: AttrOwner) -> Attrs {
+ self.raw_attrs(of).clone().filter(db, krate)
+ }
+
+ pub fn pretty_print(&self) -> String {
+ pretty::print_item_tree(self)
+ }
+
+ fn data(&self) -> &ItemTreeData {
+ self.data.as_ref().expect("attempted to access data of empty ItemTree")
+ }
+
+ fn data_mut(&mut self) -> &mut ItemTreeData {
+ self.data.get_or_insert_with(Box::default)
+ }
+
+ fn block_item_tree(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
+ let loc = db.lookup_intern_block(block);
+ let block = loc.ast_id.to_node(db.upcast());
+ let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
+ Arc::new(ctx.lower_block(&block))
+ }
+
+ fn shrink_to_fit(&mut self) {
+ if let Some(data) = &mut self.data {
+ let ItemTreeData {
+ imports,
+ extern_crates,
+ extern_blocks,
+ functions,
+ params,
+ structs,
+ fields,
+ unions,
+ enums,
+ variants,
+ consts,
+ statics,
+ traits,
+ impls,
+ type_aliases,
+ mods,
+ macro_calls,
+ macro_rules,
+ macro_defs,
+ vis,
+ } = &mut **data;
+
+ imports.shrink_to_fit();
+ extern_crates.shrink_to_fit();
+ extern_blocks.shrink_to_fit();
+ functions.shrink_to_fit();
+ params.shrink_to_fit();
+ structs.shrink_to_fit();
+ fields.shrink_to_fit();
+ unions.shrink_to_fit();
+ enums.shrink_to_fit();
+ variants.shrink_to_fit();
+ consts.shrink_to_fit();
+ statics.shrink_to_fit();
+ traits.shrink_to_fit();
+ impls.shrink_to_fit();
+ type_aliases.shrink_to_fit();
+ mods.shrink_to_fit();
+ macro_calls.shrink_to_fit();
+ macro_rules.shrink_to_fit();
+ macro_defs.shrink_to_fit();
+
+ vis.arena.shrink_to_fit();
+ }
+ }
+}
+
+#[derive(Default, Debug, Eq, PartialEq)]
+struct ItemVisibilities {
+ arena: Arena<RawVisibility>,
+}
+
+impl ItemVisibilities {
+ fn alloc(&mut self, vis: RawVisibility) -> RawVisibilityId {
+ match &vis {
+ RawVisibility::Public => RawVisibilityId::PUB,
+ RawVisibility::Module(path) if path.segments().is_empty() => match &path.kind {
+ PathKind::Super(0) => RawVisibilityId::PRIV,
+ PathKind::Crate => RawVisibilityId::PUB_CRATE,
+ _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()),
+ },
+ _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()),
+ }
+ }
+}
+
+static VIS_PUB: RawVisibility = RawVisibility::Public;
+static VIS_PRIV: RawVisibility = RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)));
+static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(ModPath::from_kind(PathKind::Crate));
+
+#[derive(Default, Debug, Eq, PartialEq)]
+struct ItemTreeData {
+ imports: Arena<Import>,
+ extern_crates: Arena<ExternCrate>,
+ extern_blocks: Arena<ExternBlock>,
+ functions: Arena<Function>,
+ params: Arena<Param>,
+ structs: Arena<Struct>,
+ fields: Arena<Field>,
+ unions: Arena<Union>,
+ enums: Arena<Enum>,
+ variants: Arena<Variant>,
+ consts: Arena<Const>,
+ statics: Arena<Static>,
+ traits: Arena<Trait>,
+ impls: Arena<Impl>,
+ type_aliases: Arena<TypeAlias>,
+ mods: Arena<Mod>,
+ macro_calls: Arena<MacroCall>,
+ macro_rules: Arena<MacroRules>,
+ macro_defs: Arena<MacroDef>,
+
+ vis: ItemVisibilities,
+}
+
+#[derive(Debug, Eq, PartialEq, Hash)]
+pub enum AttrOwner {
+ /// Attributes on an item.
+ ModItem(ModItem),
+ /// Inner attributes of the source file.
+ TopLevel,
+
+ Variant(Idx<Variant>),
+ Field(Idx<Field>),
+ Param(Idx<Param>),
+}
+
+macro_rules! from_attrs {
+ ( $( $var:ident($t:ty) ),+ ) => {
+ $(
+ impl From<$t> for AttrOwner {
+ fn from(t: $t) -> AttrOwner {
+ AttrOwner::$var(t)
+ }
+ }
+ )+
+ };
+}
+
+from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>), Param(Idx<Param>));
+
+/// Trait implemented by all item nodes in the item tree.
+pub trait ItemTreeNode: Clone {
+ type Source: AstNode + Into<ast::Item>;
+
+ fn ast_id(&self) -> FileAstId<Self::Source>;
+
+ /// Looks up an instance of `Self` in an item tree.
+ fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self;
+
+ /// Downcasts a `ModItem` to a `FileItemTreeId` specific to this type.
+ fn id_from_mod_item(mod_item: ModItem) -> Option<FileItemTreeId<Self>>;
+
+ /// Upcasts a `FileItemTreeId` to a generic `ModItem`.
+ fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem;
+}
+
+pub struct FileItemTreeId<N: ItemTreeNode> {
+ index: Idx<N>,
+ _p: PhantomData<N>,
+}
+
+impl<N: ItemTreeNode> Clone for FileItemTreeId<N> {
+ fn clone(&self) -> Self {
+ Self { index: self.index, _p: PhantomData }
+ }
+}
+impl<N: ItemTreeNode> Copy for FileItemTreeId<N> {}
+
+impl<N: ItemTreeNode> PartialEq for FileItemTreeId<N> {
+ fn eq(&self, other: &FileItemTreeId<N>) -> bool {
+ self.index == other.index
+ }
+}
+impl<N: ItemTreeNode> Eq for FileItemTreeId<N> {}
+
+impl<N: ItemTreeNode> Hash for FileItemTreeId<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state)
+ }
+}
+
+impl<N: ItemTreeNode> fmt::Debug for FileItemTreeId<N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.index.fmt(f)
+ }
+}
+
+/// Identifies a particular [`ItemTree`].
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct TreeId {
+ file: HirFileId,
+ block: Option<BlockId>,
+}
+
+impl TreeId {
+ pub(crate) fn new(file: HirFileId, block: Option<BlockId>) -> Self {
+ Self { file, block }
+ }
+
+ pub(crate) fn item_tree(&self, db: &dyn DefDatabase) -> Arc<ItemTree> {
+ match self.block {
+ Some(block) => ItemTree::block_item_tree(db, block),
+ None => db.file_item_tree(self.file),
+ }
+ }
+
+ pub(crate) fn file_id(self) -> HirFileId {
+ self.file
+ }
+
+ pub(crate) fn is_block(self) -> bool {
+ self.block.is_some()
+ }
+}
+
+#[derive(Debug)]
+pub struct ItemTreeId<N: ItemTreeNode> {
+ tree: TreeId,
+ pub value: FileItemTreeId<N>,
+}
+
+impl<N: ItemTreeNode> ItemTreeId<N> {
+ pub fn new(tree: TreeId, idx: FileItemTreeId<N>) -> Self {
+ Self { tree, value: idx }
+ }
+
+ pub fn file_id(self) -> HirFileId {
+ self.tree.file
+ }
+
+ pub fn tree_id(self) -> TreeId {
+ self.tree
+ }
+
+ pub fn item_tree(self, db: &dyn DefDatabase) -> Arc<ItemTree> {
+ self.tree.item_tree(db)
+ }
+}
+
+impl<N: ItemTreeNode> Copy for ItemTreeId<N> {}
+impl<N: ItemTreeNode> Clone for ItemTreeId<N> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<N: ItemTreeNode> PartialEq for ItemTreeId<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.tree == other.tree && self.value == other.value
+ }
+}
+
+impl<N: ItemTreeNode> Eq for ItemTreeId<N> {}
+
+impl<N: ItemTreeNode> Hash for ItemTreeId<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.tree.hash(state);
+ self.value.hash(state);
+ }
+}
+
+macro_rules! mod_items {
+ ( $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => {
+ #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
+ pub enum ModItem {
+ $(
+ $typ(FileItemTreeId<$typ>),
+ )+
+ }
+
+ $(
+ impl From<FileItemTreeId<$typ>> for ModItem {
+ fn from(id: FileItemTreeId<$typ>) -> ModItem {
+ ModItem::$typ(id)
+ }
+ }
+ )+
+
+ $(
+ impl ItemTreeNode for $typ {
+ type Source = $ast;
+
+ fn ast_id(&self) -> FileAstId<Self::Source> {
+ self.ast_id
+ }
+
+ fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self {
+ &tree.data().$fld[index]
+ }
+
+ fn id_from_mod_item(mod_item: ModItem) -> Option<FileItemTreeId<Self>> {
+ match mod_item {
+ ModItem::$typ(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem {
+ ModItem::$typ(id)
+ }
+ }
+
+ impl Index<Idx<$typ>> for ItemTree {
+ type Output = $typ;
+
+ fn index(&self, index: Idx<$typ>) -> &Self::Output {
+ &self.data().$fld[index]
+ }
+ }
+ )+
+ };
+}
+
+mod_items! {
+ Import in imports -> ast::Use,
+ ExternCrate in extern_crates -> ast::ExternCrate,
+ ExternBlock in extern_blocks -> ast::ExternBlock,
+ Function in functions -> ast::Fn,
+ Struct in structs -> ast::Struct,
+ Union in unions -> ast::Union,
+ Enum in enums -> ast::Enum,
+ Const in consts -> ast::Const,
+ Static in statics -> ast::Static,
+ Trait in traits -> ast::Trait,
+ Impl in impls -> ast::Impl,
+ TypeAlias in type_aliases -> ast::TypeAlias,
+ Mod in mods -> ast::Module,
+ MacroCall in macro_calls -> ast::MacroCall,
+ MacroRules in macro_rules -> ast::MacroRules,
+ MacroDef in macro_defs -> ast::MacroDef,
+}
+
+macro_rules! impl_index {
+ ( $($fld:ident: $t:ty),+ $(,)? ) => {
+ $(
+ impl Index<Idx<$t>> for ItemTree {
+ type Output = $t;
+
+ fn index(&self, index: Idx<$t>) -> &Self::Output {
+ &self.data().$fld[index]
+ }
+ }
+ )+
+ };
+}
+
+impl_index!(fields: Field, variants: Variant, params: Param);
+
+impl Index<RawVisibilityId> for ItemTree {
+ type Output = RawVisibility;
+ fn index(&self, index: RawVisibilityId) -> &Self::Output {
+ match index {
+ RawVisibilityId::PRIV => &VIS_PRIV,
+ RawVisibilityId::PUB => &VIS_PUB,
+ RawVisibilityId::PUB_CRATE => &VIS_PUB_CRATE,
+ _ => &self.data().vis.arena[Idx::from_raw(index.0.into())],
+ }
+ }
+}
+
+impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
+ type Output = N;
+ fn index(&self, id: FileItemTreeId<N>) -> &N {
+ N::lookup(self, id.index)
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Import {
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::Use>,
+ pub use_tree: UseTree,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct UseTree {
+ pub index: Idx<ast::UseTree>,
+ kind: UseTreeKind,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum UseTreeKind {
+ /// ```
+ /// use path::to::Item;
+ /// use path::to::Item as Renamed;
+ /// use path::to::Trait as _;
+ /// ```
+ Single { path: Interned<ModPath>, alias: Option<ImportAlias> },
+
+ /// ```
+ /// use *; // (invalid, but can occur in nested tree)
+ /// use path::*;
+ /// ```
+ Glob { path: Option<Interned<ModPath>> },
+
+ /// ```
+ /// use prefix::{self, Item, ...};
+ /// ```
+ Prefixed { prefix: Option<Interned<ModPath>>, list: Box<[UseTree]> },
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ExternCrate {
+ pub name: Name,
+ pub alias: Option<ImportAlias>,
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::ExternCrate>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ExternBlock {
+ pub abi: Option<Interned<str>>,
+ pub ast_id: FileAstId<ast::ExternBlock>,
+ pub children: Box<[ModItem]>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Function {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub explicit_generic_params: Interned<GenericParams>,
+ pub abi: Option<Interned<str>>,
+ pub params: IdxRange<Param>,
+ pub ret_type: Interned<TypeRef>,
+ pub async_ret_type: Option<Interned<TypeRef>>,
+ pub ast_id: FileAstId<ast::Fn>,
+ pub(crate) flags: FnFlags,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Param {
+ Normal(Option<Name>, Interned<TypeRef>),
+ Varargs,
+}
+
+bitflags::bitflags! {
+ #[derive(Default)]
+ pub(crate) struct FnFlags: u8 {
+ const HAS_SELF_PARAM = 1 << 0;
+ const HAS_BODY = 1 << 1;
+ const HAS_DEFAULT_KW = 1 << 2;
+ const HAS_CONST_KW = 1 << 3;
+ const HAS_ASYNC_KW = 1 << 4;
+ const HAS_UNSAFE_KW = 1 << 5;
+ const IS_VARARGS = 1 << 6;
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Struct {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub fields: Fields,
+ pub ast_id: FileAstId<ast::Struct>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Union {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub fields: Fields,
+ pub ast_id: FileAstId<ast::Union>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Enum {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub variants: IdxRange<Variant>,
+ pub ast_id: FileAstId<ast::Enum>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Const {
+ /// `None` for `const _: () = ();`
+ pub name: Option<Name>,
+ pub visibility: RawVisibilityId,
+ pub type_ref: Interned<TypeRef>,
+ pub ast_id: FileAstId<ast::Const>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Static {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub mutable: bool,
+ pub type_ref: Interned<TypeRef>,
+ pub ast_id: FileAstId<ast::Static>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Trait {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub is_auto: bool,
+ pub is_unsafe: bool,
++ /// This is [`None`] if this Trait is a trait alias.
++ pub items: Option<Box<[AssocItem]>>,
+ pub ast_id: FileAstId<ast::Trait>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Impl {
+ pub generic_params: Interned<GenericParams>,
+ pub target_trait: Option<Interned<TraitRef>>,
+ pub self_ty: Interned<TypeRef>,
+ pub is_negative: bool,
+ pub items: Box<[AssocItem]>,
+ pub ast_id: FileAstId<ast::Impl>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TypeAlias {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`.
+ pub bounds: Box<[Interned<TypeBound>]>,
+ pub generic_params: Interned<GenericParams>,
+ pub type_ref: Option<Interned<TypeRef>>,
+ pub ast_id: FileAstId<ast::TypeAlias>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Mod {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub kind: ModKind,
+ pub ast_id: FileAstId<ast::Module>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum ModKind {
+ /// `mod m { ... }`
+ Inline { items: Box<[ModItem]> },
+
+ /// `mod m;`
+ Outline,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroCall {
+ /// Path to the called macro.
+ pub path: Interned<ModPath>,
+ pub ast_id: FileAstId<ast::MacroCall>,
+ pub expand_to: ExpandTo,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroRules {
+ /// The name of the declared macro.
+ pub name: Name,
+ pub ast_id: FileAstId<ast::MacroRules>,
+}
+
+/// "Macros 2.0" macro definition.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroDef {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::MacroDef>,
+}
+
+impl Import {
+ /// Maps a `UseTree` contained in this import back to its AST node.
+ pub fn use_tree_to_ast(
+ &self,
+ db: &dyn DefDatabase,
+ file_id: HirFileId,
+ index: Idx<ast::UseTree>,
+ ) -> ast::UseTree {
+ // Re-lower the AST item and get the source map.
+ // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
+ let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
+ let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
+ let hygiene = Hygiene::new(db.upcast(), file_id);
+ let (_, source_map) =
+ lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
+ source_map[index].clone()
+ }
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum ImportKind {
+ /// The `ModPath` is imported normally.
+ Plain,
+ /// This is a glob-import of all names in the `ModPath`.
+ Glob,
+ /// This is a `some::path::self` import, which imports `some::path` only in type namespace.
+ TypeOnly,
+}
+
+impl UseTree {
+ /// Expands the `UseTree` into individually imported `ModPath`s.
+ pub fn expand(
+ &self,
+ mut cb: impl FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
+ ) {
+ self.expand_impl(None, &mut cb)
+ }
+
+ fn expand_impl(
+ &self,
+ prefix: Option<ModPath>,
+ cb: &mut dyn FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
+ ) {
+ fn concat_mod_paths(
+ prefix: Option<ModPath>,
+ path: &ModPath,
+ ) -> Option<(ModPath, ImportKind)> {
+ match (prefix, &path.kind) {
+ (None, _) => Some((path.clone(), ImportKind::Plain)),
+ (Some(mut prefix), PathKind::Plain) => {
+ for segment in path.segments() {
+ prefix.push_segment(segment.clone());
+ }
+ Some((prefix, ImportKind::Plain))
+ }
+ (Some(mut prefix), PathKind::Super(n))
+ if *n > 0 && prefix.segments().is_empty() =>
+ {
+ // `super::super` + `super::rest`
+ match &mut prefix.kind {
+ PathKind::Super(m) => {
+ cov_mark::hit!(concat_super_mod_paths);
+ *m += *n;
+ for segment in path.segments() {
+ prefix.push_segment(segment.clone());
+ }
+ Some((prefix, ImportKind::Plain))
+ }
+ _ => None,
+ }
+ }
+ (Some(prefix), PathKind::Super(0)) if path.segments().is_empty() => {
+ // `some::path::self` == `some::path`
+ Some((prefix, ImportKind::TypeOnly))
+ }
+ (Some(_), _) => None,
+ }
+ }
+
+ match &self.kind {
+ UseTreeKind::Single { path, alias } => {
+ if let Some((path, kind)) = concat_mod_paths(prefix, path) {
+ cb(self.index, path, kind, alias.clone());
+ }
+ }
+ UseTreeKind::Glob { path: Some(path) } => {
+ if let Some((path, _)) = concat_mod_paths(prefix, path) {
+ cb(self.index, path, ImportKind::Glob, None);
+ }
+ }
+ UseTreeKind::Glob { path: None } => {
+ if let Some(prefix) = prefix {
+ cb(self.index, prefix, ImportKind::Glob, None);
+ }
+ }
+ UseTreeKind::Prefixed { prefix: additional_prefix, list } => {
+ let prefix = match additional_prefix {
+ Some(path) => match concat_mod_paths(prefix, path) {
+ Some((path, ImportKind::Plain)) => Some(path),
+ _ => return,
+ },
+ None => prefix,
+ };
+ for tree in &**list {
+ tree.expand_impl(prefix.clone(), cb);
+ }
+ }
+ }
+ }
+}
+
+macro_rules! impl_froms {
+ ($e:ident { $($v:ident ($t:ty)),* $(,)? }) => {
+ $(
+ impl From<$t> for $e {
+ fn from(it: $t) -> $e {
+ $e::$v(it)
+ }
+ }
+ )*
+ }
+}
+
+impl ModItem {
+ pub fn as_assoc_item(&self) -> Option<AssocItem> {
+ match self {
+ ModItem::Import(_)
+ | ModItem::ExternCrate(_)
+ | ModItem::ExternBlock(_)
+ | ModItem::Struct(_)
+ | ModItem::Union(_)
+ | ModItem::Enum(_)
+ | ModItem::Static(_)
+ | ModItem::Trait(_)
+ | ModItem::Impl(_)
+ | ModItem::Mod(_)
+ | ModItem::MacroRules(_)
+ | ModItem::MacroDef(_) => None,
+ ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)),
+ ModItem::Const(konst) => Some(AssocItem::Const(*konst)),
+ ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)),
+ ModItem::Function(func) => Some(AssocItem::Function(*func)),
+ }
+ }
+
+ pub fn downcast<N: ItemTreeNode>(self) -> Option<FileItemTreeId<N>> {
+ N::id_from_mod_item(self)
+ }
+
+ pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
+ match self {
+ ModItem::Import(it) => tree[it.index].ast_id().upcast(),
+ ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(),
+ ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Function(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Struct(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Union(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Enum(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Const(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Static(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Trait(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Impl(it) => tree[it.index].ast_id().upcast(),
+ ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Mod(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroCall(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroRules(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroDef(it) => tree[it.index].ast_id().upcast(),
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, Eq, PartialEq)]
+pub enum AssocItem {
+ Function(FileItemTreeId<Function>),
+ TypeAlias(FileItemTreeId<TypeAlias>),
+ Const(FileItemTreeId<Const>),
+ MacroCall(FileItemTreeId<MacroCall>),
+}
+
+impl_froms!(AssocItem {
+ Function(FileItemTreeId<Function>),
+ TypeAlias(FileItemTreeId<TypeAlias>),
+ Const(FileItemTreeId<Const>),
+ MacroCall(FileItemTreeId<MacroCall>),
+});
+
+impl From<AssocItem> for ModItem {
+ fn from(item: AssocItem) -> Self {
+ match item {
+ AssocItem::Function(it) => it.into(),
+ AssocItem::TypeAlias(it) => it.into(),
+ AssocItem::Const(it) => it.into(),
+ AssocItem::MacroCall(it) => it.into(),
+ }
+ }
+}
+
+impl AssocItem {
+ pub fn ast_id(self, tree: &ItemTree) -> FileAstId<ast::AssocItem> {
+ match self {
+ AssocItem::Function(id) => tree[id].ast_id.upcast(),
+ AssocItem::TypeAlias(id) => tree[id].ast_id.upcast(),
+ AssocItem::Const(id) => tree[id].ast_id.upcast(),
+ AssocItem::MacroCall(id) => tree[id].ast_id.upcast(),
+ }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub struct Variant {
+ pub name: Name,
+ pub fields: Fields,
+ pub ast_id: FileAstId<ast::Variant>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Fields {
+ Record(IdxRange<Field>),
+ Tuple(IdxRange<Field>),
+ Unit,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum FieldAstId {
+ Record(FileAstId<ast::RecordField>),
+ Tuple(FileAstId<ast::TupleField>),
+}
+
+/// A single field of an enum variant or struct
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Field {
+ pub name: Name,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibilityId,
+ pub ast_id: FieldAstId,
+}
--- /dev/null
- let res = Trait {
- name,
- visibility,
- generic_params,
- is_auto,
- is_unsafe,
- items: items.unwrap_or_default(),
- ast_id,
- };
+//! AST -> `ItemTree` lowering code.
+
+use std::{collections::hash_map::Entry, sync::Arc};
+
+use hir_expand::{ast_id_map::AstIdMap, hygiene::Hygiene, HirFileId};
+use syntax::ast::{self, HasModuleItem};
+
+use crate::{
+ generics::{GenericParams, TypeParamData, TypeParamProvenance},
+ type_ref::{LifetimeRef, TraitBoundModifier, TraitRef},
+};
+
+use super::*;
+
+fn id<N: ItemTreeNode>(index: Idx<N>) -> FileItemTreeId<N> {
+ FileItemTreeId { index, _p: PhantomData }
+}
+
+pub(super) struct Ctx<'a> {
+ db: &'a dyn DefDatabase,
+ tree: ItemTree,
+ source_ast_id_map: Arc<AstIdMap>,
+ body_ctx: crate::body::LowerCtx<'a>,
+}
+
+impl<'a> Ctx<'a> {
+ pub(super) fn new(db: &'a dyn DefDatabase, file: HirFileId) -> Self {
+ Self {
+ db,
+ tree: ItemTree::default(),
+ source_ast_id_map: db.ast_id_map(file),
+ body_ctx: crate::body::LowerCtx::new(db, file),
+ }
+ }
+
+ pub(super) fn hygiene(&self) -> &Hygiene {
+ self.body_ctx.hygiene()
+ }
+
+ pub(super) fn lower_module_items(mut self, item_owner: &dyn HasModuleItem) -> ItemTree {
+ self.tree.top_level =
+ item_owner.items().flat_map(|item| self.lower_mod_item(&item)).collect();
+ self.tree
+ }
+
+ pub(super) fn lower_macro_stmts(mut self, stmts: ast::MacroStmts) -> ItemTree {
+ self.tree.top_level = stmts
+ .statements()
+ .filter_map(|stmt| {
+ match stmt {
+ ast::Stmt::Item(item) => Some(item),
+ // Macro calls can be both items and expressions. The syntax library always treats
+ // them as expressions here, so we undo that.
+ ast::Stmt::ExprStmt(es) => match es.expr()? {
+ ast::Expr::MacroExpr(expr) => {
+ cov_mark::hit!(macro_call_in_macro_stmts_is_added_to_item_tree);
+ Some(expr.macro_call()?.into())
+ }
+ _ => None,
+ },
+ _ => None,
+ }
+ })
+ .flat_map(|item| self.lower_mod_item(&item))
+ .collect();
+
+ if let Some(ast::Expr::MacroExpr(tail_macro)) = stmts.expr() {
+ if let Some(call) = tail_macro.macro_call() {
+ cov_mark::hit!(macro_stmt_with_trailing_macro_expr);
+ if let Some(mod_item) = self.lower_mod_item(&call.into()) {
+ self.tree.top_level.push(mod_item);
+ }
+ }
+ }
+
+ self.tree
+ }
+
+ pub(super) fn lower_block(mut self, block: &ast::BlockExpr) -> ItemTree {
+ self.tree.top_level = block
+ .statements()
+ .filter_map(|stmt| match stmt {
+ ast::Stmt::Item(item) => self.lower_mod_item(&item),
+ // Macro calls can be both items and expressions. The syntax library always treats
+ // them as expressions here, so we undo that.
+ ast::Stmt::ExprStmt(es) => match es.expr()? {
+ ast::Expr::MacroExpr(expr) => self.lower_mod_item(&expr.macro_call()?.into()),
+ _ => None,
+ },
+ _ => None,
+ })
+ .collect();
+
+ self.tree
+ }
+
+ fn data(&mut self) -> &mut ItemTreeData {
+ self.tree.data_mut()
+ }
+
+ fn lower_mod_item(&mut self, item: &ast::Item) -> Option<ModItem> {
+ let attrs = RawAttrs::new(self.db, item, self.hygiene());
+ let item: ModItem = match item {
+ ast::Item::Struct(ast) => self.lower_struct(ast)?.into(),
+ ast::Item::Union(ast) => self.lower_union(ast)?.into(),
+ ast::Item::Enum(ast) => self.lower_enum(ast)?.into(),
+ ast::Item::Fn(ast) => self.lower_function(ast)?.into(),
+ ast::Item::TypeAlias(ast) => self.lower_type_alias(ast)?.into(),
+ ast::Item::Static(ast) => self.lower_static(ast)?.into(),
+ ast::Item::Const(ast) => self.lower_const(ast).into(),
+ ast::Item::Module(ast) => self.lower_module(ast)?.into(),
+ ast::Item::Trait(ast) => self.lower_trait(ast)?.into(),
+ ast::Item::Impl(ast) => self.lower_impl(ast)?.into(),
+ ast::Item::Use(ast) => self.lower_use(ast)?.into(),
+ ast::Item::ExternCrate(ast) => self.lower_extern_crate(ast)?.into(),
+ ast::Item::MacroCall(ast) => self.lower_macro_call(ast)?.into(),
+ ast::Item::MacroRules(ast) => self.lower_macro_rules(ast)?.into(),
+ ast::Item::MacroDef(ast) => self.lower_macro_def(ast)?.into(),
+ ast::Item::ExternBlock(ast) => self.lower_extern_block(ast).into(),
+ };
+
+ self.add_attrs(item.into(), attrs);
+
+ Some(item)
+ }
+
+ fn add_attrs(&mut self, item: AttrOwner, attrs: RawAttrs) {
+ match self.tree.attrs.entry(item) {
+ Entry::Occupied(mut entry) => {
+ *entry.get_mut() = entry.get().merge(attrs);
+ }
+ Entry::Vacant(entry) => {
+ entry.insert(attrs);
+ }
+ }
+ }
+
+ fn lower_assoc_item(&mut self, item: &ast::AssocItem) -> Option<AssocItem> {
+ match item {
+ ast::AssocItem::Fn(ast) => self.lower_function(ast).map(Into::into),
+ ast::AssocItem::TypeAlias(ast) => self.lower_type_alias(ast).map(Into::into),
+ ast::AssocItem::Const(ast) => Some(self.lower_const(ast).into()),
+ ast::AssocItem::MacroCall(ast) => self.lower_macro_call(ast).map(Into::into),
+ }
+ }
+
+ fn lower_struct(&mut self, strukt: &ast::Struct) -> Option<FileItemTreeId<Struct>> {
+ let visibility = self.lower_visibility(strukt);
+ let name = strukt.name()?.as_name();
+ let generic_params = self.lower_generic_params(GenericsOwner::Struct, strukt);
+ let fields = self.lower_fields(&strukt.kind());
+ let ast_id = self.source_ast_id_map.ast_id(strukt);
+ let res = Struct { name, visibility, generic_params, fields, ast_id };
+ Some(id(self.data().structs.alloc(res)))
+ }
+
+ fn lower_fields(&mut self, strukt_kind: &ast::StructKind) -> Fields {
+ match strukt_kind {
+ ast::StructKind::Record(it) => {
+ let range = self.lower_record_fields(it);
+ Fields::Record(range)
+ }
+ ast::StructKind::Tuple(it) => {
+ let range = self.lower_tuple_fields(it);
+ Fields::Tuple(range)
+ }
+ ast::StructKind::Unit => Fields::Unit,
+ }
+ }
+
+ fn lower_record_fields(&mut self, fields: &ast::RecordFieldList) -> IdxRange<Field> {
+ let start = self.next_field_idx();
+ for field in fields.fields() {
+ if let Some(data) = self.lower_record_field(&field) {
+ let idx = self.data().fields.alloc(data);
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene()));
+ }
+ }
+ let end = self.next_field_idx();
+ IdxRange::new(start..end)
+ }
+
+ fn lower_record_field(&mut self, field: &ast::RecordField) -> Option<Field> {
+ let name = field.name()?.as_name();
+ let visibility = self.lower_visibility(field);
+ let type_ref = self.lower_type_ref_opt(field.ty());
+ let ast_id = FieldAstId::Record(self.source_ast_id_map.ast_id(field));
+ let res = Field { name, type_ref, visibility, ast_id };
+ Some(res)
+ }
+
+ fn lower_tuple_fields(&mut self, fields: &ast::TupleFieldList) -> IdxRange<Field> {
+ let start = self.next_field_idx();
+ for (i, field) in fields.fields().enumerate() {
+ let data = self.lower_tuple_field(i, &field);
+ let idx = self.data().fields.alloc(data);
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &field, self.hygiene()));
+ }
+ let end = self.next_field_idx();
+ IdxRange::new(start..end)
+ }
+
+ fn lower_tuple_field(&mut self, idx: usize, field: &ast::TupleField) -> Field {
+ let name = Name::new_tuple_field(idx);
+ let visibility = self.lower_visibility(field);
+ let type_ref = self.lower_type_ref_opt(field.ty());
+ let ast_id = FieldAstId::Tuple(self.source_ast_id_map.ast_id(field));
+ Field { name, type_ref, visibility, ast_id }
+ }
+
+ fn lower_union(&mut self, union: &ast::Union) -> Option<FileItemTreeId<Union>> {
+ let visibility = self.lower_visibility(union);
+ let name = union.name()?.as_name();
+ let generic_params = self.lower_generic_params(GenericsOwner::Union, union);
+ let fields = match union.record_field_list() {
+ Some(record_field_list) => self.lower_fields(&StructKind::Record(record_field_list)),
+ None => Fields::Record(IdxRange::new(self.next_field_idx()..self.next_field_idx())),
+ };
+ let ast_id = self.source_ast_id_map.ast_id(union);
+ let res = Union { name, visibility, generic_params, fields, ast_id };
+ Some(id(self.data().unions.alloc(res)))
+ }
+
+ fn lower_enum(&mut self, enum_: &ast::Enum) -> Option<FileItemTreeId<Enum>> {
+ let visibility = self.lower_visibility(enum_);
+ let name = enum_.name()?.as_name();
+ let generic_params = self.lower_generic_params(GenericsOwner::Enum, enum_);
+ let variants = match &enum_.variant_list() {
+ Some(variant_list) => self.lower_variants(variant_list),
+ None => IdxRange::new(self.next_variant_idx()..self.next_variant_idx()),
+ };
+ let ast_id = self.source_ast_id_map.ast_id(enum_);
+ let res = Enum { name, visibility, generic_params, variants, ast_id };
+ Some(id(self.data().enums.alloc(res)))
+ }
+
+ fn lower_variants(&mut self, variants: &ast::VariantList) -> IdxRange<Variant> {
+ let start = self.next_variant_idx();
+ for variant in variants.variants() {
+ if let Some(data) = self.lower_variant(&variant) {
+ let idx = self.data().variants.alloc(data);
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &variant, self.hygiene()));
+ }
+ }
+ let end = self.next_variant_idx();
+ IdxRange::new(start..end)
+ }
+
+ fn lower_variant(&mut self, variant: &ast::Variant) -> Option<Variant> {
+ let name = variant.name()?.as_name();
+ let fields = self.lower_fields(&variant.kind());
+ let ast_id = self.source_ast_id_map.ast_id(variant);
+ let res = Variant { name, fields, ast_id };
+ Some(res)
+ }
+
+ fn lower_function(&mut self, func: &ast::Fn) -> Option<FileItemTreeId<Function>> {
+ let visibility = self.lower_visibility(func);
+ let name = func.name()?.as_name();
+
+ let mut has_self_param = false;
+ let start_param = self.next_param_idx();
+ if let Some(param_list) = func.param_list() {
+ if let Some(self_param) = param_list.self_param() {
+ let self_type = match self_param.ty() {
+ Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref),
+ None => {
+ let self_type = TypeRef::Path(name![Self].into());
+ match self_param.kind() {
+ ast::SelfParamKind::Owned => self_type,
+ ast::SelfParamKind::Ref => TypeRef::Reference(
+ Box::new(self_type),
+ self_param.lifetime().as_ref().map(LifetimeRef::new),
+ Mutability::Shared,
+ ),
+ ast::SelfParamKind::MutRef => TypeRef::Reference(
+ Box::new(self_type),
+ self_param.lifetime().as_ref().map(LifetimeRef::new),
+ Mutability::Mut,
+ ),
+ }
+ }
+ };
+ let ty = Interned::new(self_type);
+ let idx = self.data().params.alloc(Param::Normal(None, ty));
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, &self_param, self.hygiene()));
+ has_self_param = true;
+ }
+ for param in param_list.params() {
+ let idx = match param.dotdotdot_token() {
+ Some(_) => self.data().params.alloc(Param::Varargs),
+ None => {
+ let type_ref = TypeRef::from_ast_opt(&self.body_ctx, param.ty());
+ let ty = Interned::new(type_ref);
+ let mut pat = param.pat();
+ // FIXME: This really shouldn't be here, in fact FunctionData/ItemTree's function shouldn't know about
+ // pattern names at all
+ let name = 'name: loop {
+ match pat {
+ Some(ast::Pat::RefPat(ref_pat)) => pat = ref_pat.pat(),
+ Some(ast::Pat::IdentPat(ident)) => {
+ break 'name ident.name().map(|it| it.as_name())
+ }
+ _ => break 'name None,
+ }
+ };
+ self.data().params.alloc(Param::Normal(name, ty))
+ }
+ };
+ self.add_attrs(idx.into(), RawAttrs::new(self.db, ¶m, self.hygiene()));
+ }
+ }
+ let end_param = self.next_param_idx();
+ let params = IdxRange::new(start_param..end_param);
+
+ let ret_type = match func.ret_type() {
+ Some(rt) => match rt.ty() {
+ Some(type_ref) => TypeRef::from_ast(&self.body_ctx, type_ref),
+ None if rt.thin_arrow_token().is_some() => TypeRef::Error,
+ None => TypeRef::unit(),
+ },
+ None => TypeRef::unit(),
+ };
+
+ let (ret_type, async_ret_type) = if func.async_token().is_some() {
+ let async_ret_type = ret_type.clone();
+ let future_impl = desugar_future_path(ret_type);
+ let ty_bound = Interned::new(TypeBound::Path(future_impl, TraitBoundModifier::None));
+ (TypeRef::ImplTrait(vec![ty_bound]), Some(async_ret_type))
+ } else {
+ (ret_type, None)
+ };
+
+ let abi = func.abi().map(lower_abi);
+
+ let ast_id = self.source_ast_id_map.ast_id(func);
+
+ let mut flags = FnFlags::default();
+ if func.body().is_some() {
+ flags |= FnFlags::HAS_BODY;
+ }
+ if has_self_param {
+ flags |= FnFlags::HAS_SELF_PARAM;
+ }
+ if func.default_token().is_some() {
+ flags |= FnFlags::HAS_DEFAULT_KW;
+ }
+ if func.const_token().is_some() {
+ flags |= FnFlags::HAS_CONST_KW;
+ }
+ if func.async_token().is_some() {
+ flags |= FnFlags::HAS_ASYNC_KW;
+ }
+ if func.unsafe_token().is_some() {
+ flags |= FnFlags::HAS_UNSAFE_KW;
+ }
+
+ let mut res = Function {
+ name,
+ visibility,
+ explicit_generic_params: Interned::new(GenericParams::default()),
+ abi,
+ params,
+ ret_type: Interned::new(ret_type),
+ async_ret_type: async_ret_type.map(Interned::new),
+ ast_id,
+ flags,
+ };
+ res.explicit_generic_params =
+ self.lower_generic_params(GenericsOwner::Function(&res), func);
+
+ Some(id(self.data().functions.alloc(res)))
+ }
+
+ fn lower_type_alias(
+ &mut self,
+ type_alias: &ast::TypeAlias,
+ ) -> Option<FileItemTreeId<TypeAlias>> {
+ let name = type_alias.name()?.as_name();
+ let type_ref = type_alias.ty().map(|it| self.lower_type_ref(&it));
+ let visibility = self.lower_visibility(type_alias);
+ let bounds = self.lower_type_bounds(type_alias);
+ let generic_params = self.lower_generic_params(GenericsOwner::TypeAlias, type_alias);
+ let ast_id = self.source_ast_id_map.ast_id(type_alias);
+ let res = TypeAlias {
+ name,
+ visibility,
+ bounds: bounds.into_boxed_slice(),
+ generic_params,
+ type_ref,
+ ast_id,
+ };
+ Some(id(self.data().type_aliases.alloc(res)))
+ }
+
+ fn lower_static(&mut self, static_: &ast::Static) -> Option<FileItemTreeId<Static>> {
+ let name = static_.name()?.as_name();
+ let type_ref = self.lower_type_ref_opt(static_.ty());
+ let visibility = self.lower_visibility(static_);
+ let mutable = static_.mut_token().is_some();
+ let ast_id = self.source_ast_id_map.ast_id(static_);
+ let res = Static { name, visibility, mutable, type_ref, ast_id };
+ Some(id(self.data().statics.alloc(res)))
+ }
+
+ fn lower_const(&mut self, konst: &ast::Const) -> FileItemTreeId<Const> {
+ let name = konst.name().map(|it| it.as_name());
+ let type_ref = self.lower_type_ref_opt(konst.ty());
+ let visibility = self.lower_visibility(konst);
+ let ast_id = self.source_ast_id_map.ast_id(konst);
+ let res = Const { name, visibility, type_ref, ast_id };
+ id(self.data().consts.alloc(res))
+ }
+
+ fn lower_module(&mut self, module: &ast::Module) -> Option<FileItemTreeId<Mod>> {
+ let name = module.name()?.as_name();
+ let visibility = self.lower_visibility(module);
+ let kind = if module.semicolon_token().is_some() {
+ ModKind::Outline
+ } else {
+ ModKind::Inline {
+ items: module
+ .item_list()
+ .map(|list| list.items().flat_map(|item| self.lower_mod_item(&item)).collect())
+ .unwrap_or_else(|| {
+ cov_mark::hit!(name_res_works_for_broken_modules);
+ Box::new([]) as Box<[_]>
+ }),
+ }
+ };
+ let ast_id = self.source_ast_id_map.ast_id(module);
+ let res = Mod { name, visibility, kind, ast_id };
+ Some(id(self.data().mods.alloc(res)))
+ }
+
+ fn lower_trait(&mut self, trait_def: &ast::Trait) -> Option<FileItemTreeId<Trait>> {
+ let name = trait_def.name()?.as_name();
+ let visibility = self.lower_visibility(trait_def);
+ let generic_params = self.lower_generic_params(GenericsOwner::Trait(trait_def), trait_def);
+ let is_auto = trait_def.auto_token().is_some();
+ let is_unsafe = trait_def.unsafe_token().is_some();
+ let items = trait_def.assoc_item_list().map(|list| {
+ list.assoc_items()
+ .filter_map(|item| {
+ let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+ self.lower_assoc_item(&item).map(|item| {
+ self.add_attrs(ModItem::from(item).into(), attrs);
+ item
+ })
+ })
+ .collect()
+ });
+ let ast_id = self.source_ast_id_map.ast_id(trait_def);
++ let res = Trait { name, visibility, generic_params, is_auto, is_unsafe, items, ast_id };
+ Some(id(self.data().traits.alloc(res)))
+ }
+
+ fn lower_impl(&mut self, impl_def: &ast::Impl) -> Option<FileItemTreeId<Impl>> {
+ let generic_params = self.lower_generic_params(GenericsOwner::Impl, impl_def);
+ // FIXME: If trait lowering fails, due to a non PathType for example, we treat this impl
+ // as if it was an non-trait impl. Ideally we want to create a unique missing ref that only
+ // equals itself.
+ let target_trait = impl_def.trait_().and_then(|tr| self.lower_trait_ref(&tr));
+ let self_ty = self.lower_type_ref(&impl_def.self_ty()?);
+ let is_negative = impl_def.excl_token().is_some();
+
+ // We cannot use `assoc_items()` here as that does not include macro calls.
+ let items = impl_def
+ .assoc_item_list()
+ .into_iter()
+ .flat_map(|it| it.assoc_items())
+ .filter_map(|item| {
+ let assoc = self.lower_assoc_item(&item)?;
+ let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+ self.add_attrs(ModItem::from(assoc).into(), attrs);
+ Some(assoc)
+ })
+ .collect();
+ let ast_id = self.source_ast_id_map.ast_id(impl_def);
+ let res = Impl { generic_params, target_trait, self_ty, is_negative, items, ast_id };
+ Some(id(self.data().impls.alloc(res)))
+ }
+
+ fn lower_use(&mut self, use_item: &ast::Use) -> Option<FileItemTreeId<Import>> {
+ let visibility = self.lower_visibility(use_item);
+ let ast_id = self.source_ast_id_map.ast_id(use_item);
+ let (use_tree, _) = lower_use_tree(self.db, self.hygiene(), use_item.use_tree()?)?;
+
+ let res = Import { visibility, ast_id, use_tree };
+ Some(id(self.data().imports.alloc(res)))
+ }
+
+ fn lower_extern_crate(
+ &mut self,
+ extern_crate: &ast::ExternCrate,
+ ) -> Option<FileItemTreeId<ExternCrate>> {
+ let name = extern_crate.name_ref()?.as_name();
+ let alias = extern_crate.rename().map(|a| {
+ a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
+ });
+ let visibility = self.lower_visibility(extern_crate);
+ let ast_id = self.source_ast_id_map.ast_id(extern_crate);
+
+ let res = ExternCrate { name, alias, visibility, ast_id };
+ Some(id(self.data().extern_crates.alloc(res)))
+ }
+
+ fn lower_macro_call(&mut self, m: &ast::MacroCall) -> Option<FileItemTreeId<MacroCall>> {
+ let path = Interned::new(ModPath::from_src(self.db.upcast(), m.path()?, self.hygiene())?);
+ let ast_id = self.source_ast_id_map.ast_id(m);
+ let expand_to = hir_expand::ExpandTo::from_call_site(m);
+ let res = MacroCall { path, ast_id, expand_to };
+ Some(id(self.data().macro_calls.alloc(res)))
+ }
+
+ fn lower_macro_rules(&mut self, m: &ast::MacroRules) -> Option<FileItemTreeId<MacroRules>> {
+ let name = m.name().map(|it| it.as_name())?;
+ let ast_id = self.source_ast_id_map.ast_id(m);
+
+ let res = MacroRules { name, ast_id };
+ Some(id(self.data().macro_rules.alloc(res)))
+ }
+
+ fn lower_macro_def(&mut self, m: &ast::MacroDef) -> Option<FileItemTreeId<MacroDef>> {
+ let name = m.name().map(|it| it.as_name())?;
+
+ let ast_id = self.source_ast_id_map.ast_id(m);
+ let visibility = self.lower_visibility(m);
+
+ let res = MacroDef { name, ast_id, visibility };
+ Some(id(self.data().macro_defs.alloc(res)))
+ }
+
+ fn lower_extern_block(&mut self, block: &ast::ExternBlock) -> FileItemTreeId<ExternBlock> {
+ let ast_id = self.source_ast_id_map.ast_id(block);
+ let abi = block.abi().map(lower_abi);
+ let children: Box<[_]> = block.extern_item_list().map_or(Box::new([]), |list| {
+ list.extern_items()
+ .filter_map(|item| {
+ // Note: All items in an `extern` block need to be lowered as if they're outside of one
+ // (in other words, the knowledge that they're in an extern block must not be used).
+ // This is because an extern block can contain macros whose ItemTree's top-level items
+ // should be considered to be in an extern block too.
+ let attrs = RawAttrs::new(self.db, &item, self.hygiene());
+ let id: ModItem = match item {
+ ast::ExternItem::Fn(ast) => self.lower_function(&ast)?.into(),
+ ast::ExternItem::Static(ast) => self.lower_static(&ast)?.into(),
+ ast::ExternItem::TypeAlias(ty) => self.lower_type_alias(&ty)?.into(),
+ ast::ExternItem::MacroCall(call) => self.lower_macro_call(&call)?.into(),
+ };
+ self.add_attrs(id.into(), attrs);
+ Some(id)
+ })
+ .collect()
+ });
+
+ let res = ExternBlock { abi, ast_id, children };
+ id(self.data().extern_blocks.alloc(res))
+ }
+
+ fn lower_generic_params(
+ &mut self,
+ owner: GenericsOwner<'_>,
+ node: &dyn ast::HasGenericParams,
+ ) -> Interned<GenericParams> {
+ let mut generics = GenericParams::default();
+ match owner {
+ GenericsOwner::Function(_)
+ | GenericsOwner::Struct
+ | GenericsOwner::Enum
+ | GenericsOwner::Union
+ | GenericsOwner::TypeAlias => {
+ generics.fill(&self.body_ctx, node);
+ }
+ GenericsOwner::Trait(trait_def) => {
+ // traits get the Self type as an implicit first type parameter
+ generics.type_or_consts.alloc(
+ TypeParamData {
+ name: Some(name![Self]),
+ default: None,
+ provenance: TypeParamProvenance::TraitSelf,
+ }
+ .into(),
+ );
+ // add super traits as bounds on Self
+ // i.e., trait Foo: Bar is equivalent to trait Foo where Self: Bar
+ let self_param = TypeRef::Path(name![Self].into());
+ generics.fill_bounds(&self.body_ctx, trait_def, Either::Left(self_param));
+ generics.fill(&self.body_ctx, node);
+ }
+ GenericsOwner::Impl => {
+ // Note that we don't add `Self` here: in `impl`s, `Self` is not a
+ // type-parameter, but rather is a type-alias for impl's target
+ // type, so this is handled by the resolver.
+ generics.fill(&self.body_ctx, node);
+ }
+ }
+
+ generics.shrink_to_fit();
+ Interned::new(generics)
+ }
+
+ fn lower_type_bounds(&mut self, node: &dyn ast::HasTypeBounds) -> Vec<Interned<TypeBound>> {
+ match node.type_bound_list() {
+ Some(bound_list) => bound_list
+ .bounds()
+ .map(|it| Interned::new(TypeBound::from_ast(&self.body_ctx, it)))
+ .collect(),
+ None => Vec::new(),
+ }
+ }
+
+ fn lower_visibility(&mut self, item: &dyn ast::HasVisibility) -> RawVisibilityId {
+ let vis = RawVisibility::from_ast_with_hygiene(self.db, item.visibility(), self.hygiene());
+ self.data().vis.alloc(vis)
+ }
+
+ fn lower_trait_ref(&mut self, trait_ref: &ast::Type) -> Option<Interned<TraitRef>> {
+ let trait_ref = TraitRef::from_ast(&self.body_ctx, trait_ref.clone())?;
+ Some(Interned::new(trait_ref))
+ }
+
+ fn lower_type_ref(&mut self, type_ref: &ast::Type) -> Interned<TypeRef> {
+ let tyref = TypeRef::from_ast(&self.body_ctx, type_ref.clone());
+ Interned::new(tyref)
+ }
+
+ fn lower_type_ref_opt(&mut self, type_ref: Option<ast::Type>) -> Interned<TypeRef> {
+ match type_ref.map(|ty| self.lower_type_ref(&ty)) {
+ Some(it) => it,
+ None => Interned::new(TypeRef::Error),
+ }
+ }
+
+ fn next_field_idx(&self) -> Idx<Field> {
+ Idx::from_raw(RawIdx::from(
+ self.tree.data.as_ref().map_or(0, |data| data.fields.len() as u32),
+ ))
+ }
+ fn next_variant_idx(&self) -> Idx<Variant> {
+ Idx::from_raw(RawIdx::from(
+ self.tree.data.as_ref().map_or(0, |data| data.variants.len() as u32),
+ ))
+ }
+ fn next_param_idx(&self) -> Idx<Param> {
+ Idx::from_raw(RawIdx::from(
+ self.tree.data.as_ref().map_or(0, |data| data.params.len() as u32),
+ ))
+ }
+}
+
+fn desugar_future_path(orig: TypeRef) -> Path {
+ let path = path![core::future::Future];
+ let mut generic_args: Vec<_> =
+ std::iter::repeat(None).take(path.segments().len() - 1).collect();
+ let mut last = GenericArgs::empty();
+ let binding = AssociatedTypeBinding {
+ name: name![Output],
+ args: None,
+ type_ref: Some(orig),
+ bounds: Vec::new(),
+ };
+ last.bindings.push(binding);
+ generic_args.push(Some(Interned::new(last)));
+
+ Path::from_known_path(path, generic_args)
+}
+
+enum GenericsOwner<'a> {
+ /// We need access to the partially-lowered `Function` for lowering `impl Trait` in argument
+ /// position.
+ Function(&'a Function),
+ Struct,
+ Enum,
+ Union,
+ /// The `TraitDef` is needed to fill the source map for the implicit `Self` parameter.
+ Trait(&'a ast::Trait),
+ TypeAlias,
+ Impl,
+}
+
+fn lower_abi(abi: ast::Abi) -> Interned<str> {
+ // FIXME: Abi::abi() -> Option<SyntaxToken>?
+ match abi.syntax().last_token() {
+ Some(tok) if tok.kind() == SyntaxKind::STRING => {
+ // FIXME: Better way to unescape?
+ Interned::new_str(tok.text().trim_matches('"'))
+ }
+ _ => {
+ // `extern` default to be `extern "C"`.
+ Interned::new_str("C")
+ }
+ }
+}
+
+struct UseTreeLowering<'a> {
+ db: &'a dyn DefDatabase,
+ hygiene: &'a Hygiene,
+ mapping: Arena<ast::UseTree>,
+}
+
+impl UseTreeLowering<'_> {
+ fn lower_use_tree(&mut self, tree: ast::UseTree) -> Option<UseTree> {
+ if let Some(use_tree_list) = tree.use_tree_list() {
+ let prefix = match tree.path() {
+ // E.g. use something::{{{inner}}};
+ None => None,
+ // E.g. `use something::{inner}` (prefix is `None`, path is `something`)
+ // or `use something::{path::{inner::{innerer}}}` (prefix is `something::path`, path is `inner`)
+ Some(path) => {
+ match ModPath::from_src(self.db.upcast(), path, self.hygiene) {
+ Some(it) => Some(it),
+ None => return None, // FIXME: report errors somewhere
+ }
+ }
+ };
+
+ let list =
+ use_tree_list.use_trees().filter_map(|tree| self.lower_use_tree(tree)).collect();
+
+ Some(
+ self.use_tree(
+ UseTreeKind::Prefixed { prefix: prefix.map(Interned::new), list },
+ tree,
+ ),
+ )
+ } else {
+ let is_glob = tree.star_token().is_some();
+ let path = match tree.path() {
+ Some(path) => Some(ModPath::from_src(self.db.upcast(), path, self.hygiene)?),
+ None => None,
+ };
+ let alias = tree.rename().map(|a| {
+ a.name().map(|it| it.as_name()).map_or(ImportAlias::Underscore, ImportAlias::Alias)
+ });
+ if alias.is_some() && is_glob {
+ return None;
+ }
+
+ match (path, alias, is_glob) {
+ (path, None, true) => {
+ if path.is_none() {
+ cov_mark::hit!(glob_enum_group);
+ }
+ Some(self.use_tree(UseTreeKind::Glob { path: path.map(Interned::new) }, tree))
+ }
+ // Globs can't be renamed
+ (_, Some(_), true) | (None, None, false) => None,
+ // `bla::{ as Name}` is invalid
+ (None, Some(_), false) => None,
+ (Some(path), alias, false) => Some(
+ self.use_tree(UseTreeKind::Single { path: Interned::new(path), alias }, tree),
+ ),
+ }
+ }
+ }
+
+ fn use_tree(&mut self, kind: UseTreeKind, ast: ast::UseTree) -> UseTree {
+ let index = self.mapping.alloc(ast);
+ UseTree { index, kind }
+ }
+}
+
+pub(super) fn lower_use_tree(
+ db: &dyn DefDatabase,
+ hygiene: &Hygiene,
+ tree: ast::UseTree,
+) -> Option<(UseTree, Arena<ast::UseTree>)> {
+ let mut lowering = UseTreeLowering { db, hygiene, mapping: Arena::new() };
+ let tree = lowering.lower_use_tree(tree)?;
+ Some((tree, lowering.mapping))
+}
--- /dev/null
- self.print_where_clause_and_opening_brace(generic_params);
- self.indented(|this| {
- for item in &**items {
- this.print_mod_item((*item).into());
+//! `ItemTree` debug printer.
+
+use std::fmt::{self, Write};
+
+use crate::{
+ attr::RawAttrs,
+ generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget},
+ pretty::{print_path, print_type_bounds, print_type_ref},
+ visibility::RawVisibility,
+};
+
+use super::*;
+
+pub(super) fn print_item_tree(tree: &ItemTree) -> String {
+ let mut p = Printer { tree, buf: String::new(), indent_level: 0, needs_indent: true };
+
+ if let Some(attrs) = tree.attrs.get(&AttrOwner::TopLevel) {
+ p.print_attrs(attrs, true);
+ }
+ p.blank();
+
+ for item in tree.top_level_items() {
+ p.print_mod_item(*item);
+ }
+
+ let mut s = p.buf.trim_end_matches('\n').to_string();
+ s.push('\n');
+ s
+}
+
+macro_rules! w {
+ ($dst:expr, $($arg:tt)*) => {
+ { let _ = write!($dst, $($arg)*); }
+ };
+}
+
+macro_rules! wln {
+ ($dst:expr) => {
+ { let _ = writeln!($dst); }
+ };
+ ($dst:expr, $($arg:tt)*) => {
+ { let _ = writeln!($dst, $($arg)*); }
+ };
+}
+
+struct Printer<'a> {
+ tree: &'a ItemTree,
+ buf: String,
+ indent_level: usize,
+ needs_indent: bool,
+}
+
+impl<'a> Printer<'a> {
+ fn indented(&mut self, f: impl FnOnce(&mut Self)) {
+ self.indent_level += 1;
+ wln!(self);
+ f(self);
+ self.indent_level -= 1;
+ self.buf = self.buf.trim_end_matches('\n').to_string();
+ }
+
+ /// Ensures that a blank line is output before the next text.
+ fn blank(&mut self) {
+ let mut iter = self.buf.chars().rev().fuse();
+ match (iter.next(), iter.next()) {
+ (Some('\n'), Some('\n') | None) | (None, None) => {}
+ (Some('\n'), Some(_)) => {
+ self.buf.push('\n');
+ }
+ (Some(_), _) => {
+ self.buf.push('\n');
+ self.buf.push('\n');
+ }
+ (None, Some(_)) => unreachable!(),
+ }
+ }
+
+ fn whitespace(&mut self) {
+ match self.buf.chars().next_back() {
+ None | Some('\n' | ' ') => {}
+ _ => self.buf.push(' '),
+ }
+ }
+
+ fn print_attrs(&mut self, attrs: &RawAttrs, inner: bool) {
+ let inner = if inner { "!" } else { "" };
+ for attr in &**attrs {
+ wln!(
+ self,
+ "#{}[{}{}]",
+ inner,
+ attr.path,
+ attr.input.as_ref().map(|it| it.to_string()).unwrap_or_default(),
+ );
+ }
+ }
+
+ fn print_attrs_of(&mut self, of: impl Into<AttrOwner>) {
+ if let Some(attrs) = self.tree.attrs.get(&of.into()) {
+ self.print_attrs(attrs, false);
+ }
+ }
+
+ fn print_visibility(&mut self, vis: RawVisibilityId) {
+ match &self.tree[vis] {
+ RawVisibility::Module(path) => w!(self, "pub({}) ", path),
+ RawVisibility::Public => w!(self, "pub "),
+ };
+ }
+
+ fn print_fields(&mut self, fields: &Fields) {
+ match fields {
+ Fields::Record(fields) => {
+ self.whitespace();
+ w!(self, "{{");
+ self.indented(|this| {
+ for field in fields.clone() {
+ let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field];
+ this.print_attrs_of(field);
+ this.print_visibility(*visibility);
+ w!(this, "{}: ", name);
+ this.print_type_ref(type_ref);
+ wln!(this, ",");
+ }
+ });
+ w!(self, "}}");
+ }
+ Fields::Tuple(fields) => {
+ w!(self, "(");
+ self.indented(|this| {
+ for field in fields.clone() {
+ let Field { visibility, name, type_ref, ast_id: _ } = &this.tree[field];
+ this.print_attrs_of(field);
+ this.print_visibility(*visibility);
+ w!(this, "{}: ", name);
+ this.print_type_ref(type_ref);
+ wln!(this, ",");
+ }
+ });
+ w!(self, ")");
+ }
+ Fields::Unit => {}
+ }
+ }
+
+ fn print_fields_and_where_clause(&mut self, fields: &Fields, params: &GenericParams) {
+ match fields {
+ Fields::Record(_) => {
+ if self.print_where_clause(params) {
+ wln!(self);
+ }
+ self.print_fields(fields);
+ }
+ Fields::Unit => {
+ self.print_where_clause(params);
+ self.print_fields(fields);
+ }
+ Fields::Tuple(_) => {
+ self.print_fields(fields);
+ self.print_where_clause(params);
+ }
+ }
+ }
+
+ fn print_use_tree(&mut self, use_tree: &UseTree) {
+ match &use_tree.kind {
+ UseTreeKind::Single { path, alias } => {
+ w!(self, "{}", path);
+ if let Some(alias) = alias {
+ w!(self, " as {}", alias);
+ }
+ }
+ UseTreeKind::Glob { path } => {
+ if let Some(path) = path {
+ w!(self, "{}::", path);
+ }
+ w!(self, "*");
+ }
+ UseTreeKind::Prefixed { prefix, list } => {
+ if let Some(prefix) = prefix {
+ w!(self, "{}::", prefix);
+ }
+ w!(self, "{{");
+ for (i, tree) in list.iter().enumerate() {
+ if i != 0 {
+ w!(self, ", ");
+ }
+ self.print_use_tree(tree);
+ }
+ w!(self, "}}");
+ }
+ }
+ }
+
+ fn print_mod_item(&mut self, item: ModItem) {
+ self.print_attrs_of(item);
+
+ match item {
+ ModItem::Import(it) => {
+ let Import { visibility, use_tree, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "use ");
+ self.print_use_tree(use_tree);
+ wln!(self, ";");
+ }
+ ModItem::ExternCrate(it) => {
+ let ExternCrate { name, alias, visibility, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "extern crate {}", name);
+ if let Some(alias) = alias {
+ w!(self, " as {}", alias);
+ }
+ wln!(self, ";");
+ }
+ ModItem::ExternBlock(it) => {
+ let ExternBlock { abi, ast_id: _, children } = &self.tree[it];
+ w!(self, "extern ");
+ if let Some(abi) = abi {
+ w!(self, "\"{}\" ", abi);
+ }
+ w!(self, "{{");
+ self.indented(|this| {
+ for child in &**children {
+ this.print_mod_item(*child);
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::Function(it) => {
+ let Function {
+ name,
+ visibility,
+ explicit_generic_params,
+ abi,
+ params,
+ ret_type,
+ async_ret_type: _,
+ ast_id: _,
+ flags,
+ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ if flags.contains(FnFlags::HAS_DEFAULT_KW) {
+ w!(self, "default ");
+ }
+ if flags.contains(FnFlags::HAS_CONST_KW) {
+ w!(self, "const ");
+ }
+ if flags.contains(FnFlags::HAS_ASYNC_KW) {
+ w!(self, "async ");
+ }
+ if flags.contains(FnFlags::HAS_UNSAFE_KW) {
+ w!(self, "unsafe ");
+ }
+ if let Some(abi) = abi {
+ w!(self, "extern \"{}\" ", abi);
+ }
+ w!(self, "fn {}", name);
+ self.print_generic_params(explicit_generic_params);
+ w!(self, "(");
+ if !params.is_empty() {
+ self.indented(|this| {
+ for (i, param) in params.clone().enumerate() {
+ this.print_attrs_of(param);
+ match &this.tree[param] {
+ Param::Normal(name, ty) => {
+ match name {
+ Some(name) => w!(this, "{}: ", name),
+ None => w!(this, "_: "),
+ }
+ this.print_type_ref(ty);
+ w!(this, ",");
+ if flags.contains(FnFlags::HAS_SELF_PARAM) && i == 0 {
+ wln!(this, " // self");
+ } else {
+ wln!(this);
+ }
+ }
+ Param::Varargs => {
+ wln!(this, "...");
+ }
+ };
+ }
+ });
+ }
+ w!(self, ") -> ");
+ self.print_type_ref(ret_type);
+ self.print_where_clause(explicit_generic_params);
+ if flags.contains(FnFlags::HAS_BODY) {
+ wln!(self, " {{ ... }}");
+ } else {
+ wln!(self, ";");
+ }
+ }
+ ModItem::Struct(it) => {
+ let Struct { visibility, name, fields, generic_params, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "struct {}", name);
+ self.print_generic_params(generic_params);
+ self.print_fields_and_where_clause(fields, generic_params);
+ if matches!(fields, Fields::Record(_)) {
+ wln!(self);
+ } else {
+ wln!(self, ";");
+ }
+ }
+ ModItem::Union(it) => {
+ let Union { name, visibility, fields, generic_params, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "union {}", name);
+ self.print_generic_params(generic_params);
+ self.print_fields_and_where_clause(fields, generic_params);
+ if matches!(fields, Fields::Record(_)) {
+ wln!(self);
+ } else {
+ wln!(self, ";");
+ }
+ }
+ ModItem::Enum(it) => {
+ let Enum { name, visibility, variants, generic_params, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "enum {}", name);
+ self.print_generic_params(generic_params);
+ self.print_where_clause_and_opening_brace(generic_params);
+ self.indented(|this| {
+ for variant in variants.clone() {
+ let Variant { name, fields, ast_id: _ } = &this.tree[variant];
+ this.print_attrs_of(variant);
+ w!(this, "{}", name);
+ this.print_fields(fields);
+ wln!(this, ",");
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::Const(it) => {
+ let Const { name, visibility, type_ref, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "const ");
+ match name {
+ Some(name) => w!(self, "{}", name),
+ None => w!(self, "_"),
+ }
+ w!(self, ": ");
+ self.print_type_ref(type_ref);
+ wln!(self, " = _;");
+ }
+ ModItem::Static(it) => {
+ let Static { name, visibility, mutable, type_ref, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "static ");
+ if *mutable {
+ w!(self, "mut ");
+ }
+ w!(self, "{}: ", name);
+ self.print_type_ref(type_ref);
+ w!(self, " = _;");
+ wln!(self);
+ }
+ ModItem::Trait(it) => {
+ let Trait {
+ name,
+ visibility,
+ is_auto,
+ is_unsafe,
+ items,
+ generic_params,
+ ast_id: _,
+ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ if *is_unsafe {
+ w!(self, "unsafe ");
+ }
+ if *is_auto {
+ w!(self, "auto ");
+ }
+ w!(self, "trait {}", name);
+ self.print_generic_params(generic_params);
- });
++ match items {
++ Some(items) => {
++ self.print_where_clause_and_opening_brace(generic_params);
++ self.indented(|this| {
++ for item in &**items {
++ this.print_mod_item((*item).into());
++ }
++ });
+ }
++ None => {
++ w!(self, " = ");
++ // FIXME: Print the aliased traits
++ self.print_where_clause_and_opening_brace(generic_params);
++ }
++ }
+ wln!(self, "}}");
+ }
+ ModItem::Impl(it) => {
+ let Impl { target_trait, self_ty, is_negative, items, generic_params, ast_id: _ } =
+ &self.tree[it];
+ w!(self, "impl");
+ self.print_generic_params(generic_params);
+ w!(self, " ");
+ if *is_negative {
+ w!(self, "!");
+ }
+ if let Some(tr) = target_trait {
+ self.print_path(&tr.path);
+ w!(self, " for ");
+ }
+ self.print_type_ref(self_ty);
+ self.print_where_clause_and_opening_brace(generic_params);
+ self.indented(|this| {
+ for item in &**items {
+ this.print_mod_item((*item).into());
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModItem::TypeAlias(it) => {
+ let TypeAlias { name, visibility, bounds, type_ref, generic_params, ast_id: _ } =
+ &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "type {}", name);
+ self.print_generic_params(generic_params);
+ if !bounds.is_empty() {
+ w!(self, ": ");
+ self.print_type_bounds(bounds);
+ }
+ if let Some(ty) = type_ref {
+ w!(self, " = ");
+ self.print_type_ref(ty);
+ }
+ self.print_where_clause(generic_params);
+ w!(self, ";");
+ wln!(self);
+ }
+ ModItem::Mod(it) => {
+ let Mod { name, visibility, kind, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ w!(self, "mod {}", name);
+ match kind {
+ ModKind::Inline { items } => {
+ w!(self, " {{");
+ self.indented(|this| {
+ for item in &**items {
+ this.print_mod_item(*item);
+ }
+ });
+ wln!(self, "}}");
+ }
+ ModKind::Outline => {
+ wln!(self, ";");
+ }
+ }
+ }
+ ModItem::MacroCall(it) => {
+ let MacroCall { path, ast_id: _, expand_to: _ } = &self.tree[it];
+ wln!(self, "{}!(...);", path);
+ }
+ ModItem::MacroRules(it) => {
+ let MacroRules { name, ast_id: _ } = &self.tree[it];
+ wln!(self, "macro_rules! {} {{ ... }}", name);
+ }
+ ModItem::MacroDef(it) => {
+ let MacroDef { name, visibility, ast_id: _ } = &self.tree[it];
+ self.print_visibility(*visibility);
+ wln!(self, "macro {} {{ ... }}", name);
+ }
+ }
+
+ self.blank();
+ }
+
+ fn print_type_ref(&mut self, type_ref: &TypeRef) {
+ print_type_ref(type_ref, self).unwrap();
+ }
+
+ fn print_type_bounds(&mut self, bounds: &[Interned<TypeBound>]) {
+ print_type_bounds(bounds, self).unwrap();
+ }
+
+ fn print_path(&mut self, path: &Path) {
+ print_path(path, self).unwrap();
+ }
+
+ fn print_generic_params(&mut self, params: &GenericParams) {
+ if params.type_or_consts.is_empty() && params.lifetimes.is_empty() {
+ return;
+ }
+
+ w!(self, "<");
+ let mut first = true;
+ for (_, lt) in params.lifetimes.iter() {
+ if !first {
+ w!(self, ", ");
+ }
+ first = false;
+ w!(self, "{}", lt.name);
+ }
+ for (idx, x) in params.type_or_consts.iter() {
+ if !first {
+ w!(self, ", ");
+ }
+ first = false;
+ match x {
+ TypeOrConstParamData::TypeParamData(ty) => match &ty.name {
+ Some(name) => w!(self, "{}", name),
+ None => w!(self, "_anon_{}", idx.into_raw()),
+ },
+ TypeOrConstParamData::ConstParamData(konst) => {
+ w!(self, "const {}: ", konst.name);
+ self.print_type_ref(&konst.ty);
+ }
+ }
+ }
+ w!(self, ">");
+ }
+
+ fn print_where_clause_and_opening_brace(&mut self, params: &GenericParams) {
+ if self.print_where_clause(params) {
+ w!(self, "\n{{");
+ } else {
+ self.whitespace();
+ w!(self, "{{");
+ }
+ }
+
+ fn print_where_clause(&mut self, params: &GenericParams) -> bool {
+ if params.where_predicates.is_empty() {
+ return false;
+ }
+
+ w!(self, "\nwhere");
+ self.indented(|this| {
+ for (i, pred) in params.where_predicates.iter().enumerate() {
+ if i != 0 {
+ wln!(this, ",");
+ }
+
+ let (target, bound) = match pred {
+ WherePredicate::TypeBound { target, bound } => (target, bound),
+ WherePredicate::Lifetime { target, bound } => {
+ wln!(this, "{}: {},", target.name, bound.name);
+ continue;
+ }
+ WherePredicate::ForLifetime { lifetimes, target, bound } => {
+ w!(this, "for<");
+ for (i, lt) in lifetimes.iter().enumerate() {
+ if i != 0 {
+ w!(this, ", ");
+ }
+ w!(this, "{}", lt);
+ }
+ w!(this, "> ");
+ (target, bound)
+ }
+ };
+
+ match target {
+ WherePredicateTypeTarget::TypeRef(ty) => this.print_type_ref(ty),
+ WherePredicateTypeTarget::TypeOrConstParam(id) => {
+ match ¶ms.type_or_consts[*id].name() {
+ Some(name) => w!(this, "{}", name),
+ None => w!(this, "_anon_{}", id.into_raw()),
+ }
+ }
+ }
+ w!(this, ": ");
+ this.print_type_bounds(std::slice::from_ref(bound));
+ }
+ });
+ true
+ }
+}
+
+impl<'a> Write for Printer<'a> {
+ fn write_str(&mut self, s: &str) -> fmt::Result {
+ for line in s.split_inclusive('\n') {
+ if self.needs_indent {
+ match self.buf.chars().last() {
+ Some('\n') | None => {}
+ _ => self.buf.push('\n'),
+ }
+ self.buf.push_str(&" ".repeat(self.indent_level));
+ self.needs_indent = false;
+ }
+
+ self.buf.push_str(line);
+ self.needs_indent = line.ends_with('\n');
+ }
+
+ Ok(())
+ }
+}
--- /dev/null
- |;
- |92|;
- |let x = 92|;
+//! Test that `$var:expr` captures function correctly.
+
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn unary_minus_is_a_literal() {
+ check(
+ r#"
+macro_rules! m { ($x:literal) => (literal!();); ($x:tt) => (not_a_literal!();); }
+m!(92);
+m!(-92);
+m!(-9.2);
+m!(--92);
+"#,
+ expect![[r#"
+macro_rules! m { ($x:literal) => (literal!();); ($x:tt) => (not_a_literal!();); }
+literal!();
+literal!();
+literal!();
+/* error: leftover tokens */not_a_literal!();
+"#]],
+ )
+}
+
+#[test]
+fn test_expand_bad_literal() {
+ check(
+ r#"
+macro_rules! m { ($i:literal) => {}; }
+m!(&k");
+"#,
+ expect![[r#"
+macro_rules! m { ($i:literal) => {}; }
+/* error: Failed to lower macro args to token tree */"#]],
+ );
+}
+
+#[test]
+fn test_empty_comments() {
+ check(
+ r#"
+macro_rules! m{ ($fmt:expr) => (); }
+m!(/**/);
+"#,
+ expect![[r#"
+macro_rules! m{ ($fmt:expr) => (); }
+/* error: expected Expr */
+"#]],
+ );
+}
+
+#[test]
+fn asi() {
+ // Thanks, Christopher!
+ //
+ // https://internals.rust-lang.org/t/understanding-decisions-behind-semicolons/15181/29
+ check(
+ r#"
+macro_rules! asi { ($($stmt:stmt)*) => ($($stmt)*); }
+
+fn main() {
+ asi! {
+ let a = 2
+ let b = 5
+ drop(b-a)
+ println!("{}", a+b)
+ }
+}
+"#,
+ expect![[r#"
+macro_rules! asi { ($($stmt:stmt)*) => ($($stmt)*); }
+
+fn main() {
+ let a = 2let b = 5drop(b-a)println!("{}", a+b)
+}
+"#]],
+ )
+}
+
+#[test]
+fn stmt_boundaries() {
+ // FIXME: this actually works OK under rustc.
+ check(
+ r#"
+macro_rules! m {
+ ($($s:stmt)*) => (stringify!($($s |)*);)
+}
+m!(;;92;let x = 92; loop {};);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($s:stmt)*) => (stringify!($($s |)*);)
+}
+stringify!(;
- |;
++| ;
++|92| ;
++|let x = 92| ;
+|loop {}
- stringify!(.. .. ..|);
++| ;
+|);
+"#]],
+ );
+}
+
+#[test]
+fn range_patterns() {
+ // FIXME: rustc thinks there are three patterns here, not one.
+ check(
+ r#"
+macro_rules! m {
+ ($($p:pat)*) => (stringify!($($p |)*);)
+}
+m!(.. .. ..);
+"#,
+ expect![[r#"
+macro_rules! m {
+ ($($p:pat)*) => (stringify!($($p |)*);)
+}
++stringify!(.. .. .. |);
+"#]],
+ );
+}
+
+#[test]
+fn trailing_vis() {
+ check(
+ r#"
+macro_rules! m { ($($i:ident)? $vis:vis) => () }
+m!(x pub);
+"#,
+ expect![[r#"
+macro_rules! m { ($($i:ident)? $vis:vis) => () }
+
+"#]],
+ )
+}
--- /dev/null
- expect![[r##"
+//! Tests for user-defined procedural macros.
+//!
+//! Note `//- proc_macros: identity` fixture metas in tests -- we don't use real
+//! proc-macros here, as that would be slow. Instead, we use several hard-coded
+//! in-memory macros.
+use expect_test::expect;
+
+use crate::macro_expansion_tests::check;
+
+#[test]
+fn attribute_macro_attr_censoring() {
+ cov_mark::check!(attribute_macro_attr_censoring);
+ check(
+ r#"
+//- proc_macros: identity
+#[attr1] #[proc_macros::identity] #[attr2]
+struct S;
+"#,
+ expect![[r##"
+#[attr1] #[proc_macros::identity] #[attr2]
+struct S;
+
+#[attr1]
+#[attr2] struct S;"##]],
+ );
+}
+
+#[test]
+fn derive_censoring() {
+ cov_mark::check!(derive_censoring);
+ check(
+ r#"
+//- proc_macros: derive_identity
+//- minicore:derive
+#[attr1]
+#[derive(Foo)]
+#[derive(proc_macros::DeriveIdentity)]
+#[derive(Bar)]
+#[attr2]
+struct S;
+"#,
+ expect![[r##"
+#[attr1]
+#[derive(Foo)]
+#[derive(proc_macros::DeriveIdentity)]
+#[derive(Bar)]
+#[attr2]
+struct S;
+
+#[attr1]
+#[derive(Bar)]
+#[attr2] struct S;"##]],
+ );
+}
+
+#[test]
+fn attribute_macro_syntax_completion_1() {
+ // this is just the case where the input is actually valid
+ check(
+ r#"
+//- proc_macros: identity_when_valid
+#[proc_macros::identity_when_valid]
+fn foo() { bar.baz(); blub }
+"#,
+ expect![[r##"
+#[proc_macros::identity_when_valid]
+fn foo() { bar.baz(); blub }
+
+fn foo() {
+ bar.baz();
+ blub
+}"##]],
+ );
+}
+
+#[test]
+fn attribute_macro_syntax_completion_2() {
+ // common case of dot completion while typing
+ check(
+ r#"
+//- proc_macros: identity_when_valid
+#[proc_macros::identity_when_valid]
+fn foo() { bar.; blub }
+"#,
- bar.;
++ expect![[r#"
+#[proc_macros::identity_when_valid]
+fn foo() { bar.; blub }
+
+fn foo() {
- }"##]],
++ bar. ;
+ blub
++}"#]],
+ );
+}
+
+#[test]
+fn float_parsing_panic() {
+ // Regression test for https://github.com/rust-lang/rust-analyzer/issues/12211
+ check(
+ r#"
+//- proc_macros: identity
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+id /*+errors*/! {
+ #[proc_macros::identity]
+ impl Foo for WrapBj {
+ async fn foo(&self) {
+ self.0. id().await;
+ }
+ }
+}
+"#,
+ expect![[r##"
+macro_rules! id {
+ ($($t:tt)*) => {
+ $($t)*
+ };
+}
+/* parse error: expected SEMICOLON */
+#[proc_macros::identity] impl Foo for WrapBj {
+ async fn foo(&self ) {
+ self .0.id().await ;
+ }
+}
+"##]],
+ );
+}
--- /dev/null
+//! The core of the module-level name resolution algorithm.
+//!
+//! `DefCollector::collect` contains the fixed-point iteration loop which
+//! resolves imports and expands macros.
+
+use std::{iter, mem};
+
+use base_db::{CrateId, Edition, FileId};
+use cfg::{CfgExpr, CfgOptions};
+use either::Either;
+use hir_expand::{
+ ast_id_map::FileAstId,
+ builtin_attr_macro::find_builtin_attr,
+ builtin_derive_macro::find_builtin_derive,
+ builtin_fn_macro::find_builtin_macro,
+ name::{name, AsName, Name},
+ proc_macro::ProcMacroExpander,
+ ExpandTo, HirFileId, InFile, MacroCallId, MacroCallKind, MacroCallLoc, MacroDefId,
+ MacroDefKind,
+};
+use itertools::{izip, Itertools};
+use la_arena::Idx;
+use limit::Limit;
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::always;
+use syntax::{ast, SmolStr};
+
+use crate::{
+ attr::{Attr, AttrId, Attrs},
+ attr_macro_as_call_id,
+ db::DefDatabase,
+ derive_macro_as_call_id,
+ item_scope::{ImportType, PerNsGlobImports},
+ item_tree::{
+ self, Fields, FileItemTreeId, ImportKind, ItemTree, ItemTreeId, ItemTreeNode, MacroCall,
+ MacroDef, MacroRules, Mod, ModItem, ModKind, TreeId,
+ },
+ macro_call_as_call_id, macro_id_to_def_id,
+ nameres::{
+ diagnostics::DefDiagnostic,
+ mod_resolution::ModDir,
+ path_resolution::ReachedFixedPoint,
+ proc_macro::{ProcMacroDef, ProcMacroKind},
+ BuiltinShadowMode, DefMap, ModuleData, ModuleOrigin, ResolveMode,
+ },
+ path::{ImportAlias, ModPath, PathKind},
+ per_ns::PerNs,
+ visibility::{RawVisibility, Visibility},
+ AdtId, AstId, AstIdWithPath, ConstLoc, EnumLoc, EnumVariantId, ExternBlockLoc, FunctionId,
+ FunctionLoc, ImplLoc, Intern, ItemContainerId, LocalModuleId, Macro2Id, Macro2Loc,
+ MacroExpander, MacroId, MacroRulesId, MacroRulesLoc, ModuleDefId, ModuleId, ProcMacroId,
+ ProcMacroLoc, StaticLoc, StructLoc, TraitLoc, TypeAliasLoc, UnionLoc, UnresolvedMacro,
+};
+
+static GLOB_RECURSION_LIMIT: Limit = Limit::new(100);
+static EXPANSION_DEPTH_LIMIT: Limit = Limit::new(128);
+static FIXED_POINT_LIMIT: Limit = Limit::new(8192);
+
+pub(super) fn collect_defs(db: &dyn DefDatabase, mut def_map: DefMap, tree_id: TreeId) -> DefMap {
+ let crate_graph = db.crate_graph();
+
+ let mut deps = FxHashMap::default();
+ // populate external prelude and dependency list
+ let krate = &crate_graph[def_map.krate];
+ for dep in &krate.dependencies {
+ tracing::debug!("crate dep {:?} -> {:?}", dep.name, dep.crate_id);
+ let dep_def_map = db.crate_def_map(dep.crate_id);
+ let dep_root = dep_def_map.module_id(dep_def_map.root);
+
+ deps.insert(dep.as_name(), dep_root.into());
+
+ if dep.is_prelude() && !tree_id.is_block() {
+ def_map.extern_prelude.insert(dep.as_name(), dep_root);
+ }
+ }
+
+ let cfg_options = &krate.cfg_options;
+ let proc_macros = match &krate.proc_macro {
+ Ok(proc_macros) => {
+ proc_macros
+ .iter()
+ .enumerate()
+ .map(|(idx, it)| {
+ // FIXME: a hacky way to create a Name from string.
+ let name = tt::Ident { text: it.name.clone(), id: tt::TokenId::unspecified() };
+ (
+ name.as_name(),
+ ProcMacroExpander::new(def_map.krate, base_db::ProcMacroId(idx as u32)),
+ )
+ })
+ .collect()
+ }
+ Err(e) => {
+ def_map.proc_macro_loading_error = Some(e.clone().into_boxed_str());
+ Vec::new()
+ }
+ };
+ let is_proc_macro = krate.is_proc_macro;
+
+ let mut collector = DefCollector {
+ db,
+ def_map,
+ deps,
+ glob_imports: FxHashMap::default(),
+ unresolved_imports: Vec::new(),
+ indeterminate_imports: Vec::new(),
+ unresolved_macros: Vec::new(),
+ mod_dirs: FxHashMap::default(),
+ cfg_options,
+ proc_macros,
+ from_glob_import: Default::default(),
+ skip_attrs: Default::default(),
+ is_proc_macro,
+ };
+ if tree_id.is_block() {
+ collector.seed_with_inner(tree_id);
+ } else {
+ collector.seed_with_top_level();
+ }
+ collector.collect();
+ let mut def_map = collector.finish();
+ def_map.shrink_to_fit();
+ def_map
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+enum PartialResolvedImport {
+ /// None of any namespaces is resolved
+ Unresolved,
+ /// One of namespaces is resolved
+ Indeterminate(PerNs),
+ /// All namespaces are resolved, OR it comes from other crate
+ Resolved(PerNs),
+}
+
+impl PartialResolvedImport {
+ fn namespaces(self) -> PerNs {
+ match self {
+ PartialResolvedImport::Unresolved => PerNs::none(),
+ PartialResolvedImport::Indeterminate(ns) | PartialResolvedImport::Resolved(ns) => ns,
+ }
+ }
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+enum ImportSource {
+ Import { id: ItemTreeId<item_tree::Import>, use_tree: Idx<ast::UseTree> },
+ ExternCrate(ItemTreeId<item_tree::ExternCrate>),
+}
+
+#[derive(Debug, Eq, PartialEq)]
+struct Import {
+ path: ModPath,
+ alias: Option<ImportAlias>,
+ visibility: RawVisibility,
+ kind: ImportKind,
+ is_prelude: bool,
+ is_extern_crate: bool,
+ is_macro_use: bool,
+ source: ImportSource,
+}
+
+impl Import {
+ fn from_use(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ tree: &ItemTree,
+ id: ItemTreeId<item_tree::Import>,
+ ) -> Vec<Self> {
+ let it = &tree[id.value];
+ let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
+ let visibility = &tree[it.visibility];
+ let is_prelude = attrs.by_key("prelude_import").exists();
+
+ let mut res = Vec::new();
+ it.use_tree.expand(|idx, path, kind, alias| {
+ res.push(Self {
+ path,
+ alias,
+ visibility: visibility.clone(),
+ kind,
+ is_prelude,
+ is_extern_crate: false,
+ is_macro_use: false,
+ source: ImportSource::Import { id, use_tree: idx },
+ });
+ });
+ res
+ }
+
+ fn from_extern_crate(
+ db: &dyn DefDatabase,
+ krate: CrateId,
+ tree: &ItemTree,
+ id: ItemTreeId<item_tree::ExternCrate>,
+ ) -> Self {
+ let it = &tree[id.value];
+ let attrs = &tree.attrs(db, krate, ModItem::from(id.value).into());
+ let visibility = &tree[it.visibility];
+ Self {
+ path: ModPath::from_segments(PathKind::Plain, iter::once(it.name.clone())),
+ alias: it.alias.clone(),
+ visibility: visibility.clone(),
+ kind: ImportKind::Plain,
+ is_prelude: false,
+ is_extern_crate: true,
+ is_macro_use: attrs.by_key("macro_use").exists(),
+ source: ImportSource::ExternCrate(id),
+ }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq)]
+struct ImportDirective {
++ /// The module this import directive is in.
+ module_id: LocalModuleId,
+ import: Import,
+ status: PartialResolvedImport,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+struct MacroDirective {
+ module_id: LocalModuleId,
+ depth: usize,
+ kind: MacroDirectiveKind,
+ container: ItemContainerId,
+}
+
+#[derive(Clone, Debug, Eq, PartialEq)]
+enum MacroDirectiveKind {
+ FnLike { ast_id: AstIdWithPath<ast::MacroCall>, expand_to: ExpandTo },
+ Derive { ast_id: AstIdWithPath<ast::Adt>, derive_attr: AttrId, derive_pos: usize },
+ Attr { ast_id: AstIdWithPath<ast::Item>, attr: Attr, mod_item: ModItem, tree: TreeId },
+}
+
+/// Walks the tree of module recursively
+struct DefCollector<'a> {
+ db: &'a dyn DefDatabase,
+ def_map: DefMap,
+ deps: FxHashMap<Name, ModuleId>,
+ glob_imports: FxHashMap<LocalModuleId, Vec<(LocalModuleId, Visibility)>>,
+ unresolved_imports: Vec<ImportDirective>,
+ indeterminate_imports: Vec<ImportDirective>,
+ unresolved_macros: Vec<MacroDirective>,
+ mod_dirs: FxHashMap<LocalModuleId, ModDir>,
+ cfg_options: &'a CfgOptions,
+ /// List of procedural macros defined by this crate. This is read from the dynamic library
+ /// built by the build system, and is the list of proc. macros we can actually expand. It is
+ /// empty when proc. macro support is disabled (in which case we still do name resolution for
+ /// them).
+ proc_macros: Vec<(Name, ProcMacroExpander)>,
+ is_proc_macro: bool,
+ from_glob_import: PerNsGlobImports,
+ /// If we fail to resolve an attribute on a `ModItem`, we fall back to ignoring the attribute.
+ /// This map is used to skip all attributes up to and including the one that failed to resolve,
+ /// in order to not expand them twice.
+ ///
+ /// This also stores the attributes to skip when we resolve derive helpers and non-macro
+ /// non-builtin attributes in general.
+ skip_attrs: FxHashMap<InFile<ModItem>, AttrId>,
+}
+
+impl DefCollector<'_> {
+ fn seed_with_top_level(&mut self) {
+ let _p = profile::span("seed_with_top_level");
+
+ let file_id = self.db.crate_graph()[self.def_map.krate].root_file_id;
+ let item_tree = self.db.file_item_tree(file_id.into());
+ let module_id = self.def_map.root;
+
+ let attrs = item_tree.top_level_attrs(self.db, self.def_map.krate);
+ if attrs.cfg().map_or(true, |cfg| self.cfg_options.check(&cfg) != Some(false)) {
+ self.inject_prelude(&attrs);
+
+ // Process other crate-level attributes.
+ for attr in &*attrs {
+ let attr_name = match attr.path.as_ident() {
+ Some(name) => name,
+ None => continue,
+ };
+
+ if *attr_name == hir_expand::name![recursion_limit] {
+ if let Some(limit) = attr.string_value() {
+ if let Ok(limit) = limit.parse() {
+ self.def_map.recursion_limit = Some(limit);
+ }
+ }
+ continue;
+ }
+
+ if *attr_name == hir_expand::name![crate_type] {
+ if let Some("proc-macro") = attr.string_value().map(SmolStr::as_str) {
+ self.is_proc_macro = true;
+ }
+ continue;
+ }
+
+ if *attr_name == hir_expand::name![feature] {
+ let features =
+ attr.parse_path_comma_token_tree().into_iter().flatten().filter_map(
+ |feat| match feat.segments() {
+ [name] => Some(name.to_smol_str()),
+ _ => None,
+ },
+ );
+ self.def_map.unstable_features.extend(features);
+ }
+
+ let attr_is_register_like = *attr_name == hir_expand::name![register_attr]
+ || *attr_name == hir_expand::name![register_tool];
+ if !attr_is_register_like {
+ continue;
+ }
+
+ let registered_name = match attr.single_ident_value() {
+ Some(ident) => ident.as_name(),
+ _ => continue,
+ };
+
+ if *attr_name == hir_expand::name![register_attr] {
+ self.def_map.registered_attrs.push(registered_name.to_smol_str());
+ cov_mark::hit!(register_attr);
+ } else {
+ self.def_map.registered_tools.push(registered_name.to_smol_str());
+ cov_mark::hit!(register_tool);
+ }
+ }
+
+ ModCollector {
+ def_collector: self,
+ macro_depth: 0,
+ module_id,
+ tree_id: TreeId::new(file_id.into(), None),
+ item_tree: &item_tree,
+ mod_dir: ModDir::root(),
+ }
+ .collect_in_top_module(item_tree.top_level_items());
+ }
+ }
+
+ fn seed_with_inner(&mut self, tree_id: TreeId) {
+ let item_tree = tree_id.item_tree(self.db);
+ let module_id = self.def_map.root;
+
+ let is_cfg_enabled = item_tree
+ .top_level_attrs(self.db, self.def_map.krate)
+ .cfg()
+ .map_or(true, |cfg| self.cfg_options.check(&cfg) != Some(false));
+ if is_cfg_enabled {
+ ModCollector {
+ def_collector: self,
+ macro_depth: 0,
+ module_id,
+ tree_id,
+ item_tree: &item_tree,
+ mod_dir: ModDir::root(),
+ }
+ .collect_in_top_module(item_tree.top_level_items());
+ }
+ }
+
+ fn resolution_loop(&mut self) {
+ let _p = profile::span("DefCollector::resolution_loop");
+
+ // main name resolution fixed-point loop.
+ let mut i = 0;
+ 'resolve_attr: loop {
+ 'resolve_macros: loop {
+ self.db.unwind_if_cancelled();
+
+ {
+ let _p = profile::span("resolve_imports loop");
+
+ 'resolve_imports: loop {
+ if self.resolve_imports() == ReachedFixedPoint::Yes {
+ break 'resolve_imports;
+ }
+ }
+ }
+ if self.resolve_macros() == ReachedFixedPoint::Yes {
+ break 'resolve_macros;
+ }
+
+ i += 1;
+ if FIXED_POINT_LIMIT.check(i).is_err() {
+ tracing::error!("name resolution is stuck");
+ break 'resolve_attr;
+ }
+ }
+
+ if self.reseed_with_unresolved_attribute() == ReachedFixedPoint::Yes {
+ break 'resolve_attr;
+ }
+ }
+ }
+
+ fn collect(&mut self) {
+ let _p = profile::span("DefCollector::collect");
+
+ self.resolution_loop();
+
+ // Resolve all indeterminate resolved imports again
+ // As some of the macros will expand newly import shadowing partial resolved imports
+ // FIXME: We maybe could skip this, if we handle the indeterminate imports in `resolve_imports`
+ // correctly
+ let partial_resolved = self.indeterminate_imports.drain(..).map(|directive| {
+ ImportDirective { status: PartialResolvedImport::Unresolved, ..directive }
+ });
+ self.unresolved_imports.extend(partial_resolved);
+ self.resolve_imports();
+
+ let unresolved_imports = mem::take(&mut self.unresolved_imports);
+ // show unresolved imports in completion, etc
+ for directive in &unresolved_imports {
+ self.record_resolved_import(directive);
+ }
+ self.unresolved_imports = unresolved_imports;
+
+ if self.is_proc_macro {
+ // A crate exporting procedural macros is not allowed to export anything else.
+ //
+ // Additionally, while the proc macro entry points must be `pub`, they are not publicly
+ // exported in type/value namespace. This function reduces the visibility of all items
+ // in the crate root that aren't proc macros.
+ let root = self.def_map.root;
+ let module_id = self.def_map.module_id(root);
+ let root = &mut self.def_map.modules[root];
+ root.scope.censor_non_proc_macros(module_id);
+ }
+ }
+
+ /// When the fixed-point loop reaches a stable state, we might still have
+ /// some unresolved attributes left over. This takes one of them, and feeds
+ /// the item it's applied to back into name resolution.
+ ///
+ /// This effectively ignores the fact that the macro is there and just treats the items as
+ /// normal code.
+ ///
+ /// This improves UX for unresolved attributes, and replicates the
+ /// behavior before we supported proc. attribute macros.
+ fn reseed_with_unresolved_attribute(&mut self) -> ReachedFixedPoint {
+ cov_mark::hit!(unresolved_attribute_fallback);
+
+ let unresolved_attr =
+ self.unresolved_macros.iter().enumerate().find_map(|(idx, directive)| match &directive
+ .kind
+ {
+ MacroDirectiveKind::Attr { ast_id, mod_item, attr, tree } => {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+ directive.module_id,
+ MacroCallKind::Attr {
+ ast_id: ast_id.ast_id,
+ attr_args: Default::default(),
+ invoc_attr_index: attr.id.ast_index,
+ is_derive: false,
+ },
+ attr.path().clone(),
+ ));
+
+ self.skip_attrs.insert(ast_id.ast_id.with_value(*mod_item), attr.id);
+
+ Some((idx, directive, *mod_item, *tree))
+ }
+ _ => None,
+ });
+
+ match unresolved_attr {
+ Some((pos, &MacroDirective { module_id, depth, container, .. }, mod_item, tree_id)) => {
+ let item_tree = &tree_id.item_tree(self.db);
+ let mod_dir = self.mod_dirs[&module_id].clone();
+ ModCollector {
+ def_collector: self,
+ macro_depth: depth,
+ module_id,
+ tree_id,
+ item_tree,
+ mod_dir,
+ }
+ .collect(&[mod_item], container);
+
+ self.unresolved_macros.swap_remove(pos);
+ // Continue name resolution with the new data.
+ ReachedFixedPoint::No
+ }
+ None => ReachedFixedPoint::Yes,
+ }
+ }
+
+ fn inject_prelude(&mut self, crate_attrs: &Attrs) {
+ // See compiler/rustc_builtin_macros/src/standard_library_imports.rs
+
+ if crate_attrs.by_key("no_core").exists() {
+ // libcore does not get a prelude.
+ return;
+ }
+
+ let krate = if crate_attrs.by_key("no_std").exists() {
+ name![core]
+ } else {
+ let std = name![std];
+ if self.def_map.extern_prelude().any(|(name, _)| *name == std) {
+ std
+ } else {
+ // If `std` does not exist for some reason, fall back to core. This mostly helps
+ // keep r-a's own tests minimal.
+ name![core]
+ }
+ };
+
+ let edition = match self.def_map.edition {
+ Edition::Edition2015 => name![rust_2015],
+ Edition::Edition2018 => name![rust_2018],
+ Edition::Edition2021 => name![rust_2021],
+ };
+
+ let path_kind = match self.def_map.edition {
+ Edition::Edition2015 => PathKind::Plain,
+ _ => PathKind::Abs,
+ };
+ let path =
+ ModPath::from_segments(path_kind, [krate.clone(), name![prelude], edition].into_iter());
+ // Fall back to the older `std::prelude::v1` for compatibility with Rust <1.52.0
+ // FIXME remove this fallback
+ let fallback_path =
+ ModPath::from_segments(path_kind, [krate, name![prelude], name![v1]].into_iter());
+
+ for path in &[path, fallback_path] {
+ let (per_ns, _) = self.def_map.resolve_path(
+ self.db,
+ self.def_map.root,
+ path,
+ BuiltinShadowMode::Other,
+ );
+
+ match per_ns.types {
+ Some((ModuleDefId::ModuleId(m), _)) => {
+ self.def_map.prelude = Some(m);
+ break;
+ }
+ types => {
+ tracing::debug!(
+ "could not resolve prelude path `{}` to module (resolved to {:?})",
+ path,
+ types
+ );
+ }
+ }
+ }
+ }
+
+ /// Adds a definition of procedural macro `name` to the root module.
+ ///
+ /// # Notes on procedural macro resolution
+ ///
+ /// Procedural macro functionality is provided by the build system: It has to build the proc
+ /// macro and pass the resulting dynamic library to rust-analyzer.
+ ///
+ /// When procedural macro support is enabled, the list of proc macros exported by a crate is
+ /// known before we resolve names in the crate. This list is stored in `self.proc_macros` and is
+ /// derived from the dynamic library.
+ ///
+ /// However, we *also* would like to be able to at least *resolve* macros on our own, without
+ /// help by the build system. So, when the macro isn't found in `self.proc_macros`, we instead
+ /// use a dummy expander that always errors. This comes with the drawback of macros potentially
+ /// going out of sync with what the build system sees (since we resolve using VFS state, but
+ /// Cargo builds only on-disk files). We could and probably should add diagnostics for that.
+ fn export_proc_macro(
+ &mut self,
+ def: ProcMacroDef,
+ id: ItemTreeId<item_tree::Function>,
+ fn_id: FunctionId,
+ module_id: ModuleId,
+ ) {
+ let kind = def.kind.to_basedb_kind();
+ let (expander, kind) = match self.proc_macros.iter().find(|(n, _)| n == &def.name) {
+ Some(&(_, expander)) => (expander, kind),
+ None => (ProcMacroExpander::dummy(self.def_map.krate), kind),
+ };
+
+ let proc_macro_id =
+ ProcMacroLoc { container: module_id, id, expander, kind }.intern(self.db);
+ self.define_proc_macro(def.name.clone(), proc_macro_id);
+ if let ProcMacroKind::CustomDerive { helpers } = def.kind {
+ self.def_map
+ .exported_derives
+ .insert(macro_id_to_def_id(self.db, proc_macro_id.into()), helpers);
+ }
+ self.def_map.fn_proc_macro_mapping.insert(fn_id, proc_macro_id);
+ }
+
+ /// Define a macro with `macro_rules`.
+ ///
+ /// It will define the macro in legacy textual scope, and if it has `#[macro_export]`,
+ /// then it is also defined in the root module scope.
+ /// You can `use` or invoke it by `crate::macro_name` anywhere, before or after the definition.
+ ///
+ /// It is surprising that the macro will never be in the current module scope.
+ /// These code fails with "unresolved import/macro",
+ /// ```rust,compile_fail
+ /// mod m { macro_rules! foo { () => {} } }
+ /// use m::foo as bar;
+ /// ```
+ ///
+ /// ```rust,compile_fail
+ /// macro_rules! foo { () => {} }
+ /// self::foo!();
+ /// crate::foo!();
+ /// ```
+ ///
+ /// Well, this code compiles, because the plain path `foo` in `use` is searched
+ /// in the legacy textual scope only.
+ /// ```rust
+ /// macro_rules! foo { () => {} }
+ /// use foo as bar;
+ /// ```
+ fn define_macro_rules(
+ &mut self,
+ module_id: LocalModuleId,
+ name: Name,
+ macro_: MacroRulesId,
+ export: bool,
+ ) {
+ // Textual scoping
+ self.define_legacy_macro(module_id, name.clone(), macro_.into());
+
+ // Module scoping
+ // In Rust, `#[macro_export]` macros are unconditionally visible at the
+ // crate root, even if the parent modules is **not** visible.
+ if export {
+ let module_id = self.def_map.root;
+ self.def_map.modules[module_id].scope.declare(macro_.into());
+ self.update(
+ module_id,
+ &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))],
+ Visibility::Public,
+ ImportType::Named,
+ );
+ }
+ }
+
+ /// Define a legacy textual scoped macro in module
+ ///
+ /// We use a map `legacy_macros` to store all legacy textual scoped macros visible per module.
+ /// It will clone all macros from parent legacy scope, whose definition is prior to
+ /// the definition of current module.
+ /// And also, `macro_use` on a module will import all legacy macros visible inside to
+ /// current legacy scope, with possible shadowing.
+ fn define_legacy_macro(&mut self, module_id: LocalModuleId, name: Name, mac: MacroId) {
+ // Always shadowing
+ self.def_map.modules[module_id].scope.define_legacy_macro(name, mac);
+ }
+
+ /// Define a macro 2.0 macro
+ ///
+ /// The scoped of macro 2.0 macro is equal to normal function
+ fn define_macro_def(
+ &mut self,
+ module_id: LocalModuleId,
+ name: Name,
+ macro_: Macro2Id,
+ vis: &RawVisibility,
+ ) {
+ let vis =
+ self.def_map.resolve_visibility(self.db, module_id, vis).unwrap_or(Visibility::Public);
+ self.def_map.modules[module_id].scope.declare(macro_.into());
+ self.update(
+ module_id,
+ &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))],
+ vis,
+ ImportType::Named,
+ );
+ }
+
+ /// Define a proc macro
+ ///
+ /// A proc macro is similar to normal macro scope, but it would not visible in legacy textual scoped.
+ /// And unconditionally exported.
+ fn define_proc_macro(&mut self, name: Name, macro_: ProcMacroId) {
+ let module_id = self.def_map.root;
+ self.def_map.modules[module_id].scope.declare(macro_.into());
+ self.update(
+ module_id,
+ &[(Some(name), PerNs::macros(macro_.into(), Visibility::Public))],
+ Visibility::Public,
+ ImportType::Named,
+ );
+ }
+
+ /// Import macros from `#[macro_use] extern crate`.
+ fn import_macros_from_extern_crate(
+ &mut self,
+ current_module_id: LocalModuleId,
+ extern_crate: &item_tree::ExternCrate,
+ ) {
+ tracing::debug!(
+ "importing macros from extern crate: {:?} ({:?})",
+ extern_crate,
+ self.def_map.edition,
+ );
+
+ if let Some(m) = self.resolve_extern_crate(&extern_crate.name) {
+ if m == self.def_map.module_id(current_module_id) {
+ cov_mark::hit!(ignore_macro_use_extern_crate_self);
+ return;
+ }
+
+ cov_mark::hit!(macro_rules_from_other_crates_are_visible_with_macro_use);
+ self.import_all_macros_exported(current_module_id, m.krate);
+ }
+ }
+
+ /// Import all exported macros from another crate
+ ///
+ /// Exported macros are just all macros in the root module scope.
+ /// Note that it contains not only all `#[macro_export]` macros, but also all aliases
+ /// created by `use` in the root module, ignoring the visibility of `use`.
+ fn import_all_macros_exported(&mut self, current_module_id: LocalModuleId, krate: CrateId) {
+ let def_map = self.db.crate_def_map(krate);
+ for (name, def) in def_map[def_map.root].scope.macros() {
+ // `#[macro_use]` brings macros into legacy scope. Yes, even non-`macro_rules!` macros.
+ self.define_legacy_macro(current_module_id, name.clone(), def);
+ }
+ }
+
+ /// Tries to resolve every currently unresolved import.
+ fn resolve_imports(&mut self) -> ReachedFixedPoint {
+ let mut res = ReachedFixedPoint::Yes;
+ let imports = mem::take(&mut self.unresolved_imports);
+
+ self.unresolved_imports = imports
+ .into_iter()
+ .filter_map(|mut directive| {
+ directive.status = self.resolve_import(directive.module_id, &directive.import);
+ match directive.status {
+ PartialResolvedImport::Indeterminate(_) => {
+ self.record_resolved_import(&directive);
+ self.indeterminate_imports.push(directive);
+ res = ReachedFixedPoint::No;
+ None
+ }
+ PartialResolvedImport::Resolved(_) => {
+ self.record_resolved_import(&directive);
+ res = ReachedFixedPoint::No;
+ None
+ }
+ PartialResolvedImport::Unresolved => Some(directive),
+ }
+ })
+ .collect();
+ res
+ }
+
+ fn resolve_import(&self, module_id: LocalModuleId, import: &Import) -> PartialResolvedImport {
+ let _p = profile::span("resolve_import").detail(|| format!("{}", import.path));
+ tracing::debug!("resolving import: {:?} ({:?})", import, self.def_map.edition);
+ if import.is_extern_crate {
+ let name = import
+ .path
+ .as_ident()
+ .expect("extern crate should have been desugared to one-element path");
+
+ let res = self.resolve_extern_crate(name);
+
+ match res {
+ Some(res) => {
+ PartialResolvedImport::Resolved(PerNs::types(res.into(), Visibility::Public))
+ }
+ None => PartialResolvedImport::Unresolved,
+ }
+ } else {
+ let res = self.def_map.resolve_path_fp_with_macro(
+ self.db,
+ ResolveMode::Import,
+ module_id,
+ &import.path,
+ BuiltinShadowMode::Module,
+ );
+
+ let def = res.resolved_def;
+ if res.reached_fixedpoint == ReachedFixedPoint::No || def.is_none() {
+ return PartialResolvedImport::Unresolved;
+ }
+
+ if let Some(krate) = res.krate {
+ if krate != self.def_map.krate {
+ return PartialResolvedImport::Resolved(
+ def.filter_visibility(|v| matches!(v, Visibility::Public)),
+ );
+ }
+ }
+
+ // Check whether all namespace is resolved
+ if def.take_types().is_some()
+ && def.take_values().is_some()
+ && def.take_macros().is_some()
+ {
+ PartialResolvedImport::Resolved(def)
+ } else {
+ PartialResolvedImport::Indeterminate(def)
+ }
+ }
+ }
+
+ fn resolve_extern_crate(&self, name: &Name) -> Option<ModuleId> {
+ if *name == name!(self) {
+ cov_mark::hit!(extern_crate_self_as);
+ let root = match self.def_map.block {
+ Some(_) => {
+ let def_map = self.def_map.crate_root(self.db).def_map(self.db);
+ def_map.module_id(def_map.root())
+ }
+ None => self.def_map.module_id(self.def_map.root()),
+ };
+ Some(root)
+ } else {
+ self.deps.get(name).copied()
+ }
+ }
+
+ fn record_resolved_import(&mut self, directive: &ImportDirective) {
+ let _p = profile::span("record_resolved_import");
+
+ let module_id = directive.module_id;
+ let import = &directive.import;
+ let mut def = directive.status.namespaces();
+ let vis = self
+ .def_map
+ .resolve_visibility(self.db, module_id, &directive.import.visibility)
+ .unwrap_or(Visibility::Public);
+
+ match import.kind {
+ ImportKind::Plain | ImportKind::TypeOnly => {
+ let name = match &import.alias {
+ Some(ImportAlias::Alias(name)) => Some(name),
+ Some(ImportAlias::Underscore) => None,
+ None => match import.path.segments().last() {
+ Some(last_segment) => Some(last_segment),
+ None => {
+ cov_mark::hit!(bogus_paths);
+ return;
+ }
+ },
+ };
+
+ if import.kind == ImportKind::TypeOnly {
+ def.values = None;
+ def.macros = None;
+ }
+
+ tracing::debug!("resolved import {:?} ({:?}) to {:?}", name, import, def);
+
+ // extern crates in the crate root are special-cased to insert entries into the extern prelude: rust-lang/rust#54658
+ if import.is_extern_crate
+ && self.def_map.block.is_none()
+ && module_id == self.def_map.root
+ {
+ if let (Some(ModuleDefId::ModuleId(def)), Some(name)) = (def.take_types(), name)
+ {
+ self.def_map.extern_prelude.insert(name.clone(), def);
+ }
+ }
+
+ self.update(module_id, &[(name.cloned(), def)], vis, ImportType::Named);
+ }
+ ImportKind::Glob => {
+ tracing::debug!("glob import: {:?}", import);
+ match def.take_types() {
+ Some(ModuleDefId::ModuleId(m)) => {
+ if import.is_prelude {
+ // Note: This dodgily overrides the injected prelude. The rustc
+ // implementation seems to work the same though.
+ cov_mark::hit!(std_prelude);
+ self.def_map.prelude = Some(m);
+ } else if m.krate != self.def_map.krate {
+ cov_mark::hit!(glob_across_crates);
+ // glob import from other crate => we can just import everything once
+ let item_map = m.def_map(self.db);
+ let scope = &item_map[m.local_id].scope;
+
+ // Module scoped macros is included
+ let items = scope
+ .resolutions()
+ // only keep visible names...
+ .map(|(n, res)| {
+ (n, res.filter_visibility(|v| v.is_visible_from_other_crate()))
+ })
+ .filter(|(_, res)| !res.is_none())
+ .collect::<Vec<_>>();
+
+ self.update(module_id, &items, vis, ImportType::Glob);
+ } else {
+ // glob import from same crate => we do an initial
+ // import, and then need to propagate any further
+ // additions
+ let def_map;
+ let scope = if m.block == self.def_map.block_id() {
+ &self.def_map[m.local_id].scope
+ } else {
+ def_map = m.def_map(self.db);
+ &def_map[m.local_id].scope
+ };
+
+ // Module scoped macros is included
+ let items = scope
+ .resolutions()
+ // only keep visible names...
+ .map(|(n, res)| {
+ (
+ n,
+ res.filter_visibility(|v| {
+ v.is_visible_from_def_map(
+ self.db,
+ &self.def_map,
+ module_id,
+ )
+ }),
+ )
+ })
+ .filter(|(_, res)| !res.is_none())
+ .collect::<Vec<_>>();
+
+ self.update(module_id, &items, vis, ImportType::Glob);
+ // record the glob import in case we add further items
+ let glob = self.glob_imports.entry(m.local_id).or_default();
+ if !glob.iter().any(|(mid, _)| *mid == module_id) {
+ glob.push((module_id, vis));
+ }
+ }
+ }
+ Some(ModuleDefId::AdtId(AdtId::EnumId(e))) => {
+ cov_mark::hit!(glob_enum);
+ // glob import from enum => just import all the variants
+
+ // XXX: urgh, so this works by accident! Here, we look at
+ // the enum data, and, in theory, this might require us to
+ // look back at the crate_def_map, creating a cycle. For
+ // example, `enum E { crate::some_macro!(); }`. Luckily, the
+ // only kind of macro that is allowed inside enum is a
+ // `cfg_macro`, and we don't need to run name resolution for
+ // it, but this is sheer luck!
+ let enum_data = self.db.enum_data(e);
+ let resolutions = enum_data
+ .variants
+ .iter()
+ .map(|(local_id, variant_data)| {
+ let name = variant_data.name.clone();
+ let variant = EnumVariantId { parent: e, local_id };
+ let res = PerNs::both(variant.into(), variant.into(), vis);
+ (Some(name), res)
+ })
+ .collect::<Vec<_>>();
+ self.update(module_id, &resolutions, vis, ImportType::Glob);
+ }
+ Some(d) => {
+ tracing::debug!("glob import {:?} from non-module/enum {:?}", import, d);
+ }
+ None => {
+ tracing::debug!("glob import {:?} didn't resolve as type", import);
+ }
+ }
+ }
+ }
+ }
+
+ fn update(
+ &mut self,
++ // The module for which `resolutions` have been resolve
+ module_id: LocalModuleId,
+ resolutions: &[(Option<Name>, PerNs)],
++ // Visibility this import will have
+ vis: Visibility,
+ import_type: ImportType,
+ ) {
+ self.db.unwind_if_cancelled();
+ self.update_recursive(module_id, resolutions, vis, import_type, 0)
+ }
+
+ fn update_recursive(
+ &mut self,
++ // The module for which `resolutions` have been resolve
+ module_id: LocalModuleId,
+ resolutions: &[(Option<Name>, PerNs)],
+ // All resolutions are imported with this visibility; the visibilities in
+ // the `PerNs` values are ignored and overwritten
+ vis: Visibility,
+ import_type: ImportType,
+ depth: usize,
+ ) {
+ if GLOB_RECURSION_LIMIT.check(depth).is_err() {
+ // prevent stack overflows (but this shouldn't be possible)
+ panic!("infinite recursion in glob imports!");
+ }
+ let mut changed = false;
+
+ for (name, res) in resolutions {
+ match name {
+ Some(name) => {
+ let scope = &mut self.def_map.modules[module_id].scope;
+ changed |= scope.push_res_with_import(
+ &mut self.from_glob_import,
+ (module_id, name.clone()),
+ res.with_visibility(vis),
+ import_type,
+ );
+ }
+ None => {
+ let tr = match res.take_types() {
+ Some(ModuleDefId::TraitId(tr)) => tr,
+ Some(other) => {
+ tracing::debug!("non-trait `_` import of {:?}", other);
+ continue;
+ }
+ None => continue,
+ };
+ let old_vis = self.def_map.modules[module_id].scope.unnamed_trait_vis(tr);
+ let should_update = match old_vis {
+ None => true,
+ Some(old_vis) => {
+ let max_vis = old_vis.max(vis, &self.def_map).unwrap_or_else(|| {
+ panic!("`Tr as _` imports with unrelated visibilities {:?} and {:?} (trait {:?})", old_vis, vis, tr);
+ });
+
+ if max_vis == old_vis {
+ false
+ } else {
+ cov_mark::hit!(upgrade_underscore_visibility);
+ true
+ }
+ }
+ };
+
+ if should_update {
+ changed = true;
+ self.def_map.modules[module_id].scope.push_unnamed_trait(tr, vis);
+ }
+ }
+ }
+ }
+
+ if !changed {
+ return;
+ }
+ let glob_imports = self
+ .glob_imports
+ .get(&module_id)
+ .into_iter()
+ .flatten()
+ .filter(|(glob_importing_module, _)| {
+ // we know all resolutions have the same visibility (`vis`), so we
+ // just need to check that once
+ vis.is_visible_from_def_map(self.db, &self.def_map, *glob_importing_module)
+ })
+ .cloned()
+ .collect::<Vec<_>>();
+
+ for (glob_importing_module, glob_import_vis) in glob_imports {
+ self.update_recursive(
+ glob_importing_module,
+ resolutions,
+ glob_import_vis,
+ ImportType::Glob,
+ depth + 1,
+ );
+ }
+ }
+
+ fn resolve_macros(&mut self) -> ReachedFixedPoint {
+ let mut macros = mem::take(&mut self.unresolved_macros);
+ let mut resolved = Vec::new();
+ let mut push_resolved = |directive: &MacroDirective, call_id| {
+ resolved.push((directive.module_id, directive.depth, directive.container, call_id));
+ };
+ let mut res = ReachedFixedPoint::Yes;
+ macros.retain(|directive| {
+ let resolver = |path| {
+ let resolved_res = self.def_map.resolve_path_fp_with_macro(
+ self.db,
+ ResolveMode::Other,
+ directive.module_id,
+ &path,
+ BuiltinShadowMode::Module,
+ );
+ resolved_res
+ .resolved_def
+ .take_macros()
+ .map(|it| (it, macro_id_to_def_id(self.db, it)))
+ };
+ let resolver_def_id = |path| resolver(path).map(|(_, it)| it);
+
+ match &directive.kind {
+ MacroDirectiveKind::FnLike { ast_id, expand_to } => {
+ let call_id = macro_call_as_call_id(
+ self.db,
+ ast_id,
+ *expand_to,
+ self.def_map.krate,
+ &resolver_def_id,
+ &mut |_err| (),
+ );
+ if let Ok(Ok(call_id)) = call_id {
+ push_resolved(directive, call_id);
+ res = ReachedFixedPoint::No;
+ return false;
+ }
+ }
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ let id = derive_macro_as_call_id(
+ self.db,
+ ast_id,
+ *derive_attr,
+ *derive_pos as u32,
+ self.def_map.krate,
+ &resolver,
+ );
+
+ if let Ok((macro_id, def_id, call_id)) = id {
+ self.def_map.modules[directive.module_id].scope.set_derive_macro_invoc(
+ ast_id.ast_id,
+ call_id,
+ *derive_attr,
+ *derive_pos,
+ );
+ // Record its helper attributes.
+ if def_id.krate != self.def_map.krate {
+ let def_map = self.db.crate_def_map(def_id.krate);
+ if let Some(helpers) = def_map.exported_derives.get(&def_id) {
+ self.def_map
+ .derive_helpers_in_scope
+ .entry(ast_id.ast_id.map(|it| it.upcast()))
+ .or_default()
+ .extend(izip!(
+ helpers.iter().cloned(),
+ iter::repeat(macro_id),
+ iter::repeat(call_id),
+ ));
+ }
+ }
+
+ push_resolved(directive, call_id);
+ res = ReachedFixedPoint::No;
+ return false;
+ }
+ }
+ MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, attr, tree } => {
+ let &AstIdWithPath { ast_id, ref path } = file_ast_id;
+ let file_id = ast_id.file_id;
+
+ let mut recollect_without = |collector: &mut Self| {
+ // Remove the original directive since we resolved it.
+ let mod_dir = collector.mod_dirs[&directive.module_id].clone();
+ collector.skip_attrs.insert(InFile::new(file_id, *mod_item), attr.id);
+
+ let item_tree = tree.item_tree(self.db);
+ ModCollector {
+ def_collector: collector,
+ macro_depth: directive.depth,
+ module_id: directive.module_id,
+ tree_id: *tree,
+ item_tree: &item_tree,
+ mod_dir,
+ }
+ .collect(&[*mod_item], directive.container);
+ res = ReachedFixedPoint::No;
+ false
+ };
+
+ if let Some(ident) = path.as_ident() {
+ if let Some(helpers) = self.def_map.derive_helpers_in_scope.get(&ast_id) {
+ if helpers.iter().any(|(it, ..)| it == ident) {
+ cov_mark::hit!(resolved_derive_helper);
+ // Resolved to derive helper. Collect the item's attributes again,
+ // starting after the derive helper.
+ return recollect_without(self);
+ }
+ }
+ }
+
+ let def = match resolver_def_id(path.clone()) {
+ Some(def) if def.is_attribute() => def,
+ _ => return true,
+ };
+ if matches!(
+ def,
+ MacroDefId { kind:MacroDefKind::BuiltInAttr(expander, _),.. }
+ if expander.is_derive()
+ ) {
+ // Resolved to `#[derive]`
+
+ let item_tree = tree.item_tree(self.db);
+ let ast_adt_id: FileAstId<ast::Adt> = match *mod_item {
+ ModItem::Struct(strukt) => item_tree[strukt].ast_id().upcast(),
+ ModItem::Union(union) => item_tree[union].ast_id().upcast(),
+ ModItem::Enum(enum_) => item_tree[enum_].ast_id().upcast(),
+ _ => {
+ let diag = DefDiagnostic::invalid_derive_target(
+ directive.module_id,
+ ast_id,
+ attr.id,
+ );
+ self.def_map.diagnostics.push(diag);
+ return recollect_without(self);
+ }
+ };
+ let ast_id = ast_id.with_value(ast_adt_id);
+
+ match attr.parse_path_comma_token_tree() {
+ Some(derive_macros) => {
+ let mut len = 0;
+ for (idx, path) in derive_macros.enumerate() {
+ let ast_id = AstIdWithPath::new(file_id, ast_id.value, path);
+ self.unresolved_macros.push(MacroDirective {
+ module_id: directive.module_id,
+ depth: directive.depth + 1,
+ kind: MacroDirectiveKind::Derive {
+ ast_id,
+ derive_attr: attr.id,
+ derive_pos: idx,
+ },
+ container: directive.container,
+ });
+ len = idx;
+ }
+
+ // We treat the #[derive] macro as an attribute call, but we do not resolve it for nameres collection.
+ // This is just a trick to be able to resolve the input to derives as proper paths.
+ // Check the comment in [`builtin_attr_macro`].
+ let call_id = attr_macro_as_call_id(
+ self.db,
+ file_ast_id,
+ attr,
+ self.def_map.krate,
+ def,
+ true,
+ );
+ self.def_map.modules[directive.module_id]
+ .scope
+ .init_derive_attribute(ast_id, attr.id, call_id, len + 1);
+ }
+ None => {
+ let diag = DefDiagnostic::malformed_derive(
+ directive.module_id,
+ ast_id,
+ attr.id,
+ );
+ self.def_map.diagnostics.push(diag);
+ }
+ }
+
+ return recollect_without(self);
+ }
+
+ // Not resolved to a derive helper or the derive attribute, so try to treat as a normal attribute.
+ let call_id = attr_macro_as_call_id(
+ self.db,
+ file_ast_id,
+ attr,
+ self.def_map.krate,
+ def,
+ false,
+ );
+ let loc: MacroCallLoc = self.db.lookup_intern_macro_call(call_id);
+
+ // If proc attribute macro expansion is disabled, skip expanding it here
+ if !self.db.enable_proc_attr_macros() {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
+ directive.module_id,
+ loc.kind,
+ loc.def.krate,
+ ));
+ return recollect_without(self);
+ }
+
+ // Skip #[test]/#[bench] expansion, which would merely result in more memory usage
+ // due to duplicating functions into macro expansions
+ if matches!(
+ loc.def.kind,
+ MacroDefKind::BuiltInAttr(expander, _)
+ if expander.is_test() || expander.is_bench()
+ ) {
+ return recollect_without(self);
+ }
+
+ if let MacroDefKind::ProcMacro(exp, ..) = loc.def.kind {
+ if exp.is_dummy() {
+ // If there's no expander for the proc macro (e.g.
+ // because proc macros are disabled, or building the
+ // proc macro crate failed), report this and skip
+ // expansion like we would if it was disabled
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro(
+ directive.module_id,
+ loc.kind,
+ loc.def.krate,
+ ));
+
+ return recollect_without(self);
+ }
+ }
+
+ self.def_map.modules[directive.module_id]
+ .scope
+ .add_attr_macro_invoc(ast_id, call_id);
+
+ push_resolved(directive, call_id);
+ res = ReachedFixedPoint::No;
+ return false;
+ }
+ }
+
+ true
+ });
+ // Attribute resolution can add unresolved macro invocations, so concatenate the lists.
+ macros.extend(mem::take(&mut self.unresolved_macros));
+ self.unresolved_macros = macros;
+
+ for (module_id, depth, container, macro_call_id) in resolved {
+ self.collect_macro_expansion(module_id, macro_call_id, depth, container);
+ }
+
+ res
+ }
+
+ fn collect_macro_expansion(
+ &mut self,
+ module_id: LocalModuleId,
+ macro_call_id: MacroCallId,
+ depth: usize,
+ container: ItemContainerId,
+ ) {
+ if EXPANSION_DEPTH_LIMIT.check(depth).is_err() {
+ cov_mark::hit!(macro_expansion_overflow);
+ tracing::warn!("macro expansion is too deep");
+ return;
+ }
+ let file_id = macro_call_id.as_file();
+
+ // First, fetch the raw expansion result for purposes of error reporting. This goes through
+ // `macro_expand_error` to avoid depending on the full expansion result (to improve
+ // incrementality).
+ let loc: MacroCallLoc = self.db.lookup_intern_macro_call(macro_call_id);
+ let err = self.db.macro_expand_error(macro_call_id);
+ if let Some(err) = err {
+ let diag = match err {
+ hir_expand::ExpandError::UnresolvedProcMacro(krate) => {
+ always!(krate == loc.def.krate);
+ // Missing proc macros are non-fatal, so they are handled specially.
+ DefDiagnostic::unresolved_proc_macro(module_id, loc.kind.clone(), loc.def.krate)
+ }
+ _ => DefDiagnostic::macro_error(module_id, loc.kind.clone(), err.to_string()),
+ };
+
+ self.def_map.diagnostics.push(diag);
+ }
+
+ // Then, fetch and process the item tree. This will reuse the expansion result from above.
+ let item_tree = self.db.file_item_tree(file_id);
+ let mod_dir = self.mod_dirs[&module_id].clone();
+ ModCollector {
+ def_collector: &mut *self,
+ macro_depth: depth,
+ tree_id: TreeId::new(file_id, None),
+ module_id,
+ item_tree: &item_tree,
+ mod_dir,
+ }
+ .collect(item_tree.top_level_items(), container);
+ }
+
+ fn finish(mut self) -> DefMap {
+ // Emit diagnostics for all remaining unexpanded macros.
+
+ let _p = profile::span("DefCollector::finish");
+
+ for directive in &self.unresolved_macros {
+ match &directive.kind {
+ MacroDirectiveKind::FnLike { ast_id, expand_to } => {
+ let macro_call_as_call_id = macro_call_as_call_id(
+ self.db,
+ ast_id,
+ *expand_to,
+ self.def_map.krate,
+ |path| {
+ let resolved_res = self.def_map.resolve_path_fp_with_macro(
+ self.db,
+ ResolveMode::Other,
+ directive.module_id,
+ &path,
+ BuiltinShadowMode::Module,
+ );
+ resolved_res
+ .resolved_def
+ .take_macros()
+ .map(|it| macro_id_to_def_id(self.db, it))
+ },
+ &mut |_| (),
+ );
+ if let Err(UnresolvedMacro { path }) = macro_call_as_call_id {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+ directive.module_id,
+ MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: *expand_to },
+ path,
+ ));
+ }
+ }
+ MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos } => {
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_macro_call(
+ directive.module_id,
+ MacroCallKind::Derive {
+ ast_id: ast_id.ast_id,
+ derive_attr_index: derive_attr.ast_index,
+ derive_index: *derive_pos as u32,
+ },
+ ast_id.path.clone(),
+ ));
+ }
+ // These are diagnosed by `reseed_with_unresolved_attribute`, as that function consumes them
+ MacroDirectiveKind::Attr { .. } => {}
+ }
+ }
+
+ // Emit diagnostics for all remaining unresolved imports.
+
+ // We'd like to avoid emitting a diagnostics avalanche when some `extern crate` doesn't
+ // resolve. We first emit diagnostics for unresolved extern crates and collect the missing
+ // crate names. Then we emit diagnostics for unresolved imports, but only if the import
+ // doesn't start with an unresolved crate's name. Due to renaming and reexports, this is a
+ // heuristic, but it works in practice.
+ let mut diagnosed_extern_crates = FxHashSet::default();
+ for directive in &self.unresolved_imports {
+ if let ImportSource::ExternCrate(krate) = directive.import.source {
+ let item_tree = krate.item_tree(self.db);
+ let extern_crate = &item_tree[krate.value];
+
+ diagnosed_extern_crates.insert(extern_crate.name.clone());
+
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_extern_crate(
+ directive.module_id,
+ InFile::new(krate.file_id(), extern_crate.ast_id),
+ ));
+ }
+ }
+
+ for directive in &self.unresolved_imports {
+ if let ImportSource::Import { id: import, use_tree } = directive.import.source {
+ if matches!(
+ (directive.import.path.segments().first(), &directive.import.path.kind),
+ (Some(krate), PathKind::Plain | PathKind::Abs) if diagnosed_extern_crates.contains(krate)
+ ) {
+ continue;
+ }
+
+ self.def_map.diagnostics.push(DefDiagnostic::unresolved_import(
+ directive.module_id,
+ import,
+ use_tree,
+ ));
+ }
+ }
+
+ self.def_map
+ }
+}
+
+/// Walks a single module, populating defs, imports and macros
+struct ModCollector<'a, 'b> {
+ def_collector: &'a mut DefCollector<'b>,
+ macro_depth: usize,
+ module_id: LocalModuleId,
+ tree_id: TreeId,
+ item_tree: &'a ItemTree,
+ mod_dir: ModDir,
+}
+
+impl ModCollector<'_, '_> {
+ fn collect_in_top_module(&mut self, items: &[ModItem]) {
+ let module = self.def_collector.def_map.module_id(self.module_id);
+ self.collect(items, module.into())
+ }
+
+ fn collect(&mut self, items: &[ModItem], container: ItemContainerId) {
+ let krate = self.def_collector.def_map.krate;
+
+ // Note: don't assert that inserted value is fresh: it's simply not true
+ // for macros.
+ self.def_collector.mod_dirs.insert(self.module_id, self.mod_dir.clone());
+
+ // Prelude module is always considered to be `#[macro_use]`.
+ if let Some(prelude_module) = self.def_collector.def_map.prelude {
+ if prelude_module.krate != krate {
+ cov_mark::hit!(prelude_is_macro_use);
+ self.def_collector.import_all_macros_exported(self.module_id, prelude_module.krate);
+ }
+ }
+
+ // This should be processed eagerly instead of deferred to resolving.
+ // `#[macro_use] extern crate` is hoisted to imports macros before collecting
+ // any other items.
+ for &item in items {
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, item.into());
+ if attrs.cfg().map_or(true, |cfg| self.is_cfg_enabled(&cfg)) {
+ if let ModItem::ExternCrate(id) = item {
+ let import = &self.item_tree[id];
+ let attrs = self.item_tree.attrs(
+ self.def_collector.db,
+ krate,
+ ModItem::from(id).into(),
+ );
+ if attrs.by_key("macro_use").exists() {
+ self.def_collector.import_macros_from_extern_crate(self.module_id, import);
+ }
+ }
+ }
+ }
+
+ for &item in items {
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, item.into());
+ if let Some(cfg) = attrs.cfg() {
+ if !self.is_cfg_enabled(&cfg) {
+ self.emit_unconfigured_diagnostic(item, &cfg);
+ continue;
+ }
+ }
+
+ if let Err(()) = self.resolve_attributes(&attrs, item, container) {
+ // Do not process the item. It has at least one non-builtin attribute, so the
+ // fixed-point algorithm is required to resolve the rest of them.
+ continue;
+ }
+
+ let db = self.def_collector.db;
+ let module = self.def_collector.def_map.module_id(self.module_id);
+ let def_map = &mut self.def_collector.def_map;
+ let update_def =
+ |def_collector: &mut DefCollector<'_>, id, name: &Name, vis, has_constructor| {
+ def_collector.def_map.modules[self.module_id].scope.declare(id);
+ def_collector.update(
+ self.module_id,
+ &[(Some(name.clone()), PerNs::from_def(id, vis, has_constructor))],
+ vis,
+ ImportType::Named,
+ )
+ };
+ let resolve_vis = |def_map: &DefMap, visibility| {
+ def_map
+ .resolve_visibility(db, self.module_id, visibility)
+ .unwrap_or(Visibility::Public)
+ };
+
+ match item {
+ ModItem::Mod(m) => self.collect_module(m, &attrs),
+ ModItem::Import(import_id) => {
+ let imports = Import::from_use(
+ db,
+ krate,
+ self.item_tree,
+ ItemTreeId::new(self.tree_id, import_id),
+ );
+ self.def_collector.unresolved_imports.extend(imports.into_iter().map(
+ |import| ImportDirective {
+ module_id: self.module_id,
+ import,
+ status: PartialResolvedImport::Unresolved,
+ },
+ ));
+ }
+ ModItem::ExternCrate(import_id) => {
+ self.def_collector.unresolved_imports.push(ImportDirective {
+ module_id: self.module_id,
+ import: Import::from_extern_crate(
+ db,
+ krate,
+ self.item_tree,
+ ItemTreeId::new(self.tree_id, import_id),
+ ),
+ status: PartialResolvedImport::Unresolved,
+ })
+ }
+ ModItem::ExternBlock(block) => self.collect(
+ &self.item_tree[block].children,
+ ItemContainerId::ExternBlockId(
+ ExternBlockLoc {
+ container: module,
+ id: ItemTreeId::new(self.tree_id, block),
+ }
+ .intern(db),
+ ),
+ ),
+ ModItem::MacroCall(mac) => self.collect_macro_call(&self.item_tree[mac], container),
+ ModItem::MacroRules(id) => self.collect_macro_rules(id, module),
+ ModItem::MacroDef(id) => self.collect_macro_def(id, module),
+ ModItem::Impl(imp) => {
+ let impl_id =
+ ImplLoc { container: module, id: ItemTreeId::new(self.tree_id, imp) }
+ .intern(db);
+ self.def_collector.def_map.modules[self.module_id].scope.define_impl(impl_id)
+ }
+ ModItem::Function(id) => {
+ let it = &self.item_tree[id];
+ let fn_id =
+ FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ if self.def_collector.is_proc_macro {
+ if self.module_id == def_map.root {
+ if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) {
+ let crate_root = def_map.module_id(def_map.root);
+ self.def_collector.export_proc_macro(
+ proc_macro,
+ ItemTreeId::new(self.tree_id, id),
+ fn_id,
+ crate_root,
+ );
+ }
+ }
+ }
+
+ update_def(self.def_collector, fn_id.into(), &it.name, vis, false);
+ }
+ ModItem::Struct(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ StructLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ !matches!(it.fields, Fields::Record(_)),
+ );
+ }
+ ModItem::Union(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ UnionLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::Enum(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ EnumLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::Const(id) => {
+ let it = &self.item_tree[id];
+ let const_id =
+ ConstLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db);
+
+ match &it.name {
+ Some(name) => {
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(self.def_collector, const_id.into(), name, vis, false);
+ }
+ None => {
+ // const _: T = ...;
+ self.def_collector.def_map.modules[self.module_id]
+ .scope
+ .define_unnamed_const(const_id);
+ }
+ }
+ }
+ ModItem::Static(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ StaticLoc { container, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::Trait(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ TraitLoc { container: module, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ ModItem::TypeAlias(id) => {
+ let it = &self.item_tree[id];
+
+ let vis = resolve_vis(def_map, &self.item_tree[it.visibility]);
+ update_def(
+ self.def_collector,
+ TypeAliasLoc { container, id: ItemTreeId::new(self.tree_id, id) }
+ .intern(db)
+ .into(),
+ &it.name,
+ vis,
+ false,
+ );
+ }
+ }
+ }
+ }
+
+ fn collect_module(&mut self, module_id: FileItemTreeId<Mod>, attrs: &Attrs) {
+ let path_attr = attrs.by_key("path").string_value();
+ let is_macro_use = attrs.by_key("macro_use").exists();
+ let module = &self.item_tree[module_id];
+ match &module.kind {
+ // inline module, just recurse
+ ModKind::Inline { items } => {
+ let module_id = self.push_child_module(
+ module.name.clone(),
+ AstId::new(self.file_id(), module.ast_id),
+ None,
+ &self.item_tree[module.visibility],
+ module_id,
+ );
+
+ if let Some(mod_dir) = self.mod_dir.descend_into_definition(&module.name, path_attr)
+ {
+ ModCollector {
+ def_collector: &mut *self.def_collector,
+ macro_depth: self.macro_depth,
+ module_id,
+ tree_id: self.tree_id,
+ item_tree: self.item_tree,
+ mod_dir,
+ }
+ .collect_in_top_module(&*items);
+ if is_macro_use {
+ self.import_all_legacy_macros(module_id);
+ }
+ }
+ }
+ // out of line module, resolve, parse and recurse
+ ModKind::Outline => {
+ let ast_id = AstId::new(self.tree_id.file_id(), module.ast_id);
+ let db = self.def_collector.db;
+ match self.mod_dir.resolve_declaration(db, self.file_id(), &module.name, path_attr)
+ {
+ Ok((file_id, is_mod_rs, mod_dir)) => {
+ let item_tree = db.file_item_tree(file_id.into());
+ let krate = self.def_collector.def_map.krate;
+ let is_enabled = item_tree
+ .top_level_attrs(db, krate)
+ .cfg()
+ .map_or(true, |cfg| self.is_cfg_enabled(&cfg));
+ if is_enabled {
+ let module_id = self.push_child_module(
+ module.name.clone(),
+ ast_id,
+ Some((file_id, is_mod_rs)),
+ &self.item_tree[module.visibility],
+ module_id,
+ );
+ ModCollector {
+ def_collector: self.def_collector,
+ macro_depth: self.macro_depth,
+ module_id,
+ tree_id: TreeId::new(file_id.into(), None),
+ item_tree: &item_tree,
+ mod_dir,
+ }
+ .collect_in_top_module(item_tree.top_level_items());
+ let is_macro_use = is_macro_use
+ || item_tree
+ .top_level_attrs(db, krate)
+ .by_key("macro_use")
+ .exists();
+ if is_macro_use {
+ self.import_all_legacy_macros(module_id);
+ }
+ }
+ }
+ Err(candidates) => {
+ self.push_child_module(
+ module.name.clone(),
+ ast_id,
+ None,
+ &self.item_tree[module.visibility],
+ module_id,
+ );
+ self.def_collector.def_map.diagnostics.push(
+ DefDiagnostic::unresolved_module(self.module_id, ast_id, candidates),
+ );
+ }
+ };
+ }
+ }
+ }
+
+ fn push_child_module(
+ &mut self,
+ name: Name,
+ declaration: AstId<ast::Module>,
+ definition: Option<(FileId, bool)>,
+ visibility: &crate::visibility::RawVisibility,
+ mod_tree_id: FileItemTreeId<Mod>,
+ ) -> LocalModuleId {
+ let def_map = &mut self.def_collector.def_map;
+ let vis = def_map
+ .resolve_visibility(self.def_collector.db, self.module_id, visibility)
+ .unwrap_or(Visibility::Public);
+ let modules = &mut def_map.modules;
+ let origin = match definition {
+ None => ModuleOrigin::Inline {
+ definition: declaration,
+ definition_tree_id: ItemTreeId::new(self.tree_id, mod_tree_id),
+ },
+ Some((definition, is_mod_rs)) => ModuleOrigin::File {
+ declaration,
+ definition,
+ is_mod_rs,
+ declaration_tree_id: ItemTreeId::new(self.tree_id, mod_tree_id),
+ },
+ };
+
+ let res = modules.alloc(ModuleData::new(origin, vis));
+ modules[res].parent = Some(self.module_id);
+ for (name, mac) in modules[self.module_id].scope.collect_legacy_macros() {
+ for &mac in &mac {
+ modules[res].scope.define_legacy_macro(name.clone(), mac);
+ }
+ }
+ modules[self.module_id].children.insert(name.clone(), res);
+
+ let module = def_map.module_id(res);
+ let def = ModuleDefId::from(module);
+
+ def_map.modules[self.module_id].scope.declare(def);
+ self.def_collector.update(
+ self.module_id,
+ &[(Some(name), PerNs::from_def(def, vis, false))],
+ vis,
+ ImportType::Named,
+ );
+ res
+ }
+
+ /// Resolves attributes on an item.
+ ///
+ /// Returns `Err` when some attributes could not be resolved to builtins and have been
+ /// registered as unresolved.
+ ///
+ /// If `ignore_up_to` is `Some`, attributes preceding and including that attribute will be
+ /// assumed to be resolved already.
+ fn resolve_attributes(
+ &mut self,
+ attrs: &Attrs,
+ mod_item: ModItem,
+ container: ItemContainerId,
+ ) -> Result<(), ()> {
+ let mut ignore_up_to =
+ self.def_collector.skip_attrs.get(&InFile::new(self.file_id(), mod_item)).copied();
+ let iter = attrs
+ .iter()
+ .dedup_by(|a, b| {
+ // FIXME: this should not be required, all attributes on an item should have a
+ // unique ID!
+ // Still, this occurs because `#[cfg_attr]` can "expand" to multiple attributes:
+ // #[cfg_attr(not(off), unresolved, unresolved)]
+ // struct S;
+ // We should come up with a different way to ID attributes.
+ a.id == b.id
+ })
+ .skip_while(|attr| match ignore_up_to {
+ Some(id) if attr.id == id => {
+ ignore_up_to = None;
+ true
+ }
+ Some(_) => true,
+ None => false,
+ });
+
+ for attr in iter {
+ if self.def_collector.def_map.is_builtin_or_registered_attr(&attr.path) {
+ continue;
+ }
+ tracing::debug!("non-builtin attribute {}", attr.path);
+
+ let ast_id = AstIdWithPath::new(
+ self.file_id(),
+ mod_item.ast_id(self.item_tree),
+ attr.path.as_ref().clone(),
+ );
+ self.def_collector.unresolved_macros.push(MacroDirective {
+ module_id: self.module_id,
+ depth: self.macro_depth + 1,
+ kind: MacroDirectiveKind::Attr {
+ ast_id,
+ attr: attr.clone(),
+ mod_item,
+ tree: self.tree_id,
+ },
+ container,
+ });
+
+ return Err(());
+ }
+
+ Ok(())
+ }
+
+ fn collect_macro_rules(&mut self, id: FileItemTreeId<MacroRules>, module: ModuleId) {
+ let krate = self.def_collector.def_map.krate;
+ let mac = &self.item_tree[id];
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into());
+ let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast());
+
+ let export_attr = attrs.by_key("macro_export");
+
+ let is_export = export_attr.exists();
+ let local_inner = if is_export {
+ export_attr.tt_values().flat_map(|it| &it.token_trees).any(|it| match it {
+ tt::TokenTree::Leaf(tt::Leaf::Ident(ident)) => {
+ ident.text.contains("local_inner_macros")
+ }
+ _ => false,
+ })
+ } else {
+ false
+ };
+
+ // Case 1: builtin macros
+ let expander = if attrs.by_key("rustc_builtin_macro").exists() {
+ // `#[rustc_builtin_macro = "builtin_name"]` overrides the `macro_rules!` name.
+ let name;
+ let name = match attrs.by_key("rustc_builtin_macro").string_value() {
+ Some(it) => {
+ // FIXME: a hacky way to create a Name from string.
+ name = tt::Ident { text: it.clone(), id: tt::TokenId::unspecified() }.as_name();
+ &name
+ }
+ None => {
+ let explicit_name =
+ attrs.by_key("rustc_builtin_macro").tt_values().next().and_then(|tt| {
+ match tt.token_trees.first() {
+ Some(tt::TokenTree::Leaf(tt::Leaf::Ident(name))) => Some(name),
+ _ => None,
+ }
+ });
+ match explicit_name {
+ Some(ident) => {
+ name = ident.as_name();
+ &name
+ }
+ None => &mac.name,
+ }
+ }
+ };
+ match find_builtin_macro(name) {
+ Some(Either::Left(it)) => MacroExpander::BuiltIn(it),
+ Some(Either::Right(it)) => MacroExpander::BuiltInEager(it),
+ None => {
+ self.def_collector
+ .def_map
+ .diagnostics
+ .push(DefDiagnostic::unimplemented_builtin_macro(self.module_id, ast_id));
+ return;
+ }
+ }
+ } else {
+ // Case 2: normal `macro_rules!` macro
+ MacroExpander::Declarative
+ };
+
+ let macro_id = MacroRulesLoc {
+ container: module,
+ id: ItemTreeId::new(self.tree_id, id),
+ local_inner,
+ expander,
+ }
+ .intern(self.def_collector.db);
+ self.def_collector.define_macro_rules(
+ self.module_id,
+ mac.name.clone(),
+ macro_id,
+ is_export,
+ );
+ }
+
+ fn collect_macro_def(&mut self, id: FileItemTreeId<MacroDef>, module: ModuleId) {
+ let krate = self.def_collector.def_map.krate;
+ let mac = &self.item_tree[id];
+ let ast_id = InFile::new(self.file_id(), mac.ast_id.upcast());
+
+ // Case 1: builtin macros
+ let attrs = self.item_tree.attrs(self.def_collector.db, krate, ModItem::from(id).into());
+ let expander = if attrs.by_key("rustc_builtin_macro").exists() {
+ if let Some(expander) = find_builtin_macro(&mac.name) {
+ match expander {
+ Either::Left(it) => MacroExpander::BuiltIn(it),
+ Either::Right(it) => MacroExpander::BuiltInEager(it),
+ }
+ } else if let Some(expander) = find_builtin_derive(&mac.name) {
+ MacroExpander::BuiltInDerive(expander)
+ } else if let Some(expander) = find_builtin_attr(&mac.name) {
+ MacroExpander::BuiltInAttr(expander)
+ } else {
+ self.def_collector
+ .def_map
+ .diagnostics
+ .push(DefDiagnostic::unimplemented_builtin_macro(self.module_id, ast_id));
+ return;
+ }
+ } else {
+ // Case 2: normal `macro`
+ MacroExpander::Declarative
+ };
+
+ let macro_id =
+ Macro2Loc { container: module, id: ItemTreeId::new(self.tree_id, id), expander }
+ .intern(self.def_collector.db);
+ self.def_collector.define_macro_def(
+ self.module_id,
+ mac.name.clone(),
+ macro_id,
+ &self.item_tree[mac.visibility],
+ );
+ }
+
+ fn collect_macro_call(&mut self, mac: &MacroCall, container: ItemContainerId) {
+ let ast_id = AstIdWithPath::new(self.file_id(), mac.ast_id, ModPath::clone(&mac.path));
+
+ // Case 1: try to resolve in legacy scope and expand macro_rules
+ let mut error = None;
+ match macro_call_as_call_id(
+ self.def_collector.db,
+ &ast_id,
+ mac.expand_to,
+ self.def_collector.def_map.krate,
+ |path| {
+ path.as_ident().and_then(|name| {
+ self.def_collector.def_map.with_ancestor_maps(
+ self.def_collector.db,
+ self.module_id,
+ &mut |map, module| {
+ map[module]
+ .scope
+ .get_legacy_macro(name)
+ .and_then(|it| it.last())
+ .map(|&it| macro_id_to_def_id(self.def_collector.db, it.into()))
+ },
+ )
+ })
+ },
+ &mut |err| {
+ error.get_or_insert(err);
+ },
+ ) {
+ Ok(Ok(macro_call_id)) => {
+ // Legacy macros need to be expanded immediately, so that any macros they produce
+ // are in scope.
+ self.def_collector.collect_macro_expansion(
+ self.module_id,
+ macro_call_id,
+ self.macro_depth + 1,
+ container,
+ );
+
+ if let Some(err) = error {
+ self.def_collector.def_map.diagnostics.push(DefDiagnostic::macro_error(
+ self.module_id,
+ MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: mac.expand_to },
+ err.to_string(),
+ ));
+ }
+
+ return;
+ }
+ Ok(Err(_)) => {
+ // Built-in macro failed eager expansion.
+
+ self.def_collector.def_map.diagnostics.push(DefDiagnostic::macro_error(
+ self.module_id,
+ MacroCallKind::FnLike { ast_id: ast_id.ast_id, expand_to: mac.expand_to },
+ error.unwrap().to_string(),
+ ));
+ return;
+ }
+ Err(UnresolvedMacro { .. }) => (),
+ }
+
+ // Case 2: resolve in module scope, expand during name resolution.
+ self.def_collector.unresolved_macros.push(MacroDirective {
+ module_id: self.module_id,
+ depth: self.macro_depth + 1,
+ kind: MacroDirectiveKind::FnLike { ast_id, expand_to: mac.expand_to },
+ container,
+ });
+ }
+
+ fn import_all_legacy_macros(&mut self, module_id: LocalModuleId) {
+ let macros = self.def_collector.def_map[module_id].scope.collect_legacy_macros();
+ for (name, macs) in macros {
+ macs.last().map(|&mac| {
+ self.def_collector.define_legacy_macro(self.module_id, name.clone(), mac)
+ });
+ }
+ }
+
+ fn is_cfg_enabled(&self, cfg: &CfgExpr) -> bool {
+ self.def_collector.cfg_options.check(cfg) != Some(false)
+ }
+
+ fn emit_unconfigured_diagnostic(&mut self, item: ModItem, cfg: &CfgExpr) {
+ let ast_id = item.ast_id(self.item_tree);
+
+ let ast_id = InFile::new(self.file_id(), ast_id.upcast());
+ self.def_collector.def_map.diagnostics.push(DefDiagnostic::unconfigured_code(
+ self.module_id,
+ ast_id,
+ cfg.clone(),
+ self.def_collector.cfg_options.clone(),
+ ));
+ }
+
+ fn file_id(&self) -> HirFileId {
+ self.tree_id.file_id()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{db::DefDatabase, test_db::TestDB};
+ use base_db::{fixture::WithFixture, SourceDatabase};
+
+ use super::*;
+
+ fn do_collect_defs(db: &dyn DefDatabase, def_map: DefMap) -> DefMap {
+ let mut collector = DefCollector {
+ db,
+ def_map,
+ deps: FxHashMap::default(),
+ glob_imports: FxHashMap::default(),
+ unresolved_imports: Vec::new(),
+ indeterminate_imports: Vec::new(),
+ unresolved_macros: Vec::new(),
+ mod_dirs: FxHashMap::default(),
+ cfg_options: &CfgOptions::default(),
+ proc_macros: Default::default(),
+ from_glob_import: Default::default(),
+ skip_attrs: Default::default(),
+ is_proc_macro: false,
+ };
+ collector.seed_with_top_level();
+ collector.collect();
+ collector.def_map
+ }
+
+ fn do_resolve(not_ra_fixture: &str) -> DefMap {
+ let (db, file_id) = TestDB::with_single_file(not_ra_fixture);
+ let krate = db.test_crate();
+
+ let edition = db.crate_graph()[krate].edition;
+ let module_origin = ModuleOrigin::CrateRoot { definition: file_id };
+ let def_map =
+ DefMap::empty(krate, edition, ModuleData::new(module_origin, Visibility::Public));
+ do_collect_defs(&db, def_map)
+ }
+
+ #[test]
+ fn test_macro_expand_will_stop_1() {
+ do_resolve(
+ r#"
+macro_rules! foo {
+ ($($ty:ty)*) => { foo!($($ty)*); }
+}
+foo!(KABOOM);
+"#,
+ );
+ do_resolve(
+ r#"
+macro_rules! foo {
+ ($($ty:ty)*) => { foo!(() $($ty)*); }
+}
+foo!(KABOOM);
+"#,
+ );
+ }
+
+ #[ignore]
+ #[test]
+ fn test_macro_expand_will_stop_2() {
+ // FIXME: this test does succeed, but takes quite a while: 90 seconds in
+ // the release mode. That's why the argument is not an ra_fixture --
+ // otherwise injection highlighting gets stuck.
+ //
+ // We need to find a way to fail this faster.
+ do_resolve(
+ r#"
+macro_rules! foo {
+ ($($ty:ty)*) => { foo!($($ty)* $($ty)*); }
+}
+foo!(KABOOM);
+"#,
+ );
+ }
+}
--- /dev/null
+//! This modules implements a function to resolve a path `foo::bar::baz` to a
+//! def, which is used within the name resolution.
+//!
+//! When name resolution is finished, the result of resolving a path is either
+//! `Some(def)` or `None`. However, when we are in process of resolving imports
+//! or macros, there's a third possibility:
+//!
+//! I can't resolve this path right now, but I might be resolve this path
+//! later, when more macros are expanded.
+//!
+//! `ReachedFixedPoint` signals about this.
+
+use base_db::Edition;
+use hir_expand::name::Name;
+
+use crate::{
+ db::DefDatabase,
+ item_scope::BUILTIN_SCOPE,
+ nameres::{BuiltinShadowMode, DefMap},
+ path::{ModPath, PathKind},
+ per_ns::PerNs,
+ visibility::{RawVisibility, Visibility},
+ AdtId, CrateId, EnumVariantId, LocalModuleId, ModuleDefId, ModuleId,
+};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub(super) enum ResolveMode {
+ Import,
+ Other,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub(super) enum ReachedFixedPoint {
+ Yes,
+ No,
+}
+
+#[derive(Debug, Clone)]
+pub(super) struct ResolvePathResult {
+ pub(super) resolved_def: PerNs,
+ pub(super) segment_index: Option<usize>,
+ pub(super) reached_fixedpoint: ReachedFixedPoint,
+ pub(super) krate: Option<CrateId>,
+}
+
+impl ResolvePathResult {
+ fn empty(reached_fixedpoint: ReachedFixedPoint) -> ResolvePathResult {
+ ResolvePathResult::with(PerNs::none(), reached_fixedpoint, None, None)
+ }
+
+ fn with(
+ resolved_def: PerNs,
+ reached_fixedpoint: ReachedFixedPoint,
+ segment_index: Option<usize>,
+ krate: Option<CrateId>,
+ ) -> ResolvePathResult {
+ ResolvePathResult { resolved_def, segment_index, reached_fixedpoint, krate }
+ }
+}
+
+impl DefMap {
+ pub(super) fn resolve_name_in_extern_prelude(
+ &self,
+ db: &dyn DefDatabase,
+ name: &Name,
+ ) -> Option<ModuleId> {
+ match self.block {
+ Some(_) => self.crate_root(db).def_map(db).extern_prelude.get(name).copied(),
+ None => self.extern_prelude.get(name).copied(),
+ }
+ }
+
+ pub(crate) fn resolve_visibility(
+ &self,
+ db: &dyn DefDatabase,
++ // module to import to
+ original_module: LocalModuleId,
++ // pub(path)
++ // ^^^^ this
+ visibility: &RawVisibility,
+ ) -> Option<Visibility> {
+ let mut vis = match visibility {
+ RawVisibility::Module(path) => {
+ let (result, remaining) =
+ self.resolve_path(db, original_module, path, BuiltinShadowMode::Module);
+ if remaining.is_some() {
+ return None;
+ }
+ let types = result.take_types()?;
+ match types {
+ ModuleDefId::ModuleId(m) => Visibility::Module(m),
+ _ => {
+ // error: visibility needs to refer to module
+ return None;
+ }
+ }
+ }
+ RawVisibility::Public => Visibility::Public,
+ };
+
+ // In block expressions, `self` normally refers to the containing non-block module, and
+ // `super` to its parent (etc.). However, visibilities must only refer to a module in the
+ // DefMap they're written in, so we restrict them when that happens.
+ if let Visibility::Module(m) = vis {
+ if self.block_id() != m.block {
+ cov_mark::hit!(adjust_vis_in_block_def_map);
+ vis = Visibility::Module(self.module_id(self.root()));
+ tracing::debug!("visibility {:?} points outside DefMap, adjusting to {:?}", m, vis);
+ }
+ }
+
+ Some(vis)
+ }
+
+ // Returns Yes if we are sure that additions to `ItemMap` wouldn't change
+ // the result.
+ pub(super) fn resolve_path_fp_with_macro(
+ &self,
+ db: &dyn DefDatabase,
+ mode: ResolveMode,
++ // module to import to
+ mut original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> ResolvePathResult {
+ let mut result = ResolvePathResult::empty(ReachedFixedPoint::No);
+
+ let mut arc;
+ let mut current_map = self;
+ loop {
+ let new = current_map.resolve_path_fp_with_macro_single(
+ db,
+ mode,
+ original_module,
+ path,
+ shadow,
+ );
+
+ // Merge `new` into `result`.
+ result.resolved_def = result.resolved_def.or(new.resolved_def);
+ if result.reached_fixedpoint == ReachedFixedPoint::No {
+ result.reached_fixedpoint = new.reached_fixedpoint;
+ }
+ // FIXME: this doesn't seem right; what if the different namespace resolutions come from different crates?
+ result.krate = result.krate.or(new.krate);
+ result.segment_index = match (result.segment_index, new.segment_index) {
+ (Some(idx), None) => Some(idx),
+ (Some(old), Some(new)) => Some(old.max(new)),
+ (None, new) => new,
+ };
+
+ match ¤t_map.block {
+ Some(block) => {
+ original_module = block.parent.local_id;
+ arc = block.parent.def_map(db);
+ current_map = &*arc;
+ }
+ None => return result,
+ }
+ }
+ }
+
+ pub(super) fn resolve_path_fp_with_macro_single(
+ &self,
+ db: &dyn DefDatabase,
+ mode: ResolveMode,
+ original_module: LocalModuleId,
+ path: &ModPath,
+ shadow: BuiltinShadowMode,
+ ) -> ResolvePathResult {
+ let graph = db.crate_graph();
+ let _cx = stdx::panic_context::enter(format!(
+ "DefMap {:?} crate_name={:?} block={:?} path={}",
+ self.krate, graph[self.krate].display_name, self.block, path
+ ));
+
+ let mut segments = path.segments().iter().enumerate();
+ let mut curr_per_ns: PerNs = match path.kind {
+ PathKind::DollarCrate(krate) => {
+ if krate == self.krate {
+ cov_mark::hit!(macro_dollar_crate_self);
+ PerNs::types(self.crate_root(db).into(), Visibility::Public)
+ } else {
+ let def_map = db.crate_def_map(krate);
+ let module = def_map.module_id(def_map.root);
+ cov_mark::hit!(macro_dollar_crate_other);
+ PerNs::types(module.into(), Visibility::Public)
+ }
+ }
+ PathKind::Crate => PerNs::types(self.crate_root(db).into(), Visibility::Public),
+ // plain import or absolute path in 2015: crate-relative with
+ // fallback to extern prelude (with the simplification in
+ // rust-lang/rust#57745)
+ // FIXME there must be a nicer way to write this condition
+ PathKind::Plain | PathKind::Abs
+ if self.edition == Edition::Edition2015
+ && (path.kind == PathKind::Abs || mode == ResolveMode::Import) =>
+ {
+ let (_, segment) = match segments.next() {
+ Some((idx, segment)) => (idx, segment),
+ None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
+ };
+ tracing::debug!("resolving {:?} in crate root (+ extern prelude)", segment);
+ self.resolve_name_in_crate_root_or_extern_prelude(db, segment)
+ }
+ PathKind::Plain => {
+ let (_, segment) = match segments.next() {
+ Some((idx, segment)) => (idx, segment),
+ None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
+ };
+ // The first segment may be a builtin type. If the path has more
+ // than one segment, we first try resolving it as a module
+ // anyway.
+ // FIXME: If the next segment doesn't resolve in the module and
+ // BuiltinShadowMode wasn't Module, then we need to try
+ // resolving it as a builtin.
+ let prefer_module =
+ if path.segments().len() == 1 { shadow } else { BuiltinShadowMode::Module };
+
+ tracing::debug!("resolving {:?} in module", segment);
+ self.resolve_name_in_module(db, original_module, segment, prefer_module)
+ }
+ PathKind::Super(lvl) => {
+ let mut module = original_module;
+ for i in 0..lvl {
+ match self.modules[module].parent {
+ Some(it) => module = it,
+ None => match &self.block {
+ Some(block) => {
+ // Look up remaining path in parent `DefMap`
+ let new_path = ModPath::from_segments(
+ PathKind::Super(lvl - i),
+ path.segments().to_vec(),
+ );
+ tracing::debug!(
+ "`super` path: {} -> {} in parent map",
+ path,
+ new_path
+ );
+ return block.parent.def_map(db).resolve_path_fp_with_macro(
+ db,
+ mode,
+ block.parent.local_id,
+ &new_path,
+ shadow,
+ );
+ }
+ None => {
+ tracing::debug!("super path in root module");
+ return ResolvePathResult::empty(ReachedFixedPoint::Yes);
+ }
+ },
+ }
+ }
+
+ // Resolve `self` to the containing crate-rooted module if we're a block
+ self.with_ancestor_maps(db, module, &mut |def_map, module| {
+ if def_map.block.is_some() {
+ None // keep ascending
+ } else {
+ Some(PerNs::types(def_map.module_id(module).into(), Visibility::Public))
+ }
+ })
+ .expect("block DefMap not rooted in crate DefMap")
+ }
+ PathKind::Abs => {
+ // 2018-style absolute path -- only extern prelude
+ let segment = match segments.next() {
+ Some((_, segment)) => segment,
+ None => return ResolvePathResult::empty(ReachedFixedPoint::Yes),
+ };
+ if let Some(&def) = self.extern_prelude.get(segment) {
+ tracing::debug!("absolute path {:?} resolved to crate {:?}", path, def);
+ PerNs::types(def.into(), Visibility::Public)
+ } else {
+ return ResolvePathResult::empty(ReachedFixedPoint::No); // extern crate declarations can add to the extern prelude
+ }
+ }
+ };
+
+ for (i, segment) in segments {
+ let (curr, vis) = match curr_per_ns.take_types_vis() {
+ Some(r) => r,
+ None => {
+ // we still have path segments left, but the path so far
+ // didn't resolve in the types namespace => no resolution
+ // (don't break here because `curr_per_ns` might contain
+ // something in the value namespace, and it would be wrong
+ // to return that)
+ return ResolvePathResult::empty(ReachedFixedPoint::No);
+ }
+ };
+ // resolve segment in curr
+
+ curr_per_ns = match curr {
+ ModuleDefId::ModuleId(module) => {
+ if module.krate != self.krate {
+ let path = ModPath::from_segments(
+ PathKind::Super(0),
+ path.segments()[i..].iter().cloned(),
+ );
+ tracing::debug!("resolving {:?} in other crate", path);
+ let defp_map = module.def_map(db);
+ let (def, s) = defp_map.resolve_path(db, module.local_id, &path, shadow);
+ return ResolvePathResult::with(
+ def,
+ ReachedFixedPoint::Yes,
+ s.map(|s| s + i),
+ Some(module.krate),
+ );
+ }
+
+ let def_map;
+ let module_data = if module.block == self.block_id() {
+ &self[module.local_id]
+ } else {
+ def_map = module.def_map(db);
+ &def_map[module.local_id]
+ };
+
+ // Since it is a qualified path here, it should not contains legacy macros
+ module_data.scope.get(segment)
+ }
+ ModuleDefId::AdtId(AdtId::EnumId(e)) => {
+ // enum variant
+ cov_mark::hit!(can_import_enum_variant);
+ let enum_data = db.enum_data(e);
+ match enum_data.variant(segment) {
+ Some(local_id) => {
+ let variant = EnumVariantId { parent: e, local_id };
+ match &*enum_data.variants[local_id].variant_data {
+ crate::adt::VariantData::Record(_) => {
+ PerNs::types(variant.into(), Visibility::Public)
+ }
+ crate::adt::VariantData::Tuple(_)
+ | crate::adt::VariantData::Unit => {
+ PerNs::both(variant.into(), variant.into(), Visibility::Public)
+ }
+ }
+ }
+ None => {
+ return ResolvePathResult::with(
+ PerNs::types(e.into(), vis),
+ ReachedFixedPoint::Yes,
+ Some(i),
+ Some(self.krate),
+ );
+ }
+ }
+ }
+ s => {
+ // could be an inherent method call in UFCS form
+ // (`Struct::method`), or some other kind of associated item
+ tracing::debug!(
+ "path segment {:?} resolved to non-module {:?}, but is not last",
+ segment,
+ curr,
+ );
+
+ return ResolvePathResult::with(
+ PerNs::types(s, vis),
+ ReachedFixedPoint::Yes,
+ Some(i),
+ Some(self.krate),
+ );
+ }
+ };
++
++ curr_per_ns = curr_per_ns
++ .filter_visibility(|vis| vis.is_visible_from_def_map(db, self, original_module));
+ }
+
+ ResolvePathResult::with(curr_per_ns, ReachedFixedPoint::Yes, None, Some(self.krate))
+ }
+
+ fn resolve_name_in_module(
+ &self,
+ db: &dyn DefDatabase,
+ module: LocalModuleId,
+ name: &Name,
+ shadow: BuiltinShadowMode,
+ ) -> PerNs {
+ // Resolve in:
+ // - legacy scope of macro
+ // - current module / scope
+ // - extern prelude
+ // - std prelude
+ let from_legacy_macro = self[module]
+ .scope
+ .get_legacy_macro(name)
+ // FIXME: shadowing
+ .and_then(|it| it.last())
+ .map_or_else(PerNs::none, |&m| PerNs::macros(m.into(), Visibility::Public));
+ let from_scope = self[module].scope.get(name);
+ let from_builtin = match self.block {
+ Some(_) => {
+ // Only resolve to builtins in the root `DefMap`.
+ PerNs::none()
+ }
+ None => BUILTIN_SCOPE.get(name).copied().unwrap_or_else(PerNs::none),
+ };
+ let from_scope_or_builtin = match shadow {
+ BuiltinShadowMode::Module => from_scope.or(from_builtin),
+ BuiltinShadowMode::Other => match from_scope.take_types() {
+ Some(ModuleDefId::ModuleId(_)) => from_builtin.or(from_scope),
+ Some(_) | None => from_scope.or(from_builtin),
+ },
+ };
+
+ let extern_prelude = || {
+ self.extern_prelude
+ .get(name)
+ .map_or(PerNs::none(), |&it| PerNs::types(it.into(), Visibility::Public))
+ };
+ let prelude = || self.resolve_in_prelude(db, name);
+
+ from_legacy_macro.or(from_scope_or_builtin).or_else(extern_prelude).or_else(prelude)
+ }
+
+ fn resolve_name_in_crate_root_or_extern_prelude(
+ &self,
+ db: &dyn DefDatabase,
+ name: &Name,
+ ) -> PerNs {
+ let from_crate_root = match self.block {
+ Some(_) => {
+ let def_map = self.crate_root(db).def_map(db);
+ def_map[def_map.root].scope.get(name)
+ }
+ None => self[self.root].scope.get(name),
+ };
+ let from_extern_prelude = || {
+ self.resolve_name_in_extern_prelude(db, name)
+ .map_or(PerNs::none(), |it| PerNs::types(it.into(), Visibility::Public))
+ };
+
+ from_crate_root.or_else(from_extern_prelude)
+ }
+
+ fn resolve_in_prelude(&self, db: &dyn DefDatabase, name: &Name) -> PerNs {
+ if let Some(prelude) = self.prelude {
+ let keep;
+ let def_map = if prelude.krate == self.krate {
+ self
+ } else {
+ // Extend lifetime
+ keep = prelude.def_map(db);
+ &keep
+ };
+ def_map[prelude.local_id].scope.get(name)
+ } else {
+ PerNs::none()
+ }
+ }
+}
--- /dev/null
- E: t
+mod globs;
+mod incremental;
+mod macros;
+mod mod_resolution;
+mod primitives;
+
+use std::sync::Arc;
+
+use base_db::{fixture::WithFixture, SourceDatabase};
+use expect_test::{expect, Expect};
+
+use crate::{db::DefDatabase, test_db::TestDB};
+
+use super::DefMap;
+
+fn compute_crate_def_map(ra_fixture: &str) -> Arc<DefMap> {
+ let db = TestDB::with_files(ra_fixture);
+ let krate = db.crate_graph().iter().next().unwrap();
+ db.crate_def_map(krate)
+}
+
+fn render_crate_def_map(ra_fixture: &str) -> String {
+ let db = TestDB::with_files(ra_fixture);
+ let krate = db.crate_graph().iter().next().unwrap();
+ db.crate_def_map(krate).dump(&db)
+}
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = render_crate_def_map(ra_fixture);
+ expect.assert_eq(&actual);
+}
+
+#[test]
+fn crate_def_map_smoke_test() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+struct S;
+use crate::foo::bar::E;
+use self::E::V;
+
+//- /foo/mod.rs
+pub mod bar;
+fn f() {}
+
+//- /foo/bar.rs
+pub struct Baz;
+
+union U { to_be: bool, not_to_be: u8 }
+enum E { V }
+
+extern {
+ type Ext;
+ static EXT: u8;
+ fn ext();
+}
+"#,
+ expect![[r#"
+ crate
- V: t v
++ E: _
+ S: t v
- Bar: t v
++ V: _
+ foo: t
+
+ crate::foo
+ bar: t
+ f: v
+
+ crate::foo::bar
+ Baz: t v
+ E: t
+ EXT: v
+ Ext: t
+ U: t
+ ext: v
+ "#]],
+ );
+}
+
+#[test]
+fn crate_def_map_super_super() {
+ check(
+ r#"
+mod a {
+ const A: usize = 0;
+ mod b {
+ const B: usize = 0;
+ mod c {
+ use super::super::*;
+ }
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ a: t
+
+ crate::a
+ A: v
+ b: t
+
+ crate::a::b
+ B: v
+ c: t
+
+ crate::a::b::c
+ A: v
+ b: t
+ "#]],
+ );
+}
+
+#[test]
+fn crate_def_map_fn_mod_same_name() {
+ check(
+ r#"
+mod m {
+ pub mod z {}
+ pub fn z() {}
+}
+"#,
+ expect![[r#"
+ crate
+ m: t
+
+ crate::m
+ z: t v
+
+ crate::m::z
+ "#]],
+ );
+}
+
+#[test]
+fn bogus_paths() {
+ cov_mark::check!(bogus_paths);
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+struct S;
+use self;
+
+//- /foo/mod.rs
+use super;
+use crate;
+"#,
+ expect![[r#"
+ crate
+ S: t v
+ foo: t
+
+ crate::foo
+ "#]],
+ );
+}
+
+#[test]
+fn use_as() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use crate::foo::Baz as Foo;
+
+//- /foo/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Foo: t v
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn use_trees() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use crate::foo::bar::{Baz, Quux};
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+pub struct Baz;
+pub enum Quux {};
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ Quux: t
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ Quux: t
+ "#]],
+ );
+}
+
+#[test]
+fn re_exports() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use self::foo::Baz;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::Baz;
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn std_prelude() {
+ cov_mark::check!(std_prelude);
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+#[prelude_import]
+use ::test_crate::prelude::*;
+
+use Foo::*;
+
+//- /lib.rs crate:test_crate
+pub mod prelude;
+
+//- /prelude.rs
+pub enum Foo { Bar, Baz }
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn can_import_enum_variant() {
+ cov_mark::check!(can_import_enum_variant);
+ check(
+ r#"
+enum E { V }
+use self::E::V;
+"#,
+ expect![[r#"
+ crate
+ E: t
+ V: t v
+ "#]],
+ );
+}
+
+#[test]
+fn edition_2015_imports() {
+ check(
+ r#"
+//- /main.rs crate:main deps:other_crate edition:2015
+mod foo;
+mod bar;
+
+//- /bar.rs
+struct Bar;
+
+//- /foo.rs
+use bar::Bar;
+use other_crate::FromLib;
+
+//- /lib.rs crate:other_crate edition:2018
+pub struct FromLib;
+"#,
+ expect![[r#"
+ crate
+ bar: t
+ foo: t
+
+ crate::bar
+ Bar: t v
+
+ crate::foo
++ Bar: _
+ FromLib: t v
+ "#]],
+ );
+}
+
+#[test]
+fn item_map_using_self() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use crate::foo::bar::Baz::{self};
+
+//- /foo/mod.rs
+pub mod bar;
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn item_map_across_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::Baz;
+
+//- /lib.rs crate:test_crate
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn extern_crate_rename() {
+ check(
+ r#"
+//- /main.rs crate:main deps:alloc
+extern crate alloc as alloc_crate;
+mod alloc;
+mod sync;
+
+//- /sync.rs
+use alloc_crate::Arc;
+
+//- /lib.rs crate:alloc
+pub struct Arc;
+"#,
+ expect![[r#"
+ crate
+ alloc: t
+ alloc_crate: t
+ sync: t
+
+ crate::alloc
+
+ crate::sync
+ Arc: t v
+ "#]],
+ );
+}
+
+#[test]
+fn extern_crate_rename_2015_edition() {
+ check(
+ r#"
+//- /main.rs crate:main deps:alloc edition:2015
+extern crate alloc as alloc_crate;
+mod alloc;
+mod sync;
+
+//- /sync.rs
+use alloc_crate::Arc;
+
+//- /lib.rs crate:alloc
+pub struct Arc;
+"#,
+ expect![[r#"
+ crate
+ alloc: t
+ alloc_crate: t
+ sync: t
+
+ crate::alloc
+
+ crate::sync
+ Arc: t v
+ "#]],
+ );
+}
+
+#[test]
+fn macro_use_extern_crate_self() {
+ cov_mark::check!(ignore_macro_use_extern_crate_self);
+ check(
+ r#"
+//- /main.rs crate:main
+#[macro_use]
+extern crate self as bla;
+"#,
+ expect![[r#"
+ crate
+ bla: t
+ "#]],
+ );
+}
+
+#[test]
+fn reexport_across_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::Baz;
+
+//- /lib.rs crate:test_crate
+pub use foo::Baz;
+mod foo;
+
+//- /foo.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn values_dont_shadow_extern_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+fn foo() {}
+use foo::Bar;
+
+//- /foo/lib.rs crate:foo
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ foo: v
+ "#]],
+ );
+}
+
+#[test]
+fn no_std_prelude() {
+ check(
+ r#"
+ //- /main.rs crate:main deps:core,std
+ #![cfg_attr(not(never), no_std)]
+ use Rust;
+
+ //- /core.rs crate:core
+ pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Rust;
+ }
+ }
+ //- /std.rs crate:std deps:core
+ pub mod prelude {
+ pub mod rust_2018 {
+ }
+ }
+ "#,
+ expect![[r#"
+ crate
+ Rust: t v
+ "#]],
+ );
+}
+
+#[test]
+fn edition_specific_preludes() {
+ // We can't test the 2015 prelude here since you can't reexport its contents with 2015's
+ // absolute paths.
+
+ check(
+ r#"
+ //- /main.rs edition:2018 crate:main deps:std
+ use Rust2018;
+
+ //- /std.rs crate:std
+ pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Rust2018;
+ }
+ }
+ "#,
+ expect![[r#"
+ crate
+ Rust2018: t v
+ "#]],
+ );
+ check(
+ r#"
+ //- /main.rs edition:2021 crate:main deps:std
+ use Rust2021;
+
+ //- /std.rs crate:std
+ pub mod prelude {
+ pub mod rust_2021 {
+ pub struct Rust2021;
+ }
+ }
+ "#,
+ expect![[r#"
+ crate
+ Rust2021: t v
+ "#]],
+ );
+}
+
+#[test]
+fn std_prelude_takes_precedence_above_core_prelude() {
+ check(
+ r#"
+//- /main.rs crate:main deps:core,std
+use {Foo, Bar};
+
+//- /std.rs crate:std deps:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Foo;
+ pub use core::prelude::rust_2018::Bar;
+ }
+}
+
+//- /core.rs crate:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub struct Bar;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_not_test() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+use {Foo, Bar, Baz};
+
+//- /lib.rs crate:std
+pub mod prelude {
+ pub mod rust_2018 {
+ #[cfg(test)]
+ pub struct Foo;
+ #[cfg(not(test))]
+ pub struct Bar;
+ #[cfg(all(not(any()), feature = "foo", feature = "bar", opt = "42"))]
+ pub struct Baz;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: _
+ Foo: _
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_test() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+use {Foo, Bar, Baz};
+
+//- /lib.rs crate:std cfg:test,feature=foo,feature=bar,opt=42
+pub mod prelude {
+ pub mod rust_2018 {
+ #[cfg(test)]
+ pub struct Foo;
+ #[cfg(not(test))]
+ pub struct Bar;
+ #[cfg(all(not(any()), feature = "foo", feature = "bar", opt = "42"))]
+ pub struct Baz;
+ }
+}
+"#,
+ expect![[r#"
+ crate
+ Bar: _
+ Baz: t v
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn infer_multiple_namespace() {
+ check(
+ r#"
+//- /main.rs
+mod a {
+ pub type T = ();
+ pub use crate::b::*;
+}
+
+use crate::a::T;
+
+mod b {
+ pub const T: () = ();
+}
+"#,
+ expect![[r#"
+ crate
+ T: t v
+ a: t
+ b: t
+
+ crate::a
+ T: t v
+
+ crate::b
+ T: v
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_import() {
+ check(
+ r#"
+//- /main.rs
+use tr::Tr as _;
+use tr::Tr2 as _;
+
+mod tr {
+ pub trait Tr {}
+ pub trait Tr2 {}
+}
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ _: t
+ tr: t
+
+ crate::tr
+ Tr: t
+ Tr2: t
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_reexport() {
+ check(
+ r#"
+//- /main.rs
+mod tr {
+ pub trait PubTr {}
+ pub trait PrivTr {}
+}
+mod reex {
+ use crate::tr::PrivTr as _;
+ pub use crate::tr::PubTr as _;
+}
+use crate::reex::*;
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ reex: t
+ tr: t
+
+ crate::reex
+ _: t
+ _: t
+
+ crate::tr
+ PrivTr: t
+ PubTr: t
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_pub_crate_reexport() {
+ cov_mark::check!(upgrade_underscore_visibility);
+ check(
+ r#"
+//- /main.rs crate:main deps:lib
+use lib::*;
+
+//- /lib.rs crate:lib
+use tr::Tr as _;
+pub use tr::Tr as _;
+
+mod tr {
+ pub trait Tr {}
+}
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_nontrait() {
+ check(
+ r#"
+//- /main.rs
+mod m {
+ pub struct Struct;
+ pub enum Enum {}
+ pub const CONST: () = ();
+}
+use crate::m::{Struct as _, Enum as _, CONST as _};
+ "#,
+ expect![[r#"
+ crate
+ m: t
+
+ crate::m
+ CONST: v
+ Enum: t
+ Struct: t v
+ "#]],
+ );
+}
+
+#[test]
+fn underscore_name_conflict() {
+ check(
+ r#"
+//- /main.rs
+struct Tr;
+
+use tr::Tr as _;
+
+mod tr {
+ pub trait Tr {}
+}
+ "#,
+ expect![[r#"
+ crate
+ _: t
+ Tr: t v
+ tr: t
+
+ crate::tr
+ Tr: t
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_the_entire_crate() {
+ check(
+ r#"
+//- /main.rs
+#![cfg(never)]
+
+pub struct S;
+pub enum E {}
+pub fn f() {}
+ "#,
+ expect![[r#"
+ crate
+ "#]],
+ );
+}
+
+#[test]
+fn use_crate_as() {
+ check(
+ r#"
+use crate as foo;
+
+use foo::bar as baz;
+
+fn bar() {}
+ "#,
+ expect![[r#"
+ crate
+ bar: v
+ baz: v
+ foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn self_imports_only_types() {
+ check(
+ r#"
+//- /main.rs
+mod m {
+ pub macro S() {}
+ pub struct S;
+}
+
+use self::m::S::{self};
+ "#,
+ expect![[r#"
+ crate
+ S: t
+ m: t
+
+ crate::m
+ S: t v m
+ "#]],
+ );
+}
+
+#[test]
+fn import_from_extern_crate_only_imports_public_items() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:settings,macros
+use macros::settings;
+use settings::Settings;
+//- /settings.rs crate:settings
+pub struct Settings;
+//- /macros.rs crate:macros
+mod settings {}
+pub const settings: () = ();
+ "#,
+ expect![[r#"
+ crate
+ Settings: t v
+ settings: v
+ "#]],
+ )
+}
+
+#[test]
+fn non_prelude_deps() {
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep extern-prelude:
+use dep::Struct;
+//- /dep.rs crate:dep
+pub struct Struct;
+ "#,
+ expect![[r#"
+ crate
+ Struct: _
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs crate:lib deps:dep extern-prelude:
+extern crate dep;
+use dep::Struct;
+//- /dep.rs crate:dep
+pub struct Struct;
+ "#,
+ expect![[r#"
+ crate
+ Struct: t v
+ dep: t
+ "#]],
+ );
+}
+
+#[test]
+fn braced_supers_in_use_tree() {
+ cov_mark::check!(concat_super_mod_paths);
+ check(
+ r#"
+mod some_module {
+ pub fn unknown_func() {}
+}
+
+mod other_module {
+ mod some_submodule {
+ use { super::{ super::unknown_func, }, };
+ }
+}
+
+use some_module::unknown_func;
+ "#,
+ expect![[r#"
+ crate
+ other_module: t
+ some_module: t
+ unknown_func: v
+
+ crate::other_module
+ some_submodule: t
+
+ crate::other_module::some_submodule
+ unknown_func: v
+
+ crate::some_module
+ unknown_func: v
+ "#]],
+ )
+}
--- /dev/null
- mod bar;
+use super::*;
+
+#[test]
+fn glob_1() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::*;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::Baz;
+pub struct Foo;
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ Foo: t v
+ bar: t
+ foo: t
+
+ crate::foo
+ Baz: t v
+ Foo: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_2() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::*;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::*;
+pub struct Foo;
+
+//- /foo/bar.rs
+pub struct Baz;
+pub use super::*;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ Foo: t v
+ bar: t
+ foo: t
+
+ crate::foo
+ Baz: t v
+ Foo: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ Foo: t v
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_privacy_1() {
+ check(
+ r"
+//- /lib.rs
+mod foo;
+use foo::*;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::*;
+struct PrivateStructFoo;
+
+//- /foo/bar.rs
+pub struct Baz;
+struct PrivateStructBar;
+pub use super::*;
+",
+ expect![[r#"
+ crate
+ Baz: t v
+ bar: t
+ foo: t
+
+ crate::foo
+ Baz: t v
+ PrivateStructFoo: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ PrivateStructBar: t v
+ PrivateStructFoo: t v
+ bar: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_privacy_2() {
+ check(
+ r"
+//- /lib.rs
+mod foo;
+use foo::*;
+use foo::bar::*;
+
+//- /foo/mod.rs
++pub mod bar;
+fn Foo() {};
+pub struct Foo {};
+
+//- /foo/bar.rs
+pub(super) struct PrivateBaz;
+struct PrivateBar;
+pub(crate) struct PubCrateStruct;
+",
+ expect![[r#"
+ crate
+ Foo: t
+ PubCrateStruct: t v
++ bar: t
+ foo: t
+
+ crate::foo
+ Foo: t v
+ bar: t
+
+ crate::foo::bar
+ PrivateBar: t v
+ PrivateBaz: t v
+ PubCrateStruct: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_across_crates() {
+ cov_mark::check!(glob_across_crates);
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::*;
+
+//- /lib.rs crate:test_crate
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_privacy_across_crates() {
+ check(
+ r#"
+//- /main.rs crate:main deps:test_crate
+use test_crate::*;
+
+//- /lib.rs crate:test_crate
+pub struct Baz;
+struct Foo;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_enum() {
+ cov_mark::check!(glob_enum);
+ check(
+ r#"
+enum Foo { Bar, Baz }
+use self::Foo::*;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ Foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_enum_group() {
+ cov_mark::check!(glob_enum_group);
+ check(
+ r#"
+enum Foo { Bar, Baz }
+use self::Foo::{*};
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ Baz: t v
+ Foo: t
+ "#]],
+ );
+}
+
+#[test]
+fn glob_shadowed_def() {
+ cov_mark::check!(import_shadowed);
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+mod bar;
+use foo::*;
+use bar::baz;
+use baz::Bar;
+
+//- /foo.rs
+pub mod baz { pub struct Foo; }
+
+//- /bar.rs
+pub mod baz { pub struct Bar; }
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ bar: t
+ baz: t
+ foo: t
+
+ crate::bar
+ baz: t
+
+ crate::bar::baz
+ Bar: t v
+
+ crate::foo
+ baz: t
+
+ crate::foo::baz
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_shadowed_def_reversed() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+mod bar;
+use bar::baz;
+use foo::*;
+use baz::Bar;
+
+//- /foo.rs
+pub mod baz { pub struct Foo; }
+
+//- /bar.rs
+pub mod baz { pub struct Bar; }
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ bar: t
+ baz: t
+ foo: t
+
+ crate::bar
+ baz: t
+
+ crate::bar::baz
+ Bar: t v
+
+ crate::foo
+ baz: t
+
+ crate::foo::baz
+ Foo: t v
+ "#]],
+ );
+}
+
+#[test]
+fn glob_shadowed_def_dependencies() {
+ check(
+ r#"
+mod a { pub mod foo { pub struct X; } }
+mod b { pub use super::a::foo; }
+mod c { pub mod foo { pub struct Y; } }
+mod d {
+ use super::c::foo;
+ use super::b::*;
+ use foo::Y;
+}
+"#,
+ expect![[r#"
+ crate
+ a: t
+ b: t
+ c: t
+ d: t
+
+ crate::a
+ foo: t
+
+ crate::a::foo
+ X: t v
+
+ crate::b
+ foo: t
+
+ crate::c
+ foo: t
+
+ crate::c::foo
+ Y: t v
+
+ crate::d
+ Y: t v
+ foo: t
+ "#]],
+ );
+}
++
++#[test]
++fn glob_name_collision_check_visibility() {
++ check(
++ r#"
++mod event {
++ mod serenity {
++ pub fn Event() {}
++ }
++ use serenity::*;
++
++ pub struct Event {}
++}
++
++use event::Event;
++ "#,
++ expect![[r#"
++ crate
++ Event: t
++ event: t
++
++ crate::event
++ Event: t v
++ serenity: t
++
++ crate::event::serenity
++ Event: v
++ "#]],
++ );
++}
--- /dev/null
- mod bar;
+use super::*;
+
+#[test]
+fn name_res_works_for_broken_modules() {
+ cov_mark::check!(name_res_works_for_broken_modules);
+ check(
+ r"
+//- /lib.rs
+mod foo // no `;`, no body
+use self::foo::Baz;
+
+//- /foo/mod.rs
+pub mod bar;
+pub use self::bar::Baz;
+
+//- /foo/bar.rs
+pub struct Baz;
+",
+ expect![[r#"
+ crate
+ Baz: _
+ foo: t
+
+ crate::foo
+ "#]],
+ );
+}
+
+#[test]
+fn nested_module_resolution() {
+ check(
+ r#"
+//- /lib.rs
+mod n1;
+
+//- /n1.rs
+mod n2;
+
+//- /n1/n2.rs
+struct X;
+"#,
+ expect![[r#"
+ crate
+ n1: t
+
+ crate::n1
+ n2: t
+
+ crate::n1::n2
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_module_resolution_2() {
+ check(
+ r#"
+//- /lib.rs
+mod prelude;
+mod iter;
+
+//- /prelude.rs
+pub use crate::iter::Iterator;
+
+//- /iter.rs
+pub use self::traits::Iterator;
+mod traits;
+
+//- /iter/traits.rs
+pub use self::iterator::Iterator;
+mod iterator;
+
+//- /iter/traits/iterator.rs
+pub trait Iterator;
+"#,
+ expect![[r#"
+ crate
+ iter: t
+ prelude: t
+
+ crate::iter
+ Iterator: t
+ traits: t
+
+ crate::iter::traits
+ Iterator: t
+ iterator: t
+
+ crate::iter::traits::iterator
+ Iterator: t
+
+ crate::prelude
+ Iterator: t
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_works_for_non_standard_filenames() {
+ check(
+ r#"
+//- /my_library.rs crate:my_library
+mod foo;
+use self::foo::Bar;
+
+//- /foo/mod.rs
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ foo: t
+
+ crate::foo
+ Bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_works_for_raw_modules() {
+ check(
+ r#"
+//- /lib.rs
+mod r#async;
+use self::r#async::Bar;
+
+//- /async.rs
+mod foo;
+mod r#async;
+pub struct Bar;
+
+//- /async/foo.rs
+pub struct Foo;
+
+//- /async/async.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ r#async: t
+
+ crate::r#async
+ Bar: t v
+ foo: t
+ r#async: t
+
+ crate::r#async::foo
+ Foo: t v
+
+ crate::r#async::r#async
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_path() {
+ check(
+ r#"
+//- /lib.rs
+#[path = "bar/baz/foo.rs"]
+mod foo;
+use self::foo::Bar;
+
+//- /bar/baz/foo.rs
+pub struct Bar;
+"#,
+ expect![[r#"
+ crate
+ Bar: t v
+ foo: t
+
+ crate::foo
+ Bar: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_module_with_path_in_mod_rs() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo/mod.rs
+#[path = "baz.rs"]
+pub mod bar;
+use self::bar::Baz;
+
+//- /foo/baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_module_with_path_non_crate_root() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+#[path = "baz.rs"]
+pub mod bar;
+use self::bar::Baz;
+
+//- /baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_module_decl_path_super() {
+ check(
+ r#"
+//- /main.rs
+#[path = "bar/baz/module.rs"]
+mod foo;
+pub struct Baz;
+
+//- /bar/baz/module.rs
+use super::Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_explicit_path_mod_rs() {
+ check(
+ r#"
+//- /main.rs
+#[path = "module/mod.rs"]
+mod foo;
+
+//- /module/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_relative_path() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+#[path = "./sub.rs"]
+pub mod foo_bar;
+
+//- /sub.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ foo_bar: t
+
+ crate::foo::foo_bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_relative_path_2() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo/mod.rs
+#[path="../sub.rs"]
+pub mod foo_bar;
+
+//- /sub.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ foo_bar: t
+
+ crate::foo::foo_bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_relative_path_outside_root() {
+ check(
+ r#"
+//- /a/b/c/d/e/main.rs crate:main
+#[path="../../../../../outside.rs"]
+mod foo;
+
+//- /outside.rs
+mod bar;
+
+//- /bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+"#]],
+ );
+}
+
+#[test]
+fn module_resolution_explicit_path_mod_rs_2() {
+ check(
+ r#"
+//- /main.rs
+#[path = "module/bar/mod.rs"]
+mod foo;
+
+//- /module/bar/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_explicit_path_mod_rs_with_win_separator() {
+ check(
+ r#"
+//- /main.rs
+#[path = r"module\bar\mod.rs"]
+mod foo;
+
+//- /module/bar/mod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_with_path_attribute() {
+ check(
+ r#"
+//- /main.rs
+#[path = "models"]
+mod foo { mod bar; }
+
+//- /models/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module() {
+ check(
+ r#"
+//- /main.rs
+mod foo { mod bar; }
+
+//- /foo/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_2_with_path_attribute() {
+ check(
+ r#"
+//- /main.rs
+#[path = "models/db"]
+mod foo { mod bar; }
+
+//- /models/db/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_3() {
+ check(
+ r#"
+//- /main.rs
+#[path = "models/db"]
+mod foo {
+ #[path = "users.rs"]
+ mod bar;
+}
+
+//- /models/db/users.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_empty_path() {
+ check(
+ r#"
+//- /main.rs
+#[path = ""]
+mod foo {
+ #[path = "users.rs"]
+ mod bar;
+}
+
+//- /users.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_empty_path() {
+ check(
+ r#"
+//- /main.rs
+#[path = ""] // Should try to read `/` (a directory)
+mod foo;
+
+//- /foo.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_relative_path() {
+ check(
+ r#"
+//- /main.rs
+#[path = "./models"]
+mod foo { mod bar; }
+
+//- /models/bar.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_crate_root() {
+ check(
+ r#"
+//- /main.rs
+mod foo {
+ #[path = "baz.rs"]
++ pub mod bar;
+}
+use self::foo::bar::Baz;
+
+//- /foo/baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ Baz: t v
+ foo: t
+
+ crate::foo
+ bar: t
+
+ crate::foo::bar
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_mod_rs() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo/mod.rs
+mod bar {
+ #[path = "qwe.rs"]
+ pub mod baz;
+}
+use self::bar::baz::Baz;
+
+//- /foo/bar/qwe.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ baz: t
+
+ crate::foo::bar::baz
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_non_crate_root() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+mod bar {
+ #[path = "qwe.rs"]
+ pub mod baz;
+}
+use self::bar::baz::Baz;
+
+//- /foo/bar/qwe.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ baz: t
+
+ crate::foo::bar::baz
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_inline_module_in_non_crate_root_2() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+
+//- /foo.rs
+#[path = "bar"]
+mod bar {
+ pub mod baz;
+}
+use self::bar::baz::Baz;
+
+//- /bar/baz.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ foo: t
+
+ crate::foo
+ Baz: t v
+ bar: t
+
+ crate::foo::bar
+ baz: t
+
+ crate::foo::bar::baz
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn module_resolution_decl_inside_module_in_non_crate_root_2() {
+ check(
+ r#"
+//- /main.rs
+#[path="module/m2.rs"]
+mod module;
+
+//- /module/m2.rs
+pub mod submod;
+
+//- /module/submod.rs
+pub struct Baz;
+"#,
+ expect![[r#"
+ crate
+ module: t
+
+ crate::module
+ submod: t
+
+ crate::module::submod
+ Baz: t v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_out_of_line_module() {
+ check(
+ r#"
+//- /lib.rs
+mod a {
+ mod b {
+ mod c;
+ }
+}
+
+//- /a/b/c.rs
+struct X;
+"#,
+ expect![[r#"
+ crate
+ a: t
+
+ crate::a
+ b: t
+
+ crate::a::b
+ c: t
+
+ crate::a::b::c
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn nested_out_of_line_module_with_path() {
+ check(
+ r#"
+//- /lib.rs
+mod a {
+ #[path = "d/e"]
+ mod b {
+ mod c;
+ }
+}
+
+//- /a/d/e/c.rs
+struct X;
+"#,
+ expect![[r#"
+ crate
+ a: t
+
+ crate::a
+ b: t
+
+ crate::a::b
+ c: t
+
+ crate::a::b::c
+ X: t v
+ "#]],
+ );
+}
+
+#[test]
+fn circular_mods() {
+ cov_mark::check!(circular_mods);
+ compute_crate_def_map(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+#[path = "./foo.rs"]
+mod foo;
+"#,
+ );
+
+ compute_crate_def_map(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+#[path = "./bar.rs"]
+mod bar;
+//- /bar.rs
+#[path = "./foo.rs"]
+mod foo;
+"#,
+ );
+}
+
+#[test]
+fn abs_path_ignores_local() {
+ check(
+ r#"
+//- /main.rs crate:main deps:core
+pub use ::core::hash::Hash;
+pub mod core {}
+
+//- /lib.rs crate:core
+pub mod hash { pub trait Hash {} }
+"#,
+ expect![[r#"
+ crate
+ Hash: t
+ core: t
+
+ crate::core
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_in_module_file() {
+ // Inner `#![cfg]` in a module file makes the whole module disappear.
+ check(
+ r#"
+//- /main.rs
+mod module;
+
+//- /module.rs
+#![cfg(NEVER)]
+
+struct AlsoShoulntAppear;
+ "#,
+ expect![[r#"
+ crate
+ "#]],
+ )
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "hir-expand"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+tracing = "0.1.35"
+either = "1.7.0"
+rustc-hash = "1.1.0"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+itertools = "0.10.5"
+hashbrown = { version = "0.12.1", features = [
+ "inline-more",
+], default-features = false }
+smallvec = { version = "1.10.0", features = ["const_new"] }
+
+stdx = { path = "../stdx", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+mbe = { path = "../mbe", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
--- /dev/null
- tt.token_trees.retain(|tt| match tt {
- tt::TokenTree::Leaf(leaf) => {
- token_map.synthetic_token_id(leaf.id()).is_none()
- || token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
- }
- tt::TokenTree::Subtree(st) => st.delimiter.map_or(true, |d| {
- token_map.synthetic_token_id(d.id).is_none()
- || token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)
- }),
- });
- tt.token_trees.iter_mut().for_each(|tt| match tt {
- tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
- tt::TokenTree::Leaf(leaf) => {
- if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
- let original = &undo_info.original[id.0 as usize];
- *tt = tt::TokenTree::Subtree(original.clone());
+//! To make attribute macros work reliably when typing, we need to take care to
+//! fix up syntax errors in the code we're passing to them.
+use std::mem;
+
+use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
+use rustc_hash::FxHashMap;
++use smallvec::SmallVec;
+use syntax::{
+ ast::{self, AstNode, HasLoopBody},
+ match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
+};
+use tt::Subtree;
+
+/// The result of calculating fixes for a syntax node -- a bunch of changes
+/// (appending to and replacing nodes), the information that is needed to
+/// reverse those changes afterwards, and a token map.
+#[derive(Debug)]
+pub(crate) struct SyntaxFixups {
+ pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) undo_info: SyntaxFixupUndoInfo,
+ pub(crate) token_map: TokenMap,
+ pub(crate) next_id: u32,
+}
+
+/// This is the information needed to reverse the fixups.
+#[derive(Debug, PartialEq, Eq)]
+pub struct SyntaxFixupUndoInfo {
+ original: Vec<Subtree>,
+}
+
+const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
+
+pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
+ let mut append = FxHashMap::<SyntaxElement, _>::default();
+ let mut replace = FxHashMap::<SyntaxElement, _>::default();
+ let mut preorder = node.preorder();
+ let mut original = Vec::new();
+ let mut token_map = TokenMap::default();
+ let mut next_id = 0;
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ syntax::WalkEvent::Enter(node) => node,
+ syntax::WalkEvent::Leave(_) => continue,
+ };
+
+ if can_handle_error(&node) && has_error_to_handle(&node) {
+ // the node contains an error node, we have to completely replace it by something valid
+ let (original_tree, new_tmap, new_next_id) =
+ mbe::syntax_node_to_token_tree_with_modifications(
+ &node,
+ mem::take(&mut token_map),
+ next_id,
+ Default::default(),
+ Default::default(),
+ );
+ token_map = new_tmap;
+ next_id = new_next_id;
+ let idx = original.len() as u32;
+ original.push(original_tree);
+ let replacement = SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: node.text_range(),
+ id: SyntheticTokenId(idx),
+ };
+ replace.insert(node.clone().into(), vec![replacement]);
+ preorder.skip_subtree();
+ continue;
+ }
+ // In some other situations, we can fix things by just appending some tokens.
+ let end_range = TextRange::empty(node.text_range().end());
+ match_ast! {
+ match node {
+ ast::FieldExpr(it) => {
+ if it.name_ref().is_none() {
+ // incomplete field access: some_expr.|
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::ExprStmt(it) => {
+ if it.semicolon_token().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::SEMICOLON,
+ text: ";".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::LetStmt(it) => {
+ if it.semicolon_token().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::SEMICOLON,
+ text: ";".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::IfExpr(it) => {
+ if it.condition().is_none() {
+ // insert placeholder token after the if token
+ let if_token = match it.if_token() {
+ Some(t) => t,
+ None => continue,
+ };
+ append.insert(if_token.into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ if it.then_branch().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::L_CURLY,
+ text: "{".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ SyntheticToken {
+ kind: SyntaxKind::R_CURLY,
+ text: "}".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::WhileExpr(it) => {
+ if it.condition().is_none() {
+ // insert placeholder token after the while token
+ let while_token = match it.while_token() {
+ Some(t) => t,
+ None => continue,
+ };
+ append.insert(while_token.into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ if it.loop_body().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::L_CURLY,
+ text: "{".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ SyntheticToken {
+ kind: SyntaxKind::R_CURLY,
+ text: "}".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::LoopExpr(it) => {
+ if it.loop_body().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::L_CURLY,
+ text: "{".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ SyntheticToken {
+ kind: SyntaxKind::R_CURLY,
+ text: "}".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ // FIXME: foo::
+ ast::MatchExpr(it) => {
+ if it.expr().is_none() {
+ let match_token = match it.match_token() {
+ Some(t) => t,
+ None => continue
+ };
+ append.insert(match_token.into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID
+ },
+ ]);
+ }
+ if it.match_arm_list().is_none() {
+ // No match arms
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::L_CURLY,
+ text: "{".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ SyntheticToken {
+ kind: SyntaxKind::R_CURLY,
+ text: "}".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::ForExpr(it) => {
+ let for_token = match it.for_token() {
+ Some(token) => token,
+ None => continue
+ };
+
+ let [pat, in_token, iter] = [
+ (SyntaxKind::UNDERSCORE, "_"),
+ (SyntaxKind::IN_KW, "in"),
+ (SyntaxKind::IDENT, "__ra_fixup")
+ ].map(|(kind, text)| SyntheticToken { kind, text: text.into(), range: end_range, id: EMPTY_ID});
+
+ if it.pat().is_none() && it.in_token().is_none() && it.iterable().is_none() {
+ append.insert(for_token.into(), vec![pat, in_token, iter]);
+ // does something funky -- see test case for_no_pat
+ } else if it.pat().is_none() {
+ append.insert(for_token.into(), vec![pat]);
+ }
+
+ if it.loop_body().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::L_CURLY,
+ text: "{".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ SyntheticToken {
+ kind: SyntaxKind::R_CURLY,
+ text: "}".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ _ => (),
+ }
+ }
+ }
+ SyntaxFixups {
+ append,
+ replace,
+ token_map,
+ next_id,
+ undo_info: SyntaxFixupUndoInfo { original },
+ }
+}
+
+fn has_error(node: &SyntaxNode) -> bool {
+ node.children().any(|c| c.kind() == SyntaxKind::ERROR)
+}
+
+fn can_handle_error(node: &SyntaxNode) -> bool {
+ ast::Expr::can_cast(node.kind())
+}
+
+fn has_error_to_handle(node: &SyntaxNode) -> bool {
+ has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
+}
+
+pub(crate) fn reverse_fixups(
+ tt: &mut Subtree,
+ token_map: &TokenMap,
+ undo_info: &SyntaxFixupUndoInfo,
+) {
- }
- });
++ let tts = std::mem::take(&mut tt.token_trees);
++ tt.token_trees = tts
++ .into_iter()
++ .filter(|tt| match tt {
++ tt::TokenTree::Leaf(leaf) => token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID),
++ tt::TokenTree::Subtree(st) => {
++ st.delimiter.map_or(true, |d| token_map.synthetic_token_id(d.id) != Some(EMPTY_ID))
+ }
- let mut actual = tt.to_string();
- actual.push('\n');
++ })
++ .flat_map(|tt| match tt {
++ tt::TokenTree::Subtree(mut tt) => {
++ reverse_fixups(&mut tt, token_map, undo_info);
++ SmallVec::from_const([tt.into()])
++ }
++ tt::TokenTree::Leaf(leaf) => {
++ if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
++ let original = undo_info.original[id.0 as usize].clone();
++ if original.delimiter.is_none() {
++ original.token_trees.into()
++ } else {
++ SmallVec::from_const([original.into()])
++ }
++ } else {
++ SmallVec::from_const([leaf.into()])
++ }
++ }
++ })
++ .collect();
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use super::reverse_fixups;
+
++ // The following three functions are only meant to check partial structural equivalence of
++ // `TokenTree`s, see the last assertion in `check()`.
++ fn check_leaf_eq(a: &tt::Leaf, b: &tt::Leaf) -> bool {
++ match (a, b) {
++ (tt::Leaf::Literal(a), tt::Leaf::Literal(b)) => a.text == b.text,
++ (tt::Leaf::Punct(a), tt::Leaf::Punct(b)) => a.char == b.char,
++ (tt::Leaf::Ident(a), tt::Leaf::Ident(b)) => a.text == b.text,
++ _ => false,
++ }
++ }
++
++ fn check_subtree_eq(a: &tt::Subtree, b: &tt::Subtree) -> bool {
++ a.delimiter.map(|it| it.kind) == b.delimiter.map(|it| it.kind)
++ && a.token_trees.len() == b.token_trees.len()
++ && a.token_trees.iter().zip(&b.token_trees).all(|(a, b)| check_tt_eq(a, b))
++ }
++
++ fn check_tt_eq(a: &tt::TokenTree, b: &tt::TokenTree) -> bool {
++ match (a, b) {
++ (tt::TokenTree::Leaf(a), tt::TokenTree::Leaf(b)) => check_leaf_eq(a, b),
++ (tt::TokenTree::Subtree(a), tt::TokenTree::Subtree(b)) => check_subtree_eq(a, b),
++ _ => false,
++ }
++ }
++
+ #[track_caller]
+ fn check(ra_fixture: &str, mut expect: Expect) {
+ let parsed = syntax::SourceFile::parse(ra_fixture);
+ let fixups = super::fixup_syntax(&parsed.syntax_node());
+ let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ &parsed.syntax_node(),
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
- assert_eq!(
- parse.errors(),
- &[],
++ let actual = format!("{}\n", tt);
+
+ expect.indent(false);
+ expect.assert_eq(&actual);
+
+ // the fixed-up tree should be syntactically valid
+ let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
- // (but token IDs don't matter)
++ assert!(
++ parse.errors().is_empty(),
+ "parse has syntax errors. parse tree:\n{:#?}",
+ parse.syntax_node()
+ );
+
+ reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
+
+ // the fixed-up + reversed version should be equivalent to the original input
- assert_eq!(tt.to_string(), original_as_tt.to_string());
++ // modulo token IDs and `Punct`s' spacing.
+ let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
- fn foo () {a .__ra_fixup}
++ assert!(
++ check_subtree_eq(&tt, &original_as_tt),
++ "different token tree: {tt:?}, {original_as_tt:?}"
++ );
+ }
+
+ #[test]
+ fn just_for_token() {
+ check(
+ r#"
+fn foo() {
+ for
+}
+"#,
+ expect![[r#"
+fn foo () {for _ in __ra_fixup {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn for_no_iter_pattern() {
+ check(
+ r#"
+fn foo() {
+ for {}
+}
+"#,
+ expect![[r#"
+fn foo () {for _ in __ra_fixup {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn for_no_body() {
+ check(
+ r#"
+fn foo() {
+ for bar in qux
+}
+"#,
+ expect![[r#"
+fn foo () {for bar in qux {}}
+"#]],
+ )
+ }
+
+ // FIXME: https://github.com/rust-lang/rust-analyzer/pull/12937#discussion_r937633695
+ #[test]
+ fn for_no_pat() {
+ check(
+ r#"
+fn foo() {
+ for in qux {
+
+ }
+}
+"#,
+ expect![[r#"
+fn foo () {__ra_fixup}
+"#]],
+ )
+ }
+
+ #[test]
+ fn match_no_expr_no_arms() {
+ check(
+ r#"
+fn foo() {
+ match
+}
+"#,
+ expect![[r#"
+fn foo () {match __ra_fixup {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn match_expr_no_arms() {
+ check(
+ r#"
+fn foo() {
+ match x {
+
+ }
+}
+"#,
+ expect![[r#"
+fn foo () {match x {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn match_no_expr() {
+ check(
+ r#"
+fn foo() {
+ match {
+ _ => {}
+ }
+}
+"#,
+ expect![[r#"
+fn foo () {match __ra_fixup {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_1() {
+ check(
+ r#"
+fn foo() {
+ a.
+}
+"#,
+ expect![[r#"
- fn foo () {a .__ra_fixup ;}
++fn foo () {a . __ra_fixup}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_2() {
+ check(
+ r#"
+fn foo() {
+ a.;
+}
+"#,
+ expect![[r#"
- fn foo () {a .__ra_fixup ; bar () ;}
++fn foo () {a . __ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_3() {
+ check(
+ r#"
+fn foo() {
+ a.;
+ bar();
+}
+"#,
+ expect![[r#"
- fn foo () {let x = a .__ra_fixup ;}
++fn foo () {a . __ra_fixup ; bar () ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_let() {
+ check(
+ r#"
+fn foo() {
+ let x = a
+}
+"#,
+ expect![[r#"
+fn foo () {let x = a ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_in_let() {
+ check(
+ r#"
+fn foo() {
+ let x = a.
+}
+"#,
+ expect![[r#"
- fn foo () {a .b ; bar () ;}
++fn foo () {let x = a . __ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn field_expr_before_call() {
+ // another case that easily happens while typing
+ check(
+ r#"
+fn foo() {
+ a.b
+ bar();
+}
+"#,
+ expect![[r#"
++fn foo () {a . b ; bar () ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn extraneous_comma() {
+ check(
+ r#"
+fn foo() {
+ bar(,);
+}
+"#,
+ expect![[r#"
+fn foo () {__ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_1() {
+ check(
+ r#"
+fn foo() {
+ if a
+}
+"#,
+ expect![[r#"
+fn foo () {if a {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_2() {
+ check(
+ r#"
+fn foo() {
+ if
+}
+"#,
+ expect![[r#"
+fn foo () {if __ra_fixup {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_3() {
+ check(
+ r#"
+fn foo() {
+ if {}
+}
+"#,
+ // the {} gets parsed as the condition, I think?
+ expect![[r#"
+fn foo () {if {} {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_while_1() {
+ check(
+ r#"
+fn foo() {
+ while
+}
+"#,
+ expect![[r#"
+fn foo () {while __ra_fixup {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_while_2() {
+ check(
+ r#"
+fn foo() {
+ while foo
+}
+"#,
+ expect![[r#"
+fn foo () {while foo {}}
+"#]],
+ )
+ }
+ #[test]
+ fn fixup_while_3() {
+ check(
+ r#"
+fn foo() {
+ while {}
+}
+"#,
+ expect![[r#"
+fn foo () {while __ra_fixup {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_loop() {
+ check(
+ r#"
+fn foo() {
+ loop
+}
+"#,
+ expect![[r#"
+fn foo () {loop {}}
+"#]],
+ )
+ }
+}
--- /dev/null
- // as we don't have node inputs otherwise and therefor can't find an `N` node in the input
+//! `hir_expand` deals with macro expansion.
+//!
+//! Specifically, it implements a concept of `MacroFile` -- a file whose syntax
+//! tree originates not from the text of some `FileId`, but from some macro
+//! expansion.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+pub mod db;
+pub mod ast_id_map;
+pub mod name;
+pub mod hygiene;
+pub mod builtin_attr_macro;
+pub mod builtin_derive_macro;
+pub mod builtin_fn_macro;
+pub mod proc_macro;
+pub mod quote;
+pub mod eager;
+pub mod mod_path;
+mod fixup;
+
+pub use mbe::{Origin, ValueResult};
+
+use std::{fmt, hash::Hash, iter, sync::Arc};
+
+use base_db::{impl_intern_key, salsa, CrateId, FileId, FileRange, ProcMacroKind};
+use either::Either;
+use syntax::{
+ algo::{self, skip_trivia_token},
+ ast::{self, AstNode, HasDocComments},
+ Direction, SyntaxNode, SyntaxToken,
+};
+
+use crate::{
+ ast_id_map::FileAstId,
+ builtin_attr_macro::BuiltinAttrExpander,
+ builtin_derive_macro::BuiltinDeriveExpander,
+ builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander},
+ db::TokenExpander,
+ mod_path::ModPath,
+ proc_macro::ProcMacroExpander,
+};
+
+pub type ExpandResult<T> = ValueResult<T, ExpandError>;
+
+#[derive(Debug, PartialEq, Eq, Clone)]
+pub enum ExpandError {
+ UnresolvedProcMacro(CrateId),
+ Mbe(mbe::ExpandError),
+ Other(Box<str>),
+}
+
+impl From<mbe::ExpandError> for ExpandError {
+ fn from(mbe: mbe::ExpandError) -> Self {
+ Self::Mbe(mbe)
+ }
+}
+
+impl fmt::Display for ExpandError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"),
+ ExpandError::Mbe(it) => it.fmt(f),
+ ExpandError::Other(it) => f.write_str(it),
+ }
+ }
+}
+
+/// Input to the analyzer is a set of files, where each file is identified by
+/// `FileId` and contains source code. However, another source of source code in
+/// Rust are macros: each macro can be thought of as producing a "temporary
+/// file". To assign an id to such a file, we use the id of the macro call that
+/// produced the file. So, a `HirFileId` is either a `FileId` (source code
+/// written by user), or a `MacroCallId` (source code produced by macro).
+///
+/// What is a `MacroCallId`? Simplifying, it's a `HirFileId` of a file
+/// containing the call plus the offset of the macro call in the file. Note that
+/// this is a recursive definition! However, the size_of of `HirFileId` is
+/// finite (because everything bottoms out at the real `FileId`) and small
+/// (`MacroCallId` uses the location interning. You can check details here:
+/// <https://en.wikipedia.org/wiki/String_interning>).
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct HirFileId(HirFileIdRepr);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+enum HirFileIdRepr {
+ FileId(FileId),
+ MacroFile(MacroFile),
+}
+
+impl From<FileId> for HirFileId {
+ fn from(id: FileId) -> Self {
+ HirFileId(HirFileIdRepr::FileId(id))
+ }
+}
+
+impl From<MacroFile> for HirFileId {
+ fn from(id: MacroFile) -> Self {
+ HirFileId(HirFileIdRepr::MacroFile(id))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroFile {
+ pub macro_call_id: MacroCallId,
+}
+
+/// `MacroCallId` identifies a particular macro invocation, like
+/// `println!("Hello, {}", world)`.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroCallId(salsa::InternId);
+impl_intern_key!(MacroCallId);
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroCallLoc {
+ pub def: MacroDefId,
+ pub(crate) krate: CrateId,
+ eager: Option<EagerCallInfo>,
+ pub kind: MacroCallKind,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct MacroDefId {
+ pub krate: CrateId,
+ pub kind: MacroDefKind,
+ pub local_inner: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroDefKind {
+ Declarative(AstId<ast::Macro>),
+ BuiltIn(BuiltinFnLikeExpander, AstId<ast::Macro>),
+ BuiltInAttr(BuiltinAttrExpander, AstId<ast::Macro>),
+ BuiltInDerive(BuiltinDeriveExpander, AstId<ast::Macro>),
+ BuiltInEager(EagerExpander, AstId<ast::Macro>),
+ ProcMacro(ProcMacroExpander, ProcMacroKind, AstId<ast::Fn>),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+struct EagerCallInfo {
+ /// NOTE: This can be *either* the expansion result, *or* the argument to the eager macro!
+ arg_or_expansion: Arc<tt::Subtree>,
+ included_file: Option<FileId>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum MacroCallKind {
+ FnLike {
+ ast_id: AstId<ast::MacroCall>,
+ expand_to: ExpandTo,
+ },
+ Derive {
+ ast_id: AstId<ast::Adt>,
+ /// Syntactical index of the invoking `#[derive]` attribute.
+ ///
+ /// Outer attributes are counted first, then inner attributes. This does not support
+ /// out-of-line modules, which may have attributes spread across 2 files!
+ derive_attr_index: u32,
+ /// Index of the derive macro in the derive attribute
+ derive_index: u32,
+ },
+ Attr {
+ ast_id: AstId<ast::Item>,
+ attr_args: Arc<(tt::Subtree, mbe::TokenMap)>,
+ /// Syntactical index of the invoking `#[attribute]`.
+ ///
+ /// Outer attributes are counted first, then inner attributes. This does not support
+ /// out-of-line modules, which may have attributes spread across 2 files!
+ invoc_attr_index: u32,
+ /// Whether this attribute is the `#[derive]` attribute.
+ is_derive: bool,
+ },
+}
+
+impl HirFileId {
+ /// For macro-expansion files, returns the file original source file the
+ /// expansion originated from.
+ pub fn original_file(self, db: &dyn db::AstDatabase) -> FileId {
+ let mut file_id = self;
+ loop {
+ match file_id.0 {
+ HirFileIdRepr::FileId(id) => break id,
+ HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_call_id);
+ file_id = match loc.eager {
+ Some(EagerCallInfo { included_file: Some(file), .. }) => file.into(),
+ _ => loc.kind.file_id(),
+ };
+ }
+ }
+ }
+ }
+
+ pub fn expansion_level(self, db: &dyn db::AstDatabase) -> u32 {
+ let mut level = 0;
+ let mut curr = self;
+ while let HirFileIdRepr::MacroFile(macro_file) = curr.0 {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+
+ level += 1;
+ curr = loc.kind.file_id();
+ }
+ level
+ }
+
+ /// If this is a macro call, returns the syntax node of the call.
+ pub fn call_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ Some(loc.kind.to_node(db))
+ }
+ }
+ }
+
+ /// If this is a macro call, returns the syntax node of the very first macro call this file resides in.
+ pub fn original_call_node(self, db: &dyn db::AstDatabase) -> Option<(FileId, SyntaxNode)> {
+ let mut call = match self.0 {
+ HirFileIdRepr::FileId(_) => return None,
+ HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ db.lookup_intern_macro_call(macro_call_id).kind.to_node(db)
+ }
+ };
+ loop {
+ match call.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => break Some((file_id, call.value)),
+ HirFileIdRepr::MacroFile(MacroFile { macro_call_id }) => {
+ call = db.lookup_intern_macro_call(macro_call_id).kind.to_node(db);
+ }
+ }
+ }
+ }
+
+ /// Return expansion information if it is a macro-expansion file
+ pub fn expansion_info(self, db: &dyn db::AstDatabase) -> Option<ExpansionInfo> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+
+ let arg_tt = loc.kind.arg(db)?;
+
+ let macro_def = db.macro_def(loc.def).ok()?;
+ let (parse, exp_map) = db.parse_macro_expansion(macro_file).value?;
+ let macro_arg = db.macro_arg(macro_file.macro_call_id)?;
+
+ let def = loc.def.ast_id().left().and_then(|id| {
+ let def_tt = match id.to_node(db) {
+ ast::Macro::MacroRules(mac) => mac.token_tree()?,
+ ast::Macro::MacroDef(_)
+ if matches!(*macro_def, TokenExpander::BuiltinAttr(_)) =>
+ {
+ return None
+ }
+ ast::Macro::MacroDef(mac) => mac.body()?,
+ };
+ Some(InFile::new(id.file_id, def_tt))
+ });
+ let attr_input_or_mac_def = def.or_else(|| match loc.kind {
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ let tt = ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .and_then(Either::left)?
+ .token_tree()?;
+ Some(InFile::new(ast_id.file_id, tt))
+ }
+ _ => None,
+ });
+
+ Some(ExpansionInfo {
+ expanded: InFile::new(self, parse.syntax_node()),
+ arg: InFile::new(loc.kind.file_id(), arg_tt),
+ attr_input_or_mac_def,
+ macro_arg_shift: mbe::Shift::new(¯o_arg.0),
+ macro_arg,
+ macro_def,
+ exp_map,
+ })
+ }
+ }
+ }
+
+ /// Indicate it is macro file generated for builtin derive
+ pub fn is_builtin_derive(&self, db: &dyn db::AstDatabase) -> Option<InFile<ast::Attr>> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ let attr = match loc.def.kind {
+ MacroDefKind::BuiltInDerive(..) => loc.kind.to_node(db),
+ _ => return None,
+ };
+ Some(attr.with_value(ast::Attr::cast(attr.value.clone())?))
+ }
+ }
+ }
+
+ pub fn is_custom_derive(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => false,
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.def.kind, MacroDefKind::ProcMacro(_, ProcMacroKind::CustomDerive, _))
+ }
+ }
+ }
+
+ /// Return whether this file is an include macro
+ pub fn is_include_macro(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.eager, Some(EagerCallInfo { included_file: Some(_), .. }))
+ }
+ _ => false,
+ }
+ }
+
+ /// Return whether this file is an attr macro
+ pub fn is_attr_macro(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.kind, MacroCallKind::Attr { .. })
+ }
+ _ => false,
+ }
+ }
+
+ /// Return whether this file is the pseudo expansion of the derive attribute.
+ /// See [`crate::builtin_attr_macro::derive_attr_expand`].
+ pub fn is_derive_attr_pseudo_expansion(&self, db: &dyn db::AstDatabase) -> bool {
+ match self.0 {
+ HirFileIdRepr::MacroFile(macro_file) => {
+ let loc: MacroCallLoc = db.lookup_intern_macro_call(macro_file.macro_call_id);
+ matches!(loc.kind, MacroCallKind::Attr { is_derive: true, .. })
+ }
+ _ => false,
+ }
+ }
+
+ pub fn is_macro(self) -> bool {
+ matches!(self.0, HirFileIdRepr::MacroFile(_))
+ }
+
+ pub fn macro_file(self) -> Option<MacroFile> {
+ match self.0 {
+ HirFileIdRepr::FileId(_) => None,
+ HirFileIdRepr::MacroFile(m) => Some(m),
+ }
+ }
+}
+
+impl MacroDefId {
+ pub fn as_lazy_macro(
+ self,
+ db: &dyn db::AstDatabase,
+ krate: CrateId,
+ kind: MacroCallKind,
+ ) -> MacroCallId {
+ db.intern_macro_call(MacroCallLoc { def: self, krate, eager: None, kind })
+ }
+
+ pub fn ast_id(&self) -> Either<AstId<ast::Macro>, AstId<ast::Fn>> {
+ let id = match self.kind {
+ MacroDefKind::ProcMacro(.., id) => return Either::Right(id),
+ MacroDefKind::Declarative(id)
+ | MacroDefKind::BuiltIn(_, id)
+ | MacroDefKind::BuiltInAttr(_, id)
+ | MacroDefKind::BuiltInDerive(_, id)
+ | MacroDefKind::BuiltInEager(_, id) => id,
+ };
+ Either::Left(id)
+ }
+
+ pub fn is_proc_macro(&self) -> bool {
+ matches!(self.kind, MacroDefKind::ProcMacro(..))
+ }
+
+ pub fn is_attribute(&self) -> bool {
+ matches!(
+ self.kind,
+ MacroDefKind::BuiltInAttr(..) | MacroDefKind::ProcMacro(_, ProcMacroKind::Attr, _)
+ )
+ }
+}
+
+// FIXME: attribute indices do not account for `cfg_attr`, which means that we'll strip the whole
+// `cfg_attr` instead of just one of the attributes it expands to
+
+impl MacroCallKind {
+ /// Returns the file containing the macro invocation.
+ fn file_id(&self) -> HirFileId {
+ match *self {
+ MacroCallKind::FnLike { ast_id: InFile { file_id, .. }, .. }
+ | MacroCallKind::Derive { ast_id: InFile { file_id, .. }, .. }
+ | MacroCallKind::Attr { ast_id: InFile { file_id, .. }, .. } => file_id,
+ }
+ }
+
+ pub fn to_node(&self, db: &dyn db::AstDatabase) -> InFile<SyntaxNode> {
+ match self {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ ast_id.with_value(ast_id.to_node(db).syntax().clone())
+ }
+ MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
+ // FIXME: handle `cfg_attr`
+ ast_id.with_value(ast_id.to_node(db)).map(|it| {
+ it.doc_comments_and_attrs()
+ .nth(*derive_attr_index as usize)
+ .and_then(|it| match it {
+ Either::Left(attr) => Some(attr.syntax().clone()),
+ Either::Right(_) => None,
+ })
+ .unwrap_or_else(|| it.syntax().clone())
+ })
+ }
+ MacroCallKind::Attr { ast_id, is_derive: true, invoc_attr_index, .. } => {
+ // FIXME: handle `cfg_attr`
+ ast_id.with_value(ast_id.to_node(db)).map(|it| {
+ it.doc_comments_and_attrs()
+ .nth(*invoc_attr_index as usize)
+ .and_then(|it| match it {
+ Either::Left(attr) => Some(attr.syntax().clone()),
+ Either::Right(_) => None,
+ })
+ .unwrap_or_else(|| it.syntax().clone())
+ })
+ }
+ MacroCallKind::Attr { ast_id, .. } => {
+ ast_id.with_value(ast_id.to_node(db).syntax().clone())
+ }
+ }
+ }
+
+ /// Returns the original file range that best describes the location of this macro call.
+ ///
+ /// Unlike `MacroCallKind::original_call_range`, this also spans the item of attributes and derives.
+ pub fn original_call_range_with_body(self, db: &dyn db::AstDatabase) -> FileRange {
+ let mut kind = self;
+ let file_id = loop {
+ match kind.file_id().0 {
+ HirFileIdRepr::MacroFile(file) => {
+ kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+ }
+ HirFileIdRepr::FileId(file_id) => break file_id,
+ }
+ };
+
+ let range = match kind {
+ MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::Derive { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::Attr { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ };
+
+ FileRange { range, file_id }
+ }
+
+ /// Returns the original file range that best describes the location of this macro call.
+ ///
+ /// Here we try to roughly match what rustc does to improve diagnostics: fn-like macros
+ /// get the whole `ast::MacroCall`, attribute macros get the attribute's range, and derives
+ /// get only the specific derive that is being referred to.
+ pub fn original_call_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ let mut kind = self;
+ let file_id = loop {
+ match kind.file_id().0 {
+ HirFileIdRepr::MacroFile(file) => {
+ kind = db.lookup_intern_macro_call(file.macro_call_id).kind;
+ }
+ HirFileIdRepr::FileId(file_id) => break file_id,
+ }
+ };
+
+ let range = match kind {
+ MacroCallKind::FnLike { ast_id, .. } => ast_id.to_node(db).syntax().text_range(),
+ MacroCallKind::Derive { ast_id, derive_attr_index, .. } => {
+ // FIXME: should be the range of the macro name, not the whole derive
+ ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(derive_attr_index as usize)
+ .expect("missing derive")
+ .expect_left("derive is a doc comment?")
+ .syntax()
+ .text_range()
+ }
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => ast_id
+ .to_node(db)
+ .doc_comments_and_attrs()
+ .nth(invoc_attr_index as usize)
+ .expect("missing attribute")
+ .expect_left("attribute macro is a doc comment?")
+ .syntax()
+ .text_range(),
+ };
+
+ FileRange { range, file_id }
+ }
+
+ fn arg(&self, db: &dyn db::AstDatabase) -> Option<SyntaxNode> {
+ match self {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ Some(ast_id.to_node(db).token_tree()?.syntax().clone())
+ }
+ MacroCallKind::Derive { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
+ MacroCallKind::Attr { ast_id, .. } => Some(ast_id.to_node(db).syntax().clone()),
+ }
+ }
+
+ fn expand_to(&self) -> ExpandTo {
+ match self {
+ MacroCallKind::FnLike { expand_to, .. } => *expand_to,
+ MacroCallKind::Derive { .. } => ExpandTo::Items,
+ MacroCallKind::Attr { is_derive: true, .. } => ExpandTo::Statements,
+ MacroCallKind::Attr { .. } => ExpandTo::Items, // is this always correct?
+ }
+ }
+}
+
+impl MacroCallId {
+ pub fn as_file(self) -> HirFileId {
+ MacroFile { macro_call_id: self }.into()
+ }
+}
+
+/// ExpansionInfo mainly describes how to map text range between src and expanded macro
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct ExpansionInfo {
+ expanded: InFile<SyntaxNode>,
+ /// The argument TokenTree or item for attributes
+ arg: InFile<SyntaxNode>,
+ /// The `macro_rules!` or attribute input.
+ attr_input_or_mac_def: Option<InFile<ast::TokenTree>>,
+
+ macro_def: Arc<TokenExpander>,
+ macro_arg: Arc<(tt::Subtree, mbe::TokenMap, fixup::SyntaxFixupUndoInfo)>,
+ /// A shift built from `macro_arg`'s subtree, relevant for attributes as the item is the macro arg
+ /// and as such we need to shift tokens if they are part of an attributes input instead of their item.
+ macro_arg_shift: mbe::Shift,
+ exp_map: Arc<mbe::TokenMap>,
+}
+
+impl ExpansionInfo {
+ pub fn expanded(&self) -> InFile<SyntaxNode> {
+ self.expanded.clone()
+ }
+
+ pub fn call_node(&self) -> Option<InFile<SyntaxNode>> {
+ Some(self.arg.with_value(self.arg.value.parent()?))
+ }
+
+ /// Map a token down from macro input into the macro expansion.
+ ///
+ /// The inner workings of this function differ slightly depending on the type of macro we are dealing with:
+ /// - declarative:
+ /// For declarative macros, we need to accommodate for the macro definition site(which acts as a second unchanging input)
+ /// , as tokens can mapped in and out of it.
+ /// To do this we shift all ids in the expansion by the maximum id of the definition site giving us an easy
+ /// way to map all the tokens.
+ /// - attribute:
+ /// Attributes have two different inputs, the input tokentree in the attribute node and the item
+ /// the attribute is annotating. Similarly as for declarative macros we need to do a shift here
+ /// as well. Currently this is done by shifting the attribute input by the maximum id of the item.
+ /// - function-like and derives:
+ /// Both of these only have one simple call site input so no special handling is required here.
+ pub fn map_token_down(
+ &self,
+ db: &dyn db::AstDatabase,
+ item: Option<ast::Item>,
+ token: InFile<&SyntaxToken>,
+ ) -> Option<impl Iterator<Item = InFile<SyntaxToken>> + '_> {
+ assert_eq!(token.file_id, self.arg.file_id);
+ let token_id_in_attr_input = if let Some(item) = item {
+ // check if we are mapping down in an attribute input
+ // this is a special case as attributes can have two inputs
+ let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+ let loc = db.lookup_intern_macro_call(call_id);
+
+ let token_range = token.value.text_range();
+ match &loc.kind {
+ MacroCallKind::Attr { attr_args, invoc_attr_index, is_derive, .. } => {
+ let attr = item
+ .doc_comments_and_attrs()
+ .nth(*invoc_attr_index as usize)
+ .and_then(Either::left)?;
+ match attr.token_tree() {
+ Some(token_tree)
+ if token_tree.syntax().text_range().contains_range(token_range) =>
+ {
+ let attr_input_start =
+ token_tree.left_delimiter_token()?.text_range().start();
+ let relative_range =
+ token.value.text_range().checked_sub(attr_input_start)?;
+ // shift by the item's tree's max id
+ let token_id = attr_args.1.token_by_range(relative_range)?;
+ let token_id = if *is_derive {
+ // we do not shift for `#[derive]`, as we only need to downmap the derive attribute tokens
+ token_id
+ } else {
+ self.macro_arg_shift.shift(token_id)
+ };
+ Some(token_id)
+ }
+ _ => None,
+ }
+ }
+ _ => None,
+ }
+ } else {
+ None
+ };
+
+ let token_id = match token_id_in_attr_input {
+ Some(token_id) => token_id,
+ // the token is not inside an attribute's input so do the lookup in the macro_arg as usual
+ None => {
+ let relative_range =
+ token.value.text_range().checked_sub(self.arg.value.text_range().start())?;
+ let token_id = self.macro_arg.1.token_by_range(relative_range)?;
+ // conditionally shift the id by a declaratives macro definition
+ self.macro_def.map_id_down(token_id)
+ }
+ };
+
+ let tokens = self
+ .exp_map
+ .ranges_by_token(token_id, token.value.kind())
+ .flat_map(move |range| self.expanded.value.covering_element(range).into_token());
+
+ Some(tokens.map(move |token| self.expanded.with_value(token)))
+ }
+
+ /// Map a token up out of the expansion it resides in into the arguments of the macro call of the expansion.
+ pub fn map_token_up(
+ &self,
+ db: &dyn db::AstDatabase,
+ token: InFile<&SyntaxToken>,
+ ) -> Option<(InFile<SyntaxToken>, Origin)> {
+ // Fetch the id through its text range,
+ let token_id = self.exp_map.token_by_range(token.value.text_range())?;
+ // conditionally unshifting the id to accommodate for macro-rules def site
+ let (mut token_id, origin) = self.macro_def.map_id_up(token_id);
+
+ let call_id = self.expanded.file_id.macro_file()?.macro_call_id;
+ let loc = db.lookup_intern_macro_call(call_id);
+
+ // Attributes are a bit special for us, they have two inputs, the input tokentree and the annotated item.
+ let (token_map, tt) = match &loc.kind {
+ MacroCallKind::Attr { attr_args, is_derive: true, .. } => {
+ (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ }
+ MacroCallKind::Attr { attr_args, .. } => {
+ // try unshifting the the token id, if unshifting fails, the token resides in the non-item attribute input
+ // note that the `TokenExpander::map_id_up` earlier only unshifts for declarative macros, so we don't double unshift with this
+ match self.macro_arg_shift.unshift(token_id) {
+ Some(unshifted) => {
+ token_id = unshifted;
+ (&attr_args.1, self.attr_input_or_mac_def.clone()?.syntax().cloned())
+ }
+ None => (&self.macro_arg.1, self.arg.clone()),
+ }
+ }
+ _ => match origin {
+ mbe::Origin::Call => (&self.macro_arg.1, self.arg.clone()),
+ mbe::Origin::Def => match (&*self.macro_def, &self.attr_input_or_mac_def) {
+ (TokenExpander::DeclarativeMacro { def_site_token_map, .. }, Some(tt)) => {
+ (def_site_token_map, tt.syntax().cloned())
+ }
+ _ => panic!("`Origin::Def` used with non-`macro_rules!` macro"),
+ },
+ },
+ };
+
+ let range = token_map.first_range_by_token(token_id, token.value.kind())?;
+ let token =
+ tt.value.covering_element(range + tt.value.text_range().start()).into_token()?;
+ Some((tt.with_value(token), origin))
+ }
+}
+
+/// `AstId` points to an AST node in any file.
+///
+/// It is stable across reparses, and can be used as salsa key/value.
+pub type AstId<N> = InFile<FileAstId<N>>;
+
+impl<N: AstNode> AstId<N> {
+ pub fn to_node(&self, db: &dyn db::AstDatabase) -> N {
+ let root = db.parse_or_expand(self.file_id).unwrap();
+ db.ast_id_map(self.file_id).get(self.value).to_node(&root)
+ }
+}
+
+/// `InFile<T>` stores a value of `T` inside a particular file/syntax tree.
+///
+/// Typical usages are:
+///
+/// * `InFile<SyntaxNode>` -- syntax node in a file
+/// * `InFile<ast::FnDef>` -- ast node in a file
+/// * `InFile<TextSize>` -- offset in a file
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct InFile<T> {
+ pub file_id: HirFileId,
+ pub value: T,
+}
+
+impl<T> InFile<T> {
+ pub fn new(file_id: HirFileId, value: T) -> InFile<T> {
+ InFile { file_id, value }
+ }
+
+ pub fn with_value<U>(&self, value: U) -> InFile<U> {
+ InFile::new(self.file_id, value)
+ }
+
+ pub fn map<F: FnOnce(T) -> U, U>(self, f: F) -> InFile<U> {
+ InFile::new(self.file_id, f(self.value))
+ }
+
+ pub fn as_ref(&self) -> InFile<&T> {
+ self.with_value(&self.value)
+ }
+
+ pub fn file_syntax(&self, db: &dyn db::AstDatabase) -> SyntaxNode {
+ db.parse_or_expand(self.file_id).expect("source created from invalid file")
+ }
+}
+
+impl<T: Clone> InFile<&T> {
+ pub fn cloned(&self) -> InFile<T> {
+ self.with_value(self.value.clone())
+ }
+}
+
+impl<T> InFile<Option<T>> {
+ pub fn transpose(self) -> Option<InFile<T>> {
+ let value = self.value?;
+ Some(InFile::new(self.file_id, value))
+ }
+}
+
+impl<'a> InFile<&'a SyntaxNode> {
+ pub fn ancestors_with_macros(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + Clone + '_ {
+ iter::successors(Some(self.cloned()), move |node| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => node.file_id.call_node(db),
+ })
+ }
+
+ /// Skips the attributed item that caused the macro invocation we are climbing up
+ pub fn ancestors_with_macros_skip_attr_item(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ let succ = move |node: &InFile<SyntaxNode>| match node.value.parent() {
+ Some(parent) => Some(node.with_value(parent)),
+ None => {
+ let parent_node = node.file_id.call_node(db)?;
+ if node.file_id.is_attr_macro(db) {
+ // macro call was an attributed item, skip it
+ // FIXME: does this fail if this is a direct expansion of another macro?
+ parent_node.map(|node| node.parent()).transpose()
+ } else {
+ Some(parent_node)
+ }
+ }
+ };
+ iter::successors(succ(&self.cloned()), succ)
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ ///
+ /// For attributes and derives, this will point back to the attribute only.
+ /// For the entire item `InFile::use original_file_range_full`.
+ pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ match self.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some(res) = self.original_file_range_opt(db) {
+ return res;
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
+ match ascend_node_border_tokens(db, self) {
+ Some(InFile { file_id, value: (first, last) }) => {
+ let original_file = file_id.original_file(db);
+ let range = first.text_range().cover(last.text_range());
+ if file_id != original_file.into() {
+ tracing::error!("Failed mapping up more for {:?}", range);
+ return None;
+ }
+ Some(FileRange { file_id: original_file, range })
+ }
+ _ if !self.file_id.is_macro() => Some(FileRange {
+ file_id: self.file_id.original_file(db),
+ range: self.value.text_range(),
+ }),
+ _ => None,
+ }
+ }
+
+ pub fn original_syntax_node(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxNode>> {
+ // This kind of upmapping can only be achieved in attribute expanded files,
- // as we don't have node inputs otherwise and therefor can't find an `N` node in the input
++ // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
+ if !self.file_id.is_macro() {
+ return Some(self.map(Clone::clone));
+ } else if !self.file_id.is_attr_macro(db) {
+ return None;
+ }
+
+ if let Some(InFile { file_id, value: (first, last) }) = ascend_node_border_tokens(db, self)
+ {
+ if file_id.is_macro() {
+ let range = first.text_range().cover(last.text_range());
+ tracing::error!("Failed mapping out of macro file for {:?}", range);
+ return None;
+ }
+ // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
+ let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
+ let kind = self.value.kind();
+ let value = anc.ancestors().find(|it| it.kind() == kind)?;
+ return Some(InFile::new(file_id, value));
+ }
+ None
+ }
+}
+
+impl InFile<SyntaxToken> {
+ pub fn upmap(self, db: &dyn db::AstDatabase) -> Option<InFile<SyntaxToken>> {
+ let expansion = self.file_id.expansion_info(db)?;
+ expansion.map_token_up(db, self.as_ref()).map(|(it, _)| it)
+ }
+
+ /// Falls back to the macro call range if the node cannot be mapped up fully.
+ pub fn original_file_range(self, db: &dyn db::AstDatabase) -> FileRange {
+ match self.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => FileRange { file_id, range: self.value.text_range() },
+ HirFileIdRepr::MacroFile(mac_file) => {
+ if let Some(res) = self.original_file_range_opt(db) {
+ return res;
+ }
+ // Fall back to whole macro call.
+ let loc = db.lookup_intern_macro_call(mac_file.macro_call_id);
+ loc.kind.original_call_range(db)
+ }
+ }
+ }
+
+ /// Attempts to map the syntax node back up its macro calls.
+ pub fn original_file_range_opt(self, db: &dyn db::AstDatabase) -> Option<FileRange> {
+ match self.file_id.0 {
+ HirFileIdRepr::FileId(file_id) => {
+ Some(FileRange { file_id, range: self.value.text_range() })
+ }
+ HirFileIdRepr::MacroFile(_) => {
+ let expansion = self.file_id.expansion_info(db)?;
+ let InFile { file_id, value } = ascend_call_token(db, &expansion, self)?;
+ let original_file = file_id.original_file(db);
+ if file_id != original_file.into() {
+ return None;
+ }
+ Some(FileRange { file_id: original_file, range: value.text_range() })
+ }
+ }
+ }
+
+ pub fn ancestors_with_macros(
+ self,
+ db: &dyn db::AstDatabase,
+ ) -> impl Iterator<Item = InFile<SyntaxNode>> + '_ {
+ self.value.parent().into_iter().flat_map({
+ let file_id = self.file_id;
+ move |parent| InFile::new(file_id, &parent).ancestors_with_macros(db)
+ })
+ }
+}
+
+fn ascend_node_border_tokens(
+ db: &dyn db::AstDatabase,
+ InFile { file_id, value: node }: InFile<&SyntaxNode>,
+) -> Option<InFile<(SyntaxToken, SyntaxToken)>> {
+ let expansion = file_id.expansion_info(db)?;
+
+ let first_token = |node: &SyntaxNode| skip_trivia_token(node.first_token()?, Direction::Next);
+ let last_token = |node: &SyntaxNode| skip_trivia_token(node.last_token()?, Direction::Prev);
+
+ let first = first_token(node)?;
+ let last = last_token(node)?;
+ let first = ascend_call_token(db, &expansion, InFile::new(file_id, first))?;
+ let last = ascend_call_token(db, &expansion, InFile::new(file_id, last))?;
+ (first.file_id == last.file_id).then(|| InFile::new(first.file_id, (first.value, last.value)))
+}
+
+fn ascend_call_token(
+ db: &dyn db::AstDatabase,
+ expansion: &ExpansionInfo,
+ token: InFile<SyntaxToken>,
+) -> Option<InFile<SyntaxToken>> {
+ let mut mapping = expansion.map_token_up(db, token.as_ref())?;
+ while let (mapped, Origin::Call) = mapping {
+ match mapped.file_id.expansion_info(db) {
+ Some(info) => mapping = info.map_token_up(db, mapped.as_ref())?,
+ None => return Some(mapped),
+ }
+ }
+ None
+}
+
+impl<N: AstNode> InFile<N> {
+ pub fn descendants<T: AstNode>(self) -> impl Iterator<Item = InFile<T>> {
+ self.value.syntax().descendants().filter_map(T::cast).map(move |n| self.with_value(n))
+ }
+
+ pub fn original_ast_node(self, db: &dyn db::AstDatabase) -> Option<InFile<N>> {
+ // This kind of upmapping can only be achieved in attribute expanded files,
++ // as we don't have node inputs otherwise and therefore can't find an `N` node in the input
+ if !self.file_id.is_macro() {
+ return Some(self);
+ } else if !self.file_id.is_attr_macro(db) {
+ return None;
+ }
+
+ if let Some(InFile { file_id, value: (first, last) }) =
+ ascend_node_border_tokens(db, self.syntax())
+ {
+ if file_id.is_macro() {
+ let range = first.text_range().cover(last.text_range());
+ tracing::error!("Failed mapping out of macro file for {:?}", range);
+ return None;
+ }
+ // FIXME: This heuristic is brittle and with the right macro may select completely unrelated nodes
+ let anc = algo::least_common_ancestor(&first.parent()?, &last.parent()?)?;
+ let value = anc.ancestors().find_map(N::cast)?;
+ return Some(InFile::new(file_id, value));
+ }
+ None
+ }
+
+ pub fn syntax(&self) -> InFile<&SyntaxNode> {
+ self.with_value(self.value.syntax())
+ }
+}
+
+/// In Rust, macros expand token trees to token trees. When we want to turn a
+/// token tree into an AST node, we need to figure out what kind of AST node we
+/// want: something like `foo` can be a type, an expression, or a pattern.
+///
+/// Naively, one would think that "what this expands to" is a property of a
+/// particular macro: macro `m1` returns an item, while macro `m2` returns an
+/// expression, etc. That's not the case -- macros are polymorphic in the
+/// result, and can expand to any type of the AST node.
+///
+/// What defines the actual AST node is the syntactic context of the macro
+/// invocation. As a contrived example, in `let T![*] = T![*];` the first `T`
+/// expands to a pattern, while the second one expands to an expression.
+///
+/// `ExpandTo` captures this bit of information about a particular macro call
+/// site.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ExpandTo {
+ Statements,
+ Items,
+ Pattern,
+ Type,
+ Expr,
+}
+
+impl ExpandTo {
+ pub fn from_call_site(call: &ast::MacroCall) -> ExpandTo {
+ use syntax::SyntaxKind::*;
+
+ let syn = call.syntax();
+
+ let parent = match syn.parent() {
+ Some(it) => it,
+ None => return ExpandTo::Statements,
+ };
+
+ // FIXME: macros in statement position are treated as expression statements, they should
+ // probably be their own statement kind. The *grand*parent indicates what's valid.
+ if parent.kind() == MACRO_EXPR
+ && parent
+ .parent()
+ .map_or(false, |p| matches!(p.kind(), EXPR_STMT | STMT_LIST | MACRO_STMTS))
+ {
+ return ExpandTo::Statements;
+ }
+
+ match parent.kind() {
+ MACRO_ITEMS | SOURCE_FILE | ITEM_LIST => ExpandTo::Items,
+ MACRO_STMTS | EXPR_STMT | STMT_LIST => ExpandTo::Statements,
+ MACRO_PAT => ExpandTo::Pattern,
+ MACRO_TYPE => ExpandTo::Type,
+
+ ARG_LIST | ARRAY_EXPR | AWAIT_EXPR | BIN_EXPR | BREAK_EXPR | CALL_EXPR | CAST_EXPR
+ | CLOSURE_EXPR | FIELD_EXPR | FOR_EXPR | IF_EXPR | INDEX_EXPR | LET_EXPR
+ | MATCH_ARM | MATCH_EXPR | MATCH_GUARD | METHOD_CALL_EXPR | PAREN_EXPR | PATH_EXPR
+ | PREFIX_EXPR | RANGE_EXPR | RECORD_EXPR_FIELD | REF_EXPR | RETURN_EXPR | TRY_EXPR
+ | TUPLE_EXPR | WHILE_EXPR | MACRO_EXPR => ExpandTo::Expr,
+ _ => {
+ // Unknown , Just guess it is `Items`
+ ExpandTo::Items
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct UnresolvedMacro {
+ pub path: ModPath,
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "hir-ty"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.5"
+arrayvec = "0.7.2"
+smallvec = "1.10.0"
+ena = "0.14.0"
+tracing = "0.1.35"
+rustc-hash = "1.1.0"
+scoped-tls = "1.0.0"
+chalk-solve = { version = "0.86.0", default-features = false }
+chalk-ir = "0.86.0"
+chalk-recursive = { version = "0.86.0", default-features = false }
+chalk-derive = "0.86.0"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+once_cell = "1.15.0"
+typed-arena = "2.0.1"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+hir-def = { path = "../hir-def", version = "0.0.0" }
+hir-expand = { path = "../hir-expand", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
+tracing = "0.1.35"
+tracing-subscriber = { version = "0.3.16", default-features = false, features = [
+ "env-filter",
+ "registry",
+] }
+tracing-tree = "0.2.1"
--- /dev/null
- InferenceResult,
+//! The type system. We currently use this to infer types for completion, hover
+//! information and various assists.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod autoderef;
+mod builder;
+mod chalk_db;
+mod chalk_ext;
+pub mod consteval;
+mod infer;
+mod inhabitedness;
+mod interner;
+mod lower;
+mod mapping;
+mod tls;
+mod utils;
+mod walk;
+pub mod db;
+pub mod diagnostics;
+pub mod display;
+pub mod method_resolution;
+pub mod primitive;
+pub mod traits;
+
+#[cfg(test)]
+mod tests;
+#[cfg(test)]
+mod test_db;
+
+use std::sync::Arc;
+
+use chalk_ir::{
+ fold::{Shift, TypeFoldable},
+ interner::HasInterner,
+ NoSolution, UniverseIndex,
+};
+use hir_def::{expr::ExprId, type_ref::Rawness, TypeOrConstParamId};
+use hir_expand::name;
+use itertools::Either;
+use traits::FnTrait;
+use utils::Generics;
+
+use crate::{consteval::unknown_const, db::HirDatabase, utils::generics};
+
+pub use autoderef::autoderef;
+pub use builder::{ParamKind, TyBuilder};
+pub use chalk_ext::*;
+pub use infer::{
+ could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic,
- self_ty: &Canonical<Ty>,
++ InferenceResult, OverloadedDeref, PointerCast,
+};
+pub use interner::Interner;
+pub use lower::{
+ associated_type_shorthand_candidates, CallableDefId, ImplTraitLoweringMode, TyDefId,
+ TyLoweringContext, ValueTyDefId,
+};
+pub use mapping::{
+ from_assoc_type_id, from_chalk_trait_id, from_foreign_def_id, from_placeholder_idx,
+ lt_from_placeholder_idx, to_assoc_type_id, to_chalk_trait_id, to_foreign_def_id,
+ to_placeholder_idx,
+};
+pub use traits::TraitEnvironment;
+pub use utils::{all_super_traits, is_fn_unsafe_to_call};
+pub use walk::TypeWalk;
+
+pub use chalk_ir::{
+ cast::Cast, AdtId, BoundVar, DebruijnIndex, Mutability, Safety, Scalar, TyVariableKind,
+};
+
+pub type ForeignDefId = chalk_ir::ForeignDefId<Interner>;
+pub type AssocTypeId = chalk_ir::AssocTypeId<Interner>;
+pub type FnDefId = chalk_ir::FnDefId<Interner>;
+pub type ClosureId = chalk_ir::ClosureId<Interner>;
+pub type OpaqueTyId = chalk_ir::OpaqueTyId<Interner>;
+pub type PlaceholderIndex = chalk_ir::PlaceholderIndex;
+
+pub type VariableKind = chalk_ir::VariableKind<Interner>;
+pub type VariableKinds = chalk_ir::VariableKinds<Interner>;
+pub type CanonicalVarKinds = chalk_ir::CanonicalVarKinds<Interner>;
+/// Represents generic parameters and an item bound by them. When the item has parent, the binders
+/// also contain the generic parameters for its parent. See chalk's documentation for details.
+///
+/// One thing to keep in mind when working with `Binders` (and `Substitution`s, which represent
+/// generic arguments) in rust-analyzer is that the ordering within *is* significant - the generic
+/// parameters/arguments for an item MUST come before those for its parent. This is to facilitate
+/// the integration with chalk-solve, which mildly puts constraints as such. See #13335 for its
+/// motivation in detail.
+pub type Binders<T> = chalk_ir::Binders<T>;
+/// Interned list of generic arguments for an item. When an item has parent, the `Substitution` for
+/// it contains generic arguments for both its parent and itself. See chalk's documentation for
+/// details.
+///
+/// See `Binders` for the constraint on the ordering.
+pub type Substitution = chalk_ir::Substitution<Interner>;
+pub type GenericArg = chalk_ir::GenericArg<Interner>;
+pub type GenericArgData = chalk_ir::GenericArgData<Interner>;
+
+pub type Ty = chalk_ir::Ty<Interner>;
+pub type TyKind = chalk_ir::TyKind<Interner>;
+pub type DynTy = chalk_ir::DynTy<Interner>;
+pub type FnPointer = chalk_ir::FnPointer<Interner>;
+// pub type FnSubst = chalk_ir::FnSubst<Interner>;
+pub use chalk_ir::FnSubst;
+pub type ProjectionTy = chalk_ir::ProjectionTy<Interner>;
+pub type AliasTy = chalk_ir::AliasTy<Interner>;
+pub type OpaqueTy = chalk_ir::OpaqueTy<Interner>;
+pub type InferenceVar = chalk_ir::InferenceVar;
+
+pub type Lifetime = chalk_ir::Lifetime<Interner>;
+pub type LifetimeData = chalk_ir::LifetimeData<Interner>;
+pub type LifetimeOutlives = chalk_ir::LifetimeOutlives<Interner>;
+
+pub type Const = chalk_ir::Const<Interner>;
+pub type ConstData = chalk_ir::ConstData<Interner>;
+pub type ConstValue = chalk_ir::ConstValue<Interner>;
+pub type ConcreteConst = chalk_ir::ConcreteConst<Interner>;
+
+pub type ChalkTraitId = chalk_ir::TraitId<Interner>;
+pub type TraitRef = chalk_ir::TraitRef<Interner>;
+pub type QuantifiedWhereClause = Binders<WhereClause>;
+pub type QuantifiedWhereClauses = chalk_ir::QuantifiedWhereClauses<Interner>;
+pub type Canonical<T> = chalk_ir::Canonical<T>;
+
+pub type FnSig = chalk_ir::FnSig<Interner>;
+
+pub type InEnvironment<T> = chalk_ir::InEnvironment<T>;
+pub type Environment = chalk_ir::Environment<Interner>;
+pub type DomainGoal = chalk_ir::DomainGoal<Interner>;
+pub type Goal = chalk_ir::Goal<Interner>;
+pub type AliasEq = chalk_ir::AliasEq<Interner>;
+pub type Solution = chalk_solve::Solution<Interner>;
+pub type ConstrainedSubst = chalk_ir::ConstrainedSubst<Interner>;
+pub type Guidance = chalk_solve::Guidance<Interner>;
+pub type WhereClause = chalk_ir::WhereClause<Interner>;
+
+/// Return an index of a parameter in the generic type parameter list by it's id.
+pub fn param_idx(db: &dyn HirDatabase, id: TypeOrConstParamId) -> Option<usize> {
+ generics(db.upcast(), id.parent).param_idx(id)
+}
+
+pub(crate) fn wrap_empty_binders<T>(value: T) -> Binders<T>
+where
+ T: TypeFoldable<Interner> + HasInterner<Interner = Interner>,
+{
+ Binders::empty(Interner, value.shifted_in_from(Interner, DebruijnIndex::ONE))
+}
+
+pub(crate) fn make_type_and_const_binders<T: HasInterner<Interner = Interner>>(
+ which_is_const: impl Iterator<Item = Option<Ty>>,
+ value: T,
+) -> Binders<T> {
+ Binders::new(
+ VariableKinds::from_iter(
+ Interner,
+ which_is_const.map(|x| {
+ if let Some(ty) = x {
+ chalk_ir::VariableKind::Const(ty)
+ } else {
+ chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
+ }
+ }),
+ ),
+ value,
+ )
+}
+
+pub(crate) fn make_single_type_binders<T: HasInterner<Interner = Interner>>(
+ value: T,
+) -> Binders<T> {
+ Binders::new(
+ VariableKinds::from_iter(
+ Interner,
+ std::iter::once(chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)),
+ ),
+ value,
+ )
+}
+
+pub(crate) fn make_binders_with_count<T: HasInterner<Interner = Interner>>(
+ db: &dyn HirDatabase,
+ count: usize,
+ generics: &Generics,
+ value: T,
+) -> Binders<T> {
+ let it = generics.iter_id().take(count).map(|id| match id {
+ Either::Left(_) => None,
+ Either::Right(id) => Some(db.const_param_ty(id)),
+ });
+ crate::make_type_and_const_binders(it, value)
+}
+
+pub(crate) fn make_binders<T: HasInterner<Interner = Interner>>(
+ db: &dyn HirDatabase,
+ generics: &Generics,
+ value: T,
+) -> Binders<T> {
+ make_binders_with_count(db, usize::MAX, generics, value)
+}
+
+// FIXME: get rid of this, just replace it by FnPointer
+/// A function signature as seen by type inference: Several parameter types and
+/// one return type.
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct CallableSig {
+ params_and_return: Arc<[Ty]>,
+ is_varargs: bool,
+ safety: Safety,
+}
+
+has_interner!(CallableSig);
+
+/// A polymorphic function signature.
+pub type PolyFnSig = Binders<CallableSig>;
+
+impl CallableSig {
+ pub fn from_params_and_return(
+ mut params: Vec<Ty>,
+ ret: Ty,
+ is_varargs: bool,
+ safety: Safety,
+ ) -> CallableSig {
+ params.push(ret);
+ CallableSig { params_and_return: params.into(), is_varargs, safety }
+ }
+
+ pub fn from_fn_ptr(fn_ptr: &FnPointer) -> CallableSig {
+ CallableSig {
+ // FIXME: what to do about lifetime params? -> return PolyFnSig
+ params_and_return: fn_ptr
+ .substitution
+ .clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("unexpected lifetime vars in fn ptr")
+ .0
+ .as_slice(Interner)
+ .iter()
+ .map(|arg| arg.assert_ty_ref(Interner).clone())
+ .collect(),
+ is_varargs: fn_ptr.sig.variadic,
+ safety: fn_ptr.sig.safety,
+ }
+ }
+
+ pub fn to_fn_ptr(&self) -> FnPointer {
+ FnPointer {
+ num_binders: 0,
+ sig: FnSig { abi: (), safety: self.safety, variadic: self.is_varargs },
+ substitution: FnSubst(Substitution::from_iter(
+ Interner,
+ self.params_and_return.iter().cloned(),
+ )),
+ }
+ }
+
+ pub fn params(&self) -> &[Ty] {
+ &self.params_and_return[0..self.params_and_return.len() - 1]
+ }
+
+ pub fn ret(&self) -> &Ty {
+ &self.params_and_return[self.params_and_return.len() - 1]
+ }
+}
+
+impl TypeFoldable<Interner> for CallableSig {
+ fn try_fold_with<E>(
+ self,
+ folder: &mut dyn chalk_ir::fold::FallibleTypeFolder<Interner, Error = E>,
+ outer_binder: DebruijnIndex,
+ ) -> Result<Self, E> {
+ let vec = self.params_and_return.to_vec();
+ let folded = vec.try_fold_with(folder, outer_binder)?;
+ Ok(CallableSig {
+ params_and_return: folded.into(),
+ is_varargs: self.is_varargs,
+ safety: self.safety,
+ })
+ }
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub enum ImplTraitId {
+ ReturnTypeImplTrait(hir_def::FunctionId, u16),
+ AsyncBlockTypeImplTrait(hir_def::DefWithBodyId, ExprId),
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ReturnTypeImplTraits {
+ pub(crate) impl_traits: Vec<ReturnTypeImplTrait>,
+}
+
+has_interner!(ReturnTypeImplTraits);
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub(crate) struct ReturnTypeImplTrait {
+ pub(crate) bounds: Binders<Vec<QuantifiedWhereClause>>,
+}
+
+pub fn static_lifetime() -> Lifetime {
+ LifetimeData::Static.intern(Interner)
+}
+
+pub(crate) fn fold_free_vars<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ for_ty: impl FnMut(BoundVar, DebruijnIndex) -> Ty,
+ for_const: impl FnMut(Ty, BoundVar, DebruijnIndex) -> Const,
+) -> T {
+ use chalk_ir::fold::TypeFolder;
+
+ #[derive(chalk_derive::FallibleTypeFolder)]
+ #[has_interner(Interner)]
+ struct FreeVarFolder<
+ F1: FnMut(BoundVar, DebruijnIndex) -> Ty,
+ F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const,
+ >(F1, F2);
+ impl<
+ F1: FnMut(BoundVar, DebruijnIndex) -> Ty,
+ F2: FnMut(Ty, BoundVar, DebruijnIndex) -> Const,
+ > TypeFolder<Interner> for FreeVarFolder<F1, F2>
+ {
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_free_var_ty(&mut self, bound_var: BoundVar, outer_binder: DebruijnIndex) -> Ty {
+ self.0(bound_var, outer_binder)
+ }
+
+ fn fold_free_var_const(
+ &mut self,
+ ty: Ty,
+ bound_var: BoundVar,
+ outer_binder: DebruijnIndex,
+ ) -> Const {
+ self.1(ty, bound_var, outer_binder)
+ }
+ }
+ t.fold_with(&mut FreeVarFolder(for_ty, for_const), DebruijnIndex::INNERMOST)
+}
+
+pub(crate) fn fold_tys<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ mut for_ty: impl FnMut(Ty, DebruijnIndex) -> Ty,
+ binders: DebruijnIndex,
+) -> T {
+ fold_tys_and_consts(
+ t,
+ |x, d| match x {
+ Either::Left(x) => Either::Left(for_ty(x, d)),
+ Either::Right(x) => Either::Right(x),
+ },
+ binders,
+ )
+}
+
+pub(crate) fn fold_tys_and_consts<T: HasInterner<Interner = Interner> + TypeFoldable<Interner>>(
+ t: T,
+ f: impl FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const>,
+ binders: DebruijnIndex,
+) -> T {
+ use chalk_ir::fold::{TypeFolder, TypeSuperFoldable};
+ #[derive(chalk_derive::FallibleTypeFolder)]
+ #[has_interner(Interner)]
+ struct TyFolder<F: FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const>>(F);
+ impl<F: FnMut(Either<Ty, Const>, DebruijnIndex) -> Either<Ty, Const>> TypeFolder<Interner>
+ for TyFolder<F>
+ {
+ fn as_dyn(&mut self) -> &mut dyn TypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Ty {
+ let ty = ty.super_fold_with(self.as_dyn(), outer_binder);
+ self.0(Either::Left(ty), outer_binder).left().unwrap()
+ }
+
+ fn fold_const(&mut self, c: Const, outer_binder: DebruijnIndex) -> Const {
+ self.0(Either::Right(c), outer_binder).right().unwrap()
+ }
+ }
+ t.fold_with(&mut TyFolder(f), binders)
+}
+
+/// 'Canonicalizes' the `t` by replacing any errors with new variables. Also
+/// ensures there are no unbound variables or inference variables anywhere in
+/// the `t`.
+pub fn replace_errors_with_variables<T>(t: &T) -> Canonical<T>
+where
+ T: HasInterner<Interner = Interner> + TypeFoldable<Interner> + Clone,
+{
+ use chalk_ir::{
+ fold::{FallibleTypeFolder, TypeSuperFoldable},
+ Fallible,
+ };
+ struct ErrorReplacer {
+ vars: usize,
+ }
+ impl FallibleTypeFolder<Interner> for ErrorReplacer {
+ type Error = NoSolution;
+
+ fn as_dyn(&mut self) -> &mut dyn FallibleTypeFolder<Interner, Error = Self::Error> {
+ self
+ }
+
+ fn interner(&self) -> Interner {
+ Interner
+ }
+
+ fn try_fold_ty(&mut self, ty: Ty, outer_binder: DebruijnIndex) -> Fallible<Ty> {
+ if let TyKind::Error = ty.kind(Interner) {
+ let index = self.vars;
+ self.vars += 1;
+ Ok(TyKind::BoundVar(BoundVar::new(outer_binder, index)).intern(Interner))
+ } else {
+ ty.try_super_fold_with(self.as_dyn(), outer_binder)
+ }
+ }
+
+ fn try_fold_inference_ty(
+ &mut self,
+ _var: InferenceVar,
+ _kind: TyVariableKind,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ if cfg!(debug_assertions) {
+ // we don't want to just panic here, because then the error message
+ // won't contain the whole thing, which would not be very helpful
+ Err(NoSolution)
+ } else {
+ Ok(TyKind::Error.intern(Interner))
+ }
+ }
+
+ fn try_fold_free_var_ty(
+ &mut self,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Ty> {
+ if cfg!(debug_assertions) {
+ // we don't want to just panic here, because then the error message
+ // won't contain the whole thing, which would not be very helpful
+ Err(NoSolution)
+ } else {
+ Ok(TyKind::Error.intern(Interner))
+ }
+ }
+
+ fn try_fold_inference_const(
+ &mut self,
+ ty: Ty,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(unknown_const(ty))
+ }
+ }
+
+ fn try_fold_free_var_const(
+ &mut self,
+ ty: Ty,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Const> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(unknown_const(ty))
+ }
+ }
+
+ fn try_fold_inference_lifetime(
+ &mut self,
+ _var: InferenceVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(static_lifetime())
+ }
+ }
+
+ fn try_fold_free_var_lifetime(
+ &mut self,
+ _bound_var: BoundVar,
+ _outer_binder: DebruijnIndex,
+ ) -> Fallible<Lifetime> {
+ if cfg!(debug_assertions) {
+ Err(NoSolution)
+ } else {
+ Ok(static_lifetime())
+ }
+ }
+ }
+ let mut error_replacer = ErrorReplacer { vars: 0 };
+ let value = match t.clone().try_fold_with(&mut error_replacer, DebruijnIndex::INNERMOST) {
+ Ok(t) => t,
+ Err(_) => panic!("Encountered unbound or inference vars in {:?}", t),
+ };
+ let kinds = (0..error_replacer.vars).map(|_| {
+ chalk_ir::CanonicalVarKind::new(
+ chalk_ir::VariableKind::Ty(TyVariableKind::General),
+ chalk_ir::UniverseIndex::ROOT,
+ )
+ });
+ Canonical { value, binders: chalk_ir::CanonicalVarKinds::from_iter(Interner, kinds) }
+}
+
+pub fn callable_sig_from_fnonce(
- let mut kinds = self_ty.binders.interned().to_vec();
++ self_ty: &Ty,
+ env: Arc<TraitEnvironment>,
+ db: &dyn HirDatabase,
+) -> Option<CallableSig> {
+ let krate = env.krate;
+ let fn_once_trait = FnTrait::FnOnce.get_id(db, krate)?;
+ let output_assoc_type = db.trait_data(fn_once_trait).associated_type_by_name(&name![Output])?;
+
- let fn_once = b
- .push(self_ty.value.clone())
- .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
- .build();
- kinds.extend(fn_once.substitution.iter(Interner).skip(1).map(|x| {
- let vk = match x.data(Interner) {
- chalk_ir::GenericArgData::Ty(_) => {
- chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
- }
- chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime,
- chalk_ir::GenericArgData::Const(c) => {
- chalk_ir::VariableKind::Const(c.data(Interner).ty.clone())
- }
- };
- chalk_ir::WithKind::new(vk, UniverseIndex::ROOT)
- }));
+ let b = TyBuilder::trait_ref(db, fn_once_trait);
+ if b.remaining() != 2 {
+ return None;
+ }
- let args = subst.at(Interner, self_ty.binders.interned().len()).ty(Interner)?;
++ let fn_once = b.push(self_ty.clone()).fill_with_bound_vars(DebruijnIndex::INNERMOST, 0).build();
++ let kinds = fn_once
++ .substitution
++ .iter(Interner)
++ .skip(1)
++ .map(|x| {
++ let vk = match x.data(Interner) {
++ chalk_ir::GenericArgData::Ty(_) => {
++ chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
++ }
++ chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime,
++ chalk_ir::GenericArgData::Const(c) => {
++ chalk_ir::VariableKind::Const(c.data(Interner).ty.clone())
++ }
++ };
++ chalk_ir::WithKind::new(vk, UniverseIndex::ROOT)
++ })
++ .collect::<Vec<_>>();
+
+ // FIXME: chalk refuses to solve `<Self as FnOnce<^0.0>>::Output == ^0.1`, so we first solve
+ // `<Self as FnOnce<^0.0>>` and then replace `^0.0` with the concrete argument tuple.
+ let trait_env = env.env.clone();
+ let obligation = InEnvironment { goal: fn_once.cast(Interner), environment: trait_env };
+ let canonical =
+ Canonical { binders: CanonicalVarKinds::from_iter(Interner, kinds), value: obligation };
+ let subst = match db.trait_solve(krate, canonical) {
+ Some(Solution::Unique(vars)) => vars.value.subst,
+ _ => return None,
+ };
- if params.iter().any(|ty| ty.is_unknown()) {
- return None;
- }
++ let args = subst.at(Interner, 0).ty(Interner)?;
+ let params = match args.kind(Interner) {
+ chalk_ir::TyKind::Tuple(_, subst) => {
+ subst.iter(Interner).filter_map(|arg| arg.ty(Interner).cloned()).collect::<Vec<_>>()
+ }
+ _ => return None,
+ };
- let fn_once = TyBuilder::trait_ref(db, fn_once_trait)
- .push(self_ty.value.clone())
- .push(args.clone())
- .build();
+
++ let fn_once =
++ TyBuilder::trait_ref(db, fn_once_trait).push(self_ty.clone()).push(args.clone()).build();
+ let projection =
+ TyBuilder::assoc_type_projection(db, output_assoc_type, Some(fn_once.substitution.clone()))
+ .build();
+
+ let ret_ty = db.normalize_projection(projection, env);
+
+ Some(CallableSig::from_params_and_return(params, ret_ty.clone(), false, Safety::Safe))
+}
--- /dev/null
- let mut ty = ty;
+//! This module is concerned with finding methods that a given type provides.
+//! For details about how this works in rustc, see the method lookup page in the
+//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
+//! and the corresponding code mostly in rustc_hir_analysis/check/method/probe.rs.
+use std::{iter, ops::ControlFlow, sync::Arc};
+
+use arrayvec::ArrayVec;
+use base_db::{CrateId, Edition};
+use chalk_ir::{cast::Cast, Mutability, UniverseIndex};
+use hir_def::{
+ data::ImplData, item_scope::ItemScope, nameres::DefMap, AssocItemId, BlockId, ConstId,
+ FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId, ModuleId,
+ TraitId,
+};
+use hir_expand::name::Name;
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::never;
+
+use crate::{
+ autoderef::{self, AutoderefKind},
+ db::HirDatabase,
+ from_foreign_def_id,
+ infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
+ primitive::{FloatTy, IntTy, UintTy},
+ static_lifetime, to_chalk_trait_id,
+ utils::all_super_traits,
+ AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
+ Scalar, Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+};
+
+/// This is used as a key for indexing impls.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum TyFingerprint {
+ // These are lang item impls:
+ Str,
+ Slice,
+ Array,
+ Never,
+ RawPtr(Mutability),
+ Scalar(Scalar),
+ // These can have user-defined impls:
+ Adt(hir_def::AdtId),
+ Dyn(TraitId),
+ ForeignType(ForeignDefId),
+ // These only exist for trait impls
+ Unit,
+ Unnameable,
+ Function(u32),
+}
+
+impl TyFingerprint {
+ /// Creates a TyFingerprint for looking up an inherent impl. Only certain
+ /// types can have inherent impls: if we have some `struct S`, we can have
+ /// an `impl S`, but not `impl &S`. Hence, this will return `None` for
+ /// reference types and such.
+ pub fn for_inherent_impl(ty: &Ty) -> Option<TyFingerprint> {
+ let fp = match ty.kind(Interner) {
+ TyKind::Str => TyFingerprint::Str,
+ TyKind::Never => TyFingerprint::Never,
+ TyKind::Slice(..) => TyFingerprint::Slice,
+ TyKind::Array(..) => TyFingerprint::Array,
+ TyKind::Scalar(scalar) => TyFingerprint::Scalar(*scalar),
+ TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
+ TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
+ TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
+ TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
+ _ => return None,
+ };
+ Some(fp)
+ }
+
+ /// Creates a TyFingerprint for looking up a trait impl.
+ pub fn for_trait_impl(ty: &Ty) -> Option<TyFingerprint> {
+ let fp = match ty.kind(Interner) {
+ TyKind::Str => TyFingerprint::Str,
+ TyKind::Never => TyFingerprint::Never,
+ TyKind::Slice(..) => TyFingerprint::Slice,
+ TyKind::Array(..) => TyFingerprint::Array,
+ TyKind::Scalar(scalar) => TyFingerprint::Scalar(*scalar),
+ TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
+ TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
+ TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
+ TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
+ TyKind::Ref(_, _, ty) => return TyFingerprint::for_trait_impl(ty),
+ TyKind::Tuple(_, subst) => {
+ let first_ty = subst.interned().get(0).map(|arg| arg.assert_ty_ref(Interner));
+ match first_ty {
+ Some(ty) => return TyFingerprint::for_trait_impl(ty),
+ None => TyFingerprint::Unit,
+ }
+ }
+ TyKind::AssociatedType(_, _)
+ | TyKind::OpaqueType(_, _)
+ | TyKind::FnDef(_, _)
+ | TyKind::Closure(_, _)
+ | TyKind::Generator(..)
+ | TyKind::GeneratorWitness(..) => TyFingerprint::Unnameable,
+ TyKind::Function(fn_ptr) => {
+ TyFingerprint::Function(fn_ptr.substitution.0.len(Interner) as u32)
+ }
+ TyKind::Alias(_)
+ | TyKind::Placeholder(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _)
+ | TyKind::Error => return None,
+ };
+ Some(fp)
+ }
+}
+
+pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I8)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I16)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I32)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I64)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I128)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::Isize)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U8)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U16)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U32)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U64)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U128)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::Usize)),
+];
+
+pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [
+ TyFingerprint::Scalar(Scalar::Float(FloatTy::F32)),
+ TyFingerprint::Scalar(Scalar::Float(FloatTy::F64)),
+];
+
+/// Trait impls defined or available in some crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct TraitImpls {
+ // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type.
+ map: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>,
+}
+
+impl TraitImpls {
+ pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_crate_query").detail(|| format!("{krate:?}"));
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ impls.collect_def_map(db, &crate_def_map);
+ impls.shrink_to_fit();
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn trait_impls_in_block_query(
+ db: &dyn HirDatabase,
+ block: BlockId,
+ ) -> Option<Arc<Self>> {
+ let _p = profile::span("trait_impls_in_block_query");
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let block_def_map = db.block_def_map(block)?;
+ impls.collect_def_map(db, &block_def_map);
+ impls.shrink_to_fit();
+
+ Some(Arc::new(impls))
+ }
+
+ pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
+ let crate_graph = db.crate_graph();
+ let mut res = Self { map: FxHashMap::default() };
+
+ for krate in crate_graph.transitive_deps(krate) {
+ res.merge(&db.trait_impls_in_crate(krate));
+ }
+ res.shrink_to_fit();
+
+ Arc::new(res)
+ }
+
+ fn shrink_to_fit(&mut self) {
+ self.map.shrink_to_fit();
+ self.map.values_mut().for_each(|map| {
+ map.shrink_to_fit();
+ map.values_mut().for_each(Vec::shrink_to_fit);
+ });
+ }
+
+ fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let target_trait = match db.impl_trait(impl_id) {
+ Some(tr) => tr.skip_binders().hir_trait_id(),
+ None => continue,
+ };
+ let self_ty = db.impl_self_ty(impl_id);
+ let self_ty_fp = TyFingerprint::for_trait_impl(self_ty.skip_binders());
+ self.map
+ .entry(target_trait)
+ .or_default()
+ .entry(self_ty_fp)
+ .or_default()
+ .push(impl_id);
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in collect_unnamed_consts(db, &module_data.scope) {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db.upcast()) {
+ self.collect_def_map(db, &block_def_map);
+ }
+ }
+ }
+ }
+
+ fn merge(&mut self, other: &Self) {
+ for (trait_, other_map) in &other.map {
+ let map = self.map.entry(*trait_).or_default();
+ for (fp, impls) in other_map {
+ map.entry(*fp).or_default().extend(impls);
+ }
+ }
+ }
+
+ /// Queries all trait impls for the given type.
+ pub fn for_self_ty_without_blanket_impls(
+ &self,
+ fp: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .values()
+ .flat_map(move |impls| impls.get(&Some(fp)).into_iter())
+ .flat_map(|it| it.iter().copied())
+ }
+
+ /// Queries all impls of the given trait.
+ pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+
+ /// Queries all impls of `trait_` that may apply to `self_ty`.
+ pub fn for_trait_and_self_ty(
+ &self,
+ trait_: TraitId,
+ self_ty: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(move |map| map.get(&Some(self_ty)).into_iter().chain(map.get(&None)))
+ .flat_map(|v| v.iter().copied())
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+}
+
+/// Inherent impls defined in some crate.
+///
+/// Inherent impls can only be defined in the crate that also defines the self type of the impl
+/// (note that some primitives are considered to be defined by both libcore and liballoc).
+///
+/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a
+/// single crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct InherentImpls {
+ map: FxHashMap<TyFingerprint, Vec<ImplId>>,
+}
+
+impl InherentImpls {
+ pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ impls.collect_def_map(db, &crate_def_map);
+ impls.shrink_to_fit();
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn inherent_impls_in_block_query(
+ db: &dyn HirDatabase,
+ block: BlockId,
+ ) -> Option<Arc<Self>> {
+ let mut impls = Self { map: FxHashMap::default() };
+ if let Some(block_def_map) = db.block_def_map(block) {
+ impls.collect_def_map(db, &block_def_map);
+ impls.shrink_to_fit();
+ return Some(Arc::new(impls));
+ }
+ None
+ }
+
+ fn shrink_to_fit(&mut self) {
+ self.map.values_mut().for_each(Vec::shrink_to_fit);
+ self.map.shrink_to_fit();
+ }
+
+ fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let data = db.impl_data(impl_id);
+ if data.target_trait.is_some() {
+ continue;
+ }
+
+ let self_ty = db.impl_self_ty(impl_id);
+ let fp = TyFingerprint::for_inherent_impl(self_ty.skip_binders());
+ if let Some(fp) = fp {
+ self.map.entry(fp).or_default().push(impl_id);
+ }
+ // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in collect_unnamed_consts(db, &module_data.scope) {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db.upcast()) {
+ self.collect_def_map(db, &block_def_map);
+ }
+ }
+ }
+ }
+
+ pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] {
+ match TyFingerprint::for_inherent_impl(self_ty) {
+ Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]),
+ None => &[],
+ }
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|v| v.iter().copied())
+ }
+}
+
+pub(crate) fn inherent_impl_crates_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ fp: TyFingerprint,
+) -> ArrayVec<CrateId, 2> {
+ let _p = profile::span("inherent_impl_crates_query");
+ let mut res = ArrayVec::new();
+ let crate_graph = db.crate_graph();
+
+ for krate in crate_graph.transitive_deps(krate) {
+ if res.is_full() {
+ // we don't currently look for or store more than two crates here,
+ // so don't needlessly look at more crates than necessary.
+ break;
+ }
+ let impls = db.inherent_impls_in_crate(krate);
+ if impls.map.get(&fp).map_or(false, |v| !v.is_empty()) {
+ res.push(krate);
+ }
+ }
+
+ res
+}
+
+fn collect_unnamed_consts<'a>(
+ db: &'a dyn HirDatabase,
+ scope: &'a ItemScope,
+) -> impl Iterator<Item = ConstId> + 'a {
+ let unnamed_consts = scope.unnamed_consts();
+
+ // FIXME: Also treat consts named `_DERIVE_*` as unnamed, since synstructure generates those.
+ // Should be removed once synstructure stops doing that.
+ let synstructure_hack_consts = scope.values().filter_map(|(item, _)| match item {
+ ModuleDefId::ConstId(id) => {
+ let loc = id.lookup(db.upcast());
+ let item_tree = loc.id.item_tree(db.upcast());
+ if item_tree[loc.id.value]
+ .name
+ .as_ref()
+ .map_or(false, |n| n.to_smol_str().starts_with("_DERIVE_"))
+ {
+ Some(id)
+ } else {
+ None
+ }
+ }
+ _ => None,
+ });
+
+ unnamed_consts.chain(synstructure_hack_consts)
+}
+
+pub fn def_crates(
+ db: &dyn HirDatabase,
+ ty: &Ty,
+ cur_crate: CrateId,
+) -> Option<ArrayVec<CrateId, 2>> {
+ let mod_to_crate_ids = |module: ModuleId| Some(iter::once(module.krate()).collect());
+
+ let fp = TyFingerprint::for_inherent_impl(ty);
+
+ match ty.kind(Interner) {
+ TyKind::Adt(AdtId(def_id), _) => mod_to_crate_ids(def_id.module(db.upcast())),
+ TyKind::Foreign(id) => {
+ mod_to_crate_ids(from_foreign_def_id(*id).lookup(db.upcast()).module(db.upcast()))
+ }
+ TyKind::Dyn(_) => ty
+ .dyn_trait()
+ .and_then(|trait_| mod_to_crate_ids(GenericDefId::TraitId(trait_).module(db.upcast()))),
+ // for primitives, there may be impls in various places (core and alloc
+ // mostly). We just check the whole crate graph for crates with impls
+ // (cached behind a query).
+ TyKind::Scalar(_)
+ | TyKind::Str
+ | TyKind::Slice(_)
+ | TyKind::Array(..)
+ | TyKind::Raw(..) => {
+ Some(db.inherent_impl_crates(cur_crate, fp.expect("fingerprint for primitive")))
+ }
+ _ => return None,
+ }
+}
+
+pub fn lang_names_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, Name)> {
+ use hir_expand::name;
+ use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering};
+ Some(match op {
+ BinaryOp::LogicOp(_) => return None,
+ BinaryOp::ArithOp(aop) => match aop {
+ ArithOp::Add => (name!(add), name!(add)),
+ ArithOp::Mul => (name!(mul), name!(mul)),
+ ArithOp::Sub => (name!(sub), name!(sub)),
+ ArithOp::Div => (name!(div), name!(div)),
+ ArithOp::Rem => (name!(rem), name!(rem)),
+ ArithOp::Shl => (name!(shl), name!(shl)),
+ ArithOp::Shr => (name!(shr), name!(shr)),
+ ArithOp::BitXor => (name!(bitxor), name!(bitxor)),
+ ArithOp::BitOr => (name!(bitor), name!(bitor)),
+ ArithOp::BitAnd => (name!(bitand), name!(bitand)),
+ },
+ BinaryOp::Assignment { op: Some(aop) } => match aop {
+ ArithOp::Add => (name!(add_assign), name!(add_assign)),
+ ArithOp::Mul => (name!(mul_assign), name!(mul_assign)),
+ ArithOp::Sub => (name!(sub_assign), name!(sub_assign)),
+ ArithOp::Div => (name!(div_assign), name!(div_assign)),
+ ArithOp::Rem => (name!(rem_assign), name!(rem_assign)),
+ ArithOp::Shl => (name!(shl_assign), name!(shl_assign)),
+ ArithOp::Shr => (name!(shr_assign), name!(shr_assign)),
+ ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)),
+ ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)),
+ ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)),
+ },
+ BinaryOp::CmpOp(cop) => match cop {
+ CmpOp::Eq { negated: false } => (name!(eq), name!(eq)),
+ CmpOp::Eq { negated: true } => (name!(ne), name!(eq)),
+ CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
+ (name!(le), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
+ (name!(lt), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
+ (name!(ge), name!(partial_ord))
+ }
+ CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
+ (name!(gt), name!(partial_ord))
+ }
+ },
+ BinaryOp::Assignment { op: None } => return None,
+ })
+}
+
+/// Look up the method with the given name.
+pub(crate) fn lookup_method(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: &Name,
+) -> Option<(ReceiverAdjustments, FunctionId)> {
+ iterate_method_candidates(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ Some(name),
+ LookupMode::MethodCall,
+ |adjustments, f| match f {
+ AssocItemId::FunctionId(f) => Some((adjustments, f)),
+ _ => None,
+ },
+ )
+}
+
+/// Whether we're looking up a dotted method call (like `v.len()`) or a path
+/// (like `Vec::new`).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum LookupMode {
+ /// Looking up a method call like `v.len()`: We only consider candidates
+ /// that have a `self` parameter, and do autoderef.
+ MethodCall,
+ /// Looking up a path like `Vec::new` or `Vec::default`: We consider all
+ /// candidates including associated constants, but don't do autoderef.
+ Path,
+}
+
+#[derive(Clone, Copy)]
+pub enum VisibleFromModule {
+ /// Filter for results that are visible from the given module
+ Filter(ModuleId),
+ /// Include impls from the given block.
+ IncludeBlock(BlockId),
+ /// Do nothing special in regards visibility
+ None,
+}
+
+impl From<Option<ModuleId>> for VisibleFromModule {
+ fn from(module: Option<ModuleId>) -> Self {
+ match module {
+ Some(module) => Self::Filter(module),
+ None => Self::None,
+ }
+ }
+}
+
+impl From<Option<BlockId>> for VisibleFromModule {
+ fn from(block: Option<BlockId>) -> Self {
+ match block {
+ Some(block) => Self::IncludeBlock(block),
+ None => Self::None,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Default)]
+pub struct ReceiverAdjustments {
+ autoref: Option<Mutability>,
+ autoderefs: usize,
+ unsize_array: bool,
+}
+
+impl ReceiverAdjustments {
+ pub(crate) fn apply(&self, table: &mut InferenceTable<'_>, ty: Ty) -> (Ty, Vec<Adjustment>) {
++ let mut ty = table.resolve_ty_shallow(&ty);
+ let mut adjust = Vec::new();
+ for _ in 0..self.autoderefs {
+ match autoderef::autoderef_step(table, ty.clone()) {
+ None => {
+ never!("autoderef not possible for {:?}", ty);
+ ty = TyKind::Error.intern(Interner);
+ break;
+ }
+ Some((kind, new_ty)) => {
+ ty = new_ty.clone();
+ adjust.push(Adjustment {
+ kind: Adjust::Deref(match kind {
+ // FIXME should we know the mutability here?
+ AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Builtin => None,
+ }),
+ target: new_ty,
+ });
+ }
+ }
+ }
+ if self.unsize_array {
+ ty = match ty.kind(Interner) {
+ TyKind::Array(inner, _) => TyKind::Slice(inner.clone()).intern(Interner),
+ _ => {
+ never!("unsize_array with non-array {:?}", ty);
+ ty
+ }
+ };
+ // FIXME this is kind of wrong since the unsize needs to happen to a pointer/reference
+ adjust.push(Adjustment {
+ kind: Adjust::Pointer(PointerCast::Unsize),
+ target: ty.clone(),
+ });
+ }
+ if let Some(m) = self.autoref {
+ ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
+ adjust
+ .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() });
+ }
+ (ty, adjust)
+ }
+
+ fn with_autoref(&self, m: Mutability) -> ReceiverAdjustments {
+ Self { autoref: Some(m), ..*self }
+ }
+}
+
+// This would be nicer if it just returned an iterator, but that runs into
+// lifetime problems, because we need to borrow temp `CrateImplDefs`.
+// FIXME add a context type here?
+pub(crate) fn iterate_method_candidates<T>(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mode: LookupMode,
+ mut callback: impl FnMut(ReceiverAdjustments, AssocItemId) -> Option<T>,
+) -> Option<T> {
+ let mut slot = None;
+ iterate_method_candidates_dyn(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ mode,
+ &mut |adj, item| {
+ assert!(slot.is_none());
+ if let Some(it) = callback(adj, item) {
+ slot = Some(it);
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+}
+
+/// Looks up the impl method that actually runs for the trait method `func`.
+///
+/// Returns `func` if it's not a method defined in a trait or the lookup failed.
+pub fn lookup_impl_method(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ func: FunctionId,
+ fn_subst: Substitution,
+) -> FunctionId {
+ let trait_id = match func.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(id) => id,
+ _ => return func,
+ };
+ let trait_params = db.generic_params(trait_id.into()).type_or_consts.len();
+ let fn_params = fn_subst.len(Interner) - trait_params;
+ let trait_ref = TraitRef {
+ trait_id: to_chalk_trait_id(trait_id),
+ substitution: Substitution::from_iter(Interner, fn_subst.iter(Interner).skip(fn_params)),
+ };
+
+ let name = &db.function_data(func).name;
+ lookup_impl_method_for_trait_ref(trait_ref, db, env, name).unwrap_or(func)
+}
+
+fn lookup_impl_method_for_trait_ref(
+ trait_ref: TraitRef,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ name: &Name,
+) -> Option<FunctionId> {
+ let self_ty = trait_ref.self_type_parameter(Interner);
+ let self_ty_fp = TyFingerprint::for_trait_impl(&self_ty)?;
+ let impls = db.trait_impls_in_deps(env.krate);
+ let impls = impls.for_trait_and_self_ty(trait_ref.hir_trait_id(), self_ty_fp);
+
+ let table = InferenceTable::new(db, env);
+
+ let impl_data = find_matching_impl(impls, table, trait_ref)?;
+ impl_data.items.iter().find_map(|it| match it {
+ AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f),
+ _ => None,
+ })
+}
+
+fn find_matching_impl(
+ mut impls: impl Iterator<Item = ImplId>,
+ mut table: InferenceTable<'_>,
+ actual_trait_ref: TraitRef,
+) -> Option<Arc<ImplData>> {
+ let db = table.db;
+ loop {
+ let impl_ = impls.next()?;
+ let r = table.run_in_snapshot(|table| {
+ let impl_data = db.impl_data(impl_);
+ let impl_substs =
+ TyBuilder::subst_for_def(db, impl_, None).fill_with_inference_vars(table).build();
+ let trait_ref = db
+ .impl_trait(impl_)
+ .expect("non-trait method in find_matching_impl")
+ .substitute(Interner, &impl_substs);
+
+ if !table.unify(&trait_ref, &actual_trait_ref) {
+ return None;
+ }
+
+ let wcs = crate::chalk_db::convert_where_clauses(db, impl_.into(), &impl_substs)
+ .into_iter()
+ .map(|b| b.cast(Interner));
+ let goal = crate::Goal::all(Interner, wcs);
+ table.try_obligation(goal).map(|_| impl_data)
+ });
+ if r.is_some() {
+ break r;
+ }
+ }
+}
+
+pub fn iterate_path_candidates(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ iterate_method_candidates_dyn(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ LookupMode::Path,
+ // the adjustments are not relevant for path lookup
+ &mut |_, id| callback(id),
+ )
+}
+
+pub fn iterate_method_candidates_dyn(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mode: LookupMode,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ match mode {
+ LookupMode::MethodCall => {
+ // For method calls, rust first does any number of autoderef, and
+ // then one autoref (i.e. when the method takes &self or &mut self).
+ // Note that when we've got a receiver like &S, even if the method
+ // we find in the end takes &self, we still do the autoderef step
+ // (just as rustc does an autoderef and then autoref again).
+
+ // We have to be careful about the order we're looking at candidates
+ // in here. Consider the case where we're resolving `x.clone()`
+ // where `x: &Vec<_>`. This resolves to the clone method with self
+ // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
+ // the receiver type exactly matches before cases where we have to
+ // do autoref. But in the autoderef steps, the `&_` self type comes
+ // up *before* the `Vec<_>` self type.
+ //
+ // On the other hand, we don't want to just pick any by-value method
+ // before any by-autoref method; it's just that we need to consider
+ // the methods by autoderef order of *receiver types*, not *self
+ // types*.
+
+ let mut table = InferenceTable::new(db, env.clone());
+ let ty = table.instantiate_canonical(ty.clone());
+ let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+
+ let result = deref_chain.into_iter().zip(adj).try_for_each(|(receiver_ty, adj)| {
+ iterate_method_candidates_with_autoref(
+ &receiver_ty,
+ adj,
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ callback,
+ )
+ });
+ result
+ }
+ LookupMode::Path => {
+ // No autoderef for path lookups
+ iterate_method_candidates_for_self_ty(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ callback,
+ )
+ }
+ }
+}
+
+fn iterate_method_candidates_with_autoref(
+ receiver_ty: &Canonical<Ty>,
+ first_adjustment: ReceiverAdjustments,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ if receiver_ty.value.is_general_var(Interner, &receiver_ty.binders) {
+ // don't try to resolve methods on unknown types
+ return ControlFlow::Continue(());
+ }
+
+ iterate_method_candidates_by_receiver(
+ receiver_ty,
+ first_adjustment.clone(),
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )?;
+
+ let refed = Canonical {
+ value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
+ .intern(Interner),
+ binders: receiver_ty.binders.clone(),
+ };
+
+ iterate_method_candidates_by_receiver(
+ &refed,
+ first_adjustment.with_autoref(Mutability::Not),
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )?;
+
+ let ref_muted = Canonical {
+ value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
+ .intern(Interner),
+ binders: receiver_ty.binders.clone(),
+ };
+
+ iterate_method_candidates_by_receiver(
+ &ref_muted,
+ first_adjustment.with_autoref(Mutability::Mut),
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )
+}
+
+fn iterate_method_candidates_by_receiver(
+ receiver_ty: &Canonical<Ty>,
+ receiver_adjustments: ReceiverAdjustments,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let mut table = InferenceTable::new(db, env);
+ let receiver_ty = table.instantiate_canonical(receiver_ty.clone());
+ let snapshot = table.snapshot();
+ // We're looking for methods with *receiver* type receiver_ty. These could
+ // be found in any of the derefs of receiver_ty, so we have to go through
+ // that.
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ while let Some((self_ty, _)) = autoderef.next() {
+ iterate_inherent_methods(
+ &self_ty,
+ &mut autoderef.table,
+ name,
+ Some(&receiver_ty),
+ Some(receiver_adjustments.clone()),
+ visible_from_module,
+ &mut callback,
+ )?
+ }
+
+ table.rollback_to(snapshot);
+
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ while let Some((self_ty, _)) = autoderef.next() {
+ iterate_trait_method_candidates(
+ &self_ty,
+ &mut autoderef.table,
+ traits_in_scope,
+ name,
+ Some(&receiver_ty),
+ Some(receiver_adjustments.clone()),
+ &mut callback,
+ )?
+ }
+
+ ControlFlow::Continue(())
+}
+
+fn iterate_method_candidates_for_self_ty(
+ self_ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let mut table = InferenceTable::new(db, env);
+ let self_ty = table.instantiate_canonical(self_ty.clone());
+ iterate_inherent_methods(
+ &self_ty,
+ &mut table,
+ name,
+ None,
+ None,
+ visible_from_module,
+ &mut callback,
+ )?;
+ iterate_trait_method_candidates(
+ &self_ty,
+ &mut table,
+ traits_in_scope,
+ name,
+ None,
+ None,
+ callback,
+ )
+}
+
+fn iterate_trait_method_candidates(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let db = table.db;
+ let env = table.trait_env.clone();
+ let self_is_array = matches!(self_ty.kind(Interner), chalk_ir::TyKind::Array(..));
+
+ let canonical_self_ty = table.canonicalize(self_ty.clone()).value;
+
+ 'traits: for &t in traits_in_scope {
+ let data = db.trait_data(t);
+
+ // Traits annotated with `#[rustc_skip_array_during_method_dispatch]` are skipped during
+ // method resolution, if the receiver is an array, and we're compiling for editions before
+ // 2021.
+ // This is to make `[a].into_iter()` not break code with the new `IntoIterator` impl for
+ // arrays.
+ if data.skip_array_during_method_dispatch && self_is_array {
+ // FIXME: this should really be using the edition of the method name's span, in case it
+ // comes from a macro
+ if db.crate_graph()[env.krate].edition < Edition::Edition2021 {
+ continue;
+ }
+ }
+
+ // we'll be lazy about checking whether the type implements the
+ // trait, but if we find out it doesn't, we'll skip the rest of the
+ // iteration
+ let mut known_implemented = false;
+ for &(_, item) in data.items.iter() {
+ // Don't pass a `visible_from_module` down to `is_valid_candidate`,
+ // since only inherent methods should be included into visibility checking.
+ if !is_valid_candidate(table, name, receiver_ty, item, self_ty, None) {
+ continue;
+ }
+ if !known_implemented {
+ let goal = generic_implements_goal(db, env.clone(), t, &canonical_self_ty);
+ if db.trait_solve(env.krate, goal.cast(Interner)).is_none() {
+ continue 'traits;
+ }
+ }
+ known_implemented = true;
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ ControlFlow::Continue(())
+}
+
+fn iterate_inherent_methods(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ visible_from_module: VisibleFromModule,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let db = table.db;
+ let env = table.trait_env.clone();
+
+ // For trait object types and placeholder types with trait bounds, the methods of the trait and
+ // its super traits are considered inherent methods. This matters because these methods have
+ // higher priority than the other traits' methods, which would be considered in
+ // `iterate_trait_method_candidates()` only after this function.
+ match self_ty.kind(Interner) {
+ TyKind::Placeholder(_) => {
+ let env = table.trait_env.clone();
+ let traits = env
+ .traits_in_scope_from_clauses(self_ty.clone())
+ .flat_map(|t| all_super_traits(db.upcast(), t));
+ iterate_inherent_trait_methods(
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ callback,
+ traits,
+ )?;
+ }
+ TyKind::Dyn(_) => {
+ if let Some(principal_trait) = self_ty.dyn_trait() {
+ let traits = all_super_traits(db.upcast(), principal_trait);
+ iterate_inherent_trait_methods(
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ callback,
+ traits.into_iter(),
+ )?;
+ }
+ }
+ _ => {}
+ }
+
+ let def_crates = match def_crates(db, self_ty, env.krate) {
+ Some(k) => k,
+ None => return ControlFlow::Continue(()),
+ };
+
+ let (module, block) = match visible_from_module {
+ VisibleFromModule::Filter(module) => (Some(module), module.containing_block()),
+ VisibleFromModule::IncludeBlock(block) => (None, Some(block)),
+ VisibleFromModule::None => (None, None),
+ };
+
+ if let Some(block_id) = block {
+ if let Some(impls) = db.inherent_impls_in_block(block_id) {
+ impls_for_self_ty(
+ &impls,
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ module,
+ callback,
+ )?;
+ }
+ }
+
+ for krate in def_crates {
+ let impls = db.inherent_impls_in_crate(krate);
+ impls_for_self_ty(
+ &impls,
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ module,
+ callback,
+ )?;
+ }
+ return ControlFlow::Continue(());
+
+ fn iterate_inherent_trait_methods(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+ traits: impl Iterator<Item = TraitId>,
+ ) -> ControlFlow<()> {
+ let db = table.db;
+ for t in traits {
+ let data = db.trait_data(t);
+ for &(_, item) in data.items.iter() {
+ // We don't pass `visible_from_module` as all trait items should be visible.
+ if is_valid_candidate(table, name, receiver_ty, item, self_ty, None) {
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ }
+ ControlFlow::Continue(())
+ }
+
+ fn impls_for_self_ty(
+ impls: &InherentImpls,
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ visible_from_module: Option<ModuleId>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+ ) -> ControlFlow<()> {
+ let db = table.db;
+ let impls_for_self_ty = impls.for_self_ty(self_ty);
+ for &impl_def in impls_for_self_ty {
+ for &item in &db.impl_data(impl_def).items {
+ if !is_valid_candidate(table, name, receiver_ty, item, self_ty, visible_from_module)
+ {
+ continue;
+ }
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ ControlFlow::Continue(())
+ }
+}
+
+/// Returns the receiver type for the index trait call.
+pub fn resolve_indexing_op(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ ty: Canonical<Ty>,
+ index_trait: TraitId,
+) -> Option<ReceiverAdjustments> {
+ let mut table = InferenceTable::new(db, env.clone());
+ let ty = table.instantiate_canonical(ty);
+ let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+ for (ty, adj) in deref_chain.into_iter().zip(adj) {
+ let goal = generic_implements_goal(db, env.clone(), index_trait, &ty);
+ if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
+ return Some(adj);
+ }
+ }
+ None
+}
+
+macro_rules! check_that {
+ ($cond:expr) => {
+ if !$cond {
+ return false;
+ }
+ };
+}
+
+fn is_valid_candidate(
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ item: AssocItemId,
+ self_ty: &Ty,
+ visible_from_module: Option<ModuleId>,
+) -> bool {
+ let db = table.db;
+ match item {
+ AssocItemId::FunctionId(m) => {
+ is_valid_fn_candidate(table, m, name, receiver_ty, self_ty, visible_from_module)
+ }
+ AssocItemId::ConstId(c) => {
+ let data = db.const_data(c);
+ check_that!(receiver_ty.is_none());
+
+ check_that!(name.map_or(true, |n| data.name.as_ref() == Some(n)));
+ check_that!(visible_from_module.map_or(true, |from_module| {
+ let v = db.const_visibility(c).is_visible_from(db.upcast(), from_module);
+ if !v {
+ cov_mark::hit!(const_candidate_not_visible);
+ }
+ v
+ }));
+ if let ItemContainerId::ImplId(impl_id) = c.lookup(db.upcast()).container {
+ let self_ty_matches = table.run_in_snapshot(|table| {
+ let expected_self_ty = TyBuilder::impl_self_ty(db, impl_id)
+ .fill_with_inference_vars(table)
+ .build();
+ table.unify(&expected_self_ty, &self_ty)
+ });
+ if !self_ty_matches {
+ cov_mark::hit!(const_candidate_self_type_mismatch);
+ return false;
+ }
+ }
+ true
+ }
+ _ => false,
+ }
+}
+
+fn is_valid_fn_candidate(
+ table: &mut InferenceTable<'_>,
+ fn_id: FunctionId,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ self_ty: &Ty,
+ visible_from_module: Option<ModuleId>,
+) -> bool {
+ let db = table.db;
+ let data = db.function_data(fn_id);
+
+ check_that!(name.map_or(true, |n| n == &data.name));
+ check_that!(visible_from_module.map_or(true, |from_module| {
+ let v = db.function_visibility(fn_id).is_visible_from(db.upcast(), from_module);
+ if !v {
+ cov_mark::hit!(autoderef_candidate_not_visible);
+ }
+ v
+ }));
+
+ table.run_in_snapshot(|table| {
+ let container = fn_id.lookup(db.upcast()).container;
+ let (impl_subst, expect_self_ty) = match container {
+ ItemContainerId::ImplId(it) => {
+ let subst =
+ TyBuilder::subst_for_def(db, it, None).fill_with_inference_vars(table).build();
+ let self_ty = db.impl_self_ty(it).substitute(Interner, &subst);
+ (subst, self_ty)
+ }
+ ItemContainerId::TraitId(it) => {
+ let subst =
+ TyBuilder::subst_for_def(db, it, None).fill_with_inference_vars(table).build();
+ let self_ty = subst.at(Interner, 0).assert_ty_ref(Interner).clone();
+ (subst, self_ty)
+ }
+ _ => unreachable!(),
+ };
+
+ let fn_subst = TyBuilder::subst_for_def(db, fn_id, Some(impl_subst.clone()))
+ .fill_with_inference_vars(table)
+ .build();
+
+ check_that!(table.unify(&expect_self_ty, self_ty));
+
+ if let Some(receiver_ty) = receiver_ty {
+ check_that!(data.has_self_param());
+
+ let sig = db.callable_item_signature(fn_id.into());
+ let expected_receiver =
+ sig.map(|s| s.params()[0].clone()).substitute(Interner, &fn_subst);
+
+ check_that!(table.unify(receiver_ty, &expected_receiver));
+ }
+
+ if let ItemContainerId::ImplId(impl_id) = container {
+ // We need to consider the bounds on the impl to distinguish functions of the same name
+ // for a type.
+ let predicates = db.generic_predicates(impl_id.into());
+ predicates
+ .iter()
+ .map(|predicate| {
+ let (p, b) = predicate
+ .clone()
+ .substitute(Interner, &impl_subst)
+ // Skipping the inner binders is ok, as we don't handle quantified where
+ // clauses yet.
+ .into_value_and_skipped_binders();
+ stdx::always!(b.len(Interner) == 0);
+ p
+ })
+ // It's ok to get ambiguity here, as we may not have enough information to prove
+ // obligations. We'll check if the user is calling the selected method properly
+ // later anyway.
+ .all(|p| table.try_obligation(p.cast(Interner)).is_some())
+ } else {
+ // For `ItemContainerId::TraitId`, we check if `self_ty` implements the trait in
+ // `iterate_trait_method_candidates()`.
+ // For others, this function shouldn't be called.
+ true
+ }
+ })
+}
+
+pub fn implements_trait(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env.clone(), trait_, ty);
+ let solution = db.trait_solve(env.krate, goal.cast(Interner));
+
+ solution.is_some()
+}
+
+pub fn implements_trait_unique(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env.clone(), trait_, ty);
+ let solution = db.trait_solve(env.krate, goal.cast(Interner));
+
+ matches!(solution, Some(crate::Solution::Unique(_)))
+}
+
+/// This creates Substs for a trait with the given Self type and type variables
+/// for all other parameters, to query Chalk with it.
+fn generic_implements_goal(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+ self_ty: &Canonical<Ty>,
+) -> Canonical<InEnvironment<super::DomainGoal>> {
+ let mut kinds = self_ty.binders.interned().to_vec();
+ let trait_ref = TyBuilder::trait_ref(db, trait_)
+ .push(self_ty.value.clone())
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
+ .build();
+ kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|x| {
+ let vk = match x.data(Interner) {
+ chalk_ir::GenericArgData::Ty(_) => {
+ chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
+ }
+ chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime,
+ chalk_ir::GenericArgData::Const(c) => {
+ chalk_ir::VariableKind::Const(c.data(Interner).ty.clone())
+ }
+ };
+ chalk_ir::WithKind::new(vk, UniverseIndex::ROOT)
+ }));
+ let obligation = trait_ref.cast(Interner);
+ Canonical {
+ binders: CanonicalVarKinds::from_iter(Interner, kinds),
+ value: InEnvironment::new(&env.env, obligation),
+ }
+}
+
+fn autoderef_method_receiver(
+ table: &mut InferenceTable<'_>,
+ ty: Ty,
+) -> (Vec<Canonical<Ty>>, Vec<ReceiverAdjustments>) {
+ let (mut deref_chain, mut adjustments): (Vec<_>, Vec<_>) = (Vec::new(), Vec::new());
+ let mut autoderef = autoderef::Autoderef::new(table, ty);
+ while let Some((ty, derefs)) = autoderef.next() {
+ deref_chain.push(autoderef.table.canonicalize(ty).value);
+ adjustments.push(ReceiverAdjustments {
+ autoref: None,
+ autoderefs: derefs,
+ unsize_array: false,
+ });
+ }
+ // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
+ if let (Some((TyKind::Array(parameters, _), binders)), Some(adj)) = (
+ deref_chain.last().map(|ty| (ty.value.kind(Interner), ty.binders.clone())),
+ adjustments.last().cloned(),
+ ) {
+ let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner);
+ deref_chain.push(Canonical { value: unsized_ty, binders });
+ adjustments.push(ReceiverAdjustments { unsize_array: true, ..adj });
+ }
+ (deref_chain, adjustments)
+}
--- /dev/null
- struct A;
+use expect_test::expect;
+
+use crate::tests::check;
+
+use super::{check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn infer_slice_method() {
+ check_types(
+ r#"
+impl<T> [T] {
+ fn foo(&self) -> T {
+ loop {}
+ }
+}
+
+fn test(x: &[u8]) {
+ <[_]>::foo(x);
+ //^^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn cross_crate_primitive_method() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:other_crate
+fn test() {
+ let x = 1f32;
+ x.foo();
+} //^^^^^^^ f32
+
+//- /lib.rs crate:other_crate
+mod foo {
+ impl f32 {
+ pub fn foo(self) -> f32 { 0. }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_array_inherent_impl() {
+ check_types(
+ r#"
+impl<T, const N: usize> [T; N] {
+ fn foo(&self) -> T {
+ loop {}
+ }
+}
+fn test(x: &[u8; 0]) {
+ <[_; 0]>::foo(x);
+ //^^^^^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_associated_method_struct() {
+ check_infer(
+ r#"
+ struct A { x: u32 }
+
+ impl A {
+ fn new() -> A {
+ A { x: 0 }
+ }
+ }
+ fn test() {
+ let a = A::new();
+ a.x;
+ }
+ "#,
+ expect![[r#"
+ 48..74 '{ ... }': A
+ 58..68 'A { x: 0 }': A
+ 65..66 '0': u32
+ 87..121 '{ ...a.x; }': ()
+ 97..98 'a': A
+ 101..107 'A::new': fn new() -> A
+ 101..109 'A::new()': A
+ 115..116 'a': A
+ 115..118 'a.x': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_struct_in_local_scope() {
+ check_infer(
+ r#"
+ fn mismatch() {
+ struct A;
+
+ impl A {
+ fn from(_: i32, _: i32) -> Self {
+ A
+ }
+ }
+
+ let _a = A::from(1, 2);
+ }
+ "#,
+ expect![[r#"
+ 14..146 '{ ... 2); }': ()
+ 125..127 '_a': A
+ 130..137 'A::from': fn from(i32, i32) -> A
+ 130..143 'A::from(1, 2)': A
+ 138..139 '1': i32
+ 141..142 '2': i32
+ 60..61 '_': i32
+ 68..69 '_': i32
+ 84..109 '{ ... }': A
+ 98..99 'A': A
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_enum() {
+ check_infer(
+ r#"
+ enum A { B, C }
+
+ impl A {
+ pub fn b() -> A {
+ A::B
+ }
+ pub fn c() -> A {
+ A::C
+ }
+ }
+ fn test() {
+ let a = A::b();
+ a;
+ let c = A::c();
+ c;
+ }
+ "#,
+ expect![[r#"
+ 46..66 '{ ... }': A
+ 56..60 'A::B': A
+ 87..107 '{ ... }': A
+ 97..101 'A::C': A
+ 120..177 '{ ... c; }': ()
+ 130..131 'a': A
+ 134..138 'A::b': fn b() -> A
+ 134..140 'A::b()': A
+ 146..147 'a': A
+ 157..158 'c': A
+ 161..165 'A::c': fn c() -> A
+ 161..167 'A::c()': A
+ 173..174 'c': A
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_with_modules() {
+ check_infer(
+ r#"
+ mod a {
- struct B;
++ pub struct A;
+ impl A { pub fn thing() -> A { A {} }}
+ }
+
+ mod b {
- mod c {
- struct C;
++ pub struct B;
+ impl B { pub fn thing() -> u32 { 99 }}
+
- 55..63 '{ A {} }': A
- 57..61 'A {}': A
- 125..131 '{ 99 }': u32
- 127..129 '99': u32
- 201..209 '{ C {} }': C
- 203..207 'C {}': C
- 240..324 '{ ...g(); }': ()
- 250..251 'x': A
- 254..265 'a::A::thing': fn thing() -> A
- 254..267 'a::A::thing()': A
- 277..278 'y': u32
- 281..292 'b::B::thing': fn thing() -> u32
- 281..294 'b::B::thing()': u32
- 304..305 'z': C
- 308..319 'c::C::thing': fn thing() -> C
- 308..321 'c::C::thing()': C
++ pub mod c {
++ pub struct C;
+ impl C { pub fn thing() -> C { C {} }}
+ }
+ }
+ use b::c;
+
+ fn test() {
+ let x = a::A::thing();
+ let y = b::B::thing();
+ let z = c::C::thing();
+ }
+ "#,
+ expect![[r#"
++ 59..67 '{ A {} }': A
++ 61..65 'A {}': A
++ 133..139 '{ 99 }': u32
++ 135..137 '99': u32
++ 217..225 '{ C {} }': C
++ 219..223 'C {}': C
++ 256..340 '{ ...g(); }': ()
++ 266..267 'x': A
++ 270..281 'a::A::thing': fn thing() -> A
++ 270..283 'a::A::thing()': A
++ 293..294 'y': u32
++ 297..308 'b::B::thing': fn thing() -> u32
++ 297..310 'b::B::thing()': u32
++ 320..321 'z': C
++ 324..335 'c::C::thing': fn thing() -> C
++ 324..337 'c::C::thing()': C
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics() {
+ check_infer(
+ r#"
+ struct Gen<T> {
+ val: T
+ }
+
+ impl<T> Gen<T> {
+ pub fn make(val: T) -> Gen<T> {
+ Gen { val }
+ }
+ }
+
+ fn test() {
+ let a = Gen::make(0u32);
+ }
+ "#,
+ expect![[r#"
+ 63..66 'val': T
+ 81..108 '{ ... }': Gen<T>
+ 91..102 'Gen { val }': Gen<T>
+ 97..100 'val': T
+ 122..154 '{ ...32); }': ()
+ 132..133 'a': Gen<u32>
+ 136..145 'Gen::make': fn make<u32>(u32) -> Gen<u32>
+ 136..151 'Gen::make(0u32)': Gen<u32>
+ 146..150 '0u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics_without_args() {
+ check_infer(
+ r#"
+ struct Gen<T> {
+ val: T
+ }
+
+ impl<T> Gen<T> {
+ pub fn make() -> Gen<T> {
+ loop { }
+ }
+ }
+
+ fn test() {
+ let a = Gen::<u32>::make();
+ }
+ "#,
+ expect![[r#"
+ 75..99 '{ ... }': Gen<T>
+ 85..93 'loop { }': !
+ 90..93 '{ }': ()
+ 113..148 '{ ...e(); }': ()
+ 123..124 'a': Gen<u32>
+ 127..143 'Gen::<...::make': fn make<u32>() -> Gen<u32>
+ 127..145 'Gen::<...make()': Gen<u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_method_generics_2_type_params_without_args() {
+ check_infer(
+ r#"
+ struct Gen<T, U> {
+ val: T,
+ val2: U,
+ }
+
+ impl<T> Gen<u32, T> {
+ pub fn make() -> Gen<u32,T> {
+ loop { }
+ }
+ }
+
+ fn test() {
+ let a = Gen::<u32, u64>::make();
+ }
+ "#,
+ expect![[r#"
+ 101..125 '{ ... }': Gen<u32, T>
+ 111..119 'loop { }': !
+ 116..119 '{ }': ()
+ 139..179 '{ ...e(); }': ()
+ 149..150 'a': Gen<u32, u64>
+ 153..174 'Gen::<...::make': fn make<u64>() -> Gen<u32, u64>
+ 153..176 'Gen::<...make()': Gen<u32, u64>
+ "#]],
+ );
+}
+
+#[test]
+fn cross_crate_associated_method_call() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:other_crate
+fn test() {
+ let x = other_crate::foo::S::thing();
+ x;
+} //^ i128
+
+//- /lib.rs crate:other_crate
+pub mod foo {
+ pub struct S;
+ impl S {
+ pub fn thing() -> i128 { 0 }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_trait_method_simple() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait1 {
+ fn method(&self) -> u32;
+}
+struct S1;
+impl Trait1 for S1 {}
+trait Trait2 {
+ fn method(&self) -> i128;
+}
+struct S2;
+impl Trait2 for S2 {}
+fn test() {
+ S1.method();
+ //^^^^^^^^^^^ u32
+ S2.method(); // -> i128
+ //^^^^^^^^^^^ i128
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_scoped() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+struct S;
+mod foo {
+ pub trait Trait1 {
+ fn method(&self) -> u32;
+ }
+ impl Trait1 for super::S {}
+}
+mod bar {
+ pub trait Trait2 {
+ fn method(&self) -> i128;
+ }
+ impl Trait2 for super::S {}
+}
+
+mod foo_test {
+ use super::S;
+ use super::foo::Trait1;
+ fn test() {
+ S.method();
+ //^^^^^^^^^^ u32
+ }
+}
+
+mod bar_test {
+ use super::S;
+ use super::bar::Trait2;
+ fn test() {
+ S.method();
+ //^^^^^^^^^^ i128
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_1() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T> {
+ fn method(&self) -> T;
+}
+struct S;
+impl Trait<u32> for S {}
+fn test() {
+ S.method();
+ //^^^^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_more_params() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T1, T2, T3> {
+ fn method1(&self) -> (T1, T2, T3);
+ fn method2(&self) -> (T3, T2, T1);
+}
+struct S1;
+impl Trait<u8, u16, u32> for S1 {}
+struct S2;
+impl<T> Trait<i8, i16, T> for S2 {}
+fn test() {
+ S1.method1();
+ //^^^^^^^^^^^^ (u8, u16, u32)
+ S1.method2();
+ //^^^^^^^^^^^^ (u32, u16, u8)
+ S2.method1();
+ //^^^^^^^^^^^^ (i8, i16, {unknown})
+ S2.method2();
+ //^^^^^^^^^^^^ ({unknown}, i16, i8)
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_method_generic_2() {
+ // the trait implementation is intentionally incomplete -- it shouldn't matter
+ check_types(
+ r#"
+trait Trait<T> {
+ fn method(&self) -> T;
+}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn test() {
+ S(1u32).method();
+ //^^^^^^^^^^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method() {
+ check_infer(
+ r#"
+ trait Default {
+ fn default() -> Self;
+ }
+ struct S;
+ impl Default for S {}
+ fn test() {
+ let s1: S = Default::default();
+ let s2 = S::default();
+ let s3 = <S as Default>::default();
+ }
+ "#,
+ expect![[r#"
+ 86..192 '{ ...t(); }': ()
+ 96..98 's1': S
+ 104..120 'Defaul...efault': fn default<S>() -> S
+ 104..122 'Defaul...ault()': S
+ 132..134 's2': S
+ 137..147 'S::default': fn default<S>() -> S
+ 137..149 'S::default()': S
+ 159..161 's3': S
+ 164..187 '<S as ...efault': fn default<S>() -> S
+ 164..189 '<S as ...ault()': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_1() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> T;
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ struct G<T>;
+ impl<T> Trait<T> for G<T> {}
+ fn test() {
+ let a = S::make();
+ let b = G::<u64>::make();
+ let c: f64 = G::make();
+ }
+ "#,
+ expect![[r#"
+ 126..210 '{ ...e(); }': ()
+ 136..137 'a': u32
+ 140..147 'S::make': fn make<S, u32>() -> u32
+ 140..149 'S::make()': u32
+ 159..160 'b': u64
+ 163..177 'G::<u64>::make': fn make<G<u64>, u64>() -> u64
+ 163..179 'G::<u6...make()': u64
+ 189..190 'c': f64
+ 198..205 'G::make': fn make<G<f64>, f64>() -> f64
+ 198..207 'G::make()': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make<U>() -> (T, U);
+ }
+ struct S;
+ impl Trait<u32> for S {}
+ struct G<T>;
+ impl<T> Trait<T> for G<T> {}
+ fn test() {
+ let a = S::make::<i64>();
+ let b: (_, i64) = S::make();
+ let c = G::<u32>::make::<i64>();
+ let d: (u32, _) = G::make::<i64>();
+ let e: (u32, i64) = G::make();
+ }
+ "#,
+ expect![[r#"
+ 134..312 '{ ...e(); }': ()
+ 144..145 'a': (u32, i64)
+ 148..162 'S::make::<i64>': fn make<S, u32, i64>() -> (u32, i64)
+ 148..164 'S::mak...i64>()': (u32, i64)
+ 174..175 'b': (u32, i64)
+ 188..195 'S::make': fn make<S, u32, i64>() -> (u32, i64)
+ 188..197 'S::make()': (u32, i64)
+ 207..208 'c': (u32, i64)
+ 211..232 'G::<u3...:<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 211..234 'G::<u3...i64>()': (u32, i64)
+ 244..245 'd': (u32, i64)
+ 258..272 'G::make::<i64>': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 258..274 'G::mak...i64>()': (u32, i64)
+ 284..285 'e': (u32, i64)
+ 300..307 'G::make': fn make<G<u32>, u32, i64>() -> (u32, i64)
+ 300..309 'G::make()': (u32, i64)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_3() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> (Self, T);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<i32> {}
+ fn test() {
+ let a = S::make();
+ }
+ "#,
+ expect![[r#"
+ 100..126 '{ ...e(); }': ()
+ 110..111 'a': (S<i32>, i64)
+ 114..121 'S::make': fn make<S<i32>, i64>() -> (S<i32>, i64)
+ 114..123 'S::make()': (S<i32>, i64)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_4() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make() -> (Self, T);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<u64> {}
+ impl Trait<i32> for S<u32> {}
+ fn test() {
+ let a: (S<u64>, _) = S::make();
+ let b: (_, i32) = S::make();
+ }
+ "#,
+ expect![[r#"
+ 130..202 '{ ...e(); }': ()
+ 140..141 'a': (S<u64>, i64)
+ 157..164 'S::make': fn make<S<u64>, i64>() -> (S<u64>, i64)
+ 157..166 'S::make()': (S<u64>, i64)
+ 176..177 'b': (S<u32>, i32)
+ 190..197 'S::make': fn make<S<u32>, i32>() -> (S<u32>, i32)
+ 190..199 'S::make()': (S<u32>, i32)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_trait_assoc_method_generics_5() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn make<U>() -> (Self, T, U);
+ }
+ struct S<T>;
+ impl Trait<i64> for S<u64> {}
+ fn test() {
+ let a = <S as Trait<i64>>::make::<u8>();
+ let b: (S<u64>, _, _) = Trait::<i64>::make::<u8>();
+ }
+ "#,
+ expect![[r#"
+ 106..210 '{ ...>(); }': ()
+ 116..117 'a': (S<u64>, i64, u8)
+ 120..149 '<S as ...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 120..151 '<S as ...<u8>()': (S<u64>, i64, u8)
+ 161..162 'b': (S<u64>, i64, u8)
+ 181..205 'Trait:...::<u8>': fn make<S<u64>, i64, u8>() -> (S<u64>, i64, u8)
+ 181..207 'Trait:...<u8>()': (S<u64>, i64, u8)
+ "#]],
+ );
+}
+
+#[test]
+fn infer_call_trait_method_on_generic_param_1() {
+ check_infer(
+ r#"
+ trait Trait {
+ fn method(&self) -> u32;
+ }
+ fn test<T: Trait>(t: T) {
+ t.method();
+ }
+ "#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 63..64 't': T
+ 69..88 '{ ...d(); }': ()
+ 75..76 't': T
+ 75..85 't.method()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_call_trait_method_on_generic_param_2() {
+ check_infer(
+ r#"
+ trait Trait<T> {
+ fn method(&self) -> T;
+ }
+ fn test<U, T: Trait<U>>(t: T) {
+ t.method();
+ }
+ "#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 70..71 't': T
+ 76..95 '{ ...d(); }': ()
+ 82..83 't': T
+ 82..92 't.method()': U
+ "#]],
+ );
+}
+
+#[test]
+fn infer_with_multiple_trait_impls() {
+ check_infer(
+ r#"
+ trait Into<T> {
+ fn into(self) -> T;
+ }
+ struct S;
+ impl Into<u32> for S {}
+ impl Into<u64> for S {}
+ fn test() {
+ let x: u32 = S.into();
+ let y: u64 = S.into();
+ let z = Into::<u64>::into(S);
+ }
+ "#,
+ expect![[r#"
+ 28..32 'self': Self
+ 110..201 '{ ...(S); }': ()
+ 120..121 'x': u32
+ 129..130 'S': S
+ 129..137 'S.into()': u32
+ 147..148 'y': u64
+ 156..157 'S': S
+ 156..164 'S.into()': u64
+ 174..175 'z': u64
+ 178..195 'Into::...::into': fn into<S, u64>(S) -> u64
+ 178..198 'Into::...nto(S)': u64
+ 196..197 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_unify_impl_self_type() {
+ check_types(
+ r#"
+struct S<T>;
+impl S<u32> { fn foo(&self) -> u8 { 0 } }
+impl S<i32> { fn foo(&self) -> i8 { 0 } }
+fn test() { (S::<u32>.foo(), S::<i32>.foo()); }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (u8, i8)
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_before_autoref() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(&self) -> i8 { 0 } }
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_by_value_before_autoref() {
+ check_types(
+ r#"
+trait Clone { fn clone(&self) -> Self; }
+struct S;
+impl Clone for S {}
+impl Clone for &S {}
+fn test() { (S.clone(), (&S).clone(), (&&S).clone()); }
+ //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (S, S, &S)
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_before_autoderef() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(self) -> i8 { 0 } }
+impl Trait for &S { fn foo(self) -> u128 { 0 } }
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_impl_before_trait() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(self) -> i8 { 0 } }
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ i8
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_impl_ref_before_trait() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl S { fn foo(&self) -> i8 { 0 } }
+impl Trait for &S { fn foo(self) -> u128 { 0 } }
+fn test() { S.foo(); }
+ //^^^^^^^ i8
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_autoderef() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S { fn foo(self) -> u128 { 0 } }
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_unsize_array() {
+ check_types(
+ r#"
+//- minicore: slice
+fn test() {
+ let a = [1, 2, 3];
+ a.len();
+} //^^^^^^^ usize
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_trait_from_prelude() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+struct S;
+impl Clone for S {}
+
+fn test() {
+ S.clone();
+ //^^^^^^^^^ S
+}
+
+//- /lib.rs crate:core
+pub mod prelude {
+ pub mod rust_2018 {
+ pub trait Clone {
+ fn clone(&self) -> Self;
+ }
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_for_unknown_trait() {
+ // The blanket impl currently applies because we ignore the unresolved where clause
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T> Trait for T where T: UnknownTrait {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_not_met() {
+ // The blanket impl shouldn't apply because we can't prove S: Clone
+ // This is also to make sure that we don't resolve to the foo method just
+ // because that's the only method named foo we can find, which would make
+ // the below tests not work
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T> Trait for T where T: Clone {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_inline_not_met() {
+ // The blanket impl shouldn't apply because we can't prove S: Clone
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl<T: Clone> Trait for T {}
+fn test() { (&S).foo(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_1() {
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test() { S.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_2() {
+ check_types(
+ r#"
+trait Into<T> { fn into(self) -> T; }
+trait From<T> { fn from(other: T) -> Self; }
+struct S1;
+struct S2;
+impl From<S2> for S1 {}
+impl<T, U> Into<U> for T where U: From<T> {}
+fn test() { S2.into(); }
+ //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_where_clause_inline() {
+ check_types(
+ r#"
+trait Into<T> { fn into(self) -> T; }
+trait From<T> { fn from(other: T) -> Self; }
+struct S1;
+struct S2;
+impl From<S2> for S1 {}
+impl<T, U: From<T>> Into<U> for T {}
+fn test() { S2.into(); }
+ //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_overloaded_method() {
+ check_types(
+ r#"
+struct Wrapper<T>(T);
+struct Foo<T>(T);
+struct Bar<T>(T);
+
+impl<T> Wrapper<Foo<T>> {
+ pub fn new(foo_: T) -> Self {
+ Wrapper(Foo(foo_))
+ }
+}
+
+impl<T> Wrapper<Bar<T>> {
+ pub fn new(bar_: T) -> Self {
+ Wrapper(Bar(bar_))
+ }
+}
+
+fn main() {
+ let a = Wrapper::<Foo<f32>>::new(1.0);
+ let b = Wrapper::<Bar<f32>>::new(1.0);
+ (a, b);
+ //^^^^^^ (Wrapper<Foo<f32>>, Wrapper<Bar<f32>>)
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_overloaded_const() {
+ cov_mark::check!(const_candidate_self_type_mismatch);
+ check_types(
+ r#"
+struct Wrapper<T>(T);
+struct Foo<T>(T);
+struct Bar<T>(T);
+
+impl<T> Wrapper<Foo<T>> {
+ pub const VALUE: Foo<T>;
+}
+
+impl<T> Wrapper<Bar<T>> {
+ pub const VALUE: Bar<T>;
+}
+
+fn main() {
+ let a = Wrapper::<Foo<f32>>::VALUE;
+ let b = Wrapper::<Bar<f32>>::VALUE;
+ (a, b);
+ //^^^^^^ (Foo<f32>, Bar<f32>)
+}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_encountering_fn_type() {
+ check_types(
+ r#"
+//- /main.rs
+fn foo() {}
+trait FnOnce { fn call(self); }
+fn test() { foo.call(); }
+ //^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn super_trait_impl_return_trait_method_resolution() {
+ check_infer(
+ r#"
+ //- minicore: sized
+ trait Base {
+ fn foo(self) -> usize;
+ }
+
+ trait Super : Base {}
+
+ fn base1() -> impl Base { loop {} }
+ fn super1() -> impl Super { loop {} }
+
+ fn test(base2: impl Base, super2: impl Super) {
+ base1().foo();
+ super1().foo();
+ base2.foo();
+ super2.foo();
+ }
+ "#,
+ expect![[r#"
+ 24..28 'self': Self
+ 90..101 '{ loop {} }': !
+ 92..99 'loop {}': !
+ 97..99 '{}': ()
+ 128..139 '{ loop {} }': !
+ 130..137 'loop {}': !
+ 135..137 '{}': ()
+ 149..154 'base2': impl Base
+ 167..173 'super2': impl Super
+ 187..264 '{ ...o(); }': ()
+ 193..198 'base1': fn base1() -> impl Base
+ 193..200 'base1()': impl Base
+ 193..206 'base1().foo()': usize
+ 212..218 'super1': fn super1() -> impl Super
+ 212..220 'super1()': impl Super
+ 212..226 'super1().foo()': usize
+ 232..237 'base2': impl Base
+ 232..243 'base2.foo()': usize
+ 249..255 'super2': impl Super
+ 249..261 'super2.foo()': usize
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_non_parameter_type() {
+ check_types(
+ r#"
+mod a {
+ pub trait Foo {
+ fn foo(&self);
+ }
+}
+
+struct Wrapper<T>(T);
+fn foo<T>(t: Wrapper<T>)
+where
+ Wrapper<T>: a::Foo,
+{
+ t.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_3373() {
+ check_types(
+ r#"
+struct A<T>(T);
+
+impl A<i32> {
+ fn from(v: i32) -> A<i32> { A(v) }
+}
+
+fn main() {
+ A::from(3);
+} //^^^^^^^^^^ A<i32>
+"#,
+ );
+}
+
+#[test]
+fn method_resolution_slow() {
+ // this can get quite slow if we set the solver size limit too high
+ check_types(
+ r#"
+trait SendX {}
+
+struct S1; impl SendX for S1 {}
+struct S2; impl SendX for S2 {}
+struct U1;
+
+trait Trait { fn method(self); }
+
+struct X1<A, B> {}
+impl<A, B> SendX for X1<A, B> where A: SendX, B: SendX {}
+
+struct S<B, C> {}
+
+trait FnX {}
+
+impl<B, C> Trait for S<B, C> where C: FnX, B: SendX {}
+
+fn test() { (S {}).method(); }
+ //^^^^^^^^^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_super_trait_not_in_scope() {
+ check_infer(
+ r#"
+ mod m {
+ pub trait SuperTrait {
+ fn foo(&self) -> u32 { 0 }
+ }
+ }
+ trait Trait: m::SuperTrait {}
+
+ struct S;
+ impl m::SuperTrait for S {}
+ impl Trait for S {}
+
+ fn test(d: &dyn Trait) {
+ d.foo();
+ }
+ "#,
+ expect![[r#"
+ 51..55 'self': &Self
+ 64..69 '{ 0 }': u32
+ 66..67 '0': u32
+ 176..177 'd': &dyn Trait
+ 191..207 '{ ...o(); }': ()
+ 197..198 'd': &dyn Trait
+ 197..204 'd.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn method_resolution_foreign_opaque_type() {
+ check_infer(
+ r#"
+extern "C" {
+ type S;
+ fn f() -> &'static S;
+}
+
+impl S {
+ fn foo(&self) -> bool {
+ true
+ }
+}
+
+fn test() {
+ let s = unsafe { f() };
+ s.foo();
+}
+"#,
+ expect![[r#"
+ 75..79 'self': &S
+ 89..109 '{ ... }': bool
+ 99..103 'true': bool
+ 123..167 '{ ...o(); }': ()
+ 133..134 's': &S
+ 137..151 'unsafe { f() }': &S
+ 137..151 'unsafe { f() }': &S
+ 146..147 'f': fn f() -> &S
+ 146..149 'f()': &S
+ 157..158 's': &S
+ 157..164 's.foo()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn method_with_allocator_box_self_type() {
+ check_types(
+ r#"
+struct Slice<T> {}
+struct Box<T, A> {}
+
+impl<T> Slice<T> {
+ pub fn into_vec<A>(self: Box<Self, A>) { }
+}
+
+fn main() {
+ let foo: Slice<u32>;
+ foo.into_vec(); // we shouldn't crash on this at least
+} //^^^^^^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn method_on_dyn_impl() {
+ check_types(
+ r#"
+trait Foo {}
+
+impl Foo for u32 {}
+impl dyn Foo + '_ {
+ pub fn dyn_foo(&self) -> u32 {
+ 0
+ }
+}
+
+fn main() {
+ let f = &42u32 as &dyn Foo;
+ f.dyn_foo();
+ // ^^^^^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_method_priority() {
+ check_types(
+ r#"
+//- minicore: from
+trait Trait {
+ fn into(&self) -> usize { 0 }
+}
+
+fn foo(a: &dyn Trait) {
+ let _ = a.into();
+ //^usize
+}
+ "#,
+ );
+}
+
+#[test]
+fn trait_method_priority_for_placeholder_type() {
+ check_types(
+ r#"
+//- minicore: from
+trait Trait {
+ fn into(&self) -> usize { 0 }
+}
+
+fn foo<T: Trait>(a: &T) {
+ let _ = a.into();
+ //^usize
+}
+ "#,
+ );
+}
+
+#[test]
+fn autoderef_visibility_field() {
+ check(
+ r#"
+//- minicore: deref
+mod a {
+ pub struct Foo(pub char);
+ pub struct Bar(i32);
+ impl Bar {
+ pub fn new() -> Self {
+ Self(0)
+ }
+ }
+ impl core::ops::Deref for Bar {
+ type Target = Foo;
+ fn deref(&self) -> &Foo {
+ &Foo('z')
+ }
+ }
+}
+mod b {
+ fn foo() {
+ let x = super::a::Bar::new().0;
+ // ^^^^^^^^^^^^^^^^^^^^ adjustments: Deref(Some(OverloadedDeref(Not)))
+ // ^^^^^^^^^^^^^^^^^^^^^^ type: char
+ }
+}
+"#,
+ )
+}
+
+#[test]
+fn autoderef_visibility_method() {
+ cov_mark::check!(autoderef_candidate_not_visible);
+ check(
+ r#"
+//- minicore: deref
+mod a {
+ pub struct Foo(pub char);
+ impl Foo {
+ pub fn mango(&self) -> char {
+ self.0
+ }
+ }
+ pub struct Bar(i32);
+ impl Bar {
+ pub fn new() -> Self {
+ Self(0)
+ }
+ fn mango(&self) -> i32 {
+ self.0
+ }
+ }
+ impl core::ops::Deref for Bar {
+ type Target = Foo;
+ fn deref(&self) -> &Foo {
+ &Foo('z')
+ }
+ }
+}
+mod b {
+ fn foo() {
+ let x = super::a::Bar::new().mango();
+ // ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ type: char
+ }
+}
+"#,
+ )
+}
+
+#[test]
+fn trait_vs_private_inherent_const() {
+ cov_mark::check!(const_candidate_not_visible);
+ check(
+ r#"
+mod a {
+ pub struct Foo;
+ impl Foo {
+ const VALUE: u32 = 2;
+ }
+ pub trait Trait {
+ const VALUE: usize;
+ }
+ impl Trait for Foo {
+ const VALUE: usize = 3;
+ }
+
+ fn foo() {
+ let x = Foo::VALUE;
+ // ^^^^^^^^^^ type: u32
+ }
+}
+use a::Trait;
+fn foo() {
+ let x = a::Foo::VALUE;
+ // ^^^^^^^^^^^^^ type: usize
+}
+"#,
+ )
+}
+
+#[test]
+fn trait_impl_in_unnamed_const() {
+ check_types(
+ r#"
+struct S;
+
+trait Tr {
+ fn method(&self) -> u16;
+}
+
+const _: () = {
+ impl Tr for S {}
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn trait_impl_in_synstructure_const() {
+ check_types(
+ r#"
+struct S;
+
+trait Tr {
+ fn method(&self) -> u16;
+}
+
+const _DERIVE_Tr_: () = {
+ impl Tr for S {}
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn inherent_impl_in_unnamed_const() {
+ check_types(
+ r#"
+struct S;
+
+const _: () = {
+ impl S {
+ fn method(&self) -> u16 { 0 }
+
+ pub(super) fn super_method(&self) -> u16 { 0 }
+
+ pub(crate) fn crate_method(&self) -> u16 { 0 }
+
+ pub fn pub_method(&self) -> u16 { 0 }
+ }
+};
+
+fn f() {
+ S.method();
+ //^^^^^^^^^^ u16
+
+ S.super_method();
+ //^^^^^^^^^^^^^^^^ u16
+
+ S.crate_method();
+ //^^^^^^^^^^^^^^^^ u16
+
+ S.pub_method();
+ //^^^^^^^^^^^^^^ u16
+}
+ "#,
+ );
+}
+
+#[test]
+fn resolve_const_generic_array_methods() {
+ check_types(
+ r#"
+#[lang = "array"]
+impl<T, const N: usize> [T; N] {
+ pub fn map<F, U>(self, f: F) -> [U; N]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+#[lang = "slice"]
+impl<T> [T] {
+ pub fn map<F, U>(self, f: F) -> &[U]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+fn f() {
+ let v = [1, 2].map::<_, usize>(|x| -> x * 2);
+ v;
+ //^ [usize; 2]
+}
+ "#,
+ );
+}
+
+#[test]
+fn resolve_const_generic_method() {
+ check_types(
+ r#"
+struct Const<const N: usize>;
+
+#[lang = "array"]
+impl<T, const N: usize> [T; N] {
+ pub fn my_map<F, U, const X: usize>(self, f: F, c: Const<X>) -> [U; X]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+#[lang = "slice"]
+impl<T> [T] {
+ pub fn my_map<F, const X: usize, U>(self, f: F, c: Const<X>) -> &[U]
+ where
+ F: FnMut(T) -> U,
+ { loop {} }
+}
+
+fn f<const C: usize, P>() {
+ let v = [1, 2].my_map::<_, (), 12>(|x| -> x * 2, Const::<12>);
+ v;
+ //^ [(); 12]
+ let v = [1, 2].my_map::<_, P, C>(|x| -> x * 2, Const::<C>);
+ v;
+ //^ [P; C]
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_type_alias() {
+ check_types(
+ r#"
+struct Const<const N: usize>;
+type U2 = Const<2>;
+type U5 = Const<5>;
+
+impl U2 {
+ fn f(self) -> Const<12> {
+ loop {}
+ }
+}
+
+impl U5 {
+ fn f(self) -> Const<15> {
+ loop {}
+ }
+}
+
+fn f(x: U2) {
+ let y = x.f();
+ //^ Const<12>
+}
+ "#,
+ );
+}
+
+#[test]
+fn skip_array_during_method_dispatch() {
+ check_types(
+ r#"
+//- /main2018.rs crate:main2018 deps:core
+use core::IntoIterator;
+
+fn f() {
+ let v = [4].into_iter();
+ v;
+ //^ &i32
+
+ let a = [0, 1].into_iter();
+ a;
+ //^ &i32
+}
+
+//- /main2021.rs crate:main2021 deps:core edition:2021
+use core::IntoIterator;
+
+fn f() {
+ let v = [4].into_iter();
+ v;
+ //^ i32
+
+ let a = [0, 1].into_iter();
+ a;
+ //^ &i32
+}
+
+//- /core.rs crate:core
+#[rustc_skip_array_during_method_dispatch]
+pub trait IntoIterator {
+ type Out;
+ fn into_iter(self) -> Self::Out;
+}
+
+impl<T> IntoIterator for [T; 1] {
+ type Out = T;
+ fn into_iter(self) -> Self::Out { loop {} }
+}
+impl<'a, T> IntoIterator for &'a [T] {
+ type Out = &'a T;
+ fn into_iter(self) -> Self::Out { loop {} }
+}
+ "#,
+ );
+}
+
+#[test]
+fn sized_blanket_impl() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Foo { fn foo() -> u8; }
+impl<T: Sized> Foo for T {}
+fn f<S: Sized, T, U: ?Sized>() {
+ u32::foo;
+ S::foo;
+ T::foo;
+ U::foo;
+ <[u32]>::foo;
+}
+"#,
+ expect![[r#"
+ 89..160 '{ ...foo; }': ()
+ 95..103 'u32::foo': fn foo<u32>() -> u8
+ 109..115 'S::foo': fn foo<S>() -> u8
+ 121..127 'T::foo': fn foo<T>() -> u8
+ 133..139 'U::foo': {unknown}
+ 145..157 '<[u32]>::foo': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn local_impl() {
+ check_types(
+ r#"
+fn main() {
+ struct SomeStruct(i32);
+
+ impl SomeStruct {
+ fn is_even(&self) -> bool {
+ self.0 % 2 == 0
+ }
+ }
+
+ let o = SomeStruct(3);
+ let is_even = o.is_even();
+ // ^^^^^^^ bool
+}
+ "#,
+ );
+}
+
+#[test]
+fn deref_fun_1() {
+ check_types(
+ r#"
+//- minicore: deref
+
+struct A<T, U>(T, U);
+struct B<T>(T);
+struct C<T>(T);
+
+impl<T> core::ops::Deref for A<B<T>, u32> {
+ type Target = B<T>;
+ fn deref(&self) -> &B<T> { &self.0 }
+}
+impl core::ops::Deref for B<isize> {
+ type Target = C<isize>;
+ fn deref(&self) -> &C<isize> { loop {} }
+}
+
+impl<T: Copy> C<T> {
+ fn thing(&self) -> T { self.0 }
+}
+
+fn make<T>() -> T { loop {} }
+
+fn test() {
+ let a1 = A(make(), make());
+ let _: usize = (*a1).0;
+ a1;
+ //^^ A<B<usize>, u32>
+
+ let a2 = A(make(), make());
+ a2.thing();
+ //^^^^^^^^^^ isize
+ a2;
+ //^^ A<B<isize>, u32>
+}
+"#,
+ );
+}
+
+#[test]
+fn deref_fun_2() {
+ check_types(
+ r#"
+//- minicore: deref
+
+struct A<T, U>(T, U);
+struct B<T>(T);
+struct C<T>(T);
+
+impl<T> core::ops::Deref for A<B<T>, u32> {
+ type Target = B<T>;
+ fn deref(&self) -> &B<T> { &self.0 }
+}
+impl core::ops::Deref for B<isize> {
+ type Target = C<isize>;
+ fn deref(&self) -> &C<isize> { loop {} }
+}
+
+impl<T> core::ops::Deref for A<C<T>, i32> {
+ type Target = C<T>;
+ fn deref(&self) -> &C<T> { &self.0 }
+}
+
+impl<T: Copy> C<T> {
+ fn thing(&self) -> T { self.0 }
+}
+
+fn make<T>() -> T { loop {} }
+
+fn test() {
+ let a1 = A(make(), 1u32);
+ a1.thing();
+ a1;
+ //^^ A<B<isize>, u32>
+
+ let a2 = A(make(), 1i32);
+ let _: &str = a2.thing();
+ a2;
+ //^^ A<C<&str>, i32>
+}
+"#,
+ );
+}
+
+#[test]
+fn receiver_adjustment_autoref() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn test() {
+ Foo.foo();
+ //^^^ adjustments: Borrow(Ref(Not))
+ (&Foo).foo();
+ // ^^^^ adjustments: ,
+}
+"#,
+ );
+}
+
+#[test]
+fn receiver_adjustment_unsize_array() {
+ // FIXME not quite correct
+ check(
+ r#"
+//- minicore: slice
+fn test() {
+ let a = [1, 2, 3];
+ a.len();
+} //^ adjustments: Pointer(Unsize), Borrow(Ref(Not))
+"#,
+ );
+}
+
+#[test]
+fn bad_inferred_reference_1() {
+ check_no_mismatches(
+ r#"
+//- minicore: sized
+pub trait Into<T>: Sized {
+ fn into(self) -> T;
+}
+impl<T> Into<T> for T {
+ fn into(self) -> T { self }
+}
+
+trait ExactSizeIterator {
+ fn len(&self) -> usize;
+}
+
+pub struct Foo;
+impl Foo {
+ fn len(&self) -> usize { 0 }
+}
+
+pub fn test(generic_args: impl Into<Foo>) {
+ let generic_args = generic_args.into();
+ generic_args.len();
+ let _: Foo = generic_args;
+}
+"#,
+ );
+}
+
+#[test]
+fn bad_inferred_reference_2() {
+ check_no_mismatches(
+ r#"
+//- minicore: deref
+trait ExactSizeIterator {
+ fn len(&self) -> usize;
+}
+
+pub struct Foo;
+impl Foo {
+ fn len(&self) -> usize { 0 }
+}
+
+pub fn test() {
+ let generic_args;
+ generic_args.len();
+ let _: Foo = generic_args;
+}
+"#,
+ );
+}
+
+#[test]
+fn resolve_minicore_iterator() {
+ check_types(
+ r#"
+//- minicore: iterators, sized
+fn foo() {
+ let m = core::iter::repeat(()).filter_map(|()| Some(92)).next();
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Option<i32>
+"#,
+ );
+}
+
+#[test]
+fn primitive_assoc_fn_shadowed_by_use() {
+ check_types(
+ r#"
+//- /lib.rs crate:lib deps:core
+use core::u16;
+
+fn f() -> u16 {
+ let x = u16::from_le_bytes();
+ x
+ //^ u16
+}
+
+//- /core.rs crate:core
+pub mod u16 {}
+
+impl u16 {
+ pub fn from_le_bytes() -> Self { 0 }
+}
+ "#,
+ )
+}
+
+#[test]
+fn with_impl_bounds() {
+ check_types(
+ r#"
+trait Trait {}
+struct Foo<T>(T);
+impl Trait for isize {}
+
+impl<T: Trait> Foo<T> {
+ fn foo() -> isize { 0 }
+ fn bar(&self) -> isize { 0 }
+}
+
+impl Foo<()> {
+ fn foo() {}
+ fn bar(&self) {}
+}
+
+fn f() {
+ let _ = Foo::<isize>::foo();
+ //^isize
+ let _ = Foo(0isize).bar();
+ //^isize
+ let _ = Foo::<()>::foo();
+ //^()
+ let _ = Foo(()).bar();
+ //^()
+ let _ = Foo::<usize>::foo();
+ //^{unknown}
+ let _ = Foo(0usize).bar();
+ //^{unknown}
+}
+
+fn g<T: Trait>(a: T) {
+ let _ = Foo::<T>::foo();
+ //^isize
+ let _ = Foo(a).bar();
+ //^isize
+}
+ "#,
+ );
+}
--- /dev/null
+use expect_test::expect;
+
+use super::{check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn bug_484() {
+ check_infer(
+ r#"
+ fn test() {
+ let x = if true {};
+ }
+ "#,
+ expect![[r#"
+ 10..37 '{ ... {}; }': ()
+ 20..21 'x': ()
+ 24..34 'if true {}': ()
+ 27..31 'true': bool
+ 32..34 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn no_panic_on_field_of_enum() {
+ check_infer(
+ r#"
+ enum X {}
+
+ fn test(x: X) {
+ x.some_field;
+ }
+ "#,
+ expect![[r#"
+ 19..20 'x': X
+ 25..46 '{ ...eld; }': ()
+ 31..32 'x': X
+ 31..43 'x.some_field': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn bug_585() {
+ check_infer(
+ r#"
+ fn test() {
+ X {};
+ match x {
+ A::B {} => (),
+ A::Y() => (),
+ }
+ }
+ "#,
+ expect![[r#"
+ 10..88 '{ ... } }': ()
+ 16..20 'X {}': {unknown}
+ 26..86 'match ... }': ()
+ 32..33 'x': {unknown}
+ 44..51 'A::B {}': {unknown}
+ 55..57 '()': ()
+ 67..73 'A::Y()': {unknown}
+ 77..79 '()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn bug_651() {
+ check_infer(
+ r#"
+ fn quux() {
+ let y = 92;
+ 1 + y;
+ }
+ "#,
+ expect![[r#"
+ 10..40 '{ ...+ y; }': ()
+ 20..21 'y': i32
+ 24..26 '92': i32
+ 32..33 '1': i32
+ 32..37 '1 + y': i32
+ 36..37 'y': i32
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_vars() {
+ check_infer(
+ r#"
+ fn test() {
+ let y = unknown;
+ [y, &y];
+ }
+ "#,
+ expect![[r#"
+ 10..47 '{ ...&y]; }': ()
+ 20..21 'y': {unknown}
+ 24..31 'unknown': {unknown}
+ 37..44 '[y, &y]': [{unknown}; 2]
+ 38..39 'y': {unknown}
+ 41..43 '&y': &{unknown}
+ 42..43 'y': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn recursive_vars_2() {
+ check_infer(
+ r#"
+ fn test() {
+ let x = unknown;
+ let y = unknown;
+ [(x, y), (&y, &x)];
+ }
+ "#,
+ expect![[r#"
+ 10..79 '{ ...x)]; }': ()
+ 20..21 'x': &{unknown}
+ 24..31 'unknown': &{unknown}
+ 41..42 'y': {unknown}
+ 45..52 'unknown': {unknown}
+ 58..76 '[(x, y..., &x)]': [(&{unknown}, {unknown}); 2]
+ 59..65 '(x, y)': (&{unknown}, {unknown})
+ 60..61 'x': &{unknown}
+ 63..64 'y': {unknown}
+ 67..75 '(&y, &x)': (&{unknown}, {unknown})
+ 68..70 '&y': &{unknown}
+ 69..70 'y': {unknown}
+ 72..74 '&x': &&{unknown}
+ 73..74 'x': &{unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn array_elements_expected_type() {
+ check_no_mismatches(
+ r#"
+ fn test() {
+ let x: [[u32; 2]; 2] = [[1, 2], [3, 4]];
+ }
+ "#,
+ );
+}
+
+#[test]
+fn infer_std_crash_1() {
+ // caused stack overflow, taken from std
+ check_infer(
+ r#"
+ enum Maybe<T> {
+ Real(T),
+ Fake,
+ }
+
+ fn write() {
+ match something_unknown {
+ Maybe::Real(ref mut something) => (),
+ }
+ }
+ "#,
+ expect![[r#"
+ 53..138 '{ ... } }': ()
+ 59..136 'match ... }': ()
+ 65..82 'someth...nknown': Maybe<{unknown}>
+ 93..123 'Maybe:...thing)': Maybe<{unknown}>
+ 105..122 'ref mu...ething': &mut {unknown}
+ 127..129 '()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_2() {
+ // caused "equating two type variables, ...", taken from std
+ check_infer(
+ r#"
+ fn test_line_buffer() {
+ &[0, b'\n', 1, b'\n'];
+ }
+ "#,
+ expect![[r#"
+ 22..52 '{ ...n']; }': ()
+ 28..49 '&[0, b...b'\n']': &[u8; 4]
+ 29..49 '[0, b'...b'\n']': [u8; 4]
+ 30..31 '0': u8
+ 33..38 'b'\n'': u8
+ 40..41 '1': u8
+ 43..48 'b'\n'': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_3() {
+ // taken from rustc
+ check_infer(
+ r#"
+ pub fn compute() {
+ match nope!() {
+ SizeSkeleton::Pointer { non_zero: true, tail } => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 17..107 '{ ... } }': ()
+ 23..105 'match ... }': ()
+ 29..36 'nope!()': {unknown}
+ 47..93 'SizeSk...tail }': {unknown}
+ 81..85 'true': bool
+ 81..85 'true': bool
+ 87..91 'tail': {unknown}
+ 97..99 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_4() {
+ // taken from rustc
+ check_infer(
+ r#"
+ pub fn primitive_type() {
+ match *self {
+ BorrowedRef { type_: Primitive(p), ..} => {},
+ }
+ }
+ "#,
+ expect![[r#"
+ 24..105 '{ ... } }': ()
+ 30..103 'match ... }': ()
+ 36..41 '*self': {unknown}
+ 37..41 'self': {unknown}
+ 52..90 'Borrow...), ..}': {unknown}
+ 73..85 'Primitive(p)': {unknown}
+ 83..84 'p': {unknown}
+ 94..96 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn infer_std_crash_5() {
+ // taken from rustc
+ check_infer(
+ r#"
+ fn extra_compiler_flags() {
+ for content in doesnt_matter {
+ let name = if doesnt_matter {
+ first
+ } else {
+ &content
+ };
+
+ let content = if ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.contains(&name) {
+ name
+ } else {
+ content
+ };
+ }
+ }
+ "#,
+ expect![[r#"
+ 26..322 '{ ... } }': ()
+ 32..320 'for co... }': ()
+ 36..43 'content': {unknown}
+ 47..60 'doesnt_matter': {unknown}
+ 61..320 '{ ... }': ()
+ 75..79 'name': &{unknown}
+ 82..166 'if doe... }': &{unknown}
+ 85..98 'doesnt_matter': bool
+ 99..128 '{ ... }': &{unknown}
+ 113..118 'first': &{unknown}
+ 134..166 '{ ... }': &{unknown}
+ 148..156 '&content': &{unknown}
+ 149..156 'content': {unknown}
+ 181..188 'content': &{unknown}
+ 191..313 'if ICE... }': &{unknown}
+ 194..231 'ICE_RE..._VALUE': {unknown}
+ 194..247 'ICE_RE...&name)': bool
+ 241..246 '&name': &&{unknown}
+ 242..246 'name': &{unknown}
+ 248..276 '{ ... }': &{unknown}
+ 262..266 'name': &{unknown}
+ 282..313 '{ ... }': {unknown}
+ 296..303 'content': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn infer_nested_generics_crash() {
+ // another crash found typechecking rustc
+ check_infer(
+ r#"
+ struct Canonical<V> {
+ value: V,
+ }
+ struct QueryResponse<V> {
+ value: V,
+ }
+ fn test<R>(query_response: Canonical<QueryResponse<R>>) {
+ &query_response.value;
+ }
+ "#,
+ expect![[r#"
+ 91..105 'query_response': Canonical<QueryResponse<R>>
+ 136..166 '{ ...lue; }': ()
+ 142..163 '&query....value': &QueryResponse<R>
+ 143..157 'query_response': Canonical<QueryResponse<R>>
+ 143..163 'query_....value': QueryResponse<R>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_paren_macro_call() {
+ check_infer(
+ r#"
+ macro_rules! bar { () => {0u32} }
+ fn test() {
+ let a = (bar!());
+ }
+ "#,
+ expect![[r#"
+ !0..4 '0u32': u32
+ 44..69 '{ ...()); }': ()
+ 54..55 'a': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_array_macro_call() {
+ check_infer(
+ r#"
+ macro_rules! bar { () => {0u32} }
+ fn test() {
+ let a = [bar!()];
+ }
+ "#,
+ expect![[r#"
+ !0..4 '0u32': u32
+ 44..69 '{ ...()]; }': ()
+ 54..55 'a': [u32; 1]
+ 58..66 '[bar!()]': [u32; 1]
+ "#]],
+ );
+}
+
+#[test]
+fn bug_1030() {
+ check_infer(
+ r#"
+ struct HashSet<T, H>;
+ struct FxHasher;
+ type FxHashSet<T> = HashSet<T, FxHasher>;
+
+ impl<T, H> HashSet<T, H> {
+ fn default() -> HashSet<T, H> {}
+ }
+
+ pub fn main_loop() {
+ FxHashSet::default();
+ }
+ "#,
+ expect![[r#"
+ 143..145 '{}': HashSet<T, H>
+ 168..197 '{ ...t(); }': ()
+ 174..192 'FxHash...efault': fn default<{unknown}, FxHasher>() -> HashSet<{unknown}, FxHasher>
+ 174..194 'FxHash...ault()': HashSet<{unknown}, FxHasher>
+ "#]],
+ );
+}
+
+#[test]
+fn issue_2669() {
+ check_infer(
+ r#"
+ trait A {}
+ trait Write {}
+ struct Response<T> {}
+
+ trait D {
+ fn foo();
+ }
+
+ impl<T:A> D for Response<T> {
+ fn foo() {
+ end();
+ fn end<W: Write>() {
+ let _x: T = loop {};
+ }
+ }
+ }
+ "#,
+ expect![[r#"
+ 119..214 '{ ... }': ()
+ 129..132 'end': fn end<{unknown}>()
+ 129..134 'end()': ()
+ 163..208 '{ ... }': ()
+ 181..183 '_x': !
+ 190..197 'loop {}': !
+ 195..197 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn issue_2705() {
+ check_infer(
+ r#"
+ trait Trait {}
+ fn test() {
+ <Trait<u32>>::foo()
+ }
+ "#,
+ expect![[r#"
+ 25..52 '{ ...oo() }': ()
+ 31..48 '<Trait...>::foo': {unknown}
+ 31..50 '<Trait...:foo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_2683_chars_impl() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub struct Chars<'a> {}
+impl<'a> Iterator for Chars<'a> {
+ type Item = char;
+ fn next(&mut self) -> Option<char> { loop {} }
+}
+
+fn test() {
+ let chars: Chars<'_>;
+ (chars.next(), chars.nth(1));
+} //^^^^^^^^^^^^^^^^^^^^^^^^^^^^ (Option<char>, Option<char>)
+"#,
+ );
+}
+
+#[test]
+fn issue_3999_slice() {
+ check_infer(
+ r#"
+ fn foo(params: &[usize]) {
+ match params {
+ [ps @ .., _] => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 7..13 'params': &[usize]
+ 25..80 '{ ... } }': ()
+ 31..78 'match ... }': ()
+ 37..43 'params': &[usize]
+ 54..66 '[ps @ .., _]': [usize]
+ 55..62 'ps @ ..': &[usize]
+ 60..62 '..': [usize]
+ 64..65 '_': usize
+ 70..72 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_3999_struct() {
+ // rust-analyzer should not panic on seeing this malformed
+ // record pattern.
+ check_infer(
+ r#"
+ struct Bar {
+ a: bool,
+ }
+ fn foo(b: Bar) {
+ match b {
+ Bar { a: .. } => {},
+ }
+ }
+ "#,
+ expect![[r#"
+ 35..36 'b': Bar
+ 43..95 '{ ... } }': ()
+ 49..93 'match ... }': ()
+ 55..56 'b': Bar
+ 67..80 'Bar { a: .. }': Bar
+ 76..78 '..': bool
+ 84..86 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4235_name_conflicts() {
+ check_infer(
+ r#"
+ struct FOO {}
+ static FOO:FOO = FOO {};
+
+ impl FOO {
+ fn foo(&self) {}
+ }
+
+ fn main() {
+ let a = &FOO;
+ a.foo();
+ }
+ "#,
+ expect![[r#"
+ 31..37 'FOO {}': FOO
+ 63..67 'self': &FOO
+ 69..71 '{}': ()
+ 85..119 '{ ...o(); }': ()
+ 95..96 'a': &FOO
+ 99..103 '&FOO': &FOO
+ 100..103 'FOO': FOO
+ 109..110 'a': &FOO
+ 109..116 'a.foo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4465_dollar_crate_at_type() {
+ check_infer(
+ r#"
+ pub struct Foo {}
+ pub fn anything<T>() -> T {
+ loop {}
+ }
+ macro_rules! foo {
+ () => {{
+ let r: $crate::Foo = anything();
+ r
+ }};
+ }
+ fn main() {
+ let _a = foo!();
+ }
+ "#,
+ expect![[r#"
+ 44..59 '{ loop {} }': T
+ 50..57 'loop {}': !
+ 55..57 '{}': ()
+ !0..31 '{letr:...g();r}': Foo
+ !4..5 'r': Foo
+ !18..26 'anything': fn anything<Foo>() -> Foo
+ !18..28 'anything()': Foo
+ !29..30 'r': Foo
+ 163..187 '{ ...!(); }': ()
+ 173..175 '_a': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6811() {
+ check_infer(
+ r#"
+ macro_rules! profile_function {
+ () => {
+ let _a = 1;
+ let _b = 1;
+ };
+ }
+ fn main() {
+ profile_function!();
+ }
+ "#,
+ expect![[r#"
+ !3..5 '_a': i32
+ !6..7 '1': i32
+ !11..13 '_b': i32
+ !14..15 '1': i32
+ 103..131 '{ ...!(); }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4053_diesel_where_clauses() {
+ check_infer(
+ r#"
+ trait BoxedDsl<DB> {
+ type Output;
+ fn internal_into_boxed(self) -> Self::Output;
+ }
+
+ struct SelectStatement<From, Select, Distinct, Where, Order, LimitOffset, GroupBy, Locking> {
+ order: Order,
+ }
+
+ trait QueryFragment<DB: Backend> {}
+
+ trait Into<T> { fn into(self) -> T; }
+
+ impl<F, S, D, W, O, LOf, DB> BoxedDsl<DB>
+ for SelectStatement<F, S, D, W, O, LOf, G>
+ where
+ O: Into<dyn QueryFragment<DB>>,
+ {
+ type Output = XXX;
+
+ fn internal_into_boxed(self) -> Self::Output {
+ self.order.into();
+ }
+ }
+ "#,
+ expect![[r#"
+ 65..69 'self': Self
+ 267..271 'self': Self
+ 466..470 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
+ 488..522 '{ ... }': ()
+ 498..502 'self': SelectStatement<F, S, D, W, O, LOf, {unknown}, {unknown}>
+ 498..508 'self.order': O
+ 498..515 'self.o...into()': dyn QueryFragment<DB>
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4953() {
+ check_infer(
+ r#"
+ pub struct Foo(pub i64);
+ impl Foo {
+ fn test() -> Self { Self(0i64) }
+ }
+ "#,
+ expect![[r#"
+ 58..72 '{ Self(0i64) }': Foo
+ 60..64 'Self': Foo(i64) -> Foo
+ 60..70 'Self(0i64)': Foo
+ 65..69 '0i64': i64
+ "#]],
+ );
+ check_infer(
+ r#"
+ pub struct Foo<T>(pub T);
+ impl Foo<i64> {
+ fn test() -> Self { Self(0i64) }
+ }
+ "#,
+ expect![[r#"
+ 64..78 '{ Self(0i64) }': Foo<i64>
+ 66..70 'Self': Foo<i64>(i64) -> Foo<i64>
+ 66..76 'Self(0i64)': Foo<i64>
+ 71..75 '0i64': i64
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4931() {
+ check_infer(
+ r#"
+ trait Div<T> {
+ type Output;
+ }
+
+ trait CheckedDiv: Div<()> {}
+
+ trait PrimInt: CheckedDiv<Output = ()> {
+ fn pow(self);
+ }
+
+ fn check<T: PrimInt>(i: T) {
+ i.pow();
+ }
+ "#,
+ expect![[r#"
+ 117..121 'self': Self
+ 148..149 'i': T
+ 154..170 '{ ...w(); }': ()
+ 160..161 'i': T
+ 160..167 'i.pow()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4885() {
+ check_infer(
+ r#"
+ //- minicore: coerce_unsized, future
+ use core::future::Future;
+ trait Foo<R> {
+ type Bar;
+ }
+ fn foo<R, K>(key: &K) -> impl Future<Output = K::Bar>
+ where
+ K: Foo<R>,
+ {
+ bar(key)
+ }
+ fn bar<R, K>(key: &K) -> impl Future<Output = K::Bar>
+ where
+ K: Foo<R>,
+ {
+ }
+ "#,
+ expect![[r#"
+ 70..73 'key': &K
+ 132..148 '{ ...key) }': impl Future<Output = <K as Foo<R>>::Bar>
+ 138..141 'bar': fn bar<R, K>(&K) -> impl Future<Output = <K as Foo<R>>::Bar>
+ 138..146 'bar(key)': impl Future<Output = <K as Foo<R>>::Bar>
+ 142..145 'key': &K
+ 162..165 'key': &K
+ 224..227 '{ }': ()
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4800() {
+ check_infer(
+ r#"
+ trait Debug {}
+
+ struct Foo<T>;
+
+ type E1<T> = (T, T, T);
+ type E2<T> = E1<E1<E1<(T, T, T)>>>;
+
+ impl Debug for Foo<E2<()>> {}
+
+ struct Request;
+
+ pub trait Future {
+ type Output;
+ }
+
+ pub struct PeerSet<D>;
+
+ impl<D> Service<Request> for PeerSet<D>
+ where
+ D: Discover,
+ D::Key: Debug,
+ {
+ type Error = ();
+ type Future = dyn Future<Output = Self::Error>;
+
+ fn call(&mut self) -> Self::Future {
+ loop {}
+ }
+ }
+
+ pub trait Discover {
+ type Key;
+ }
+
+ pub trait Service<Request> {
+ type Error;
+ type Future: Future<Output = Self::Error>;
+ fn call(&mut self) -> Self::Future;
+ }
+ "#,
+ expect![[r#"
+ 379..383 'self': &mut PeerSet<D>
+ 401..424 '{ ... }': dyn Future<Output = ()>
+ 411..418 'loop {}': !
+ 416..418 '{}': ()
+ 575..579 'self': &mut Self
+ "#]],
+ );
+}
+
+#[test]
+fn issue_4966() {
+ check_infer(
+ r#"
+ //- minicore: deref
+ pub trait IntoIterator {
+ type Item;
+ }
+
+ struct Repeat<A> { element: A }
+
+ struct Map<F> { f: F }
+
+ struct Vec<T> {}
+
+ impl<T> core::ops::Deref for Vec<T> {
+ type Target = [T];
+ }
+
+ fn from_iter<A, T: IntoIterator<Item = A>>(iter: T) -> Vec<A> {}
+
+ fn main() {
+ let inner = Map { f: |_: &f64| 0.0 };
+
+ let repeat = Repeat { element: inner };
+
+ let vec = from_iter(repeat);
+
+ vec.foo_bar();
+ }
+ "#,
+ expect![[r#"
+ 225..229 'iter': T
+ 244..246 '{}': Vec<A>
+ 258..402 '{ ...r(); }': ()
+ 268..273 'inner': Map<|&f64| -> f64>
+ 276..300 'Map { ... 0.0 }': Map<|&f64| -> f64>
+ 285..298 '|_: &f64| 0.0': |&f64| -> f64
+ 286..287 '_': &f64
+ 295..298 '0.0': f64
+ 311..317 'repeat': Repeat<Map<|&f64| -> f64>>
+ 320..345 'Repeat...nner }': Repeat<Map<|&f64| -> f64>>
+ 338..343 'inner': Map<|&f64| -> f64>
+ 356..359 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 362..371 'from_iter': fn from_iter<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>, Repeat<Map<|&f64| -> f64>>>(Repeat<Map<|&f64| -> f64>>) -> Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 362..379 'from_i...epeat)': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 372..378 'repeat': Repeat<Map<|&f64| -> f64>>
+ 386..389 'vec': Vec<IntoIterator::Item<Repeat<Map<|&f64| -> f64>>>>
+ 386..399 'vec.foo_bar()': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6628() {
+ check_infer(
+ r#"
+//- minicore: fn
+struct S<T>();
+impl<T> S<T> {
+ fn f(&self, _t: T) {}
+ fn g<F: FnOnce(&T)>(&self, _f: F) {}
+}
+fn main() {
+ let s = S();
+ s.g(|_x| {});
+ s.f(10);
+}
+"#,
+ expect![[r#"
+ 40..44 'self': &S<T>
+ 46..48 '_t': T
+ 53..55 '{}': ()
+ 81..85 'self': &S<T>
+ 87..89 '_f': F
+ 94..96 '{}': ()
+ 109..160 '{ ...10); }': ()
+ 119..120 's': S<i32>
+ 123..124 'S': S<i32>() -> S<i32>
+ 123..126 'S()': S<i32>
+ 132..133 's': S<i32>
+ 132..144 's.g(|_x| {})': ()
+ 136..143 '|_x| {}': |&i32| -> ()
+ 137..139 '_x': &i32
+ 141..143 '{}': ()
+ 150..151 's': S<i32>
+ 150..157 's.f(10)': ()
+ 154..156 '10': i32
+ "#]],
+ );
+}
+
+#[test]
+fn issue_6852() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct BufWriter {}
+
+struct Mutex<T> {}
+struct MutexGuard<'a, T> {}
+impl<T> Mutex<T> {
+ fn lock(&self) -> MutexGuard<'_, T> {}
+}
+impl<'a, T: 'a> Deref for MutexGuard<'a, T> {
+ type Target = T;
+}
+fn flush(&self) {
+ let w: &Mutex<BufWriter>;
+ *(w.lock());
+}
+"#,
+ expect![[r#"
+ 123..127 'self': &Mutex<T>
+ 150..152 '{}': MutexGuard<T>
+ 234..238 'self': &{unknown}
+ 240..290 '{ ...()); }': ()
+ 250..251 'w': &Mutex<BufWriter>
+ 276..287 '*(w.lock())': BufWriter
+ 278..279 'w': &Mutex<BufWriter>
+ 278..286 'w.lock()': MutexGuard<BufWriter>
+ "#]],
+ );
+}
+
+#[test]
+fn param_overrides_fn() {
+ check_types(
+ r#"
+ fn example(example: i32) {
+ fn f() {}
+ example;
+ //^^^^^^^ i32
+ }
+ "#,
+ )
+}
+
+#[test]
+fn lifetime_from_chalk_during_deref() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Box<T: ?Sized> {}
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ loop {}
+ }
+}
+
+trait Iterator {
+ type Item;
+}
+
+pub struct Iter<'a, T: 'a> {
+ inner: Box<dyn IterTrait<'a, T, Item = &'a T> + 'a>,
+}
+
+trait IterTrait<'a, T: 'a>: Iterator<Item = &'a T> {
+ fn clone_box(&self);
+}
+
+fn clone_iter<T>(s: Iter<T>) {
+ s.inner.clone_box();
+ //^^^^^^^^^^^^^^^^^^^ ()
+}
+"#,
+ )
+}
+
+#[test]
+fn issue_8686() {
+ check_infer(
+ r#"
+pub trait Try: FromResidual {
+ type Output;
+ type Residual;
+}
+pub trait FromResidual<R = <Self as Try>::Residual> {
+ fn from_residual(residual: R) -> Self;
+}
+
+struct ControlFlow<B, C>;
+impl<B, C> Try for ControlFlow<B, C> {
+ type Output = C;
+ type Residual = ControlFlow<B, !>;
+}
+impl<B, C> FromResidual for ControlFlow<B, C> {
+ fn from_residual(r: ControlFlow<B, !>) -> Self { ControlFlow }
+}
+
+fn test() {
+ ControlFlow::from_residual(ControlFlow::<u32, !>);
+}
+ "#,
+ expect![[r#"
+ 144..152 'residual': R
+ 365..366 'r': ControlFlow<B, !>
+ 395..410 '{ ControlFlow }': ControlFlow<B, C>
+ 397..408 'ControlFlow': ControlFlow<B, C>
+ 424..482 '{ ...!>); }': ()
+ 430..456 'Contro...sidual': fn from_residual<ControlFlow<u32, {unknown}>, ControlFlow<u32, !>>(ControlFlow<u32, !>) -> ControlFlow<u32, {unknown}>
+ 430..479 'Contro...2, !>)': ControlFlow<u32, {unknown}>
+ 457..478 'Contro...32, !>': ControlFlow<u32, !>
+ "#]],
+ );
+}
+
+#[test]
+fn cfg_tail() {
+ // https://github.com/rust-lang/rust-analyzer/issues/8378
+ check_infer(
+ r#"
+ fn fake_tail(){
+ { "first" }
+ #[cfg(never)] 9
+ }
+ fn multiple_fake(){
+ { "fake" }
+ { "fake" }
+ { "second" }
+ #[cfg(never)] { 11 }
+ #[cfg(never)] 12;
+ #[cfg(never)] 13
+ }
+ fn no_normal_tail(){
+ { "third" }
+ #[cfg(never)] 14;
+ #[cfg(never)] 15;
+ }
+ fn no_actual_tail(){
+ { "fourth" };
+ #[cfg(never)] 14;
+ #[cfg(never)] 15
+ }
+ "#,
+ expect![[r#"
+ 14..53 '{ ...)] 9 }': ()
+ 20..31 '{ "first" }': ()
+ 22..29 '"first"': &str
+ 72..190 '{ ...] 13 }': ()
+ 78..88 '{ "fake" }': &str
+ 80..86 '"fake"': &str
+ 93..103 '{ "fake" }': &str
+ 95..101 '"fake"': &str
+ 108..120 '{ "second" }': ()
+ 110..118 '"second"': &str
+ 210..273 '{ ... 15; }': ()
+ 216..227 '{ "third" }': ()
+ 218..225 '"third"': &str
+ 293..357 '{ ...] 15 }': ()
+ 299..311 '{ "fourth" }': &str
+ 301..309 '"fourth"': &str
+ "#]],
+ )
+}
+
+#[test]
+fn impl_trait_in_option_9530() {
+ check_types(
+ r#"
+//- minicore: sized
+struct Option<T>;
+impl<T> Option<T> {
+ fn unwrap(self) -> T { loop {} }
+}
+fn make() -> Option<impl Copy> { Option }
+trait Copy {}
+fn test() {
+ let o = make();
+ o.unwrap();
+ //^^^^^^^^^^ impl Copy
+}
+ "#,
+ )
+}
+
+#[test]
+fn bare_dyn_trait_binders_9639() {
+ check_no_mismatches(
+ r#"
+//- minicore: fn, coerce_unsized
+fn infix_parse<T, S>(_state: S, _level_code: &Fn(S)) -> T {
+ loop {}
+}
+
+fn parse_arule() {
+ infix_parse((), &(|_recurse| ()))
+}
+ "#,
+ )
+}
+
+#[test]
+fn call_expected_type_closure() {
+ check_types(
+ r#"
+//- minicore: fn, option
+
+fn map<T, U>(o: Option<T>, f: impl FnOnce(T) -> U) -> Option<U> { loop {} }
+struct S {
+ field: u32
+}
+
+fn test() {
+ let o = Some(S { field: 2 });
+ let _: Option<()> = map(o, |s| { s.field; });
+ // ^^^^^^^ u32
+}
+ "#,
+ );
+}
+
+#[test]
+fn coerce_diesel_panic() {
+ check_no_mismatches(
+ r#"
+//- minicore: option
+
+trait TypeMetadata {
+ type MetadataLookup;
+}
+
+pub struct Output<'a, T, DB>
+where
+ DB: TypeMetadata,
+ DB::MetadataLookup: 'a,
+{
+ out: T,
+ metadata_lookup: Option<&'a DB::MetadataLookup>,
+}
+
+impl<'a, T, DB: TypeMetadata> Output<'a, T, DB> {
+ pub fn new(out: T, metadata_lookup: &'a DB::MetadataLookup) -> Self {
+ Output {
+ out,
+ metadata_lookup: Some(metadata_lookup),
+ }
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn bitslice_panic() {
+ check_no_mismatches(
+ r#"
+//- minicore: option, deref
+
+pub trait BitView {
+ type Store;
+}
+
+pub struct Lsb0;
+
+pub struct BitArray<V: BitView> { }
+
+pub struct BitSlice<T> { }
+
+impl<V: BitView> core::ops::Deref for BitArray<V> {
+ type Target = BitSlice<V::Store>;
+}
+
+impl<T> BitSlice<T> {
+ pub fn split_first(&self) -> Option<(T, &Self)> { loop {} }
+}
+
+fn multiexp_inner() {
+ let exp: &BitArray<Foo>;
+ exp.split_first();
+}
+ "#,
+ );
+}
+
+#[test]
+fn macro_expands_to_impl_trait() {
+ check_no_mismatches(
+ r#"
+trait Foo {}
+
+macro_rules! ty {
+ () => {
+ impl Foo
+ }
+}
+
+fn foo(_: ty!()) {}
+
+fn bar() {
+ foo(());
+}
+ "#,
+ )
+}
+
+#[test]
+fn nested_macro_in_fn_params() {
+ check_no_mismatches(
+ r#"
+macro_rules! U32Inner {
+ () => {
+ u32
+ };
+}
+
+macro_rules! U32 {
+ () => {
+ U32Inner!()
+ };
+}
+
+fn mamba(a: U32!(), p: u32) -> u32 {
+ a
+}
+ "#,
+ )
+}
+
+#[test]
+fn for_loop_block_expr_iterable() {
+ check_infer(
+ r#"
+fn test() {
+ for _ in { let x = 0; } {
+ let y = 0;
+ }
+}
+ "#,
+ expect![[r#"
+ 10..68 '{ ... } }': ()
+ 16..66 'for _ ... }': ()
+ 20..21 '_': {unknown}
+ 25..39 '{ let x = 0; }': ()
+ 31..32 'x': i32
+ 35..36 '0': i32
+ 40..66 '{ ... }': ()
+ 54..55 'y': i32
+ 58..59 '0': i32
+ "#]],
+ );
+}
+
+#[test]
+fn while_loop_block_expr_iterable() {
+ check_infer(
+ r#"
+fn test() {
+ while { true } {
+ let y = 0;
+ }
+}
+ "#,
+ expect![[r#"
+ 10..59 '{ ... } }': ()
+ 16..57 'while ... }': ()
+ 22..30 '{ true }': bool
+ 24..28 'true': bool
+ 31..57 '{ ... }': ()
+ 45..46 'y': i32
+ 49..50 '0': i32
+ "#]],
+ );
+}
+
+#[test]
+fn bug_11242() {
+ // FIXME: wrong, should be u32
+ check_types(
+ r#"
+fn foo<A, B>()
+where
+ A: IntoIterator<Item = u32>,
+ B: IntoIterator<Item = usize>,
+{
+ let _x: <A as IntoIterator>::Item;
+ // ^^ {unknown}
+}
+
+pub trait Iterator {
+ type Item;
+}
+
+pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+}
+
+impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+}
+"#,
+ );
+}
+
+#[test]
+fn bug_11659() {
+ check_no_mismatches(
+ r#"
+struct LinkArray<const N: usize, LD>(LD);
+fn f<const N: usize, LD>(x: LD) -> LinkArray<N, LD> {
+ let r = LinkArray::<N, LD>(x);
+ r
+}
+
+fn test() {
+ let x = f::<2, i32>(5);
+ let y = LinkArray::<52, LinkArray<2, i32>>(x);
+}
+ "#,
+ );
+ check_no_mismatches(
+ r#"
+struct LinkArray<LD, const N: usize>(LD);
+fn f<const N: usize, LD>(x: LD) -> LinkArray<LD, N> {
+ let r = LinkArray::<LD, N>(x);
+ r
+}
+
+fn test() {
+ let x = f::<i32, 2>(5);
+ let y = LinkArray::<LinkArray<i32, 2>, 52>(x);
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_error_tolerance() {
+ check_no_mismatches(
+ r#"
+#[lang = "sized"]
+pub trait Sized {}
+
+struct CT<const N: usize, T>(T);
+struct TC<T, const N: usize>(T);
+fn f<const N: usize, T>(x: T) -> (CT<N, T>, TC<T, N>) {
+ let l = CT::<N, T>(x);
+ let r = TC::<N, T>(x);
+ (l, r)
+}
+
+trait TR1<const N: usize>;
+trait TR2<const N: usize>;
+
+impl<const N: usize, T> TR1<N> for CT<N, T>;
+impl<const N: usize, T> TR1<5> for TC<T, N>;
+impl<const N: usize, T> TR2<N> for CT<T, N>;
+
+trait TR3<const N: usize> {
+ fn tr3(&self) -> &Self;
+}
+
+impl<const N: usize, T> TR3<5> for TC<T, N> {
+ fn tr3(&self) -> &Self {
+ self
+ }
+}
+
+impl<const N: usize, T> TR3<Item = 5> for TC<T, N> {}
+impl<const N: usize, T> TR3<T> for TC<T, N> {}
+
+fn impl_trait<const N: usize>(inp: impl TR1<N>) {}
+fn dyn_trait<const N: usize>(inp: &dyn TR2<N>) {}
+fn impl_trait_bad<'a, const N: usize>(inp: impl TR1<i32>) -> impl TR1<'a, i32> {}
+fn impl_trait_very_bad<const N: usize>(inp: impl TR1<Item = i32>) -> impl TR1<'a, Item = i32, 5, Foo = N> {}
+
+fn test() {
+ f::<2, i32>(5);
+ f::<2, 2>(5);
+ f(5);
+ f::<i32>(5);
+ CT::<52, CT<2, i32>>(x);
+ CT::<CT<2, i32>>(x);
+ impl_trait_bad(5);
+ impl_trait_bad(12);
+ TR3<5>::tr3();
+ TR3<{ 2+3 }>::tr3();
+ TC::<i32, 10>(5).tr3();
+ TC::<i32, 20>(5).tr3();
+ TC::<i32, i32>(5).tr3();
+ TC::<i32, { 7 + 3 }>(5).tr3();
+}
+ "#,
+ );
+}
+
+#[test]
+fn const_generic_impl_trait() {
+ check_no_mismatches(
+ r#"
+ //- minicore: from
+
+ struct Foo<T, const M: usize>;
+
+ trait Tr<T> {
+ fn f(T) -> Self;
+ }
+
+ impl<T, const M: usize> Tr<[T; M]> for Foo<T, M> {
+ fn f(_: [T; M]) -> Self {
+ Self
+ }
+ }
+
+ fn test() {
+ Foo::f([1, 2, 7, 10]);
+ }
+ "#,
+ );
+}
+
+#[test]
+fn nalgebra_factorial() {
+ check_no_mismatches(
+ r#"
+ const FACTORIAL: [u128; 4] = [1, 1, 2, 6];
+
+ fn factorial(n: usize) -> u128 {
+ match FACTORIAL.get(n) {
+ Some(f) => *f,
+ None => panic!("{}! is greater than u128::MAX", n),
+ }
+ }
+ "#,
+ )
+}
+
+#[test]
+fn regression_11688_1() {
+ check_no_mismatches(
+ r#"
+ pub struct Buffer<T>(T);
+ type Writer = Buffer<u8>;
+ impl<T> Buffer<T> {
+ fn extend_from_array<const N: usize>(&mut self, xs: &[T; N]) {
+ loop {}
+ }
+ }
+ trait Encode<S> {
+ fn encode(self, w: &mut Writer, s: &mut S);
+ }
+ impl<S> Encode<S> for u8 {
+ fn encode(self, w: &mut Writer, _: &mut S) {
+ w.extend_from_array(&self.to_le_bytes());
+ }
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_2() {
+ check_types(
+ r#"
+ union MaybeUninit<T> {
+ uninit: (),
+ value: T,
+ }
+
+ impl<T> MaybeUninit<T> {
+ fn uninit_array<const LEN: usize>() -> [Self; LEN] {
+ loop {}
+ }
+ }
+
+ fn main() {
+ let x = MaybeUninit::<i32>::uninit_array::<1>();
+ //^ [MaybeUninit<i32>; 1]
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_3() {
+ check_types(
+ r#"
+ //- minicore: iterator
+ struct Ar<T, const N: u8>(T);
+ fn f<const LEN: usize, T, const BASE: u8>(
+ num_zeros: usize,
+ ) -> dyn Iterator<Item = [Ar<T, BASE>; LEN]> {
+ loop {}
+ }
+ fn dynamic_programming() {
+ for board in f::<9, u8, 7>(1) {
+ //^^^^^ [Ar<u8, 7>; 9]
+ }
+ }
+ "#,
+ );
+}
+
+#[test]
+fn regression_11688_4() {
+ check_types(
+ r#"
+ trait Bar<const C: usize> {
+ fn baz(&self) -> [i32; C];
+ }
+
+ fn foo(x: &dyn Bar<2>) {
+ x.baz();
+ //^^^^^^^ [i32; 2]
+ }
+ "#,
+ )
+}
+
+#[test]
+fn gat_crash_1() {
+ check_no_mismatches(
+ r#"
+trait ATrait {}
+
+trait Crash {
+ type Member<const N: usize>: ATrait;
+ fn new<const N: usize>() -> Self::Member<N>;
+}
+
+fn test<T: Crash>() {
+ T::new();
+}
+"#,
+ );
+}
+
+#[test]
+fn gat_crash_2() {
+ check_no_mismatches(
+ r#"
+pub struct InlineStorage {}
+
+pub struct InlineStorageHandle<T: ?Sized> {}
+
+pub unsafe trait Storage {
+ type Handle<T: ?Sized>;
+ fn create<T: ?Sized>() -> Self::Handle<T>;
+}
+
+unsafe impl Storage for InlineStorage {
+ type Handle<T: ?Sized> = InlineStorageHandle<T>;
+}
+"#,
+ );
+}
+
+#[test]
+fn gat_crash_3() {
+ check_no_mismatches(
+ r#"
+trait Collection {
+type Item;
+type Member<T>: Collection<Item = T>;
+fn add(&mut self, value: Self::Item) -> Result<(), Self::Error>;
+}
+struct ConstGen<T, const N: usize> {
+data: [T; N],
+}
+impl<T, const N: usize> Collection for ConstGen<T, N> {
+type Item = T;
+type Member<U> = ConstGen<U, N>;
+}
+ "#,
+ );
+}
+
+#[test]
+fn cfgd_out_self_param() {
+ cov_mark::check!(cfgd_out_self_param);
+ check_no_mismatches(
+ r#"
+struct S;
+impl S {
+ fn f(#[cfg(never)] &self) {}
+}
+
+fn f(s: S) {
+ s.f();
+}
+"#,
+ );
+}
+
+#[test]
+fn rust_161_option_clone() {
+ check_types(
+ r#"
+//- minicore: option, drop
+
+fn test(o: &Option<i32>) {
+ o.my_clone();
+ //^^^^^^^^^^^^ Option<i32>
+}
+
+pub trait MyClone: Sized {
+ fn my_clone(&self) -> Self;
+}
+
+impl<T> const MyClone for Option<T>
+where
+ T: ~const MyClone + ~const Drop + ~const Destruct,
+{
+ fn my_clone(&self) -> Self {
+ match self {
+ Some(x) => Some(x.my_clone()),
+ None => None,
+ }
+ }
+}
+
+impl const MyClone for i32 {
+ fn my_clone(&self) -> Self {
+ *self
+ }
+}
+
+pub trait Destruct {}
+
+impl<T: ?Sized> const Destruct for T {}
+"#,
+ );
+}
+
+#[test]
+fn rust_162_option_clone() {
+ check_types(
+ r#"
+//- minicore: option, drop
+
+fn test(o: &Option<i32>) {
+ o.my_clone();
+ //^^^^^^^^^^^^ Option<i32>
+}
+
+pub trait MyClone: Sized {
+ fn my_clone(&self) -> Self;
+}
+
+impl<T> const MyClone for Option<T>
+where
+ T: ~const MyClone + ~const Destruct,
+{
+ fn my_clone(&self) -> Self {
+ match self {
+ Some(x) => Some(x.my_clone()),
+ None => None,
+ }
+ }
+}
+
+impl const MyClone for i32 {
+ fn my_clone(&self) -> Self {
+ *self
+ }
+}
+
+#[lang = "destruct"]
+pub trait Destruct {}
+"#,
+ );
+}
+
+#[test]
+fn tuple_struct_pattern_with_unmatched_args_crash() {
+ check_infer(
+ r#"
+struct S(usize);
+fn main() {
+ let S(.., a, b) = S(1);
+ let (.., a, b) = (1,);
+}
+ "#,
+ expect![[r#"
+ 27..85 '{ ...1,); }': ()
+ 37..48 'S(.., a, b)': S
+ 43..44 'a': usize
+ 46..47 'b': {unknown}
+ 51..52 'S': S(usize) -> S
+ 51..55 'S(1)': S
+ 53..54 '1': usize
+ 65..75 '(.., a, b)': (i32, {unknown})
+ 70..71 'a': i32
+ 73..74 'b': {unknown}
+ 78..82 '(1,)': (i32,)
+ 79..80 '1': i32
+ "#]],
+ );
+}
+
+#[test]
+fn trailing_empty_macro() {
+ check_no_mismatches(
+ r#"
+macro_rules! m2 {
+ ($($t:tt)*) => {$($t)*};
+}
+
+fn macrostmts() -> u8 {
+ m2! { 0 }
+ m2! {}
+}
+ "#,
+ );
+}
+
+#[test]
+fn dyn_with_unresolved_trait() {
+ check_types(
+ r#"
+fn foo(a: &dyn DoesNotExist) {
+ a.bar();
+ //^&{unknown}
+}
+ "#,
+ );
+}
+
+#[test]
+fn self_assoc_with_const_generics_crash() {
+ check_no_mismatches(
+ r#"
+trait Trait { type Item; }
+impl<T, const N: usize> Trait for [T; N] {
+ type Item = ();
+ fn f<U>(_: Self::Item) {}
+}
+ "#,
+ );
+}
++
++#[test]
++fn unsize_array_with_inference_variable() {
++ check_types(
++ r#"
++//- minicore: try, slice
++use core::ops::ControlFlow;
++fn foo() -> ControlFlow<(), [usize; 1]> { loop {} }
++fn bar() -> ControlFlow<(), ()> {
++ let a = foo()?.len();
++ //^ usize
++ ControlFlow::Continue(())
++}
++"#,
++ );
++}
--- /dev/null
- fn c() -> u32 { 1 }
+use expect_test::expect;
+
+use super::{check, check_infer, check_no_mismatches, check_types};
+
+#[test]
+fn infer_box() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let x = box 1;
+ let t = (x, box x, box &1, box [1]);
+ t;
+} //^ (Box<i32>, Box<Box<i32>>, Box<&i32>, Box<[i32; 1]>)
+
+//- /std.rs crate:std
+#[prelude_import] use prelude::*;
+mod prelude {}
+
+mod boxed {
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized> {
+ inner: *mut T,
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_box_with_allocator() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+fn test() {
+ let x = box 1;
+ let t = (x, box x, box &1, box [1]);
+ t;
+} //^ (Box<i32, {unknown}>, Box<Box<i32, {unknown}>, {unknown}>, Box<&i32, {unknown}>, Box<[i32; 1], {unknown}>)
+
+//- /std.rs crate:std
+#[prelude_import] use prelude::*;
+mod boxed {
+ #[lang = "owned_box"]
+ pub struct Box<T: ?Sized, A: Allocator> {
+ inner: *mut T,
+ allocator: A,
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_adt_self() {
+ check_types(
+ r#"
+enum Nat { Succ(Self), Demo(Nat), Zero }
+
+fn test() {
+ let foo: Nat = Nat::Zero;
+ if let Nat::Succ(x) = foo {
+ x;
+ } //^ Nat
+}
+"#,
+ );
+}
+
+#[test]
+fn self_in_struct_lit() {
+ check_infer(
+ r#"
+ //- /main.rs
+ struct S<T> { x: T }
+
+ impl S<u32> {
+ fn foo() {
+ Self { x: 1 };
+ }
+ }
+ "#,
+ expect![[r#"
+ 49..79 '{ ... }': ()
+ 59..72 'Self { x: 1 }': S<u32>
+ 69..70 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn type_alias_in_struct_lit() {
+ check_infer(
+ r#"
+ //- /main.rs
+ struct S<T> { x: T }
+
+ type SS = S<u32>;
+
+ fn foo() {
+ SS { x: 1 };
+ }
+ "#,
+ expect![[r#"
+ 50..70 '{ ...1 }; }': ()
+ 56..67 'SS { x: 1 }': S<u32>
+ 64..65 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_ranges() {
+ check_types(
+ r#"
+//- minicore: range
+fn test() {
+ let a = ..;
+ let b = 1..;
+ let c = ..2u32;
+ let d = 1..2usize;
+ let e = ..=10;
+ let f = 'a'..='z';
+
+ let t = (a, b, c, d, e, f);
+ t;
+} //^ (RangeFull, RangeFrom<i32>, RangeTo<u32>, Range<usize>, RangeToInclusive<i32>, RangeInclusive<char>)
+"#,
+ );
+}
+
+#[test]
+fn infer_while_let() {
+ check_types(
+ r#"
+enum Option<T> { Some(T), None }
+
+fn test() {
+ let foo: Option<f32> = None;
+ while let Option::Some(x) = foo {
+ x;
+ } //^ f32
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_basics() {
+ check_infer(
+ r#"
+fn test(a: u32, b: isize, c: !, d: &str) {
+ a;
+ b;
+ c;
+ d;
+ 1usize;
+ 1isize;
+ "test";
+ 1.0f32;
+}
+"#,
+ expect![[r#"
+ 8..9 'a': u32
+ 16..17 'b': isize
+ 26..27 'c': !
+ 32..33 'd': &str
+ 41..120 '{ ...f32; }': ()
+ 47..48 'a': u32
+ 54..55 'b': isize
+ 61..62 'c': !
+ 68..69 'd': &str
+ 75..81 '1usize': usize
+ 87..93 '1isize': isize
+ 99..105 '"test"': &str
+ 111..117 '1.0f32': f32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_let() {
+ check_infer(
+ r#"
+fn test() {
+ let a = 1isize;
+ let b: usize = 1;
+ let c = b;
+ let d: u32;
+ let e;
+ let f: i32 = e;
+}
+"#,
+ expect![[r#"
+ 10..117 '{ ...= e; }': ()
+ 20..21 'a': isize
+ 24..30 '1isize': isize
+ 40..41 'b': usize
+ 51..52 '1': usize
+ 62..63 'c': usize
+ 66..67 'b': usize
+ 77..78 'd': u32
+ 93..94 'e': i32
+ 104..105 'f': i32
+ 113..114 'e': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_paths() {
+ check_infer(
+ r#"
+fn a() -> u32 { 1 }
+
+mod b {
- 47..52 '{ 1 }': u32
- 49..50 '1': u32
- 66..90 '{ ...c(); }': ()
- 72..73 'a': fn a() -> u32
- 72..75 'a()': u32
- 81..85 'b::c': fn c() -> u32
- 81..87 'b::c()': u32
++ pub fn c() -> u32 { 1 }
+}
+
+fn test() {
+ a();
+ b::c();
+}
+"#,
+ expect![[r#"
+ 14..19 '{ 1 }': u32
+ 16..17 '1': u32
- fn foo() -> u32 {0}
++ 51..56 '{ 1 }': u32
++ 53..54 '1': u32
++ 70..94 '{ ...c(); }': ()
++ 76..77 'a': fn a() -> u32
++ 76..79 'a()': u32
++ 85..89 'b::c': fn c() -> u32
++ 85..91 'b::c()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_path_type() {
+ check_infer(
+ r#"
+struct S;
+
+impl S {
+ fn foo() -> i32 { 1 }
+}
+
+fn test() {
+ S::foo();
+ <S>::foo();
+}
+"#,
+ expect![[r#"
+ 40..45 '{ 1 }': i32
+ 42..43 '1': i32
+ 59..92 '{ ...o(); }': ()
+ 65..71 'S::foo': fn foo() -> i32
+ 65..73 'S::foo()': i32
+ 79..87 '<S>::foo': fn foo() -> i32
+ 79..89 '<S>::foo()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_struct() {
+ check_infer(
+ r#"
+struct A {
+ b: B,
+ c: C,
+}
+struct B;
+struct C(usize);
+
+fn test() {
+ let c = C(1);
+ B;
+ let a: A = A { b: B, c: C(1) };
+ a.b;
+ a.c;
+}
+"#,
+ expect![[r#"
+ 71..153 '{ ...a.c; }': ()
+ 81..82 'c': C
+ 85..86 'C': C(usize) -> C
+ 85..89 'C(1)': C
+ 87..88 '1': usize
+ 95..96 'B': B
+ 106..107 'a': A
+ 113..132 'A { b:...C(1) }': A
+ 120..121 'B': B
+ 126..127 'C': C(usize) -> C
+ 126..130 'C(1)': C
+ 128..129 '1': usize
+ 138..139 'a': A
+ 138..141 'a.b': B
+ 147..148 'a': A
+ 147..150 'a.c': C
+ "#]],
+ );
+}
+
+#[test]
+fn infer_enum() {
+ check_infer(
+ r#"
+enum E {
+ V1 { field: u32 },
+ V2
+}
+fn test() {
+ E::V1 { field: 1 };
+ E::V2;
+}
+"#,
+ expect![[r#"
+ 51..89 '{ ...:V2; }': ()
+ 57..75 'E::V1 ...d: 1 }': E
+ 72..73 '1': u32
+ 81..86 'E::V2': E
+ "#]],
+ );
+}
+
+#[test]
+fn infer_union() {
+ check_infer(
+ r#"
+union MyUnion {
+ foo: u32,
+ bar: f32,
+}
+
+fn test() {
+ let u = MyUnion { foo: 0 };
+ unsafe { baz(u); }
+ let u = MyUnion { bar: 0.0 };
+ unsafe { baz(u); }
+}
+
+unsafe fn baz(u: MyUnion) {
+ let inner = u.foo;
+ let inner = u.bar;
+}
+"#,
+ expect![[r#"
+ 57..172 '{ ...); } }': ()
+ 67..68 'u': MyUnion
+ 71..89 'MyUnio...o: 0 }': MyUnion
+ 86..87 '0': u32
+ 95..113 'unsafe...(u); }': ()
+ 95..113 'unsafe...(u); }': ()
+ 104..107 'baz': fn baz(MyUnion)
+ 104..110 'baz(u)': ()
+ 108..109 'u': MyUnion
+ 122..123 'u': MyUnion
+ 126..146 'MyUnio... 0.0 }': MyUnion
+ 141..144 '0.0': f32
+ 152..170 'unsafe...(u); }': ()
+ 152..170 'unsafe...(u); }': ()
+ 161..164 'baz': fn baz(MyUnion)
+ 161..167 'baz(u)': ()
+ 165..166 'u': MyUnion
+ 188..189 'u': MyUnion
+ 200..249 '{ ...bar; }': ()
+ 210..215 'inner': u32
+ 218..219 'u': MyUnion
+ 218..223 'u.foo': u32
+ 233..238 'inner': f32
+ 241..242 'u': MyUnion
+ 241..246 'u.bar': f32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_refs() {
+ check_infer(
+ r#"
+fn test(a: &u32, b: &mut u32, c: *const u32, d: *mut u32) {
+ a;
+ *a;
+ &a;
+ &mut a;
+ b;
+ *b;
+ &b;
+ c;
+ *c;
+ d;
+ *d;
+}
+ "#,
+ expect![[r#"
+ 8..9 'a': &u32
+ 17..18 'b': &mut u32
+ 30..31 'c': *const u32
+ 45..46 'd': *mut u32
+ 58..149 '{ ... *d; }': ()
+ 64..65 'a': &u32
+ 71..73 '*a': u32
+ 72..73 'a': &u32
+ 79..81 '&a': &&u32
+ 80..81 'a': &u32
+ 87..93 '&mut a': &mut &u32
+ 92..93 'a': &u32
+ 99..100 'b': &mut u32
+ 106..108 '*b': u32
+ 107..108 'b': &mut u32
+ 114..116 '&b': &&mut u32
+ 115..116 'b': &mut u32
+ 122..123 'c': *const u32
+ 129..131 '*c': u32
+ 130..131 'c': *const u32
+ 137..138 'd': *mut u32
+ 144..146 '*d': u32
+ 145..146 'd': *mut u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_raw_ref() {
+ check_infer(
+ r#"
+fn test(a: i32) {
+ &raw mut a;
+ &raw const a;
+}
+"#,
+ expect![[r#"
+ 8..9 'a': i32
+ 16..53 '{ ...t a; }': ()
+ 22..32 '&raw mut a': *mut i32
+ 31..32 'a': i32
+ 38..50 '&raw const a': *const i32
+ 49..50 'a': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_literals() {
+ check_infer(
+ r##"
+ fn test() {
+ 5i32;
+ 5f32;
+ 5f64;
+ "hello";
+ b"bytes";
+ 'c';
+ b'b';
+ 3.14;
+ 5000;
+ false;
+ true;
+ r#"
+ //! doc
+ // non-doc
+ mod foo {}
+ "#;
+ br#"yolo"#;
+ let a = b"a\x20b\
+ c";
+ let b = br"g\
+h";
+ let c = br#"x"\"yb"#;
+ }
+ "##,
+ expect![[r##"
+ 18..478 '{ ... }': ()
+ 32..36 '5i32': i32
+ 50..54 '5f32': f32
+ 68..72 '5f64': f64
+ 86..93 '"hello"': &str
+ 107..115 'b"bytes"': &[u8; 5]
+ 129..132 ''c'': char
+ 146..150 'b'b'': u8
+ 164..168 '3.14': f64
+ 182..186 '5000': i32
+ 200..205 'false': bool
+ 219..223 'true': bool
+ 237..333 'r#" ... "#': &str
+ 347..357 'br#"yolo"#': &[u8; 4]
+ 375..376 'a': &[u8; 4]
+ 379..403 'b"a\x2... c"': &[u8; 4]
+ 421..422 'b': &[u8; 4]
+ 425..433 'br"g\ h"': &[u8; 4]
+ 451..452 'c': &[u8; 6]
+ 455..467 'br#"x"\"yb"#': &[u8; 6]
+ "##]],
+ );
+}
+
+#[test]
+fn infer_unary_op() {
+ check_infer(
+ r#"
+enum SomeType {}
+
+fn test(x: SomeType) {
+ let b = false;
+ let c = !b;
+ let a = 100;
+ let d: i128 = -a;
+ let e = -100;
+ let f = !!!true;
+ let g = !42;
+ let h = !10u32;
+ let j = !a;
+ -3.14;
+ !3;
+ -x;
+ !x;
+ -"hello";
+ !"hello";
+}
+"#,
+ expect![[r#"
+ 26..27 'x': SomeType
+ 39..271 '{ ...lo"; }': ()
+ 49..50 'b': bool
+ 53..58 'false': bool
+ 68..69 'c': bool
+ 72..74 '!b': bool
+ 73..74 'b': bool
+ 84..85 'a': i128
+ 88..91 '100': i128
+ 101..102 'd': i128
+ 111..113 '-a': i128
+ 112..113 'a': i128
+ 123..124 'e': i32
+ 127..131 '-100': i32
+ 128..131 '100': i32
+ 141..142 'f': bool
+ 145..152 '!!!true': bool
+ 146..152 '!!true': bool
+ 147..152 '!true': bool
+ 148..152 'true': bool
+ 162..163 'g': i32
+ 166..169 '!42': i32
+ 167..169 '42': i32
+ 179..180 'h': u32
+ 183..189 '!10u32': u32
+ 184..189 '10u32': u32
+ 199..200 'j': i128
+ 203..205 '!a': i128
+ 204..205 'a': i128
+ 211..216 '-3.14': f64
+ 212..216 '3.14': f64
+ 222..224 '!3': i32
+ 223..224 '3': i32
+ 230..232 '-x': {unknown}
+ 231..232 'x': SomeType
+ 238..240 '!x': {unknown}
+ 239..240 'x': SomeType
+ 246..254 '-"hello"': {unknown}
+ 247..254 '"hello"': &str
+ 260..268 '!"hello"': {unknown}
+ 261..268 '"hello"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_backwards() {
+ check_infer(
+ r#"
+fn takes_u32(x: u32) {}
+
+struct S { i32_field: i32 }
+
+fn test() -> &mut &f64 {
+ let a = unknown_function();
+ takes_u32(a);
+ let b = unknown_function();
+ S { i32_field: b };
+ let c = unknown_function();
+ &mut &c
+}
+"#,
+ expect![[r#"
+ 13..14 'x': u32
+ 21..23 '{}': ()
+ 77..230 '{ ...t &c }': &mut &f64
+ 87..88 'a': u32
+ 91..107 'unknow...nction': {unknown}
+ 91..109 'unknow...tion()': u32
+ 115..124 'takes_u32': fn takes_u32(u32)
+ 115..127 'takes_u32(a)': ()
+ 125..126 'a': u32
+ 137..138 'b': i32
+ 141..157 'unknow...nction': {unknown}
+ 141..159 'unknow...tion()': i32
+ 165..183 'S { i3...d: b }': S
+ 180..181 'b': i32
+ 193..194 'c': f64
+ 197..213 'unknow...nction': {unknown}
+ 197..215 'unknow...tion()': f64
+ 221..228 '&mut &c': &mut &f64
+ 226..228 '&c': &f64
+ 227..228 'c': f64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_self() {
+ check_infer(
+ r#"
+struct S;
+
+impl S {
+ fn test(&self) {
+ self;
+ }
+ fn test2(self: &Self) {
+ self;
+ }
+ fn test3() -> Self {
+ S {}
+ }
+ fn test4() -> Self {
+ Self {}
+ }
+}
+"#,
+ expect![[r#"
+ 33..37 'self': &S
+ 39..60 '{ ... }': ()
+ 49..53 'self': &S
+ 74..78 'self': &S
+ 87..108 '{ ... }': ()
+ 97..101 'self': &S
+ 132..152 '{ ... }': S
+ 142..146 'S {}': S
+ 176..199 '{ ... }': S
+ 186..193 'Self {}': S
+ "#]],
+ );
+}
+
+#[test]
+fn infer_self_as_path() {
+ check_infer(
+ r#"
+struct S1;
+struct S2(isize);
+enum E {
+ V1,
+ V2(u32),
+}
+
+impl S1 {
+ fn test() {
+ Self;
+ }
+}
+impl S2 {
+ fn test() {
+ Self(1);
+ }
+}
+impl E {
+ fn test() {
+ Self::V1;
+ Self::V2(1);
+ }
+}
+"#,
+ expect![[r#"
+ 86..107 '{ ... }': ()
+ 96..100 'Self': S1
+ 134..158 '{ ... }': ()
+ 144..148 'Self': S2(isize) -> S2
+ 144..151 'Self(1)': S2
+ 149..150 '1': isize
+ 184..230 '{ ... }': ()
+ 194..202 'Self::V1': E
+ 212..220 'Self::V2': V2(u32) -> E
+ 212..223 'Self::V2(1)': E
+ 221..222 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_binary_op() {
+ check_infer(
+ r#"
+fn f(x: bool) -> i32 {
+ 0i32
+}
+
+fn test() -> bool {
+ let x = a && b;
+ let y = true || false;
+ let z = x == y;
+ let t = x != y;
+ let minus_forty: isize = -40isize;
+ let h = minus_forty <= CONST_2;
+ let c = f(z || y) + 5;
+ let d = b;
+ let g = minus_forty ^= i;
+ let ten: usize = 10;
+ let ten_is_eleven = ten == some_num;
+
+ ten < 3
+}
+"#,
+ expect![[r#"
+ 5..6 'x': bool
+ 21..33 '{ 0i32 }': i32
+ 27..31 '0i32': i32
+ 53..369 '{ ... < 3 }': bool
+ 63..64 'x': bool
+ 67..68 'a': bool
+ 67..73 'a && b': bool
+ 72..73 'b': bool
+ 83..84 'y': bool
+ 87..91 'true': bool
+ 87..100 'true || false': bool
+ 95..100 'false': bool
+ 110..111 'z': bool
+ 114..115 'x': bool
+ 114..120 'x == y': bool
+ 119..120 'y': bool
+ 130..131 't': bool
+ 134..135 'x': bool
+ 134..140 'x != y': bool
+ 139..140 'y': bool
+ 150..161 'minus_forty': isize
+ 171..179 '-40isize': isize
+ 172..179 '40isize': isize
+ 189..190 'h': bool
+ 193..204 'minus_forty': isize
+ 193..215 'minus_...ONST_2': bool
+ 208..215 'CONST_2': isize
+ 225..226 'c': i32
+ 229..230 'f': fn f(bool) -> i32
+ 229..238 'f(z || y)': i32
+ 229..242 'f(z || y) + 5': i32
+ 231..232 'z': bool
+ 231..237 'z || y': bool
+ 236..237 'y': bool
+ 241..242 '5': i32
+ 252..253 'd': {unknown}
+ 256..257 'b': {unknown}
+ 267..268 'g': ()
+ 271..282 'minus_forty': isize
+ 271..287 'minus_...y ^= i': ()
+ 286..287 'i': isize
+ 297..300 'ten': usize
+ 310..312 '10': usize
+ 322..335 'ten_is_eleven': bool
+ 338..341 'ten': usize
+ 338..353 'ten == some_num': bool
+ 345..353 'some_num': usize
+ 360..363 'ten': usize
+ 360..367 'ten < 3': bool
+ 366..367 '3': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_shift_op() {
+ check_infer(
+ r#"
+fn test() {
+ 1u32 << 5u8;
+ 1u32 >> 5u8;
+}
+"#,
+ expect![[r#"
+ 10..47 '{ ...5u8; }': ()
+ 16..20 '1u32': u32
+ 16..27 '1u32 << 5u8': u32
+ 24..27 '5u8': u8
+ 33..37 '1u32': u32
+ 33..44 '1u32 >> 5u8': u32
+ 41..44 '5u8': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_field_autoderef() {
+ check_infer(
+ r#"
+struct A {
+ b: B,
+}
+struct B;
+
+fn test1(a: A) {
+ let a1 = a;
+ a1.b;
+ let a2 = &a;
+ a2.b;
+ let a3 = &mut a;
+ a3.b;
+ let a4 = &&&&&&&a;
+ a4.b;
+ let a5 = &mut &&mut &&mut a;
+ a5.b;
+}
+
+fn test2(a1: *const A, a2: *mut A) {
+ a1.b;
+ a2.b;
+}
+"#,
+ expect![[r#"
+ 43..44 'a': A
+ 49..212 '{ ...5.b; }': ()
+ 59..61 'a1': A
+ 64..65 'a': A
+ 71..73 'a1': A
+ 71..75 'a1.b': B
+ 85..87 'a2': &A
+ 90..92 '&a': &A
+ 91..92 'a': A
+ 98..100 'a2': &A
+ 98..102 'a2.b': B
+ 112..114 'a3': &mut A
+ 117..123 '&mut a': &mut A
+ 122..123 'a': A
+ 129..131 'a3': &mut A
+ 129..133 'a3.b': B
+ 143..145 'a4': &&&&&&&A
+ 148..156 '&&&&&&&a': &&&&&&&A
+ 149..156 '&&&&&&a': &&&&&&A
+ 150..156 '&&&&&a': &&&&&A
+ 151..156 '&&&&a': &&&&A
+ 152..156 '&&&a': &&&A
+ 153..156 '&&a': &&A
+ 154..156 '&a': &A
+ 155..156 'a': A
+ 162..164 'a4': &&&&&&&A
+ 162..166 'a4.b': B
+ 176..178 'a5': &mut &&mut &&mut A
+ 181..199 '&mut &...&mut a': &mut &&mut &&mut A
+ 186..199 '&&mut &&mut a': &&mut &&mut A
+ 187..199 '&mut &&mut a': &mut &&mut A
+ 192..199 '&&mut a': &&mut A
+ 193..199 '&mut a': &mut A
+ 198..199 'a': A
+ 205..207 'a5': &mut &&mut &&mut A
+ 205..209 'a5.b': B
+ 223..225 'a1': *const A
+ 237..239 'a2': *mut A
+ 249..272 '{ ...2.b; }': ()
+ 255..257 'a1': *const A
+ 255..259 'a1.b': B
+ 265..267 'a2': *mut A
+ 265..269 'a2.b': B
+ "#]],
+ );
+}
+
+#[test]
+fn infer_argument_autoderef() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+struct A<T>(T);
+
+impl<T> A<T> {
+ fn foo(&self) -> &T {
+ &self.0
+ }
+}
+
+struct B<T>(T);
+
+impl<T> Deref for B<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn test() {
+ let t = A::foo(&&B(B(A(42))));
+}
+"#,
+ expect![[r#"
+ 66..70 'self': &A<T>
+ 78..101 '{ ... }': &T
+ 88..95 '&self.0': &T
+ 89..93 'self': &A<T>
+ 89..95 'self.0': T
+ 182..186 'self': &B<T>
+ 205..228 '{ ... }': &T
+ 215..222 '&self.0': &T
+ 216..220 'self': &B<T>
+ 216..222 'self.0': T
+ 242..280 '{ ...))); }': ()
+ 252..253 't': &i32
+ 256..262 'A::foo': fn foo<i32>(&A<i32>) -> &i32
+ 256..277 'A::foo...42))))': &i32
+ 263..276 '&&B(B(A(42)))': &&B<B<A<i32>>>
+ 264..276 '&B(B(A(42)))': &B<B<A<i32>>>
+ 265..266 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
+ 265..276 'B(B(A(42)))': B<B<A<i32>>>
+ 267..268 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
+ 267..275 'B(A(42))': B<A<i32>>
+ 269..270 'A': A<i32>(i32) -> A<i32>
+ 269..274 'A(42)': A<i32>
+ 271..273 '42': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_method_argument_autoderef() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+struct A<T>(*mut T);
+
+impl<T> A<T> {
+ fn foo(&self, x: &A<T>) -> &T {
+ &*x.0
+ }
+}
+
+struct B<T>(T);
+
+impl<T> Deref for B<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+fn test(a: A<i32>) {
+ let t = A(0 as *mut _).foo(&&B(B(a)));
+}
+"#,
+ expect![[r#"
+ 71..75 'self': &A<T>
+ 77..78 'x': &A<T>
+ 93..114 '{ ... }': &T
+ 103..108 '&*x.0': &T
+ 104..108 '*x.0': T
+ 105..106 'x': &A<T>
+ 105..108 'x.0': *mut T
+ 195..199 'self': &B<T>
+ 218..241 '{ ... }': &T
+ 228..235 '&self.0': &T
+ 229..233 'self': &B<T>
+ 229..235 'self.0': T
+ 253..254 'a': A<i32>
+ 264..310 '{ ...))); }': ()
+ 274..275 't': &i32
+ 278..279 'A': A<i32>(*mut i32) -> A<i32>
+ 278..292 'A(0 as *mut _)': A<i32>
+ 278..307 'A(0 as...B(a)))': &i32
+ 280..281 '0': i32
+ 280..291 '0 as *mut _': *mut i32
+ 297..306 '&&B(B(a))': &&B<B<A<i32>>>
+ 298..306 '&B(B(a))': &B<B<A<i32>>>
+ 299..300 'B': B<B<A<i32>>>(B<A<i32>>) -> B<B<A<i32>>>
+ 299..306 'B(B(a))': B<B<A<i32>>>
+ 301..302 'B': B<A<i32>>(A<i32>) -> B<A<i32>>
+ 301..305 'B(a)': B<A<i32>>
+ 303..304 'a': A<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_in_elseif() {
+ check_infer(
+ r#"
+struct Foo { field: i32 }
+fn main(foo: Foo) {
+ if true {
+
+ } else if false {
+ foo.field
+ }
+}
+"#,
+ expect![[r#"
+ 34..37 'foo': Foo
+ 44..108 '{ ... } }': ()
+ 50..106 'if tru... }': ()
+ 53..57 'true': bool
+ 58..66 '{ }': ()
+ 72..106 'if fal... }': ()
+ 75..80 'false': bool
+ 81..106 '{ ... }': ()
+ 91..94 'foo': Foo
+ 91..100 'foo.field': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_if_match_with_return() {
+ check_infer(
+ r#"
+fn foo() {
+ let _x1 = if true {
+ 1
+ } else {
+ return;
+ };
+ let _x2 = if true {
+ 2
+ } else {
+ return
+ };
+ let _x3 = match true {
+ true => 3,
+ _ => {
+ return;
+ }
+ };
+ let _x4 = match true {
+ true => 4,
+ _ => return
+ };
+}
+"#,
+ expect![[r#"
+ 9..322 '{ ... }; }': ()
+ 19..22 '_x1': i32
+ 25..79 'if tru... }': i32
+ 28..32 'true': bool
+ 33..50 '{ ... }': i32
+ 43..44 '1': i32
+ 56..79 '{ ... }': i32
+ 66..72 'return': !
+ 89..92 '_x2': i32
+ 95..148 'if tru... }': i32
+ 98..102 'true': bool
+ 103..120 '{ ... }': i32
+ 113..114 '2': i32
+ 126..148 '{ ... }': !
+ 136..142 'return': !
+ 158..161 '_x3': i32
+ 164..246 'match ... }': i32
+ 170..174 'true': bool
+ 185..189 'true': bool
+ 185..189 'true': bool
+ 193..194 '3': i32
+ 204..205 '_': bool
+ 209..240 '{ ... }': i32
+ 223..229 'return': !
+ 256..259 '_x4': i32
+ 262..319 'match ... }': i32
+ 268..272 'true': bool
+ 283..287 'true': bool
+ 283..287 'true': bool
+ 291..292 '4': i32
+ 302..303 '_': bool
+ 307..313 'return': !
+ "#]],
+ )
+}
+
+#[test]
+fn infer_inherent_method() {
+ check_infer(
+ r#"
+ struct A;
+
+ impl A {
+ fn foo(self, x: u32) -> i32 {}
+ }
+
+ mod b {
+ impl super::A {
+ pub fn bar(&self, x: u64) -> i64 {}
+ }
+ }
+
+ fn test(a: A) {
+ a.foo(1);
+ (&a).bar(1);
+ a.bar(1);
+ }
+ "#,
+ expect![[r#"
+ 31..35 'self': A
+ 37..38 'x': u32
+ 52..54 '{}': i32
+ 106..110 'self': &A
+ 112..113 'x': u64
+ 127..129 '{}': i64
+ 147..148 'a': A
+ 153..201 '{ ...(1); }': ()
+ 159..160 'a': A
+ 159..167 'a.foo(1)': i32
+ 165..166 '1': u32
+ 173..184 '(&a).bar(1)': i64
+ 174..176 '&a': &A
+ 175..176 'a': A
+ 182..183 '1': u64
+ 190..191 'a': A
+ 190..198 'a.bar(1)': i64
+ 196..197 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_inherent_method_str() {
+ check_infer(
+ r#"
+ #[lang = "str"]
+ impl str {
+ fn foo(&self) -> i32 {}
+ }
+
+ fn test() {
+ "foo".foo();
+ }
+ "#,
+ expect![[r#"
+ 39..43 'self': &str
+ 52..54 '{}': i32
+ 68..88 '{ ...o(); }': ()
+ 74..79 '"foo"': &str
+ 74..85 '"foo".foo()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_tuple() {
+ check_infer(
+ r#"
+ fn test(x: &str, y: isize) {
+ let a: (u32, &str) = (1, "a");
+ let b = (a, x);
+ let c = (y, x);
+ let d = (c, x);
+ let e = (1, "e");
+ let f = (e, "d");
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &str
+ 17..18 'y': isize
+ 27..169 '{ ...d"); }': ()
+ 37..38 'a': (u32, &str)
+ 54..62 '(1, "a")': (u32, &str)
+ 55..56 '1': u32
+ 58..61 '"a"': &str
+ 72..73 'b': ((u32, &str), &str)
+ 76..82 '(a, x)': ((u32, &str), &str)
+ 77..78 'a': (u32, &str)
+ 80..81 'x': &str
+ 92..93 'c': (isize, &str)
+ 96..102 '(y, x)': (isize, &str)
+ 97..98 'y': isize
+ 100..101 'x': &str
+ 112..113 'd': ((isize, &str), &str)
+ 116..122 '(c, x)': ((isize, &str), &str)
+ 117..118 'c': (isize, &str)
+ 120..121 'x': &str
+ 132..133 'e': (i32, &str)
+ 136..144 '(1, "e")': (i32, &str)
+ 137..138 '1': i32
+ 140..143 '"e"': &str
+ 154..155 'f': ((i32, &str), &str)
+ 158..166 '(e, "d")': ((i32, &str), &str)
+ 159..160 'e': (i32, &str)
+ 162..165 '"d"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn infer_array() {
+ check_infer(
+ r#"
+ fn test(x: &str, y: isize) {
+ let a = [x];
+ let b = [a, a];
+ let c = [b, b];
+
+ let d = [y, 1, 2, 3];
+ let d = [1, y, 2, 3];
+ let e = [y];
+ let f = [d, d];
+ let g = [e, e];
+
+ let h = [1, 2];
+ let i = ["a", "b"];
+
+ let b = [a, ["b"]];
+ let x: [u8; 0] = [];
+ let y: [u8; 2+2] = [1,2,3,4];
+ }
+ "#,
+ expect![[r#"
+ 8..9 'x': &str
+ 17..18 'y': isize
+ 27..326 '{ ...,4]; }': ()
+ 37..38 'a': [&str; 1]
+ 41..44 '[x]': [&str; 1]
+ 42..43 'x': &str
+ 54..55 'b': [[&str; 1]; 2]
+ 58..64 '[a, a]': [[&str; 1]; 2]
+ 59..60 'a': [&str; 1]
+ 62..63 'a': [&str; 1]
+ 74..75 'c': [[[&str; 1]; 2]; 2]
+ 78..84 '[b, b]': [[[&str; 1]; 2]; 2]
+ 79..80 'b': [[&str; 1]; 2]
+ 82..83 'b': [[&str; 1]; 2]
+ 95..96 'd': [isize; 4]
+ 99..111 '[y, 1, 2, 3]': [isize; 4]
+ 100..101 'y': isize
+ 103..104 '1': isize
+ 106..107 '2': isize
+ 109..110 '3': isize
+ 121..122 'd': [isize; 4]
+ 125..137 '[1, y, 2, 3]': [isize; 4]
+ 126..127 '1': isize
+ 129..130 'y': isize
+ 132..133 '2': isize
+ 135..136 '3': isize
+ 147..148 'e': [isize; 1]
+ 151..154 '[y]': [isize; 1]
+ 152..153 'y': isize
+ 164..165 'f': [[isize; 4]; 2]
+ 168..174 '[d, d]': [[isize; 4]; 2]
+ 169..170 'd': [isize; 4]
+ 172..173 'd': [isize; 4]
+ 184..185 'g': [[isize; 1]; 2]
+ 188..194 '[e, e]': [[isize; 1]; 2]
+ 189..190 'e': [isize; 1]
+ 192..193 'e': [isize; 1]
+ 205..206 'h': [i32; 2]
+ 209..215 '[1, 2]': [i32; 2]
+ 210..211 '1': i32
+ 213..214 '2': i32
+ 225..226 'i': [&str; 2]
+ 229..239 '["a", "b"]': [&str; 2]
+ 230..233 '"a"': &str
+ 235..238 '"b"': &str
+ 250..251 'b': [[&str; 1]; 2]
+ 254..264 '[a, ["b"]]': [[&str; 1]; 2]
+ 255..256 'a': [&str; 1]
+ 258..263 '["b"]': [&str; 1]
+ 259..262 '"b"': &str
+ 274..275 'x': [u8; 0]
+ 287..289 '[]': [u8; 0]
+ 299..300 'y': [u8; 4]
+ 314..323 '[1,2,3,4]': [u8; 4]
+ 315..316 '1': u8
+ 317..318 '2': u8
+ 319..320 '3': u8
+ 321..322 '4': u8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_struct_generics() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+
+ fn test(a1: A<u32>, i: i32) {
+ a1.x;
+ let a2 = A { x: i };
+ a2.x;
+ let a3 = A::<i128> { x: 1 };
+ a3.x;
+ }
+ "#,
+ expect![[r#"
+ 35..37 'a1': A<u32>
+ 47..48 'i': i32
+ 55..146 '{ ...3.x; }': ()
+ 61..63 'a1': A<u32>
+ 61..65 'a1.x': u32
+ 75..77 'a2': A<i32>
+ 80..90 'A { x: i }': A<i32>
+ 87..88 'i': i32
+ 96..98 'a2': A<i32>
+ 96..100 'a2.x': i32
+ 110..112 'a3': A<i128>
+ 115..133 'A::<i1...x: 1 }': A<i128>
+ 130..131 '1': i128
+ 139..141 'a3': A<i128>
+ 139..143 'a3.x': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_tuple_struct_generics() {
+ check_infer(
+ r#"
+ struct A<T>(T);
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ A(42);
+ A(42u128);
+ Some("x");
+ Option::Some("x");
+ None;
+ let x: Option<i64> = None;
+ }
+ "#,
+ expect![[r#"
+ 75..183 '{ ...one; }': ()
+ 81..82 'A': A<i32>(i32) -> A<i32>
+ 81..86 'A(42)': A<i32>
+ 83..85 '42': i32
+ 92..93 'A': A<u128>(u128) -> A<u128>
+ 92..101 'A(42u128)': A<u128>
+ 94..100 '42u128': u128
+ 107..111 'Some': Some<&str>(&str) -> Option<&str>
+ 107..116 'Some("x")': Option<&str>
+ 112..115 '"x"': &str
+ 122..134 'Option::Some': Some<&str>(&str) -> Option<&str>
+ 122..139 'Option...e("x")': Option<&str>
+ 135..138 '"x"': &str
+ 145..149 'None': Option<{unknown}>
+ 159..160 'x': Option<i64>
+ 176..180 'None': Option<i64>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_function_generics() {
+ check_infer(
+ r#"
+ fn id<T>(t: T) -> T { t }
+
+ fn test() {
+ id(1u32);
+ id::<i128>(1);
+ let x: u64 = id(1);
+ }
+ "#,
+ expect![[r#"
+ 9..10 't': T
+ 20..25 '{ t }': T
+ 22..23 't': T
+ 37..97 '{ ...(1); }': ()
+ 43..45 'id': fn id<u32>(u32) -> u32
+ 43..51 'id(1u32)': u32
+ 46..50 '1u32': u32
+ 57..67 'id::<i128>': fn id<i128>(i128) -> i128
+ 57..70 'id::<i128>(1)': i128
+ 68..69 '1': i128
+ 80..81 'x': u64
+ 89..91 'id': fn id<u64>(u64) -> u64
+ 89..94 'id(1)': u64
+ 92..93 '1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn infer_impl_generics_basic() {
+ check_infer(
+ r#"
+ struct A<T1, T2> {
+ x: T1,
+ y: T2,
+ }
+ impl<Y, X> A<X, Y> {
+ fn x(self) -> X {
+ self.x
+ }
+ fn y(self) -> Y {
+ self.y
+ }
+ fn z<T>(self, t: T) -> (X, Y, T) {
+ (self.x, self.y, t)
+ }
+ }
+
+ fn test() -> i128 {
+ let a = A { x: 1u64, y: 1i64 };
+ a.x();
+ a.y();
+ a.z(1i128);
+ a.z::<u128>(1);
+ }
+ "#,
+ expect![[r#"
+ 73..77 'self': A<X, Y>
+ 84..106 '{ ... }': X
+ 94..98 'self': A<X, Y>
+ 94..100 'self.x': X
+ 116..120 'self': A<X, Y>
+ 127..149 '{ ... }': Y
+ 137..141 'self': A<X, Y>
+ 137..143 'self.y': Y
+ 162..166 'self': A<X, Y>
+ 168..169 't': T
+ 187..222 '{ ... }': (X, Y, T)
+ 197..216 '(self.....y, t)': (X, Y, T)
+ 198..202 'self': A<X, Y>
+ 198..204 'self.x': X
+ 206..210 'self': A<X, Y>
+ 206..212 'self.y': Y
+ 214..215 't': T
+ 244..341 '{ ...(1); }': i128
+ 254..255 'a': A<u64, i64>
+ 258..280 'A { x:...1i64 }': A<u64, i64>
+ 265..269 '1u64': u64
+ 274..278 '1i64': i64
+ 286..287 'a': A<u64, i64>
+ 286..291 'a.x()': u64
+ 297..298 'a': A<u64, i64>
+ 297..302 'a.y()': i64
+ 308..309 'a': A<u64, i64>
+ 308..318 'a.z(1i128)': (u64, i64, i128)
+ 312..317 '1i128': i128
+ 324..325 'a': A<u64, i64>
+ 324..338 'a.z::<u128>(1)': (u64, i64, u128)
+ 336..337 '1': u128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_impl_generics_with_autoderef() {
+ check_infer(
+ r#"
+ enum Option<T> {
+ Some(T),
+ None,
+ }
+ impl<T> Option<T> {
+ fn as_ref(&self) -> Option<&T> {}
+ }
+ fn test(o: Option<u32>) {
+ (&o).as_ref();
+ o.as_ref();
+ }
+ "#,
+ expect![[r#"
+ 77..81 'self': &Option<T>
+ 97..99 '{}': Option<&T>
+ 110..111 'o': Option<u32>
+ 126..164 '{ ...f(); }': ()
+ 132..145 '(&o).as_ref()': Option<&u32>
+ 133..135 '&o': &Option<u32>
+ 134..135 'o': Option<u32>
+ 151..152 'o': Option<u32>
+ 151..161 'o.as_ref()': Option<&u32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_generic_chain() {
+ check_infer(
+ r#"
+ struct A<T> {
+ x: T,
+ }
+ impl<T2> A<T2> {
+ fn x(self) -> T2 {
+ self.x
+ }
+ }
+ fn id<T>(t: T) -> T { t }
+
+ fn test() -> i128 {
+ let x = 1;
+ let y = id(x);
+ let a = A { x: id(y) };
+ let z = id(a.x);
+ let b = A { x: z };
+ b.x()
+ }
+ "#,
+ expect![[r#"
+ 52..56 'self': A<T2>
+ 64..86 '{ ... }': T2
+ 74..78 'self': A<T2>
+ 74..80 'self.x': T2
+ 98..99 't': T
+ 109..114 '{ t }': T
+ 111..112 't': T
+ 134..254 '{ ....x() }': i128
+ 144..145 'x': i128
+ 148..149 '1': i128
+ 159..160 'y': i128
+ 163..165 'id': fn id<i128>(i128) -> i128
+ 163..168 'id(x)': i128
+ 166..167 'x': i128
+ 178..179 'a': A<i128>
+ 182..196 'A { x: id(y) }': A<i128>
+ 189..191 'id': fn id<i128>(i128) -> i128
+ 189..194 'id(y)': i128
+ 192..193 'y': i128
+ 206..207 'z': i128
+ 210..212 'id': fn id<i128>(i128) -> i128
+ 210..217 'id(a.x)': i128
+ 213..214 'a': A<i128>
+ 213..216 'a.x': i128
+ 227..228 'b': A<i128>
+ 231..241 'A { x: z }': A<i128>
+ 238..239 'z': i128
+ 247..248 'b': A<i128>
+ 247..252 'b.x()': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_associated_const() {
+ check_infer(
+ r#"
+ struct Struct;
+
+ impl Struct {
+ const FOO: u32 = 1;
+ }
+
+ enum Enum {}
+
+ impl Enum {
+ const BAR: u32 = 2;
+ }
+
+ trait Trait {
+ const ID: u32;
+ }
+
+ struct TraitTest;
+
+ impl Trait for TraitTest {
+ const ID: u32 = 5;
+ }
+
+ fn test() {
+ let x = Struct::FOO;
+ let y = Enum::BAR;
+ let z = TraitTest::ID;
+ }
+ "#,
+ expect![[r#"
+ 51..52 '1': u32
+ 104..105 '2': u32
+ 212..213 '5': u32
+ 228..306 '{ ...:ID; }': ()
+ 238..239 'x': u32
+ 242..253 'Struct::FOO': u32
+ 263..264 'y': u32
+ 267..276 'Enum::BAR': u32
+ 286..287 'z': u32
+ 290..303 'TraitTest::ID': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_type_alias() {
+ check_infer(
+ r#"
+ struct A<X, Y> { x: X, y: Y }
+ type Foo = A<u32, i128>;
+ type Bar<T> = A<T, u128>;
+ type Baz<U, V> = A<V, U>;
+ fn test(x: Foo, y: Bar<&str>, z: Baz<i8, u8>) {
+ x.x;
+ x.y;
+ y.x;
+ y.y;
+ z.x;
+ z.y;
+ }
+ mod m {
+ pub enum Enum {
+ Foo(u8),
+ }
+ pub type Alias = Enum;
+ }
+ fn f() {
+ let e = m::Alias::Foo(0);
+ let m::Alias::Foo(x) = &e;
+ }
+ "#,
+ expect![[r#"
+ 115..116 'x': A<u32, i128>
+ 123..124 'y': A<&str, u128>
+ 137..138 'z': A<u8, i8>
+ 153..210 '{ ...z.y; }': ()
+ 159..160 'x': A<u32, i128>
+ 159..162 'x.x': u32
+ 168..169 'x': A<u32, i128>
+ 168..171 'x.y': i128
+ 177..178 'y': A<&str, u128>
+ 177..180 'y.x': &str
+ 186..187 'y': A<&str, u128>
+ 186..189 'y.y': u128
+ 195..196 'z': A<u8, i8>
+ 195..198 'z.x': u8
+ 204..205 'z': A<u8, i8>
+ 204..207 'z.y': i8
+ 298..362 '{ ... &e; }': ()
+ 308..309 'e': Enum
+ 312..325 'm::Alias::Foo': Foo(u8) -> Enum
+ 312..328 'm::Ali...Foo(0)': Enum
+ 326..327 '0': u8
+ 338..354 'm::Ali...Foo(x)': Enum
+ 352..353 'x': &u8
+ 357..359 '&e': &Enum
+ 358..359 'e': Enum
+ "#]],
+ )
+}
+
+#[test]
+fn recursive_type_alias() {
+ check_infer(
+ r#"
+ struct A<X> {}
+ type Foo = Foo;
+ type Bar = A<Bar>;
+ fn test(x: Foo) {}
+ "#,
+ expect![[r#"
+ 58..59 'x': {unknown}
+ 66..68 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_type_param() {
+ check_infer(
+ r#"
+ fn id<T>(x: T) -> T {
+ x
+ }
+
+ fn clone<T>(x: &T) -> T {
+ *x
+ }
+
+ fn test() {
+ let y = 10u32;
+ id(y);
+ let x: bool = clone(z);
+ id::<i128>(1);
+ }
+ "#,
+ expect![[r#"
+ 9..10 'x': T
+ 20..29 '{ x }': T
+ 26..27 'x': T
+ 43..44 'x': &T
+ 55..65 '{ *x }': T
+ 61..63 '*x': T
+ 62..63 'x': &T
+ 77..157 '{ ...(1); }': ()
+ 87..88 'y': u32
+ 91..96 '10u32': u32
+ 102..104 'id': fn id<u32>(u32) -> u32
+ 102..107 'id(y)': u32
+ 105..106 'y': u32
+ 117..118 'x': bool
+ 127..132 'clone': fn clone<bool>(&bool) -> bool
+ 127..135 'clone(z)': bool
+ 133..134 'z': &bool
+ 141..151 'id::<i128>': fn id<i128>(i128) -> i128
+ 141..154 'id::<i128>(1)': i128
+ 152..153 '1': i128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_const() {
+ check_infer(
+ r#"
+struct Foo;
+impl Foo { const ASSOC_CONST: u32 = 0; }
+const GLOBAL_CONST: u32 = 101;
+fn test() {
+ const LOCAL_CONST: u32 = 99;
+ let x = LOCAL_CONST;
+ let z = GLOBAL_CONST;
+ let id = Foo::ASSOC_CONST;
+}
+"#,
+ expect![[r#"
+ 48..49 '0': u32
+ 79..82 '101': u32
+ 94..212 '{ ...NST; }': ()
+ 137..138 'x': u32
+ 141..152 'LOCAL_CONST': u32
+ 162..163 'z': u32
+ 166..178 'GLOBAL_CONST': u32
+ 188..190 'id': u32
+ 193..209 'Foo::A..._CONST': u32
+ 125..127 '99': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_static() {
+ check_infer(
+ r#"
+static GLOBAL_STATIC: u32 = 101;
+static mut GLOBAL_STATIC_MUT: u32 = 101;
+fn test() {
+ static LOCAL_STATIC: u32 = 99;
+ static mut LOCAL_STATIC_MUT: u32 = 99;
+ let x = LOCAL_STATIC;
+ let y = LOCAL_STATIC_MUT;
+ let z = GLOBAL_STATIC;
+ let w = GLOBAL_STATIC_MUT;
+}
+"#,
+ expect![[r#"
+ 28..31 '101': u32
+ 69..72 '101': u32
+ 84..279 '{ ...MUT; }': ()
+ 172..173 'x': u32
+ 176..188 'LOCAL_STATIC': u32
+ 198..199 'y': u32
+ 202..218 'LOCAL_...IC_MUT': u32
+ 228..229 'z': u32
+ 232..245 'GLOBAL_STATIC': u32
+ 255..256 'w': u32
+ 259..276 'GLOBAL...IC_MUT': u32
+ 117..119 '99': u32
+ 160..162 '99': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_enum_variant() {
+ check_infer(
+ r#"
+enum Foo {
+ A = 15,
+ B = Foo::A as isize + 1
+}
+"#,
+ expect![[r#"
+ 19..21 '15': isize
+ 31..37 'Foo::A': Foo
+ 31..46 'Foo::A as isize': isize
+ 31..50 'Foo::A...ze + 1': isize
+ 49..50 '1': isize
+ "#]],
+ );
+ check_infer(
+ r#"
+#[repr(u32)]
+enum Foo {
+ A = 15,
+ B = Foo::A as u32 + 1
+}
+"#,
+ expect![[r#"
+ 32..34 '15': u32
+ 44..50 'Foo::A': Foo
+ 44..57 'Foo::A as u32': u32
+ 44..61 'Foo::A...32 + 1': u32
+ 60..61 '1': u32
+ "#]],
+ );
+}
+
+#[test]
+fn shadowing_primitive() {
+ check_types(
+ r#"
+struct i32;
+struct Foo;
+
+impl i32 { fn foo(&self) -> Foo { Foo } }
+
+fn main() {
+ let x: i32 = i32;
+ x.foo();
+ //^^^^^^^ Foo
+}"#,
+ );
+}
+
+#[test]
+fn const_eval_array_repeat_expr() {
+ check_types(
+ r#"
+fn main() {
+ const X: usize = 6 - 1;
+ let t = [(); X + 2];
+ //^ [(); 7]
+}"#,
+ );
+}
+
+#[test]
+fn shadowing_primitive_with_inner_items() {
+ check_types(
+ r#"
+struct i32;
+struct Foo;
+
+impl i32 { fn foo(&self) -> Foo { Foo } }
+
+fn main() {
+ fn inner() {}
+ let x: i32 = i32;
+ x.foo();
+ //^^^^^^^ Foo
+}"#,
+ );
+}
+
+#[test]
+fn not_shadowing_primitive_by_module() {
+ check_types(
+ r#"
+//- /str.rs
+fn foo() {}
+
+//- /main.rs
+mod str;
+fn foo() -> &'static str { "" }
+
+fn main() {
+ foo();
+ //^^^^^ &str
+}"#,
+ );
+}
+
+#[test]
+fn not_shadowing_module_by_primitive() {
+ check_types(
+ r#"
+//- /str.rs
++pub fn foo() -> u32 {0}
+
+//- /main.rs
+mod str;
+fn foo() -> &'static str { "" }
+
+fn main() {
+ str::foo();
+ //^^^^^^^^^^ u32
+}"#,
+ );
+}
+
+// This test is actually testing the shadowing behavior within hir_def. It
+// lives here because the testing infrastructure in hir_def isn't currently
+// capable of asserting the necessary conditions.
+#[test]
+fn should_be_shadowing_imports() {
+ check_types(
+ r#"
+mod a {
+ pub fn foo() -> i8 {0}
+ pub struct foo { a: i8 }
+}
+mod b { pub fn foo () -> u8 {0} }
+mod c { pub struct foo { a: u8 } }
+mod d {
+ pub use super::a::*;
+ pub use super::c::foo;
+ pub use super::b::foo;
+}
+
+fn main() {
+ d::foo();
+ //^^^^^^^^ u8
+ d::foo{a:0};
+ //^^^^^^^^^^^ foo
+}"#,
+ );
+}
+
+#[test]
+fn closure_return() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || -> usize { return 1; };
+ }
+ "#,
+ expect![[r#"
+ 16..58 '{ ...; }; }': u32
+ 26..27 'x': || -> usize
+ 30..55 '|| -> ...n 1; }': || -> usize
+ 42..55 '{ return 1; }': usize
+ 44..52 'return 1': !
+ 51..52 '1': usize
+ "#]],
+ );
+}
+
+#[test]
+fn closure_return_unit() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || { return; };
+ }
+ "#,
+ expect![[r#"
+ 16..47 '{ ...; }; }': u32
+ 26..27 'x': || -> ()
+ 30..44 '|| { return; }': || -> ()
+ 33..44 '{ return; }': ()
+ 35..41 'return': !
+ "#]],
+ );
+}
+
+#[test]
+fn closure_return_inferred() {
+ check_infer(
+ r#"
+ fn foo() -> u32 {
+ let x = || { "test" };
+ }
+ "#,
+ expect![[r#"
+ 16..46 '{ ..." }; }': u32
+ 26..27 'x': || -> &str
+ 30..43 '|| { "test" }': || -> &str
+ 33..43 '{ "test" }': &str
+ 35..41 '"test"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn generator_types_inferred() {
+ check_infer(
+ r#"
+//- minicore: generator, deref
+use core::ops::{Generator, GeneratorState};
+use core::pin::Pin;
+
+fn f(v: i64) {}
+fn test() {
+ let mut g = |r| {
+ let a = yield 0;
+ let a = yield 1;
+ let a = yield 2;
+ "return value"
+ };
+
+ match Pin::new(&mut g).resume(0usize) {
+ GeneratorState::Yielded(y) => { f(y); }
+ GeneratorState::Complete(r) => {}
+ }
+}
+ "#,
+ expect![[r#"
+ 70..71 'v': i64
+ 78..80 '{}': ()
+ 91..362 '{ ... } }': ()
+ 101..106 'mut g': |usize| yields i64 -> &str
+ 109..218 '|r| { ... }': |usize| yields i64 -> &str
+ 110..111 'r': usize
+ 113..218 '{ ... }': &str
+ 127..128 'a': usize
+ 131..138 'yield 0': usize
+ 137..138 '0': i64
+ 152..153 'a': usize
+ 156..163 'yield 1': usize
+ 162..163 '1': i64
+ 177..178 'a': usize
+ 181..188 'yield 2': usize
+ 187..188 '2': i64
+ 198..212 '"return value"': &str
+ 225..360 'match ... }': ()
+ 231..239 'Pin::new': fn new<&mut |usize| yields i64 -> &str>(&mut |usize| yields i64 -> &str) -> Pin<&mut |usize| yields i64 -> &str>
+ 231..247 'Pin::n...mut g)': Pin<&mut |usize| yields i64 -> &str>
+ 231..262 'Pin::n...usize)': GeneratorState<i64, &str>
+ 240..246 '&mut g': &mut |usize| yields i64 -> &str
+ 245..246 'g': |usize| yields i64 -> &str
+ 255..261 '0usize': usize
+ 273..299 'Genera...ded(y)': GeneratorState<i64, &str>
+ 297..298 'y': i64
+ 303..312 '{ f(y); }': ()
+ 305..306 'f': fn f(i64)
+ 305..309 'f(y)': ()
+ 307..308 'y': i64
+ 321..348 'Genera...ete(r)': GeneratorState<i64, &str>
+ 346..347 'r': &str
+ 352..354 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn generator_resume_yield_return_unit() {
+ check_no_mismatches(
+ r#"
+//- minicore: generator, deref
+use core::ops::{Generator, GeneratorState};
+use core::pin::Pin;
+fn test() {
+ let mut g = || {
+ let () = yield;
+ };
+
+ match Pin::new(&mut g).resume(()) {
+ GeneratorState::Yielded(()) => {}
+ GeneratorState::Complete(()) => {}
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn fn_pointer_return() {
+ check_infer(
+ r#"
+ struct Vtable {
+ method: fn(),
+ }
+
+ fn main() {
+ let vtable = Vtable { method: || {} };
+ let m = vtable.method;
+ }
+ "#,
+ expect![[r#"
+ 47..120 '{ ...hod; }': ()
+ 57..63 'vtable': Vtable
+ 66..90 'Vtable...| {} }': Vtable
+ 83..88 '|| {}': || -> ()
+ 86..88 '{}': ()
+ 100..101 'm': fn()
+ 104..110 'vtable': Vtable
+ 104..117 'vtable.method': fn()
+ "#]],
+ );
+}
+
+#[test]
+fn block_modifiers_smoke_test() {
+ check_infer(
+ r#"
+//- minicore: future
+async fn main() {
+ let x = unsafe { 92 };
+ let y = async { async { () }.await };
+ let z = try { () };
+ let w = const { 92 };
+ let t = 'a: { 92 };
+}
+ "#,
+ expect![[r#"
+ 16..162 '{ ...2 }; }': ()
+ 26..27 'x': i32
+ 30..43 'unsafe { 92 }': i32
+ 30..43 'unsafe { 92 }': i32
+ 39..41 '92': i32
+ 53..54 'y': impl Future<Output = ()>
+ 57..85 'async ...wait }': ()
+ 57..85 'async ...wait }': impl Future<Output = ()>
+ 65..77 'async { () }': ()
+ 65..77 'async { () }': impl Future<Output = ()>
+ 65..83 'async ....await': ()
+ 73..75 '()': ()
+ 95..96 'z': {unknown}
+ 99..109 'try { () }': ()
+ 99..109 'try { () }': {unknown}
+ 105..107 '()': ()
+ 119..120 'w': i32
+ 123..135 'const { 92 }': i32
+ 123..135 'const { 92 }': i32
+ 131..133 '92': i32
+ 145..146 't': i32
+ 149..159 ''a: { 92 }': i32
+ 155..157 '92': i32
+ "#]],
+ )
+}
+#[test]
+fn async_block_early_return() {
+ check_infer(
+ r#"
+//- minicore: future, result, fn
+fn test<I, E, F: FnMut() -> Fut, Fut: core::future::Future<Output = Result<I, E>>>(f: F) {}
+
+fn main() {
+ async {
+ return Err(());
+ Ok(())
+ };
+ test(|| async {
+ return Err(());
+ Ok(())
+ });
+}
+ "#,
+ expect![[r#"
+ 83..84 'f': F
+ 89..91 '{}': ()
+ 103..231 '{ ... }); }': ()
+ 109..161 'async ... }': Result<(), ()>
+ 109..161 'async ... }': impl Future<Output = Result<(), ()>>
+ 125..139 'return Err(())': !
+ 132..135 'Err': Err<(), ()>(()) -> Result<(), ()>
+ 132..139 'Err(())': Result<(), ()>
+ 136..138 '()': ()
+ 149..151 'Ok': Ok<(), ()>(()) -> Result<(), ()>
+ 149..155 'Ok(())': Result<(), ()>
+ 152..154 '()': ()
+ 167..171 'test': fn test<(), (), || -> impl Future<Output = Result<(), ()>>, impl Future<Output = Result<(), ()>>>(|| -> impl Future<Output = Result<(), ()>>)
+ 167..228 'test(|... })': ()
+ 172..227 '|| asy... }': || -> impl Future<Output = Result<(), ()>>
+ 175..227 'async ... }': Result<(), ()>
+ 175..227 'async ... }': impl Future<Output = Result<(), ()>>
+ 191..205 'return Err(())': !
+ 198..201 'Err': Err<(), ()>(()) -> Result<(), ()>
+ 198..205 'Err(())': Result<(), ()>
+ 202..204 '()': ()
+ 215..217 'Ok': Ok<(), ()>(()) -> Result<(), ()>
+ 215..221 'Ok(())': Result<(), ()>
+ 218..220 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_generic_from_later_assignment() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let mut end = None;
+ loop {
+ end = Some(true);
+ }
+ }
+ "#,
+ expect![[r#"
+ 59..129 '{ ... } }': ()
+ 69..76 'mut end': Option<bool>
+ 79..83 'None': Option<bool>
+ 89..127 'loop {... }': !
+ 94..127 '{ ... }': ()
+ 104..107 'end': Option<bool>
+ 104..120 'end = ...(true)': ()
+ 110..114 'Some': Some<bool>(bool) -> Option<bool>
+ 110..120 'Some(true)': Option<bool>
+ 115..119 'true': bool
+ "#]],
+ );
+}
+
+#[test]
+fn infer_loop_break_with_val() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let x = loop {
+ if false {
+ break None;
+ }
+
+ break Some(true);
+ };
+ }
+ "#,
+ expect![[r#"
+ 59..168 '{ ... }; }': ()
+ 69..70 'x': Option<bool>
+ 73..165 'loop {... }': Option<bool>
+ 78..165 '{ ... }': ()
+ 88..132 'if fal... }': ()
+ 91..96 'false': bool
+ 97..132 '{ ... }': ()
+ 111..121 'break None': !
+ 117..121 'None': Option<bool>
+ 142..158 'break ...(true)': !
+ 148..152 'Some': Some<bool>(bool) -> Option<bool>
+ 148..158 'Some(true)': Option<bool>
+ 153..157 'true': bool
+ "#]],
+ );
+}
+
+#[test]
+fn infer_loop_break_without_val() {
+ check_infer(
+ r#"
+ enum Option<T> { Some(T), None }
+ use Option::*;
+
+ fn test() {
+ let x = loop {
+ if false {
+ break;
+ }
+ };
+ }
+ "#,
+ expect![[r#"
+ 59..136 '{ ... }; }': ()
+ 69..70 'x': ()
+ 73..133 'loop {... }': ()
+ 78..133 '{ ... }': ()
+ 88..127 'if fal... }': ()
+ 91..96 'false': bool
+ 97..127 '{ ... }': ()
+ 111..116 'break': !
+ "#]],
+ );
+}
+
+#[test]
+fn infer_labelled_break_with_val() {
+ check_infer(
+ r#"
+ fn foo() {
+ let _x = || 'outer: loop {
+ let inner = 'inner: loop {
+ let i = Default::default();
+ if (break 'outer i) {
+ loop { break 'inner 5i8; };
+ } else if true {
+ break 'inner 6;
+ }
+ break 7;
+ };
+ break inner < 8;
+ };
+ }
+ "#,
+ expect![[r#"
+ 9..335 '{ ... }; }': ()
+ 19..21 '_x': || -> bool
+ 24..332 '|| 'ou... }': || -> bool
+ 27..332 ''outer... }': bool
+ 40..332 '{ ... }': ()
+ 54..59 'inner': i8
+ 62..300 ''inner... }': i8
+ 75..300 '{ ... }': ()
+ 93..94 'i': bool
+ 97..113 'Defaul...efault': {unknown}
+ 97..115 'Defaul...ault()': bool
+ 129..269 'if (br... }': ()
+ 133..147 'break 'outer i': !
+ 146..147 'i': bool
+ 149..208 '{ ... }': ()
+ 167..193 'loop {...5i8; }': !
+ 172..193 '{ brea...5i8; }': ()
+ 174..190 'break ...er 5i8': !
+ 187..190 '5i8': i8
+ 214..269 'if tru... }': ()
+ 217..221 'true': bool
+ 222..269 '{ ... }': ()
+ 240..254 'break 'inner 6': !
+ 253..254 '6': i8
+ 282..289 'break 7': !
+ 288..289 '7': i8
+ 310..325 'break inner < 8': !
+ 316..321 'inner': i8
+ 316..325 'inner < 8': bool
+ 324..325 '8': i8
+ "#]],
+ );
+}
+
+#[test]
+fn infer_labelled_block_break_with_val() {
+ check_infer(
+ r#"
+fn default<T>() -> T { loop {} }
+fn foo() {
+ let _x = 'outer: {
+ let inner = 'inner: {
+ let i = default();
+ if (break 'outer i) {
+ break 'inner 5i8;
+ } else if true {
+ break 'inner 6;
+ }
+ break 'inner 'innermost: { 0 };
+ 42
+ };
+ break 'outer inner < 8;
+ };
+}
+"#,
+ expect![[r#"
+ 21..32 '{ loop {} }': T
+ 23..30 'loop {}': !
+ 28..30 '{}': ()
+ 42..381 '{ ... }; }': ()
+ 52..54 '_x': bool
+ 57..378 ''outer... }': bool
+ 79..84 'inner': i8
+ 87..339 ''inner... }': i8
+ 113..114 'i': bool
+ 117..124 'default': fn default<bool>() -> bool
+ 117..126 'default()': bool
+ 140..270 'if (br... }': ()
+ 144..158 'break 'outer i': !
+ 157..158 'i': bool
+ 160..209 '{ ... }': ()
+ 178..194 'break ...er 5i8': !
+ 191..194 '5i8': i8
+ 215..270 'if tru... }': ()
+ 218..222 'true': bool
+ 223..270 '{ ... }': ()
+ 241..255 'break 'inner 6': !
+ 254..255 '6': i8
+ 283..313 'break ... { 0 }': !
+ 296..313 ''inner... { 0 }': i8
+ 310..311 '0': i8
+ 327..329 '42': i8
+ 349..371 'break ...er < 8': !
+ 362..367 'inner': i8
+ 362..371 'inner < 8': bool
+ 370..371 '8': i8
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default() {
+ check_infer(
+ r#"
+ struct Thing<T = ()> { t: T }
+ enum OtherThing<T = ()> {
+ One { t: T },
+ Two(T),
+ }
+
+ fn test(t1: Thing, t2: OtherThing, t3: Thing<i32>, t4: OtherThing<i32>) {
+ t1.t;
+ t3.t;
+ match t2 {
+ OtherThing::One { t } => { t; },
+ OtherThing::Two(t) => { t; },
+ }
+ match t4 {
+ OtherThing::One { t } => { t; },
+ OtherThing::Two(t) => { t; },
+ }
+ }
+ "#,
+ expect![[r#"
+ 97..99 't1': Thing<()>
+ 108..110 't2': OtherThing<()>
+ 124..126 't3': Thing<i32>
+ 140..142 't4': OtherThing<i32>
+ 161..384 '{ ... } }': ()
+ 167..169 't1': Thing<()>
+ 167..171 't1.t': ()
+ 177..179 't3': Thing<i32>
+ 177..181 't3.t': i32
+ 187..282 'match ... }': ()
+ 193..195 't2': OtherThing<()>
+ 206..227 'OtherT... { t }': OtherThing<()>
+ 224..225 't': ()
+ 231..237 '{ t; }': ()
+ 233..234 't': ()
+ 247..265 'OtherT...Two(t)': OtherThing<()>
+ 263..264 't': ()
+ 269..275 '{ t; }': ()
+ 271..272 't': ()
+ 287..382 'match ... }': ()
+ 293..295 't4': OtherThing<i32>
+ 306..327 'OtherT... { t }': OtherThing<i32>
+ 324..325 't': i32
+ 331..337 '{ t; }': ()
+ 333..334 't': i32
+ 347..365 'OtherT...Two(t)': OtherThing<i32>
+ 363..364 't': i32
+ 369..375 '{ t; }': ()
+ 371..372 't': i32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_in_struct_literal() {
+ check_infer(
+ r#"
+ struct Thing<T = ()> { t: T }
+ enum OtherThing<T = ()> {
+ One { t: T },
+ Two(T),
+ }
+
+ fn test() {
+ let x = Thing { t: loop {} };
+ let y = Thing { t: () };
+ let z = Thing { t: 1i32 };
+ if let Thing { t } = z {
+ t;
+ }
+
+ let a = OtherThing::One { t: 1i32 };
+ let b = OtherThing::Two(1i32);
+ }
+ "#,
+ expect![[r#"
+ 99..319 '{ ...32); }': ()
+ 109..110 'x': Thing<!>
+ 113..133 'Thing ...p {} }': Thing<!>
+ 124..131 'loop {}': !
+ 129..131 '{}': ()
+ 143..144 'y': Thing<()>
+ 147..162 'Thing { t: () }': Thing<()>
+ 158..160 '()': ()
+ 172..173 'z': Thing<i32>
+ 176..193 'Thing ...1i32 }': Thing<i32>
+ 187..191 '1i32': i32
+ 199..240 'if let... }': ()
+ 202..221 'let Th... } = z': bool
+ 206..217 'Thing { t }': Thing<i32>
+ 214..215 't': i32
+ 220..221 'z': Thing<i32>
+ 222..240 '{ ... }': ()
+ 232..233 't': i32
+ 250..251 'a': OtherThing<i32>
+ 254..281 'OtherT...1i32 }': OtherThing<i32>
+ 275..279 '1i32': i32
+ 291..292 'b': OtherThing<i32>
+ 295..310 'OtherThing::Two': Two<i32>(i32) -> OtherThing<i32>
+ 295..316 'OtherT...(1i32)': OtherThing<i32>
+ 311..315 '1i32': i32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_depending_on_other_type_arg() {
+ // FIXME: the {unknown} is a bug
+ check_infer(
+ r#"
+ struct Thing<T = u128, F = fn() -> T> { t: T }
+
+ fn test(t1: Thing<u32>, t2: Thing) {
+ t1;
+ t2;
+ Thing::<_> { t: 1u32 };
+ }
+ "#,
+ expect![[r#"
+ 56..58 't1': Thing<u32, fn() -> u32>
+ 72..74 't2': Thing<u128, fn() -> u128>
+ 83..130 '{ ...2 }; }': ()
+ 89..91 't1': Thing<u32, fn() -> u32>
+ 97..99 't2': Thing<u128, fn() -> u128>
+ 105..127 'Thing:...1u32 }': Thing<u32, fn() -> {unknown}>
+ 121..125 '1u32': u32
+ "#]],
+ );
+}
+
+#[test]
+fn generic_default_depending_on_other_type_arg_forward() {
+ // the {unknown} here is intentional, as defaults are not allowed to
+ // refer to type parameters coming later
+ check_infer(
+ r#"
+ struct Thing<F = fn() -> T, T = u128> { t: T }
+
+ fn test(t1: Thing) {
+ t1;
+ }
+ "#,
+ expect![[r#"
+ 56..58 't1': Thing<fn() -> {unknown}, u128>
+ 67..78 '{ t1; }': ()
+ 73..75 't1': Thing<fn() -> {unknown}, u128>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_operator_overload() {
+ check_types(
+ r#"
+//- minicore: add
+struct V2([f32; 2]);
+
+impl core::ops::Add<V2> for V2 {
+ type Output = V2;
+}
+
+fn test() {
+ let va = V2([0.0, 1.0]);
+ let vb = V2([0.0, 1.0]);
+
+ let r = va + vb;
+ // ^^^^^^^ V2
+}
+
+ "#,
+ );
+}
+
+#[test]
+fn infer_const_params() {
+ check_infer(
+ r#"
+ fn foo<const FOO: usize>() {
+ let bar = FOO;
+ }
+ "#,
+ expect![[r#"
+ 27..49 '{ ...FOO; }': ()
+ 37..40 'bar': usize
+ 43..46 'FOO': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_inner_type() {
+ check_infer(
+ r#"
+ fn foo() {
+ struct S { field: u32 }
+ let s = S { field: 0 };
+ let f = s.field;
+ }
+ "#,
+ expect![[r#"
+ 9..89 '{ ...eld; }': ()
+ 47..48 's': S
+ 51..65 'S { field: 0 }': S
+ 62..63 '0': u32
+ 75..76 'f': u32
+ 79..80 's': S
+ 79..86 's.field': u32
+ "#]],
+ );
+}
+
+#[test]
+fn infer_nested_inner_type() {
+ check_infer(
+ r#"
+ fn foo() {
+ {
+ let s = S { field: 0 };
+ let f = s.field;
+ }
+ struct S { field: u32 }
+ }
+ "#,
+ expect![[r#"
+ 9..109 '{ ...32 } }': ()
+ 15..79 '{ ... }': ()
+ 29..30 's': S
+ 33..47 'S { field: 0 }': S
+ 44..45 '0': u32
+ 61..62 'f': u32
+ 65..66 's': S
+ 65..72 's.field': u32
+ "#]],
+ );
+}
+
+#[test]
+fn inner_use_enum_rename() {
+ check_infer(
+ r#"
+ enum Request {
+ Info
+ }
+
+ fn f() {
+ use Request as R;
+
+ let r = R::Info;
+ match r {
+ R::Info => {}
+ }
+ }
+ "#,
+ expect![[r#"
+ 34..123 '{ ... } }': ()
+ 67..68 'r': Request
+ 71..78 'R::Info': Request
+ 84..121 'match ... }': ()
+ 90..91 'r': Request
+ 102..109 'R::Info': Request
+ 113..115 '{}': ()
+ "#]],
+ )
+}
+
+#[test]
+fn box_into_vec() {
+ check_infer(
+ r#"
+#[lang = "sized"]
+pub trait Sized {}
+
+#[lang = "unsize"]
+pub trait Unsize<T: ?Sized> {}
+
+#[lang = "coerce_unsized"]
+pub trait CoerceUnsized<T> {}
+
+pub unsafe trait Allocator {}
+
+pub struct Global;
+unsafe impl Allocator for Global {}
+
+#[lang = "owned_box"]
+#[fundamental]
+pub struct Box<T: ?Sized, A: Allocator = Global>;
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
+
+pub struct Vec<T, A: Allocator = Global> {}
+
+#[lang = "slice"]
+impl<T> [T] {}
+
+#[lang = "slice_alloc"]
+impl<T> [T] {
+ pub fn into_vec<A: Allocator>(self: Box<Self, A>) -> Vec<T, A> {
+ unimplemented!()
+ }
+}
+
+fn test() {
+ let vec = <[_]>::into_vec(box [1i32]);
+ let v: Vec<Box<dyn B>> = <[_]> :: into_vec(box [box Astruct]);
+}
+
+trait B{}
+struct Astruct;
+impl B for Astruct {}
+"#,
+ expect![[r#"
+ 569..573 'self': Box<[T], A>
+ 602..634 '{ ... }': Vec<T, A>
+ 648..761 '{ ...t]); }': ()
+ 658..661 'vec': Vec<i32, Global>
+ 664..679 '<[_]>::into_vec': fn into_vec<i32, Global>(Box<[i32], Global>) -> Vec<i32, Global>
+ 664..691 '<[_]>:...1i32])': Vec<i32, Global>
+ 680..690 'box [1i32]': Box<[i32; 1], Global>
+ 684..690 '[1i32]': [i32; 1]
+ 685..689 '1i32': i32
+ 701..702 'v': Vec<Box<dyn B, Global>, Global>
+ 722..739 '<[_]> ...to_vec': fn into_vec<Box<dyn B, Global>, Global>(Box<[Box<dyn B, Global>], Global>) -> Vec<Box<dyn B, Global>, Global>
+ 722..758 '<[_]> ...ruct])': Vec<Box<dyn B, Global>, Global>
+ 740..757 'box [b...truct]': Box<[Box<dyn B, Global>; 1], Global>
+ 744..757 '[box Astruct]': [Box<dyn B, Global>; 1]
+ 745..756 'box Astruct': Box<Astruct, Global>
+ 749..756 'Astruct': Astruct
+ "#]],
+ )
+}
+
+#[test]
+fn cfgd_out_assoc_items() {
+ check_types(
+ r#"
+struct S;
+
+impl S {
+ #[cfg(FALSE)]
+ const C: S = S;
+}
+
+fn f() {
+ S::C;
+ //^^^^ {unknown}
+}
+ "#,
+ )
+}
+
+#[test]
+fn infer_missing_type() {
+ check_types(
+ r#"
+struct S;
+
+fn f() {
+ let s: = S;
+ //^ S
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_type_alias_variant() {
+ check_infer(
+ r#"
+type Qux = Foo;
+enum Foo {
+ Bar(i32),
+ Baz { baz: f32 }
+}
+
+fn f() {
+ match Foo::Bar(3) {
+ Qux::Bar(bar) => (),
+ Qux::Baz { baz } => (),
+ }
+}
+ "#,
+ expect![[r#"
+ 72..166 '{ ... } }': ()
+ 78..164 'match ... }': ()
+ 84..92 'Foo::Bar': Bar(i32) -> Foo
+ 84..95 'Foo::Bar(3)': Foo
+ 93..94 '3': i32
+ 106..119 'Qux::Bar(bar)': Foo
+ 115..118 'bar': i32
+ 123..125 '()': ()
+ 135..151 'Qux::B... baz }': Foo
+ 146..149 'baz': f32
+ 155..157 '()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn infer_boxed_self_receiver() {
+ check_infer(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct Box<T>(T);
+
+impl<T> Deref for Box<T> {
+ type Target = T;
+ fn deref(&self) -> &Self::Target;
+}
+
+struct Foo<T>(T);
+
+impl<T> Foo<T> {
+ fn get_inner<'a>(self: &'a Box<Self>) -> &'a T {}
+
+ fn get_self<'a>(self: &'a Box<Self>) -> &'a Self {}
+
+ fn into_inner(self: Box<Self>) -> Self {}
+}
+
+fn main() {
+ let boxed = Box(Foo(0_i32));
+
+ let bad1 = boxed.get_inner();
+ let good1 = Foo::get_inner(&boxed);
+
+ let bad2 = boxed.get_self();
+ let good2 = Foo::get_self(&boxed);
+
+ let inner = boxed.into_inner();
+}
+ "#,
+ expect![[r#"
+ 104..108 'self': &Box<T>
+ 188..192 'self': &Box<Foo<T>>
+ 218..220 '{}': &T
+ 242..246 'self': &Box<Foo<T>>
+ 275..277 '{}': &Foo<T>
+ 297..301 'self': Box<Foo<T>>
+ 322..324 '{}': Foo<T>
+ 338..559 '{ ...r(); }': ()
+ 348..353 'boxed': Box<Foo<i32>>
+ 356..359 'Box': Box<Foo<i32>>(Foo<i32>) -> Box<Foo<i32>>
+ 356..371 'Box(Foo(0_i32))': Box<Foo<i32>>
+ 360..363 'Foo': Foo<i32>(i32) -> Foo<i32>
+ 360..370 'Foo(0_i32)': Foo<i32>
+ 364..369 '0_i32': i32
+ 382..386 'bad1': &i32
+ 389..394 'boxed': Box<Foo<i32>>
+ 389..406 'boxed....nner()': &i32
+ 416..421 'good1': &i32
+ 424..438 'Foo::get_inner': fn get_inner<i32>(&Box<Foo<i32>>) -> &i32
+ 424..446 'Foo::g...boxed)': &i32
+ 439..445 '&boxed': &Box<Foo<i32>>
+ 440..445 'boxed': Box<Foo<i32>>
+ 457..461 'bad2': &Foo<i32>
+ 464..469 'boxed': Box<Foo<i32>>
+ 464..480 'boxed....self()': &Foo<i32>
+ 490..495 'good2': &Foo<i32>
+ 498..511 'Foo::get_self': fn get_self<i32>(&Box<Foo<i32>>) -> &Foo<i32>
+ 498..519 'Foo::g...boxed)': &Foo<i32>
+ 512..518 '&boxed': &Box<Foo<i32>>
+ 513..518 'boxed': Box<Foo<i32>>
+ 530..535 'inner': Foo<i32>
+ 538..543 'boxed': Box<Foo<i32>>
+ 538..556 'boxed....nner()': Foo<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn prelude_2015() {
+ check_types(
+ r#"
+//- /main.rs edition:2015 crate:main deps:core
+fn f() {
+ Rust;
+ //^^^^ Rust
+}
+
+//- /core.rs crate:core
+pub mod prelude {
+ pub mod rust_2015 {
+ pub struct Rust;
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn legacy_const_generics() {
+ check_no_mismatches(
+ r#"
+#[rustc_legacy_const_generics(1, 3)]
+fn mixed<const N1: &'static str, const N2: bool>(
+ a: u8,
+ b: i8,
+) {}
+
+fn f() {
+ mixed(0, "", -1, true);
+ mixed::<"", true>(0, -1);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_slice() {
+ check_types(
+ r#"
+fn main() {
+ let a;
+ //^usize
+ [a,] = [0usize];
+
+ let a;
+ //^usize
+ [a, ..] = [0usize; 5];
+
+ let a;
+ //^usize
+ [.., a] = [0usize; 5];
+
+ let a;
+ //^usize
+ [.., a, _] = [0usize; 5];
+
+ let a;
+ //^usize
+ [_, a, ..] = [0usize; 5];
+
+ let a: &mut i64 = &mut 0;
+ [*a, ..] = [1, 2, 3];
+
+ let a: usize;
+ let b;
+ //^usize
+ [a, _, b] = [3, 4, 5];
+ //^usize
+
+ let a;
+ //^i64
+ let b;
+ //^i64
+ [[a, ..], .., [.., b]] = [[1, 2], [3i64, 4], [5, 6], [7, 8]];
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_tuple() {
+ check_types(
+ r#"
+fn main() {
+ let a;
+ //^char
+ let b;
+ //^i64
+ (a, b) = ('c', 0i64);
+
+ let a;
+ //^char
+ (a, ..) = ('c', 0i64);
+
+ let a;
+ //^i64
+ (.., a) = ('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^i64
+ (a, .., b) = ('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^bool
+ (a, .., b) = ('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^bool
+ (_, a, .., b) = ('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^usize
+ (_, a, .., b) = ('c', 0i64, true, 0usize);
+
+ let mut a = 1;
+ //^^^^^i64
+ let mut b: i64 = 0;
+ (a, b) = (b, a);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_tuple_struct() {
+ check_types(
+ r#"
+struct S2(char, i64);
+struct S3(char, i64, bool);
+struct S4(char, i64, bool usize);
+fn main() {
+ let a;
+ //^char
+ let b;
+ //^i64
+ S2(a, b) = S2('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^i64
+ S2(a, .., b) = S2('c', 0i64);
+
+ let a;
+ //^char
+ let b;
+ //^bool
+ S3(a, .., b) = S3('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^bool
+ S3(_, a, .., b) = S3('c', 0i64, true);
+
+ let a;
+ //^i64
+ let b;
+ //^usize
+ S4(_, a, .., b) = S4('c', 0i64, true, 0usize);
+
+ struct Swap(i64, i64);
+
+ let mut a = 1;
+ //^^^^^i64
+ let mut b = 0;
+ //^^^^^i64
+ Swap(a, b) = Swap(b, a);
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_struct() {
+ check_types(
+ r#"
+struct S {
+ a: usize,
+ b: char,
+}
+struct T {
+ s: S,
+ t: i64,
+}
+
+fn main() {
+ let a;
+ //^usize
+ let c;
+ //^char
+ S { a, b: c } = S { a: 3, b: 'b' };
+
+ let a;
+ //^char
+ S { b: a, .. } = S { a: 3, b: 'b' };
+
+ let a;
+ //^char
+ S { b: a, _ } = S { a: 3, b: 'b' };
+
+ let a;
+ //^usize
+ let c;
+ //^char
+ let t;
+ //^i64
+ T { s: S { a, b: c }, t } = T { s: S { a: 3, b: 'b' }, t: 0 };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_nested() {
+ check_types(
+ r#"
+struct S {
+ a: TS,
+ b: [char; 3],
+}
+struct TS(usize, i64);
+
+fn main() {
+ let a;
+ //^i32
+ let b;
+ //^bool
+ ([.., a], .., b, _) = ([0, 1, 2], true, 'c');
+
+ let a;
+ //^i32
+ let b;
+ //^i32
+ [(.., a, _), .., (b, ..)] = [(1, 2); 5];
+
+ let a;
+ //^usize
+ let b;
+ //^char
+ S { a: TS(a, ..), b: [_, b, ..] } = S { a: TS(0, 0), b: ['a'; 3] };
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_unit_struct() {
+ // taken from rustc; see https://github.com/rust-lang/rust/pull/95380
+ check_no_mismatches(
+ r#"
+struct S;
+enum E { V, }
+type A = E;
+
+fn main() {
+ let mut a;
+
+ (S, a) = (S, ());
+
+ (E::V, a) = (E::V, ());
+
+ (<E>::V, a) = (E::V, ());
+ (A::V, a) = (E::V, ());
+}
+
+impl S {
+ fn check() {
+ let a;
+ (Self, a) = (S, ());
+ }
+}
+
+impl E {
+ fn check() {
+ let a;
+ (Self::V, a) = (E::V, ());
+ }
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_no_default_binding_mode() {
+ check(
+ r#"
+struct S { a: usize }
+struct TS(usize);
+fn main() {
+ let x;
+ [x,] = &[1,];
+ //^^^^expected &[i32; 1], got [{unknown}; _]
+
+ // FIXME we only want the outermost error, but this matches the current
+ // behavior of slice patterns
+ let x;
+ [(x,),] = &[(1,),];
+ // ^^^^expected {unknown}, got ({unknown},)
+ //^^^^^^^expected &[(i32,); 1], got [{unknown}; _]
+
+ let x;
+ ((x,),) = &((1,),);
+ //^^^^^^^expected &((i32,),), got (({unknown},),)
+
+ let x;
+ (x,) = &(1,);
+ //^^^^expected &(i32,), got ({unknown},)
+
+ let x;
+ (S { a: x },) = &(S { a: 42 },);
+ //^^^^^^^^^^^^^expected &(S,), got (S,)
+
+ let x;
+ S { a: x } = &S { a: 42 };
+ //^^^^^^^^^^expected &S, got S
+
+ let x;
+ TS(x) = &TS(42);
+ //^^^^^expected &TS, got TS
+}
+ "#,
+ );
+}
+
+#[test]
+fn destructuring_assignment_type_mismatch_on_identifier() {
+ check(
+ r#"
+struct S { v: i64 }
+struct TS(i64);
+fn main() {
+ let mut a: usize = 0;
+ (a,) = (0i64,);
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ [a,] = [0i64,];
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ S { v: a } = S { v: 0 };
+ //^expected i64, got usize
+
+ let mut a: usize = 0;
+ TS(a) = TS(0);
+ //^expected i64, got usize
+}
+ "#,
+ );
+}
+
+#[test]
+fn nested_break() {
+ check_no_mismatches(
+ r#"
+fn func() {
+ let int = loop {
+ break 0;
+ break (break 0);
+ };
+}
+ "#,
+ );
+}
--- /dev/null
- trait Trait {
+use cov_mark::check;
+use expect_test::expect;
+
+use super::{check, check_infer, check_infer_with_mismatches, check_no_mismatches, check_types};
+
+#[test]
+fn infer_await() {
+ check_types(
+ r#"
+//- minicore: future
+struct IntFuture;
+
+impl core::future::Future for IntFuture {
+ type Output = u64;
+}
+
+fn test() {
+ let r = IntFuture;
+ let v = r.await;
+ v;
+} //^ u64
+"#,
+ );
+}
+
+#[test]
+fn infer_async() {
+ check_types(
+ r#"
+//- minicore: future
+async fn foo() -> u64 { 128 }
+
+fn test() {
+ let r = foo();
+ let v = r.await;
+ v;
+} //^ u64
+"#,
+ );
+}
+
+#[test]
+fn infer_desugar_async() {
+ check_types(
+ r#"
+//- minicore: future, sized
+async fn foo() -> u64 { 128 }
+
+fn test() {
+ let r = foo();
+ r;
+} //^ impl Future<Output = u64>
+"#,
+ );
+}
+
+#[test]
+fn infer_async_block() {
+ check_types(
+ r#"
+//- minicore: future, option
+async fn test() {
+ let a = async { 42 };
+ a;
+// ^ impl Future<Output = i32>
+ let x = a.await;
+ x;
+// ^ i32
+ let b = async {}.await;
+ b;
+// ^ ()
+ let c = async {
+ let y = None;
+ y
+ // ^ Option<u64>
+ };
+ let _: Option<u64> = c.await;
+ c;
+// ^ impl Future<Output = Option<u64>>
+}
+"#,
+ );
+}
+
+#[test]
+fn auto_sized_async_block() {
+ check_no_mismatches(
+ r#"
+//- minicore: future, sized
+
+use core::future::Future;
+struct MyFut<Fut>(Fut);
+
+impl<Fut> Future for MyFut<Fut>
+where Fut: Future
+{
+ type Output = Fut::Output;
+}
+async fn reproduction() -> usize {
+ let f = async {999usize};
+ MyFut(f).await
+}
+ "#,
+ );
+ check_no_mismatches(
+ r#"
+//- minicore: future
+//#11815
+#[lang = "sized"]
+pub trait Sized {}
+
+#[lang = "unsize"]
+pub trait Unsize<T: ?Sized> {}
+
+#[lang = "coerce_unsized"]
+pub trait CoerceUnsized<T> {}
+
+pub unsafe trait Allocator {}
+
+pub struct Global;
+unsafe impl Allocator for Global {}
+
+#[lang = "owned_box"]
+#[fundamental]
+pub struct Box<T: ?Sized, A: Allocator = Global>;
+
+impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Box<U, A>> for Box<T, A> {}
+
+fn send() -> Box<dyn Future<Output = ()> + Send + 'static>{
+ box async move {}
+}
+
+fn not_send() -> Box<dyn Future<Output = ()> + 'static> {
+ box async move {}
+}
+ "#,
+ );
+}
+
+#[test]
+fn into_future_trait() {
+ check_types(
+ r#"
+//- minicore: future
+struct Futurable;
+impl core::future::IntoFuture for Futurable {
+ type Output = u64;
+ type IntoFuture = IntFuture;
+}
+
+struct IntFuture;
+impl core::future::Future for IntFuture {
+ type Output = u64;
+}
+
+fn test() {
+ let r = Futurable;
+ let v = r.await;
+ v;
+} //^ u64
+"#,
+ );
+}
+
+#[test]
+fn infer_try() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ let r: Result<i32, u64> = Result::Ok(1);
+ let v = r?;
+ v;
+} //^ i32
+
+//- /core.rs crate:core
+pub mod ops {
+ pub trait Try {
+ type Ok;
+ type Error;
+ }
+}
+
+pub mod result {
+ pub enum Result<O, E> {
+ Ok(O),
+ Err(E)
+ }
+
+ impl<O, E> crate::ops::Try for Result<O, E> {
+ type Ok = O;
+ type Error = E;
+ }
+}
+
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::{result::*, ops::*};
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_try_trait_v2() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ let r: Result<i32, u64> = Result::Ok(1);
+ let v = r?;
+ v;
+} //^ i32
+
+//- /core.rs crate:core
+mod ops {
+ mod try_trait {
+ pub trait Try: FromResidual {
+ type Output;
+ type Residual;
+ }
+ pub trait FromResidual<R = <Self as Try>::Residual> {}
+ }
+
+ pub use self::try_trait::FromResidual;
+ pub use self::try_trait::Try;
+}
+
+mod convert {
+ pub trait From<T> {}
+ impl<T> From<T> for T {}
+}
+
+pub mod result {
+ use crate::convert::From;
+ use crate::ops::{Try, FromResidual};
+
+ pub enum Infallible {}
+ pub enum Result<O, E> {
+ Ok(O),
+ Err(E)
+ }
+
+ impl<O, E> Try for Result<O, E> {
+ type Output = O;
+ type Error = Result<Infallible, E>;
+ }
+
+ impl<T, E, F: From<E>> FromResidual<Result<Infallible, E>> for Result<T, F> {}
+}
+
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::result::*;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_for_loop() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core,alloc
+#![no_std]
+use alloc::collections::Vec;
+
+fn test() {
+ let v = Vec::new();
+ v.push("foo");
+ for x in v {
+ x;
+ } //^ &str
+}
+
+//- /core.rs crate:core
+pub mod iter {
+ pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ }
+ pub trait Iterator {
+ type Item;
+ }
+}
+pub mod prelude {
+ pub mod rust_2018 {
+ pub use crate::iter::*;
+ }
+}
+
+//- /alloc.rs crate:alloc deps:core
+#![no_std]
+pub mod collections {
+ pub struct Vec<T> {}
+ impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+ pub fn push(&mut self, t: T) { }
+ }
+
+ impl<T> IntoIterator for Vec<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+ }
+
+ struct IntoIter<T> {}
+ impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_neg() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Neg for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = -a;
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "neg"]
+ pub trait Neg {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_not() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:std
+struct Bar;
+struct Foo;
+
+impl std::ops::Not for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = !a;
+ b;
+} //^ Foo
+
+//- /std.rs crate:std
+#[prelude_import] use ops::*;
+mod ops {
+ #[lang = "not"]
+ pub trait Not {
+ type Output;
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_from_bound_1() {
+ check_types(
+ r#"
+trait Trait<T> {}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn foo<T: Trait<u32>>(t: T) {}
+fn test() {
+ let s = S(unknown);
+ // ^^^^^^^ u32
+ foo(s);
+}"#,
+ );
+}
+
+#[test]
+fn infer_from_bound_2() {
+ check_types(
+ r#"
+trait Trait<T> {}
+struct S<T>(T);
+impl<U> Trait<U> for S<U> {}
+fn foo<U, T: Trait<U>>(t: T) -> U { loop {} }
+fn test() {
+ let s = S(unknown);
+ // ^^^^^^^ u32
+ let x: u32 = foo(s);
+}"#,
+ );
+}
+
+#[test]
+fn trait_default_method_self_bound_implements_trait() {
+ cov_mark::check!(trait_self_implements_self);
+ check(
+ r#"
+trait Trait {
+ fn foo(&self) -> i64;
+ fn bar(&self) -> () {
+ self.foo();
+ // ^^^^^^^^^^ type: i64
+ }
+}"#,
+ );
+}
+
+#[test]
+fn trait_default_method_self_bound_implements_super_trait() {
+ check(
+ r#"
+trait SuperTrait {
+ fn foo(&self) -> i64;
+}
+trait Trait: SuperTrait {
+ fn bar(&self) -> () {
+ self.foo();
+ // ^^^^^^^^^^ type: i64
+ }
+}"#,
+ );
+}
+
+#[test]
+fn infer_project_associated_type() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S;
+impl Iterable for S { type Item = u32; }
+fn test<T: Iterable>() {
+ let x: <S as Iterable>::Item = 1;
+ // ^ u32
+ let y: <T as Iterable>::Item = u;
+ // ^ Iterable::Item<T>
+ let z: T::Item = u;
+ // ^ Iterable::Item<T>
+ let a: <T>::Item = u;
+ // ^ Iterable::Item<T>
+}"#,
+ );
+}
+
+#[test]
+fn infer_return_associated_type() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S;
+impl Iterable for S { type Item = u32; }
+fn foo1<T: Iterable>(t: T) -> T::Item { loop {} }
+fn foo2<T: Iterable>(t: T) -> <T as Iterable>::Item { loop {} }
+fn foo3<T: Iterable>(t: T) -> <T>::Item { loop {} }
+fn test() {
+ foo1(S);
+ // ^^^^^^^ u32
+ foo2(S);
+ // ^^^^^^^ u32
+ foo3(S);
+ // ^^^^^^^ u32
+}"#,
+ );
+}
+
+#[test]
+fn associated_type_shorthand_from_method_bound() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+struct S<T>;
+impl<T> S<T> {
+ fn foo(self) -> T::Item where T: Iterable { loop {} }
+}
+fn test<T: Iterable>() {
+ let s: S<T>;
+ s.foo();
+ // ^^^^^^^ Iterable::Item<T>
+}"#,
+ );
+}
+
+#[test]
+fn associated_type_shorthand_from_self_issue_12484() {
+ check_types(
+ r#"
+trait Bar {
+ type A;
+}
+trait Foo {
+ type A;
+ fn test(a: Self::A, _: impl Bar) {
+ a;
+ //^ Foo::A<Self>
+ }
+}"#,
+ );
+}
+
+#[test]
+fn infer_associated_type_bound() {
+ check_types(
+ r#"
+trait Iterable {
+ type Item;
+}
+fn test<T: Iterable<Item=u32>>() {
+ let y: T::Item = unknown;
+ // ^^^^^^^ u32
+}"#,
+ );
+}
+
+#[test]
+fn infer_const_body() {
+ // FIXME make check_types work with other bodies
+ check_infer(
+ r#"
+const A: u32 = 1 + 1;
+static B: u64 = { let x = 1; x };
+"#,
+ expect![[r#"
+ 15..16 '1': u32
+ 15..20 '1 + 1': u32
+ 19..20 '1': u32
+ 38..54 '{ let ...1; x }': u64
+ 44..45 'x': u64
+ 48..49 '1': u64
+ 51..52 'x': u64
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_fields() {
+ check_infer(
+ r#"
+struct S(i32, u64);
+fn test() -> u64 {
+ let a = S(4, 6);
+ let b = a.0;
+ a.1
+}"#,
+ expect![[r#"
+ 37..86 '{ ... a.1 }': u64
+ 47..48 'a': S
+ 51..52 'S': S(i32, u64) -> S
+ 51..58 'S(4, 6)': S
+ 53..54 '4': i32
+ 56..57 '6': u64
+ 68..69 'b': i32
+ 72..73 'a': S
+ 72..75 'a.0': i32
+ 81..82 'a': S
+ 81..84 'a.1': u64
+ "#]],
+ );
+}
+
+#[test]
+fn tuple_struct_with_fn() {
+ check_infer(
+ r#"
+struct S(fn(u32) -> u64);
+fn test() -> u64 {
+ let a = S(|i| 2*i);
+ let b = a.0(4);
+ a.0(2)
+}"#,
+ expect![[r#"
+ 43..101 '{ ...0(2) }': u64
+ 53..54 'a': S
+ 57..58 'S': S(fn(u32) -> u64) -> S
+ 57..67 'S(|i| 2*i)': S
+ 59..66 '|i| 2*i': |u32| -> u64
+ 60..61 'i': u32
+ 63..64 '2': u32
+ 63..66 '2*i': u32
+ 65..66 'i': u32
+ 77..78 'b': u64
+ 81..82 'a': S
+ 81..84 'a.0': fn(u32) -> u64
+ 81..87 'a.0(4)': u64
+ 85..86 '4': u32
+ 93..94 'a': S
+ 93..96 'a.0': fn(u32) -> u64
+ 93..99 'a.0(2)': u64
+ 97..98 '2': u32
+ "#]],
+ );
+}
+
+#[test]
+fn indexing_arrays() {
+ check_infer(
+ "fn main() { &mut [9][2]; }",
+ expect![[r#"
+ 10..26 '{ &mut...[2]; }': ()
+ 12..23 '&mut [9][2]': &mut {unknown}
+ 17..20 '[9]': [i32; 1]
+ 17..23 '[9][2]': {unknown}
+ 18..19 '9': i32
+ 21..22 '2': i32
+ "#]],
+ )
+}
+
+#[test]
+fn infer_ops_index() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo;
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1u32];
+ b;
+} //^ Foo
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_field() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo {
+ field: u32;
+}
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1u32].field;
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_field_autoderef() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo {
+ field: u32;
+}
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+fn test() {
+ let a = Bar;
+ let b = (&a[1u32]).field;
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_int() {
+ check_types(
+ r#"
+//- minicore: index
+struct Bar;
+struct Foo;
+
+impl core::ops::Index<u32> for Bar {
+ type Output = Foo;
+}
+
+struct Range;
+impl core::ops::Index<Range> for Bar {
+ type Output = Bar;
+}
+
+fn test() {
+ let a = Bar;
+ let b = a[1];
+ b;
+ //^ Foo
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_ops_index_autoderef() {
+ check_types(
+ r#"
+//- minicore: index, slice
+fn test() {
+ let a = &[1u32, 2, 3];
+ let b = a[1];
+ b;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn deref_trait() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+impl S {
+ fn foo(&self) -> u128 { 0 }
+}
+
+fn test(s: Arc<S>) {
+ (*s, s.foo());
+} //^^^^^^^^^^^^^ (S, u128)
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_inference_var() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+fn new_arc<T: ?Sized>() -> Arc<T> { Arc }
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+fn foo(a: Arc<S>) {}
+
+fn test() {
+ let a = new_arc();
+ let b = *a;
+ //^^ S
+ foo(a);
+}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_infinite_recursion() {
+ check_types(
+ r#"
+//- minicore: deref
+struct S;
+
+impl core::ops::Deref for S {
+ type Target = S;
+}
+
+fn test(s: S) {
+ s.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_question_mark_size() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Arc<T: ?Sized>;
+impl<T: ?Sized> core::ops::Deref for Arc<T> {
+ type Target = T;
+}
+
+struct S;
+impl S {
+ fn foo(&self) -> u128 { 0 }
+}
+
+fn test(s: Arc<S>) {
+ (*s, s.foo());
+} //^^^^^^^^^^^^^ (S, u128)
+"#,
+ );
+}
+
+#[test]
+fn deref_trait_with_implicit_sized_requirement_on_inference_var() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Foo<T>;
+impl<T> core::ops::Deref for Foo<T> {
+ type Target = ();
+}
+fn test() {
+ let foo = Foo;
+ *foo;
+ //^^^^ ()
+ let _: Foo<u8> = foo;
+}
+"#,
+ )
+}
+
+#[test]
+fn obligation_from_function_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<u32> for S {}
+
+fn foo<T: Trait<U>, U>(t: T) -> U { loop {} }
+
+fn test(s: S) {
+ foo(s);
+} //^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_method_clause() {
+ check_types(
+ r#"
+//- /main.rs
+struct S;
+
+trait Trait<T> {}
+impl Trait<isize> for S {}
+
+struct O;
+impl O {
+ fn foo<T: Trait<U>, U>(&self, t: T) -> U { loop {} }
+}
+
+fn test() {
+ O.foo(S);
+} //^^^^^^^^ isize
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_self_method_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<i64> for S {}
+
+impl S {
+ fn foo<U>(&self) -> U where Self: Trait<U> { loop {} }
+}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ i64
+"#,
+ );
+}
+
+#[test]
+fn obligation_from_impl_clause() {
+ check_types(
+ r#"
+struct S;
+
+trait Trait<T> {}
+impl Trait<&str> for S {}
+
+struct O<T>;
+impl<U, T: Trait<U>> O<T> {
+ fn foo(&self) -> U { loop {} }
+}
+
+fn test(o: O<S>) {
+ o.foo();
+} //^^^^^^^ &str
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_1() {
+ check_types(
+ r#"
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test<T: Clone>(t: T) { t.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_1_not_met() {
+ check_types(
+ r#"
+//- /main.rs
+trait Clone {}
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Clone for S {}
+impl<T> Trait for T where T: Clone {}
+fn test<T>(t: T) { t.foo(); }
+ //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_2() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S {}
+fn test<T: Trait>(t: T) { t.foo(); }
+ //^^^^^^^ u128
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_2_not_met() {
+ check_types(
+ r#"
+trait Trait { fn foo(self) -> u128; }
+struct S;
+impl Trait for S {}
+fn test<T>(t: T) { t.foo(); }
+ //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn generic_param_env_deref() {
+ check_types(
+ r#"
+//- minicore: deref
+trait Trait {}
+impl<T> core::ops::Deref for T where T: Trait {
+ type Target = i128;
+}
+fn test<T: Trait>(t: T) { *t; }
+ //^^ i128
+"#,
+ );
+}
+
+#[test]
+fn associated_type_placeholder() {
+ // inside the generic function, the associated type gets normalized to a placeholder `ApplL::Out<T>` [https://rust-lang.github.io/rustc-guide/traits/associated-types.html#placeholder-associated-types].
+ check_types(
+ r#"
+pub trait ApplyL {
+ type Out;
+}
+
+pub struct RefMutL<T>;
+
+impl<T> ApplyL for RefMutL<T> {
+ type Out = <T as ApplyL>::Out;
+}
+
+fn test<T: ApplyL>() {
+ let y: <RefMutL<T> as ApplyL>::Out = no_matter;
+ y;
+} //^ ApplyL::Out<T>
+"#,
+ );
+}
+
+#[test]
+fn associated_type_placeholder_2() {
+ check_types(
+ r#"
+pub trait ApplyL {
+ type Out;
+}
+fn foo<T: ApplyL>(t: T) -> <T as ApplyL>::Out;
+
+fn test<T: ApplyL>(t: T) {
+ let y = foo(t);
+ y;
+} //^ ApplyL::Out<T>
+"#,
+ );
+}
+
+#[test]
+fn argument_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar(x: impl Trait<u16>) {}
+struct S<T>(T);
+impl<T> Trait<T> for S<T> {}
+
+fn test(x: impl Trait<u64>, y: &impl Trait<u32>) {
+ x;
+ y;
+ let z = S(1);
+ bar(z);
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 77..78 'x': impl Trait<u16>
+ 97..99 '{}': ()
+ 154..155 'x': impl Trait<u64>
+ 174..175 'y': &impl Trait<u32>
+ 195..323 '{ ...2(); }': ()
+ 201..202 'x': impl Trait<u64>
+ 208..209 'y': &impl Trait<u32>
+ 219..220 'z': S<u16>
+ 223..224 'S': S<u16>(u16) -> S<u16>
+ 223..227 'S(1)': S<u16>
+ 225..226 '1': u16
+ 233..236 'bar': fn bar(S<u16>)
+ 233..239 'bar(z)': ()
+ 237..238 'z': S<u16>
+ 245..246 'x': impl Trait<u64>
+ 245..252 'x.foo()': u64
+ 258..259 'y': &impl Trait<u32>
+ 258..265 'y.foo()': u32
+ 271..272 'z': S<u16>
+ 271..278 'z.foo()': u16
+ 284..285 'x': impl Trait<u64>
+ 284..292 'x.foo2()': i64
+ 298..299 'y': &impl Trait<u32>
+ 298..306 'y.foo2()': i64
+ 312..313 'z': S<u16>
+ 312..320 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_type_args_1() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+trait Foo {
+ // this function has an implicit Self param, an explicit type param,
+ // and an implicit impl Trait param!
+ fn bar<T>(x: impl Trait) -> T { loop {} }
+}
+fn foo<T>(x: impl Trait) -> T { loop {} }
+struct S;
+impl Trait for S {}
+struct F;
+impl Foo for F {}
+
+fn test() {
+ Foo::bar(S);
+ <F as Foo>::bar(S);
+ F::bar(S);
+ Foo::bar::<u32>(S);
+ <F as Foo>::bar::<u32>(S);
+
+ foo(S);
+ foo::<u32>(S);
+ foo::<u32, i32>(S); // we should ignore the extraneous i32
+}"#,
+ expect![[r#"
+ 155..156 'x': impl Trait
+ 175..186 '{ loop {} }': T
+ 177..184 'loop {}': !
+ 182..184 '{}': ()
+ 199..200 'x': impl Trait
+ 219..230 '{ loop {} }': T
+ 221..228 'loop {}': !
+ 226..228 '{}': ()
+ 300..509 '{ ... i32 }': ()
+ 306..314 'Foo::bar': fn bar<{unknown}, {unknown}>(S) -> {unknown}
+ 306..317 'Foo::bar(S)': {unknown}
+ 315..316 'S': S
+ 323..338 '<F as Foo>::bar': fn bar<F, {unknown}>(S) -> {unknown}
+ 323..341 '<F as ...bar(S)': {unknown}
+ 339..340 'S': S
+ 347..353 'F::bar': fn bar<F, {unknown}>(S) -> {unknown}
+ 347..356 'F::bar(S)': {unknown}
+ 354..355 'S': S
+ 362..377 'Foo::bar::<u32>': fn bar<{unknown}, u32>(S) -> u32
+ 362..380 'Foo::b...32>(S)': u32
+ 378..379 'S': S
+ 386..408 '<F as ...:<u32>': fn bar<F, u32>(S) -> u32
+ 386..411 '<F as ...32>(S)': u32
+ 409..410 'S': S
+ 418..421 'foo': fn foo<{unknown}>(S) -> {unknown}
+ 418..424 'foo(S)': {unknown}
+ 422..423 'S': S
+ 430..440 'foo::<u32>': fn foo<u32>(S) -> u32
+ 430..443 'foo::<u32>(S)': u32
+ 441..442 'S': S
+ 449..464 'foo::<u32, i32>': fn foo<u32>(S) -> u32
+ 449..467 'foo::<...32>(S)': u32
+ 465..466 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_type_args_2() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+struct S;
+impl Trait for S {}
+struct F<T>;
+impl<T> F<T> {
+ fn foo<U>(self, x: impl Trait) -> (T, U) { loop {} }
+}
+
+fn test() {
+ F.foo(S);
+ F::<u32>.foo(S);
+ F::<u32>.foo::<i32>(S);
+ F::<u32>.foo::<i32, u32>(S); // extraneous argument should be ignored
+}"#,
+ expect![[r#"
+ 87..91 'self': F<T>
+ 93..94 'x': impl Trait
+ 118..129 '{ loop {} }': (T, U)
+ 120..127 'loop {}': !
+ 125..127 '{}': ()
+ 143..283 '{ ...ored }': ()
+ 149..150 'F': F<{unknown}>
+ 149..157 'F.foo(S)': ({unknown}, {unknown})
+ 155..156 'S': S
+ 163..171 'F::<u32>': F<u32>
+ 163..178 'F::<u32>.foo(S)': (u32, {unknown})
+ 176..177 'S': S
+ 184..192 'F::<u32>': F<u32>
+ 184..206 'F::<u3...32>(S)': (u32, i32)
+ 204..205 'S': S
+ 212..220 'F::<u32>': F<u32>
+ 212..239 'F::<u3...32>(S)': (u32, i32)
+ 237..238 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn argument_impl_trait_to_fn_pointer() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo(x: impl Trait) { loop {} }
+struct S;
+impl Trait for S {}
+
+fn test() {
+ let f: fn(S) -> () = foo;
+}"#,
+ expect![[r#"
+ 22..23 'x': impl Trait
+ 37..48 '{ loop {} }': ()
+ 39..46 'loop {}': !
+ 44..46 '{}': ()
+ 90..123 '{ ...foo; }': ()
+ 100..101 'f': fn(S)
+ 117..120 'foo': fn foo(S)
+ "#]],
+ );
+}
+
+#[test]
+fn impl_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar() -> impl Trait<u64> {}
+
+fn test(x: impl Trait<u64>, y: &impl Trait<u64>) {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 98..100 '{}': ()
+ 110..111 'x': impl Trait<u64>
+ 130..131 'y': &impl Trait<u64>
+ 151..268 '{ ...2(); }': ()
+ 157..158 'x': impl Trait<u64>
+ 164..165 'y': &impl Trait<u64>
+ 175..176 'z': impl Trait<u64>
+ 179..182 'bar': fn bar() -> impl Trait<u64>
+ 179..184 'bar()': impl Trait<u64>
+ 190..191 'x': impl Trait<u64>
+ 190..197 'x.foo()': u64
+ 203..204 'y': &impl Trait<u64>
+ 203..210 'y.foo()': u64
+ 216..217 'z': impl Trait<u64>
+ 216..223 'z.foo()': u64
+ 229..230 'x': impl Trait<u64>
+ 229..237 'x.foo2()': i64
+ 243..244 'y': &impl Trait<u64>
+ 243..251 'y.foo2()': i64
+ 257..258 'z': impl Trait<u64>
+ 257..265 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn simple_return_pos_impl_trait() {
+ cov_mark::check!(lower_rpit);
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+fn bar() -> impl Trait<u64> { loop {} }
+
+fn test() {
+ let a = bar();
+ a.foo();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 71..82 '{ loop {} }': !
+ 73..80 'loop {}': !
+ 78..80 '{}': ()
+ 94..129 '{ ...o(); }': ()
+ 104..105 'a': impl Trait<u64>
+ 108..111 'bar': fn bar() -> impl Trait<u64>
+ 108..113 'bar()': impl Trait<u64>
+ 119..120 'a': impl Trait<u64>
+ 119..126 'a.foo()': u64
+ "#]],
+ );
+}
+
+#[test]
+fn more_return_pos_impl_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Iterator {
+ type Item;
+ fn next(&mut self) -> Self::Item;
+}
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>) { loop {} }
+fn baz<T>(t: T) -> (impl Iterator<Item = impl Trait<T>>, impl Trait<T>) { loop {} }
+
+fn test() {
+ let (a, b) = bar();
+ a.next().foo();
+ b.foo();
+ let (c, d) = baz(1u128);
+ c.next().foo();
+ d.foo();
+}"#,
+ expect![[r#"
+ 49..53 'self': &mut Self
+ 101..105 'self': &Self
+ 184..195 '{ loop {} }': ({unknown}, {unknown})
+ 186..193 'loop {}': !
+ 191..193 '{}': ()
+ 206..207 't': T
+ 268..279 '{ loop {} }': ({unknown}, {unknown})
+ 270..277 'loop {}': !
+ 275..277 '{}': ()
+ 291..413 '{ ...o(); }': ()
+ 301..307 '(a, b)': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 302..303 'a': impl Iterator<Item = impl Trait<u32>>
+ 305..306 'b': impl Trait<u64>
+ 310..313 'bar': fn bar() -> (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 310..315 'bar()': (impl Iterator<Item = impl Trait<u32>>, impl Trait<u64>)
+ 321..322 'a': impl Iterator<Item = impl Trait<u32>>
+ 321..329 'a.next()': impl Trait<u32>
+ 321..335 'a.next().foo()': u32
+ 341..342 'b': impl Trait<u64>
+ 341..348 'b.foo()': u64
+ 358..364 '(c, d)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 359..360 'c': impl Iterator<Item = impl Trait<u128>>
+ 362..363 'd': impl Trait<u128>
+ 367..370 'baz': fn baz<u128>(u128) -> (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 367..377 'baz(1u128)': (impl Iterator<Item = impl Trait<u128>>, impl Trait<u128>)
+ 371..376 '1u128': u128
+ 383..384 'c': impl Iterator<Item = impl Trait<u128>>
+ 383..391 'c.next()': impl Trait<u128>
+ 383..397 'c.next().foo()': u128
+ 403..404 'd': impl Trait<u128>
+ 403..410 'd.foo()': u128
+ "#]],
+ );
+}
+
+#[test]
+fn infer_from_return_pos_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, sized
+trait Trait<T> {}
+struct Bar<T>(T);
+impl<T> Trait<T> for Bar<T> {}
+fn foo<const C: u8, T>() -> (impl FnOnce(&str, T), impl Trait<u8>) {
+ (|input, t| {}, Bar(C))
+}
+"#,
+ expect![[r#"
+ 134..165 '{ ...(C)) }': (|&str, T| -> (), Bar<u8>)
+ 140..163 '(|inpu...ar(C))': (|&str, T| -> (), Bar<u8>)
+ 141..154 '|input, t| {}': |&str, T| -> ()
+ 142..147 'input': &str
+ 149..150 't': T
+ 152..154 '{}': ()
+ 156..159 'Bar': Bar<u8>(u8) -> Bar<u8>
+ 156..162 'Bar(C)': Bar<u8>
+ 160..161 'C': u8
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T> {
+ fn foo(&self) -> T;
+ fn foo2(&self) -> i64;
+}
+fn bar() -> dyn Trait<u64> {}
+
+fn test(x: dyn Trait<u64>, y: &dyn Trait<u64>) {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+ x.foo2();
+ y.foo2();
+ z.foo2();
+}"#,
+ expect![[r#"
+ 29..33 'self': &Self
+ 54..58 'self': &Self
+ 97..99 '{}': dyn Trait<u64>
+ 109..110 'x': dyn Trait<u64>
+ 128..129 'y': &dyn Trait<u64>
+ 148..265 '{ ...2(); }': ()
+ 154..155 'x': dyn Trait<u64>
+ 161..162 'y': &dyn Trait<u64>
+ 172..173 'z': dyn Trait<u64>
+ 176..179 'bar': fn bar() -> dyn Trait<u64>
+ 176..181 'bar()': dyn Trait<u64>
+ 187..188 'x': dyn Trait<u64>
+ 187..194 'x.foo()': u64
+ 200..201 'y': &dyn Trait<u64>
+ 200..207 'y.foo()': u64
+ 213..214 'z': dyn Trait<u64>
+ 213..220 'z.foo()': u64
+ 226..227 'x': dyn Trait<u64>
+ 226..234 'x.foo2()': i64
+ 240..241 'y': &dyn Trait<u64>
+ 240..248 'y.foo2()': i64
+ 254..255 'z': dyn Trait<u64>
+ 254..262 'z.foo2()': i64
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait_in_impl() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait<T, U> {
+ fn foo(&self) -> (T, U);
+}
+struct S<T, U> {}
+impl<T, U> S<T, U> {
+ fn bar(&self) -> &dyn Trait<T, U> { loop {} }
+}
+trait Trait2<T, U> {
+ fn baz(&self) -> (T, U);
+}
+impl<T, U> Trait2<T, U> for dyn Trait<T, U> { }
+
+fn test(s: S<u32, i32>) {
+ s.bar().baz();
+}"#,
+ expect![[r#"
+ 32..36 'self': &Self
+ 102..106 'self': &S<T, U>
+ 128..139 '{ loop {} }': &dyn Trait<T, U>
+ 130..137 'loop {}': !
+ 135..137 '{}': ()
+ 175..179 'self': &Self
+ 251..252 's': S<u32, i32>
+ 267..289 '{ ...z(); }': ()
+ 273..274 's': S<u32, i32>
+ 273..280 's.bar()': &dyn Trait<u32, i32>
+ 273..286 's.bar().baz()': (u32, i32)
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_trait_bare() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {
+ fn foo(&self) -> u64;
+}
+fn bar() -> Trait {}
+
+fn test(x: Trait, y: &Trait) -> u64 {
+ x;
+ y;
+ let z = bar();
+ x.foo();
+ y.foo();
+ z.foo();
+}"#,
+ expect![[r#"
+ 26..30 'self': &Self
+ 60..62 '{}': dyn Trait
+ 72..73 'x': dyn Trait
+ 82..83 'y': &dyn Trait
+ 100..175 '{ ...o(); }': u64
+ 106..107 'x': dyn Trait
+ 113..114 'y': &dyn Trait
+ 124..125 'z': dyn Trait
+ 128..131 'bar': fn bar() -> dyn Trait
+ 128..133 'bar()': dyn Trait
+ 139..140 'x': dyn Trait
+ 139..146 'x.foo()': u64
+ 152..153 'y': &dyn Trait
+ 152..159 'y.foo()': u64
+ 165..166 'z': dyn Trait
+ 165..172 'z.foo()': u64
+ "#]],
+ );
+
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, coerce_unsized
+struct S;
+impl S {
+ fn foo(&self) {}
+}
+fn f(_: &Fn(S)) {}
+fn main() {
+ f(&|number| number.foo());
+}
+ "#,
+ expect![[r#"
+ 31..35 'self': &S
+ 37..39 '{}': ()
+ 47..48 '_': &dyn Fn(S)
+ 58..60 '{}': ()
+ 71..105 '{ ...()); }': ()
+ 77..78 'f': fn f(&dyn Fn(S))
+ 77..102 'f(&|nu...foo())': ()
+ 79..101 '&|numb....foo()': &|S| -> ()
+ 80..101 '|numbe....foo()': |S| -> ()
+ 81..87 'number': S
+ 89..95 'number': S
+ 89..101 'number.foo()': ()
+ "#]],
+ )
+}
+
+#[test]
+fn weird_bounds() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn test(
+ a: impl Trait + 'lifetime,
+ b: impl 'lifetime,
+ c: impl (Trait),
+ d: impl ('lifetime),
+ e: impl ?Sized,
+ f: impl Trait + ?Sized
+) {}
+"#,
+ expect![[r#"
+ 28..29 'a': impl Trait
+ 59..60 'b': impl Sized
+ 82..83 'c': impl Trait
+ 103..104 'd': impl Sized
+ 128..129 'e': impl ?Sized
+ 148..149 'f': impl Trait + ?Sized
+ 173..175 '{}': ()
+ "#]],
+ );
+}
+
+#[test]
+fn error_bound_chalk() {
+ check_types(
+ r#"
+trait Trait {
+ fn foo(&self) -> u32 { 0 }
+}
+
+fn test(x: (impl Trait + UnknownTrait)) {
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn assoc_type_bindings() {
+ check_infer(
+ r#"
+//- minicore: sized
+trait Trait {
+ type Type;
+}
+
+fn get<T: Trait>(t: T) -> <T as Trait>::Type {}
+fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
+fn set<T: Trait<Type = u64>>(t: T) -> T {t}
+
+struct S<T>;
+impl<T> Trait for S<T> { type Type = T; }
+
+fn test<T: Trait<Type = u32>>(x: T, y: impl Trait<Type = i64>) {
+ get(x);
+ get2(x);
+ get(y);
+ get2(y);
+ get(set(S));
+ get2(set(S));
+ get2(S::<str>);
+}"#,
+ expect![[r#"
+ 49..50 't': T
+ 77..79 '{}': Trait::Type<T>
+ 111..112 't': T
+ 122..124 '{}': U
+ 154..155 't': T
+ 165..168 '{t}': T
+ 166..167 't': T
+ 256..257 'x': T
+ 262..263 'y': impl Trait<Type = i64>
+ 289..397 '{ ...r>); }': ()
+ 295..298 'get': fn get<T>(T) -> <T as Trait>::Type
+ 295..301 'get(x)': u32
+ 299..300 'x': T
+ 307..311 'get2': fn get2<u32, T>(T) -> u32
+ 307..314 'get2(x)': u32
+ 312..313 'x': T
+ 320..323 'get': fn get<impl Trait<Type = i64>>(impl Trait<Type = i64>) -> <impl Trait<Type = i64> as Trait>::Type
+ 320..326 'get(y)': i64
+ 324..325 'y': impl Trait<Type = i64>
+ 332..336 'get2': fn get2<i64, impl Trait<Type = i64>>(impl Trait<Type = i64>) -> i64
+ 332..339 'get2(y)': i64
+ 337..338 'y': impl Trait<Type = i64>
+ 345..348 'get': fn get<S<u64>>(S<u64>) -> <S<u64> as Trait>::Type
+ 345..356 'get(set(S))': u64
+ 349..352 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 349..355 'set(S)': S<u64>
+ 353..354 'S': S<u64>
+ 362..366 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 362..374 'get2(set(S))': u64
+ 367..370 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 367..373 'set(S)': S<u64>
+ 371..372 'S': S<u64>
+ 380..384 'get2': fn get2<str, S<str>>(S<str>) -> str
+ 380..394 'get2(S::<str>)': str
+ 385..393 'S::<str>': S<str>
+ "#]],
+ );
+}
+
+#[test]
+fn impl_trait_assoc_binding_projection_bug() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub trait Language {
+ type Kind;
+}
+pub enum RustLanguage {}
+impl Language for RustLanguage {
+ type Kind = SyntaxKind;
+}
+struct SyntaxNode<L> {}
+fn foo() -> impl Iterator<Item = SyntaxNode<RustLanguage>> {}
+
+trait Clone {
+ fn clone(&self) -> Self;
+}
+
+fn api_walkthrough() {
+ for node in foo() {
+ node.clone();
+ } //^^^^^^^^^^^^ {unknown}
+}
+"#,
+ );
+}
+
+#[test]
+fn projection_eq_within_chalk() {
+ check_infer(
+ r#"
+trait Trait1 {
+ type Type;
+}
+trait Trait2<T> {
+ fn foo(self) -> T;
+}
+impl<T, U> Trait2<T> for U where U: Trait1<Type = T> {}
+
+fn test<T: Trait1<Type = u32>>(x: T) {
+ x.foo();
+}"#,
+ expect![[r#"
+ 61..65 'self': Self
+ 163..164 'x': T
+ 169..185 '{ ...o(); }': ()
+ 175..176 'x': T
+ 175..182 'x.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn where_clause_trait_in_scope_for_method_resolution() {
+ check_types(
+ r#"
+mod foo {
- trait SuperTrait {
++ pub trait Trait {
+ fn foo(&self) -> u32 { 0 }
+ }
+}
+
+fn test<T: foo::Trait>(x: T) {
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn super_trait_method_resolution() {
+ check_infer(
+ r#"
+mod foo {
- 49..53 'self': &Self
- 62..64 '{}': u32
- 181..182 'x': T
- 187..188 'y': U
- 193..222 '{ ...o(); }': ()
- 199..200 'x': T
- 199..206 'x.foo()': u32
- 212..213 'y': U
- 212..219 'y.foo()': u32
++ pub trait SuperTrait {
+ fn foo(&self) -> u32 {}
+ }
+}
+trait Trait1: foo::SuperTrait {}
+trait Trait2 where Self: foo::SuperTrait {}
+
+fn test<T: Trait1, U: Trait2>(x: T, y: U) {
+ x.foo();
+ y.foo();
+}"#,
+ expect![[r#"
- trait SuperTrait {
++ 53..57 'self': &Self
++ 66..68 '{}': u32
++ 185..186 'x': T
++ 191..192 'y': U
++ 197..226 '{ ...o(); }': ()
++ 203..204 'x': T
++ 203..210 'x.foo()': u32
++ 216..217 'y': U
++ 216..223 'y.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_impl_trait_method_resolution() {
+ check_infer(
+ r#"
+//- minicore: sized
+mod foo {
- 49..53 'self': &Self
- 62..64 '{}': u32
- 115..116 'x': &impl Trait1
- 132..148 '{ ...o(); }': ()
- 138..139 'x': &impl Trait1
- 138..145 'x.foo()': u32
++ pub trait SuperTrait {
+ fn foo(&self) -> u32 {}
+ }
+}
+trait Trait1: foo::SuperTrait {}
+
+fn test(x: &impl Trait1) {
+ x.foo();
+}"#,
+ expect![[r#"
++ 53..57 'self': &Self
++ 66..68 '{}': u32
++ 119..120 'x': &impl Trait1
++ 136..152 '{ ...o(); }': ()
++ 142..143 'x': &impl Trait1
++ 142..149 'x.foo()': u32
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_cycle() {
+ // This just needs to not crash
+ check_infer(
+ r#"
+ trait A: B {}
+ trait B: A {}
+
+ fn test<T: A>(x: T) {
+ x.foo();
+ }
+ "#,
+ expect![[r#"
+ 43..44 'x': T
+ 49..65 '{ ...o(); }': ()
+ 55..56 'x': T
+ 55..62 'x.foo()': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn super_trait_assoc_type_bounds() {
+ check_infer(
+ r#"
+trait SuperTrait { type Type; }
+trait Trait where Self: SuperTrait {}
+
+fn get2<U, T: Trait<Type = U>>(t: T) -> U {}
+fn set<T: Trait<Type = u64>>(t: T) -> T {t}
+
+struct S<T>;
+impl<T> SuperTrait for S<T> { type Type = T; }
+impl<T> Trait for S<T> {}
+
+fn test() {
+ get2(set(S));
+}"#,
+ expect![[r#"
+ 102..103 't': T
+ 113..115 '{}': U
+ 145..146 't': T
+ 156..159 '{t}': T
+ 157..158 't': T
+ 258..279 '{ ...S)); }': ()
+ 264..268 'get2': fn get2<u64, S<u64>>(S<u64>) -> u64
+ 264..276 'get2(set(S))': u64
+ 269..272 'set': fn set<S<u64>>(S<u64>) -> S<u64>
+ 269..275 'set(S)': S<u64>
+ 273..274 'S': S<u64>
+ "#]],
+ );
+}
+
+#[test]
+fn fn_trait() {
+ check_infer_with_mismatches(
+ r#"
+trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> <Self as FnOnce<Args>>::Output;
+}
+
+fn test<F: FnOnce(u32, u64) -> u128>(f: F) {
+ f.call_once((1, 2));
+}"#,
+ expect![[r#"
+ 56..60 'self': Self
+ 62..66 'args': Args
+ 149..150 'f': F
+ 155..183 '{ ...2)); }': ()
+ 161..162 'f': F
+ 161..180 'f.call...1, 2))': u128
+ 173..179 '(1, 2)': (u32, u64)
+ 174..175 '1': u32
+ 177..178 '2': u64
+ "#]],
+ );
+}
+
+#[test]
+fn fn_ptr_and_item() {
+ check_infer_with_mismatches(
+ r#"
+#[lang="fn_once"]
+trait FnOnce<Args> {
+ type Output;
+
+ fn call_once(self, args: Args) -> Self::Output;
+}
+
+trait Foo<T> {
+ fn foo(&self) -> T;
+}
+
+struct Bar<T>(T);
+
+impl<A1, R, F: FnOnce(A1) -> R> Foo<(A1, R)> for Bar<F> {
+ fn foo(&self) -> (A1, R) { loop {} }
+}
+
+enum Opt<T> { None, Some(T) }
+impl<T> Opt<T> {
+ fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Opt<U> { loop {} }
+}
+
+fn test() {
+ let bar: Bar<fn(u8) -> u32>;
+ bar.foo();
+
+ let opt: Opt<u8>;
+ let f: fn(u8) -> u32;
+ opt.map(f);
+}"#,
+ expect![[r#"
+ 74..78 'self': Self
+ 80..84 'args': Args
+ 139..143 'self': &Self
+ 243..247 'self': &Bar<F>
+ 260..271 '{ loop {} }': (A1, R)
+ 262..269 'loop {}': !
+ 267..269 '{}': ()
+ 355..359 'self': Opt<T>
+ 361..362 'f': F
+ 377..388 '{ loop {} }': Opt<U>
+ 379..386 'loop {}': !
+ 384..386 '{}': ()
+ 402..518 '{ ...(f); }': ()
+ 412..415 'bar': Bar<fn(u8) -> u32>
+ 441..444 'bar': Bar<fn(u8) -> u32>
+ 441..450 'bar.foo()': (u8, u32)
+ 461..464 'opt': Opt<u8>
+ 483..484 'f': fn(u8) -> u32
+ 505..508 'opt': Opt<u8>
+ 505..515 'opt.map(f)': Opt<u32>
+ 513..514 'f': fn(u8) -> u32
+ "#]],
+ );
+}
+
+#[test]
+fn fn_trait_deref_with_ty_default() {
+ check_infer(
+ r#"
+//- minicore: deref, fn
+struct Foo;
+
+impl Foo {
+ fn foo(&self) -> usize {}
+}
+
+struct Lazy<T, F = fn() -> T>(F);
+
+impl<T, F> Lazy<T, F> {
+ pub fn new(f: F) -> Lazy<T, F> {}
+}
+
+impl<T, F: FnOnce() -> T> core::ops::Deref for Lazy<T, F> {
+ type Target = T;
+}
+
+fn test() {
+ let lazy1: Lazy<Foo, _> = Lazy::new(|| Foo);
+ let r1 = lazy1.foo();
+
+ fn make_foo_fn() -> Foo {}
+ let make_foo_fn_ptr: fn() -> Foo = make_foo_fn;
+ let lazy2: Lazy<Foo, _> = Lazy::new(make_foo_fn_ptr);
+ let r2 = lazy2.foo();
+}"#,
+ expect![[r#"
+ 36..40 'self': &Foo
+ 51..53 '{}': usize
+ 131..132 'f': F
+ 151..153 '{}': Lazy<T, F>
+ 251..497 '{ ...o(); }': ()
+ 261..266 'lazy1': Lazy<Foo, || -> Foo>
+ 283..292 'Lazy::new': fn new<Foo, || -> Foo>(|| -> Foo) -> Lazy<Foo, || -> Foo>
+ 283..300 'Lazy::...| Foo)': Lazy<Foo, || -> Foo>
+ 293..299 '|| Foo': || -> Foo
+ 296..299 'Foo': Foo
+ 310..312 'r1': usize
+ 315..320 'lazy1': Lazy<Foo, || -> Foo>
+ 315..326 'lazy1.foo()': usize
+ 368..383 'make_foo_fn_ptr': fn() -> Foo
+ 399..410 'make_foo_fn': fn make_foo_fn() -> Foo
+ 420..425 'lazy2': Lazy<Foo, fn() -> Foo>
+ 442..451 'Lazy::new': fn new<Foo, fn() -> Foo>(fn() -> Foo) -> Lazy<Foo, fn() -> Foo>
+ 442..468 'Lazy::...n_ptr)': Lazy<Foo, fn() -> Foo>
+ 452..467 'make_foo_fn_ptr': fn() -> Foo
+ 478..480 'r2': usize
+ 483..488 'lazy2': Lazy<Foo, fn() -> Foo>
+ 483..494 'lazy2.foo()': usize
+ 357..359 '{}': Foo
+ "#]],
+ );
+}
+
+#[test]
+fn closure_1() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+enum Option<T> { Some(T), None }
+impl<T> Option<T> {
+ fn map<U, F: FnOnce(T) -> U>(self, f: F) -> Option<U> { loop {} }
+}
+
+fn test() {
+ let x = Option::Some(1u32);
+ x.map(|v| v + 1);
+ x.map(|_v| 1u64);
+ let y: Option<i64> = x.map(|_v| 1);
+}"#,
+ expect![[r#"
+ 86..90 'self': Option<T>
+ 92..93 'f': F
+ 111..122 '{ loop {} }': Option<U>
+ 113..120 'loop {}': !
+ 118..120 '{}': ()
+ 136..255 '{ ... 1); }': ()
+ 146..147 'x': Option<u32>
+ 150..162 'Option::Some': Some<u32>(u32) -> Option<u32>
+ 150..168 'Option...(1u32)': Option<u32>
+ 163..167 '1u32': u32
+ 174..175 'x': Option<u32>
+ 174..190 'x.map(...v + 1)': Option<u32>
+ 180..189 '|v| v + 1': |u32| -> u32
+ 181..182 'v': u32
+ 184..185 'v': u32
+ 184..189 'v + 1': u32
+ 188..189 '1': u32
+ 196..197 'x': Option<u32>
+ 196..212 'x.map(... 1u64)': Option<u64>
+ 202..211 '|_v| 1u64': |u32| -> u64
+ 203..205 '_v': u32
+ 207..211 '1u64': u64
+ 222..223 'y': Option<i64>
+ 239..240 'x': Option<u32>
+ 239..252 'x.map(|_v| 1)': Option<i64>
+ 245..251 '|_v| 1': |u32| -> i64
+ 246..248 '_v': u32
+ 250..251 '1': i64
+ "#]],
+ );
+}
+
+#[test]
+fn closure_2() {
+ check_types(
+ r#"
+//- minicore: add, fn
+
+impl core::ops::Add for u64 {
+ type Output = Self;
+ fn add(self, rhs: u64) -> Self::Output {0}
+}
+
+impl core::ops::Add for u128 {
+ type Output = Self;
+ fn add(self, rhs: u128) -> Self::Output {0}
+}
+
+fn test<F: FnOnce(u32) -> u64>(f: F) {
+ f(1);
+ // ^ u32
+ //^^^^ u64
+ let g = |v| v + 1;
+ //^^^^^ u64
+ //^^^^^^^^^ |u64| -> u64
+ g(1u64);
+ //^^^^^^^ u64
+ let h = |v| 1u128 + v;
+ //^^^^^^^^^^^^^ |u128| -> u128
+}"#,
+ );
+}
+
+#[test]
+fn closure_as_argument_inference_order() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+fn foo1<T, U, F: FnOnce(T) -> U>(x: T, f: F) -> U { loop {} }
+fn foo2<T, U, F: FnOnce(T) -> U>(f: F, x: T) -> U { loop {} }
+
+struct S;
+impl S {
+ fn method(self) -> u64;
+
+ fn foo1<T, U, F: FnOnce(T) -> U>(self, x: T, f: F) -> U { loop {} }
+ fn foo2<T, U, F: FnOnce(T) -> U>(self, f: F, x: T) -> U { loop {} }
+}
+
+fn test() {
+ let x1 = foo1(S, |s| s.method());
+ let x2 = foo2(|s| s.method(), S);
+ let x3 = S.foo1(S, |s| s.method());
+ let x4 = S.foo2(|s| s.method(), S);
+}"#,
+ expect![[r#"
+ 33..34 'x': T
+ 39..40 'f': F
+ 50..61 '{ loop {} }': U
+ 52..59 'loop {}': !
+ 57..59 '{}': ()
+ 95..96 'f': F
+ 101..102 'x': T
+ 112..123 '{ loop {} }': U
+ 114..121 'loop {}': !
+ 119..121 '{}': ()
+ 158..162 'self': S
+ 210..214 'self': S
+ 216..217 'x': T
+ 222..223 'f': F
+ 233..244 '{ loop {} }': U
+ 235..242 'loop {}': !
+ 240..242 '{}': ()
+ 282..286 'self': S
+ 288..289 'f': F
+ 294..295 'x': T
+ 305..316 '{ loop {} }': U
+ 307..314 'loop {}': !
+ 312..314 '{}': ()
+ 330..489 '{ ... S); }': ()
+ 340..342 'x1': u64
+ 345..349 'foo1': fn foo1<S, u64, |S| -> u64>(S, |S| -> u64) -> u64
+ 345..368 'foo1(S...hod())': u64
+ 350..351 'S': S
+ 353..367 '|s| s.method()': |S| -> u64
+ 354..355 's': S
+ 357..358 's': S
+ 357..367 's.method()': u64
+ 378..380 'x2': u64
+ 383..387 'foo2': fn foo2<S, u64, |S| -> u64>(|S| -> u64, S) -> u64
+ 383..406 'foo2(|...(), S)': u64
+ 388..402 '|s| s.method()': |S| -> u64
+ 389..390 's': S
+ 392..393 's': S
+ 392..402 's.method()': u64
+ 404..405 'S': S
+ 416..418 'x3': u64
+ 421..422 'S': S
+ 421..446 'S.foo1...hod())': u64
+ 428..429 'S': S
+ 431..445 '|s| s.method()': |S| -> u64
+ 432..433 's': S
+ 435..436 's': S
+ 435..445 's.method()': u64
+ 456..458 'x4': u64
+ 461..462 'S': S
+ 461..486 'S.foo2...(), S)': u64
+ 468..482 '|s| s.method()': |S| -> u64
+ 469..470 's': S
+ 472..473 's': S
+ 472..482 's.method()': u64
+ 484..485 'S': S
+ "#]],
+ );
+}
+
+#[test]
+fn fn_item_fn_trait() {
+ check_types(
+ r#"
+//- minicore: fn
+struct S;
+
+fn foo() -> S { S }
+
+fn takes_closure<U, F: FnOnce() -> U>(f: F) -> U { f() }
+
+fn test() {
+ takes_closure(foo);
+} //^^^^^^^^^^^^^^^^^^ S
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_1() {
+ check_types(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+}
+
+trait Trait2 {
+ fn foo(&self) -> u32;
+}
+
+fn test<T: Trait>() where T::Item: Trait2 {
+ let x: T::Item = no_matter;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_2() {
+ check_types(
+ r#"
+trait Trait<T> {
+ type Item;
+}
+
+trait Trait2 {
+ fn foo(&self) -> u32;
+}
+
+fn test<T, U>() where T::Item: Trait2, T: Trait<U::Item>, U: Trait<()> {
+ let x: T::Item = no_matter;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_on_impl_self() {
+ check_infer(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+
+ fn f(&self, x: Self::Item);
+}
+
+struct S;
+
+impl Trait for S {
+ type Item = u32;
+ fn f(&self, x: Self::Item) { let y = x; }
+}
+
+struct S2;
+
+impl Trait for S2 {
+ type Item = i32;
+ fn f(&self, x: <Self>::Item) { let y = x; }
+}"#,
+ expect![[r#"
+ 40..44 'self': &Self
+ 46..47 'x': Trait::Item<Self>
+ 126..130 'self': &S
+ 132..133 'x': u32
+ 147..161 '{ let y = x; }': ()
+ 153..154 'y': u32
+ 157..158 'x': u32
+ 228..232 'self': &S2
+ 234..235 'x': i32
+ 251..265 '{ let y = x; }': ()
+ 257..258 'y': i32
+ 261..262 'x': i32
+ "#]],
+ );
+}
+
+#[test]
+fn unselected_projection_on_trait_self() {
+ check_types(
+ r#"
+trait Trait {
+ type Item;
+
+ fn f(&self) -> Self::Item { loop {} }
+}
+
+struct S;
+impl Trait for S {
+ type Item = u32;
+}
+
+fn test() {
+ S.f();
+} //^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_chalk_fold() {
+ check_types(
+ r#"
+trait Interner {}
+trait Fold<I: Interner, TI = I> {
+ type Result;
+}
+
+struct Ty<I: Interner> {}
+impl<I: Interner, TI: Interner> Fold<I, TI> for Ty<I> {
+ type Result = Ty<TI>;
+}
+
+fn fold<I: Interner, T>(interner: &I, t: T) -> T::Result
+where
+ T: Fold<I, I>,
+{
+ loop {}
+}
+
+fn foo<I: Interner>(interner: &I, t: Ty<I>) {
+ fold(interner, t);
+} //^^^^^^^^^^^^^^^^^ Ty<I>
+"#,
+ );
+}
+
+#[test]
+fn trait_impl_self_ty() {
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self);
+}
+
+struct S;
+
+impl Trait<Self> for S {}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn trait_impl_self_ty_cycle() {
+ check_types(
+ r#"
+trait Trait {
+ fn foo(&self);
+}
+
+struct S<T>;
+
+impl Trait for S<Self> {}
+
+fn test() {
+ S.foo();
+} //^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_1() {
+ // This is not a cycle, because the `T: Trait2<T::Item>` bound depends only on the `T: Trait`
+ // bound, not on itself (since only `Trait` can define `Item`).
+ check_types(
+ r#"
+trait Trait {
+ type Item;
+}
+
+trait Trait2<T> {}
+
+fn test<T: Trait>() where T: Trait2<T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ Trait::Item<T>
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_2() {
+ // this is a legitimate cycle
+ check_types(
+ r#"
+//- /main.rs
+trait Trait<T> {
+ type Item;
+}
+
+fn test<T, U>() where T: Trait<U::Item>, U: Trait<T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ {unknown}
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_cycle_3() {
+ // this is a cycle for rustc; we currently accept it
+ check_types(
+ r#"
+//- /main.rs
+trait Trait {
+ type Item;
+ type OtherItem;
+}
+
+fn test<T>() where T: Trait<OtherItem = T::Item> {
+ let x: T::Item = no_matter;
+} //^^^^^^^^^ Trait::Item<T>
+"#,
+ );
+}
+
+#[test]
+fn unselected_projection_in_trait_env_no_cycle() {
+ // this is not a cycle
+ check_types(
+ r#"
+//- /main.rs
+trait Index {
+ type Output;
+}
+
+type Key<S: UnificationStoreBase> = <S as UnificationStoreBase>::Key;
+
+pub trait UnificationStoreBase: Index<Output = Key<Self>> {
+ type Key;
+
+ fn len(&self) -> usize;
+}
+
+pub trait UnificationStoreMut: UnificationStoreBase {
+ fn push(&mut self, value: Self::Key);
+}
+
+fn test<T>(t: T) where T: UnificationStoreMut {
+ let x;
+ t.push(x);
+ let y: Key<T>;
+ (x, y);
+} //^^^^^^ (UnificationStoreBase::Key<T>, UnificationStoreBase::Key<T>)
+"#,
+ );
+}
+
+#[test]
+fn inline_assoc_type_bounds_1() {
+ check_types(
+ r#"
+trait Iterator {
+ type Item;
+}
+trait OtherTrait<T> {
+ fn foo(&self) -> T;
+}
+
+// workaround for Chalk assoc type normalization problems
+pub struct S<T>;
+impl<T: Iterator> Iterator for S<T> {
+ type Item = <T as Iterator>::Item;
+}
+
+fn test<I: Iterator<Item: OtherTrait<u32>>>() {
+ let x: <S<I> as Iterator>::Item;
+ x.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn inline_assoc_type_bounds_2() {
+ check_types(
+ r#"
+trait Iterator {
+ type Item;
+}
+
+fn test<I: Iterator<Item: Iterator<Item = u32>>>() {
+ let x: <<I as Iterator>::Item as Iterator>::Item;
+ x;
+} //^ u32
+"#,
+ );
+}
+
+#[test]
+fn proc_macro_server_types() {
+ check_infer(
+ r#"
+macro_rules! with_api {
+ ($S:ident, $self:ident, $m:ident) => {
+ $m! {
+ TokenStream {
+ fn new() -> $S::TokenStream;
+ },
+ Group {
+ },
+ }
+ };
+}
+macro_rules! associated_item {
+ (type TokenStream) =>
+ (type TokenStream: 'static;);
+ (type Group) =>
+ (type Group: 'static;);
+ ($($item:tt)*) => ($($item)*;)
+}
+macro_rules! declare_server_traits {
+ ($($name:ident {
+ $(fn $method:ident($($arg:ident: $arg_ty:ty),* $(,)?) $(-> $ret_ty:ty)?;)*
+ }),* $(,)?) => {
+ pub trait Types {
+ $(associated_item!(type $name);)*
+ }
+
+ $(pub trait $name: Types {
+ $(associated_item!(fn $method($($arg: $arg_ty),*) $(-> $ret_ty)?);)*
+ })*
+
+ pub trait Server: Types $(+ $name)* {}
+ impl<S: Types $(+ $name)*> Server for S {}
+ }
+}
+
+with_api!(Self, self_, declare_server_traits);
+struct G {}
+struct T {}
+struct RustAnalyzer;
+impl Types for RustAnalyzer {
+ type TokenStream = T;
+ type Group = G;
+}
+
+fn make<T>() -> T { loop {} }
+impl TokenStream for RustAnalyzer {
+ fn new() -> Self::TokenStream {
+ let group: Self::Group = make();
+ make()
+ }
+}"#,
+ expect![[r#"
+ 1075..1086 '{ loop {} }': T
+ 1077..1084 'loop {}': !
+ 1082..1084 '{}': ()
+ 1157..1220 '{ ... }': T
+ 1171..1176 'group': G
+ 1192..1196 'make': fn make<G>() -> G
+ 1192..1198 'make()': G
+ 1208..1212 'make': fn make<T>() -> T
+ 1208..1214 'make()': T
+ "#]],
+ );
+}
+
+#[test]
+fn unify_impl_trait() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Trait<T> {}
+
+fn foo(x: impl Trait<u32>) { loop {} }
+fn bar<T>(x: impl Trait<T>) -> T { loop {} }
+
+struct S<T>(T);
+impl<T> Trait<T> for S<T> {}
+
+fn default<T>() -> T { loop {} }
+
+fn test() -> impl Trait<i32> {
+ let s1 = S(default());
+ foo(s1);
+ let x: i32 = bar(S(default()));
+ S(default())
+}"#,
+ expect![[r#"
+ 26..27 'x': impl Trait<u32>
+ 46..57 '{ loop {} }': ()
+ 48..55 'loop {}': !
+ 53..55 '{}': ()
+ 68..69 'x': impl Trait<T>
+ 91..102 '{ loop {} }': T
+ 93..100 'loop {}': !
+ 98..100 '{}': ()
+ 171..182 '{ loop {} }': T
+ 173..180 'loop {}': !
+ 178..180 '{}': ()
+ 213..309 '{ ...t()) }': S<i32>
+ 223..225 's1': S<u32>
+ 228..229 'S': S<u32>(u32) -> S<u32>
+ 228..240 'S(default())': S<u32>
+ 230..237 'default': fn default<u32>() -> u32
+ 230..239 'default()': u32
+ 246..249 'foo': fn foo(S<u32>)
+ 246..253 'foo(s1)': ()
+ 250..252 's1': S<u32>
+ 263..264 'x': i32
+ 272..275 'bar': fn bar<i32>(S<i32>) -> i32
+ 272..289 'bar(S(...lt()))': i32
+ 276..277 'S': S<i32>(i32) -> S<i32>
+ 276..288 'S(default())': S<i32>
+ 278..285 'default': fn default<i32>() -> i32
+ 278..287 'default()': i32
+ 295..296 'S': S<i32>(i32) -> S<i32>
+ 295..307 'S(default())': S<i32>
+ 297..304 'default': fn default<i32>() -> i32
+ 297..306 'default()': i32
+ "#]],
+ );
+}
+
+#[test]
+fn assoc_types_from_bounds() {
+ check_infer(
+ r#"
+//- minicore: fn
+trait T {
+ type O;
+}
+
+impl T for () {
+ type O = ();
+}
+
+fn f<X, F>(_v: F)
+where
+ X: T,
+ F: FnOnce(&X::O),
+{ }
+
+fn main() {
+ f::<(), _>(|z| { z; });
+}"#,
+ expect![[r#"
+ 72..74 '_v': F
+ 117..120 '{ }': ()
+ 132..163 '{ ... }); }': ()
+ 138..148 'f::<(), _>': fn f<(), |&()| -> ()>(|&()| -> ())
+ 138..160 'f::<()... z; })': ()
+ 149..159 '|z| { z; }': |&()| -> ()
+ 150..151 'z': &()
+ 153..159 '{ z; }': ()
+ 155..156 'z': &()
+ "#]],
+ );
+}
+
+#[test]
+fn associated_type_bound() {
+ check_types(
+ r#"
+pub trait Trait {
+ type Item: OtherTrait<u32>;
+}
+pub trait OtherTrait<T> {
+ fn foo(&self) -> T;
+}
+
+// this is just a workaround for chalk#234
+pub struct S<T>;
+impl<T: Trait> Trait for S<T> {
+ type Item = <T as Trait>::Item;
+}
+
+fn test<T: Trait>() {
+ let y: <S<T> as Trait>::Item = no_matter;
+ y.foo();
+} //^^^^^^^ u32
+"#,
+ );
+}
+
+#[test]
+fn dyn_trait_through_chalk() {
+ check_types(
+ r#"
+//- minicore: deref
+struct Box<T: ?Sized> {}
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+}
+trait Trait {
+ fn foo(&self);
+}
+
+fn test(x: Box<dyn Trait>) {
+ x.foo();
+} //^^^^^^^ ()
+"#,
+ );
+}
+
+#[test]
+fn string_to_owned() {
+ check_types(
+ r#"
+struct String {}
+pub trait ToOwned {
+ type Owned;
+ fn to_owned(&self) -> Self::Owned;
+}
+impl ToOwned for str {
+ type Owned = String;
+}
+fn test() {
+ "foo".to_owned();
+} //^^^^^^^^^^^^^^^^ String
+"#,
+ );
+}
+
+#[test]
+fn iterator_chain() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, option
+pub trait Iterator {
+ type Item;
+
+ fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
+ where
+ F: FnMut(Self::Item) -> Option<B>,
+ { loop {} }
+
+ fn for_each<F>(self, f: F)
+ where
+ F: FnMut(Self::Item),
+ { loop {} }
+}
+
+pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ fn into_iter(self) -> Self::IntoIter;
+}
+
+pub struct FilterMap<I, F> { }
+impl<B, I: Iterator, F> Iterator for FilterMap<I, F>
+where
+ F: FnMut(I::Item) -> Option<B>,
+{
+ type Item = B;
+}
+
+#[stable(feature = "rust1", since = "1.0.0")]
+impl<I: Iterator> IntoIterator for I {
+ type Item = I::Item;
+ type IntoIter = I;
+
+ fn into_iter(self) -> I {
+ self
+ }
+}
+
+struct Vec<T> {}
+impl<T> Vec<T> {
+ fn new() -> Self { loop {} }
+}
+
+impl<T> IntoIterator for Vec<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+}
+
+pub struct IntoIter<T> { }
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+}
+
+fn main() {
+ Vec::<i32>::new().into_iter()
+ .filter_map(|x| if x > 0 { Some(x as u32) } else { None })
+ .for_each(|y| { y; });
+}"#,
+ expect![[r#"
+ 61..65 'self': Self
+ 67..68 'f': F
+ 152..163 '{ loop {} }': FilterMap<Self, F>
+ 154..161 'loop {}': !
+ 159..161 '{}': ()
+ 184..188 'self': Self
+ 190..191 'f': F
+ 240..251 '{ loop {} }': ()
+ 242..249 'loop {}': !
+ 247..249 '{}': ()
+ 360..364 'self': Self
+ 689..693 'self': I
+ 700..720 '{ ... }': I
+ 710..714 'self': I
+ 779..790 '{ loop {} }': Vec<T>
+ 781..788 'loop {}': !
+ 786..788 '{}': ()
+ 977..1104 '{ ... }); }': ()
+ 983..998 'Vec::<i32>::new': fn new<i32>() -> Vec<i32>
+ 983..1000 'Vec::<...:new()': Vec<i32>
+ 983..1012 'Vec::<...iter()': IntoIter<i32>
+ 983..1075 'Vec::<...one })': FilterMap<IntoIter<i32>, |i32| -> Option<u32>>
+ 983..1101 'Vec::<... y; })': ()
+ 1029..1074 '|x| if...None }': |i32| -> Option<u32>
+ 1030..1031 'x': i32
+ 1033..1074 'if x >...None }': Option<u32>
+ 1036..1037 'x': i32
+ 1036..1041 'x > 0': bool
+ 1040..1041 '0': i32
+ 1042..1060 '{ Some...u32) }': Option<u32>
+ 1044..1048 'Some': Some<u32>(u32) -> Option<u32>
+ 1044..1058 'Some(x as u32)': Option<u32>
+ 1049..1050 'x': i32
+ 1049..1057 'x as u32': u32
+ 1066..1074 '{ None }': Option<u32>
+ 1068..1072 'None': Option<u32>
+ 1090..1100 '|y| { y; }': |u32| -> ()
+ 1091..1092 'y': u32
+ 1094..1100 '{ y; }': ()
+ 1096..1097 'y': u32
+ "#]],
+ );
+}
+
+#[test]
+fn nested_assoc() {
+ check_types(
+ r#"
+struct Bar;
+struct Foo;
+
+trait A {
+ type OutputA;
+}
+
+impl A for Bar {
+ type OutputA = Foo;
+}
+
+trait B {
+ type Output;
+ fn foo() -> Self::Output;
+}
+
+impl<T:A> B for T {
+ type Output = T::OutputA;
+ fn foo() -> Self::Output { loop {} }
+}
+
+fn main() {
+ Bar::foo();
+} //^^^^^^^^^^ Foo
+"#,
+ );
+}
+
+#[test]
+fn trait_object_no_coercion() {
+ check_infer_with_mismatches(
+ r#"
+trait Foo {}
+
+fn foo(x: &dyn Foo) {}
+
+fn test(x: &dyn Foo) {
+ foo(x);
+}"#,
+ expect![[r#"
+ 21..22 'x': &dyn Foo
+ 34..36 '{}': ()
+ 46..47 'x': &dyn Foo
+ 59..74 '{ foo(x); }': ()
+ 65..68 'foo': fn foo(&dyn Foo)
+ 65..71 'foo(x)': ()
+ 69..70 'x': &dyn Foo
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+struct IsCopy;
+impl Copy for IsCopy {}
+struct NotCopy;
+
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test() {
+ IsCopy.test();
+ NotCopy.test();
+ (IsCopy, IsCopy).test();
+ (IsCopy, NotCopy).test();
+}"#,
+ expect![[r#"
+ 78..82 'self': &Self
+ 134..235 '{ ...t(); }': ()
+ 140..146 'IsCopy': IsCopy
+ 140..153 'IsCopy.test()': bool
+ 159..166 'NotCopy': NotCopy
+ 159..173 'NotCopy.test()': {unknown}
+ 179..195 '(IsCop...sCopy)': (IsCopy, IsCopy)
+ 179..202 '(IsCop...test()': bool
+ 180..186 'IsCopy': IsCopy
+ 188..194 'IsCopy': IsCopy
+ 208..225 '(IsCop...tCopy)': (IsCopy, NotCopy)
+ 208..232 '(IsCop...test()': {unknown}
+ 209..215 'IsCopy': IsCopy
+ 217..224 'NotCopy': NotCopy
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_fn_def_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+fn foo() {}
+fn bar<T: Copy>(T) -> T {}
+struct Struct(usize);
+enum Enum { Variant(usize) }
+
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test() {
+ foo.test();
+ bar.test();
+ Struct.test();
+ Enum::Variant.test();
+}"#,
+ expect![[r#"
+ 9..11 '{}': ()
+ 28..29 'T': {unknown}
+ 36..38 '{}': T
+ 36..38: expected T, got ()
+ 113..117 'self': &Self
+ 169..249 '{ ...t(); }': ()
+ 175..178 'foo': fn foo()
+ 175..185 'foo.test()': bool
+ 191..194 'bar': fn bar<{unknown}>({unknown}) -> {unknown}
+ 191..201 'bar.test()': bool
+ 207..213 'Struct': Struct(usize) -> Struct
+ 207..220 'Struct.test()': bool
+ 226..239 'Enum::Variant': Variant(usize) -> Enum
+ 226..246 'Enum::...test()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_fn_ptr_copy() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: copy
+trait Test { fn test(&self) -> bool; }
+impl<T: Copy> Test for T {}
+
+fn test(f1: fn(), f2: fn(usize) -> u8, f3: fn(u8, u8) -> &u8) {
+ f1.test();
+ f2.test();
+ f3.test();
+}"#,
+ expect![[r#"
+ 22..26 'self': &Self
+ 76..78 'f1': fn()
+ 86..88 'f2': fn(usize) -> u8
+ 107..109 'f3': fn(u8, u8) -> &u8
+ 130..178 '{ ...t(); }': ()
+ 136..138 'f1': fn()
+ 136..145 'f1.test()': bool
+ 151..153 'f2': fn(usize) -> u8
+ 151..160 'f2.test()': bool
+ 166..168 'f3': fn(u8, u8) -> &u8
+ 166..175 'f3.test()': bool
+ "#]],
+ );
+}
+
+#[test]
+fn builtin_sized() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: sized
+trait Test { fn test(&self) -> bool; }
+impl<T: Sized> Test for T {}
+
+fn test() {
+ 1u8.test();
+ (*"foo").test(); // not Sized
+ (1u8, 1u8).test();
+ (1u8, *"foo").test(); // not Sized
+}"#,
+ expect![[r#"
+ 22..26 'self': &Self
+ 79..194 '{ ...ized }': ()
+ 85..88 '1u8': u8
+ 85..95 '1u8.test()': bool
+ 101..116 '(*"foo").test()': {unknown}
+ 102..108 '*"foo"': str
+ 103..108 '"foo"': &str
+ 135..145 '(1u8, 1u8)': (u8, u8)
+ 135..152 '(1u8, ...test()': bool
+ 136..139 '1u8': u8
+ 141..144 '1u8': u8
+ 158..171 '(1u8, *"foo")': (u8, str)
+ 158..178 '(1u8, ...test()': {unknown}
+ 159..162 '1u8': u8
+ 164..170 '*"foo"': str
+ 165..170 '"foo"': &str
+ "#]],
+ );
+}
+
+#[test]
+fn integer_range_iterate() {
+ check_types(
+ r#"
+//- /main.rs crate:main deps:core
+fn test() {
+ for x in 0..100 { x; }
+} //^ i32
+
+//- /core.rs crate:core
+pub mod ops {
+ pub struct Range<Idx> {
+ pub start: Idx,
+ pub end: Idx,
+ }
+}
+
+pub mod iter {
+ pub trait Iterator {
+ type Item;
+ }
+
+ pub trait IntoIterator {
+ type Item;
+ type IntoIter: Iterator<Item = Self::Item>;
+ }
+
+ impl<T> IntoIterator for T where T: Iterator {
+ type Item = <T as Iterator>::Item;
+ type IntoIter = Self;
+ }
+}
+
+trait Step {}
+impl Step for i32 {}
+impl Step for i64 {}
+
+impl<A: Step> iter::Iterator for ops::Range<A> {
+ type Item = A;
+}
+"#,
+ );
+}
+
+#[test]
+fn infer_closure_arg() {
+ check_infer(
+ r#"
+//- /lib.rs
+
+enum Option<T> {
+ None,
+ Some(T)
+}
+
+fn foo() {
+ let s = Option::None;
+ let f = |x: Option<i32>| {};
+ (&f)(s)
+}"#,
+ expect![[r#"
+ 52..126 '{ ...)(s) }': ()
+ 62..63 's': Option<i32>
+ 66..78 'Option::None': Option<i32>
+ 88..89 'f': |Option<i32>| -> ()
+ 92..111 '|x: Op...2>| {}': |Option<i32>| -> ()
+ 93..94 'x': Option<i32>
+ 109..111 '{}': ()
+ 117..124 '(&f)(s)': ()
+ 118..120 '&f': &|Option<i32>| -> ()
+ 119..120 'f': |Option<i32>| -> ()
+ 122..123 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_fn_param_informs_call_site_closure_signature() {
+ cov_mark::check!(dyn_fn_param_informs_call_site_closure_signature);
+ check_types(
+ r#"
+//- minicore: fn, coerce_unsized
+struct S;
+impl S {
+ fn inherent(&self) -> u8 { 0 }
+}
+fn take_dyn_fn(f: &dyn Fn(S)) {}
+
+fn f() {
+ take_dyn_fn(&|x| { x.inherent(); });
+ //^^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn infer_fn_trait_arg() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, option
+fn foo<F, T>(f: F) -> T
+where
+ F: Fn(Option<i32>) -> T,
+{
+ let s = None;
+ f(s)
+}
+"#,
+ expect![[r#"
+ 13..14 'f': F
+ 59..89 '{ ...f(s) }': T
+ 69..70 's': Option<i32>
+ 73..77 'None': Option<i32>
+ 83..84 'f': F
+ 83..87 'f(s)': T
+ 85..86 's': Option<i32>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_box_fn_arg() {
+ // The type mismatch is because we don't define Unsize and CoerceUnsized
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn, deref, option
+#[lang = "owned_box"]
+pub struct Box<T: ?Sized> {
+ inner: *mut T,
+}
+
+impl<T: ?Sized> core::ops::Deref for Box<T> {
+ type Target = T;
+
+ fn deref(&self) -> &T {
+ &self.inner
+ }
+}
+
+fn foo() {
+ let s = None;
+ let f: Box<dyn FnOnce(&Option<i32>)> = box (|ps| {});
+ f(&s);
+}"#,
+ expect![[r#"
+ 154..158 'self': &Box<T>
+ 166..193 '{ ... }': &T
+ 176..187 '&self.inner': &*mut T
+ 177..181 'self': &Box<T>
+ 177..187 'self.inner': *mut T
+ 206..296 '{ ...&s); }': ()
+ 216..217 's': Option<i32>
+ 220..224 'None': Option<i32>
+ 234..235 'f': Box<dyn FnOnce(&Option<i32>)>
+ 269..282 'box (|ps| {})': Box<|&Option<i32>| -> ()>
+ 274..281 '|ps| {}': |&Option<i32>| -> ()
+ 275..277 'ps': &Option<i32>
+ 279..281 '{}': ()
+ 288..289 'f': Box<dyn FnOnce(&Option<i32>)>
+ 288..293 'f(&s)': ()
+ 290..292 '&s': &Option<i32>
+ 291..292 's': Option<i32>
+ 269..282: expected Box<dyn FnOnce(&Option<i32>)>, got Box<|&Option<i32>| -> ()>
+ "#]],
+ );
+}
+
+#[test]
+fn infer_dyn_fn_output() {
+ check_types(
+ r#"
+//- minicore: fn
+fn foo() {
+ let f: &dyn Fn() -> i32;
+ f();
+ //^^^ i32
+}"#,
+ );
+}
+
+#[test]
+fn infer_dyn_fn_once_output() {
+ check_types(
+ r#"
+//- minicore: fn
+fn foo() {
+ let f: dyn FnOnce() -> i32;
+ f();
+ //^^^ i32
+}"#,
+ );
+}
+
+#[test]
+fn variable_kinds_1() {
+ check_types(
+ r#"
+trait Trait<T> { fn get(self, t: T) -> T; }
+struct S;
+impl Trait<u128> for S {}
+impl Trait<f32> for S {}
+fn test() {
+ S.get(1);
+ //^^^^^^^^ u128
+ S.get(1.);
+ //^^^^^^^^^ f32
+}
+ "#,
+ );
+}
+
+#[test]
+fn variable_kinds_2() {
+ check_types(
+ r#"
+trait Trait { fn get(self) -> Self; }
+impl Trait for u128 {}
+impl Trait for f32 {}
+fn test() {
+ 1.get();
+ //^^^^^^^ u128
+ (1.).get();
+ //^^^^^^^^^^ f32
+}
+ "#,
+ );
+}
+
+#[test]
+fn underscore_import() {
+ check_types(
+ r#"
+mod tr {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+}
+
+struct Tr;
+impl crate::tr::Tr for Tr {}
+
+use crate::tr::Tr as _;
+fn test() {
+ Tr.method();
+ //^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn inner_use() {
+ check_types(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+fn f() {
+ use m::Tr;
+
+ ().method();
+ //^^^^^^^^^^^ u8
+}
+ "#,
+ );
+}
+
+#[test]
+fn trait_in_scope_with_inner_item() {
+ check_infer(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+use m::Tr;
+
+fn f() {
+ fn inner() {
+ ().method();
+ //^^^^^^^^^^^ u8
+ }
+}"#,
+ expect![[r#"
+ 46..50 'self': &Self
+ 58..63 '{ 0 }': u8
+ 60..61 '0': u8
+ 115..185 '{ ... } }': ()
+ 132..183 '{ ... }': ()
+ 142..144 '()': ()
+ 142..153 '().method()': u8
+ "#]],
+ );
+}
+
+#[test]
+fn inner_use_in_block() {
+ check_types(
+ r#"
+mod m {
+ pub trait Tr {
+ fn method(&self) -> u8 { 0 }
+ }
+
+ impl Tr for () {}
+}
+
+fn f() {
+ {
+ use m::Tr;
+
+ ().method();
+ //^^^^^^^^^^^ u8
+ }
+
+ {
+ ().method();
+ //^^^^^^^^^^^ {unknown}
+ }
+
+ ().method();
+ //^^^^^^^^^^^ {unknown}
+}
+ "#,
+ );
+}
+
+#[test]
+fn nested_inner_function_calling_self() {
+ check_infer(
+ r#"
+struct S;
+fn f() {
+ fn inner() -> S {
+ let s = inner();
+ }
+}"#,
+ expect![[r#"
+ 17..73 '{ ... } }': ()
+ 39..71 '{ ... }': S
+ 53..54 's': S
+ 57..62 'inner': fn inner() -> S
+ 57..64 'inner()': S
+ "#]],
+ )
+}
+
+#[test]
+fn infer_default_trait_type_parameter() {
+ check_infer(
+ r#"
+struct A;
+
+trait Op<RHS=Self> {
+ type Output;
+
+ fn do_op(self, rhs: RHS) -> Self::Output;
+}
+
+impl Op for A {
+ type Output = bool;
+
+ fn do_op(self, rhs: Self) -> Self::Output {
+ true
+ }
+}
+
+fn test() {
+ let x = A;
+ let y = A;
+ let r = x.do_op(y);
+}"#,
+ expect![[r#"
+ 63..67 'self': Self
+ 69..72 'rhs': RHS
+ 153..157 'self': A
+ 159..162 'rhs': A
+ 186..206 '{ ... }': bool
+ 196..200 'true': bool
+ 220..277 '{ ...(y); }': ()
+ 230..231 'x': A
+ 234..235 'A': A
+ 245..246 'y': A
+ 249..250 'A': A
+ 260..261 'r': bool
+ 264..265 'x': A
+ 264..274 'x.do_op(y)': bool
+ 272..273 'y': A
+ "#]],
+ )
+}
+
+#[test]
+fn qualified_path_as_qualified_trait() {
+ check_infer(
+ r#"
+mod foo {
+
+ pub trait Foo {
+ type Target;
+ }
+ pub trait Bar {
+ type Output;
+ fn boo() -> Self::Output {
+ loop {}
+ }
+ }
+}
+
+struct F;
+impl foo::Foo for F {
+ type Target = ();
+}
+impl foo::Bar for F {
+ type Output = <F as foo::Foo>::Target;
+}
+
+fn foo() {
+ use foo::Bar;
+ let x = <F as Bar>::boo();
+}"#,
+ expect![[r#"
+ 132..163 '{ ... }': Bar::Output<Self>
+ 146..153 'loop {}': !
+ 151..153 '{}': ()
+ 306..358 '{ ...o(); }': ()
+ 334..335 'x': ()
+ 338..353 '<F as Bar>::boo': fn boo<F>() -> <F as Bar>::Output
+ 338..355 '<F as ...:boo()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn renamed_extern_crate_in_block() {
+ check_types(
+ r#"
+//- /lib.rs crate:lib deps:serde
+use serde::Deserialize;
+
+struct Foo {}
+
+const _ : () = {
+ extern crate serde as _serde;
+ impl _serde::Deserialize for Foo {
+ fn deserialize() -> u8 { 0 }
+ }
+};
+
+fn foo() {
+ Foo::deserialize();
+ //^^^^^^^^^^^^^^^^^^ u8
+}
+
+//- /serde.rs crate:serde
+
+pub trait Deserialize {
+ fn deserialize() -> u8;
+}"#,
+ );
+}
+
+#[test]
+fn bin_op_with_rhs_is_self_for_assoc_bound() {
+ check_no_mismatches(
+ r#"//- minicore: eq
+ fn repro<T>(t: T) -> bool
+where
+ T: Request,
+ T::Output: Convertable,
+{
+ let a = execute(&t).convert();
+ let b = execute(&t).convert();
+ a.eq(&b);
+ let a = execute(&t).convert2();
+ let b = execute(&t).convert2();
+ a.eq(&b)
+}
+fn execute<T>(t: &T) -> T::Output
+where
+ T: Request,
+{
+ <T as Request>::output()
+}
+trait Convertable {
+ type TraitSelf: PartialEq<Self::TraitSelf>;
+ type AssocAsDefaultSelf: PartialEq;
+ fn convert(self) -> Self::AssocAsDefaultSelf;
+ fn convert2(self) -> Self::TraitSelf;
+}
+trait Request {
+ type Output;
+ fn output() -> Self::Output;
+}
+ "#,
+ );
+}
+
+#[test]
+fn bin_op_adt_with_rhs_primitive() {
+ check_infer_with_mismatches(
+ r#"
+#[lang = "add"]
+pub trait Add<Rhs = Self> {
+ type Output;
+ fn add(self, rhs: Rhs) -> Self::Output;
+}
+
+struct Wrapper(u32);
+impl Add<u32> for Wrapper {
+ type Output = Self;
+ fn add(self, rhs: u32) -> Wrapper {
+ Wrapper(rhs)
+ }
+}
+fn main(){
+ let wrapped = Wrapper(10);
+ let num: u32 = 2;
+ let res = wrapped + num;
+
+}"#,
+ expect![[r#"
+ 72..76 'self': Self
+ 78..81 'rhs': Rhs
+ 192..196 'self': Wrapper
+ 198..201 'rhs': u32
+ 219..247 '{ ... }': Wrapper
+ 229..236 'Wrapper': Wrapper(u32) -> Wrapper
+ 229..241 'Wrapper(rhs)': Wrapper
+ 237..240 'rhs': u32
+ 259..345 '{ ...um; }': ()
+ 269..276 'wrapped': Wrapper
+ 279..286 'Wrapper': Wrapper(u32) -> Wrapper
+ 279..290 'Wrapper(10)': Wrapper
+ 287..289 '10': u32
+ 300..303 'num': u32
+ 311..312 '2': u32
+ 322..325 'res': Wrapper
+ 328..335 'wrapped': Wrapper
+ 328..341 'wrapped + num': Wrapper
+ 338..341 'num': u32
+ "#]],
+ )
+}
+
+#[test]
+fn array_length() {
+ check_infer(
+ r#"
+trait T {
+ type Output;
+ fn do_thing(&self) -> Self::Output;
+}
+
+impl T for [u8; 4] {
+ type Output = usize;
+ fn do_thing(&self) -> Self::Output {
+ 2
+ }
+}
+
+impl T for [u8; 2] {
+ type Output = u8;
+ fn do_thing(&self) -> Self::Output {
+ 2
+ }
+}
+
+fn main() {
+ let v = [0u8; 2];
+ let v2 = v.do_thing();
+ let v3 = [0u8; 4];
+ let v4 = v3.do_thing();
+}
+"#,
+ expect![[r#"
+ 44..48 'self': &Self
+ 133..137 'self': &[u8; 4]
+ 155..172 '{ ... }': usize
+ 165..166 '2': usize
+ 236..240 'self': &[u8; 2]
+ 258..275 '{ ... }': u8
+ 268..269 '2': u8
+ 289..392 '{ ...g(); }': ()
+ 299..300 'v': [u8; 2]
+ 303..311 '[0u8; 2]': [u8; 2]
+ 304..307 '0u8': u8
+ 309..310 '2': usize
+ 321..323 'v2': u8
+ 326..327 'v': [u8; 2]
+ 326..338 'v.do_thing()': u8
+ 348..350 'v3': [u8; 4]
+ 353..361 '[0u8; 4]': [u8; 4]
+ 354..357 '0u8': u8
+ 359..360 '4': usize
+ 371..373 'v4': usize
+ 376..378 'v3': [u8; 4]
+ 376..389 'v3.do_thing()': usize
+ "#]],
+ )
+}
+
+#[test]
+fn const_generics() {
+ check_infer(
+ r#"
+trait T {
+ type Output;
+ fn do_thing(&self) -> Self::Output;
+}
+
+impl<const L: usize> T for [u8; L] {
+ type Output = [u8; L];
+ fn do_thing(&self) -> Self::Output {
+ *self
+ }
+}
+
+fn main() {
+ let v = [0u8; 2];
+ let v2 = v.do_thing();
+}
+"#,
+ expect![[r#"
+ 44..48 'self': &Self
+ 151..155 'self': &[u8; L]
+ 173..194 '{ ... }': [u8; L]
+ 183..188 '*self': [u8; L]
+ 184..188 'self': &[u8; L]
+ 208..260 '{ ...g(); }': ()
+ 218..219 'v': [u8; 2]
+ 222..230 '[0u8; 2]': [u8; 2]
+ 223..226 '0u8': u8
+ 228..229 '2': usize
+ 240..242 'v2': [u8; 2]
+ 245..246 'v': [u8; 2]
+ 245..257 'v.do_thing()': [u8; 2]
+ "#]],
+ )
+}
+
+#[test]
+fn fn_returning_unit() {
+ check_infer_with_mismatches(
+ r#"
+//- minicore: fn
+fn test<F: FnOnce()>(f: F) {
+ let _: () = f();
+}"#,
+ expect![[r#"
+ 21..22 'f': F
+ 27..51 '{ ...f(); }': ()
+ 37..38 '_': ()
+ 45..46 'f': F
+ 45..48 'f()': ()
+ "#]],
+ );
+}
+
+#[test]
+fn trait_in_scope_of_trait_impl() {
+ check_infer(
+ r#"
+mod foo {
+ pub trait Foo {
+ fn foo(self);
+ fn bar(self) -> usize { 0 }
+ }
+}
+impl foo::Foo for u32 {
+ fn foo(self) {
+ let _x = self.bar();
+ }
+}
+ "#,
+ expect![[r#"
+ 45..49 'self': Self
+ 67..71 'self': Self
+ 82..87 '{ 0 }': usize
+ 84..85 '0': usize
+ 131..135 'self': u32
+ 137..173 '{ ... }': ()
+ 151..153 '_x': usize
+ 156..160 'self': u32
+ 156..166 'self.bar()': usize
+ "#]],
+ );
+}
+
+#[test]
+fn infer_async_ret_type() {
+ check_types(
+ r#"
+//- minicore: future, result
+struct Fooey;
+
+impl Fooey {
+ fn collect<B: Convert>(self) -> B {
+ B::new()
+ }
+}
+
+trait Convert {
+ fn new() -> Self;
+}
+impl Convert for u32 {
+ fn new() -> Self { 0 }
+}
+
+async fn get_accounts() -> Result<u32, ()> {
+ let ret = Fooey.collect();
+ // ^^^^^^^^^^^^^^^ u32
+ Ok(ret)
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_1() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+
+fn test() {
+ struct S;
+ impl Trait<u32> for S {
+ fn foo(&self) -> u32 { 0 }
+ }
+
+ S.foo();
+ // ^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_2() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+struct S;
+
+fn test() {
+ trait Trait<T> {
+ fn foo(&self) -> T;
+ }
+ impl Trait<u32> for S {
+ fn foo(&self) -> u32 { 0 }
+ }
+
+ S.foo();
+ // ^^^^^^^ u32
+}
+"#,
+ );
+}
+
+#[test]
+fn local_impl_3() {
+ check!(block_local_impls);
+ check_types(
+ r#"
+trait Trait<T> {
+ fn foo(&self) -> T;
+}
+
+fn test() {
+ struct S1;
+ {
+ struct S2;
+
+ impl Trait<S1> for S2 {
+ fn foo(&self) -> S1 { S1 }
+ }
+
+ S2.foo();
+ // ^^^^^^^^ S1
+ }
+}
+"#,
+ );
+}
+
+#[test]
+fn associated_type_sized_bounds() {
+ check_infer(
+ r#"
+//- minicore: sized
+struct Yes;
+trait IsSized { const IS_SIZED: Yes; }
+impl<T: Sized> IsSized for T { const IS_SIZED: Yes = Yes; }
+
+trait Foo {
+ type Explicit: Sized;
+ type Implicit;
+ type Relaxed: ?Sized;
+}
+fn f<F: Foo>() {
+ F::Explicit::IS_SIZED;
+ F::Implicit::IS_SIZED;
+ F::Relaxed::IS_SIZED;
+}
+"#,
+ expect![[r#"
+ 104..107 'Yes': Yes
+ 212..295 '{ ...ZED; }': ()
+ 218..239 'F::Exp..._SIZED': Yes
+ 245..266 'F::Imp..._SIZED': Yes
+ 272..292 'F::Rel..._SIZED': {unknown}
+ "#]],
+ );
+}
+
+#[test]
+fn dyn_map() {
+ check_types(
+ r#"
+pub struct Key<K, V, P = (K, V)> {}
+
+pub trait Policy {
+ type K;
+ type V;
+}
+
+impl<K, V> Policy for (K, V) {
+ type K = K;
+ type V = V;
+}
+
+pub struct KeyMap<KEY> {}
+
+impl<P: Policy> KeyMap<Key<P::K, P::V, P>> {
+ pub fn get(&self, key: &P::K) -> P::V {
+ loop {}
+ }
+}
+
+struct Fn {}
+struct FunctionId {}
+
+fn test() {
+ let key_map: &KeyMap<Key<Fn, FunctionId>> = loop {};
+ let key;
+ let result = key_map.get(key);
+ //^^^^^^ FunctionId
+}
+"#,
+ )
+}
+
+#[test]
+fn dyn_multiple_auto_traits_in_different_order() {
+ check_no_mismatches(
+ r#"
+auto trait Send {}
+auto trait Sync {}
+
+fn f(t: &(dyn Sync + Send)) {}
+fn g(t: &(dyn Send + Sync)) {
+ f(t);
+}
+ "#,
+ );
+
+ check_no_mismatches(
+ r#"
+auto trait Send {}
+auto trait Sync {}
+trait T {}
+
+fn f(t: &(dyn T + Send + Sync)) {}
+fn g(t: &(dyn Sync + T + Send)) {
+ f(t);
+}
+ "#,
+ );
+
+ check_infer_with_mismatches(
+ r#"
+auto trait Send {}
+auto trait Sync {}
+trait T1 {}
+trait T2 {}
+
+fn f(t: &(dyn T1 + T2 + Send + Sync)) {}
+fn g(t: &(dyn Sync + T2 + T1 + Send)) {
+ f(t);
+}
+ "#,
+ expect![[r#"
+ 68..69 't': &{unknown}
+ 101..103 '{}': ()
+ 109..110 't': &{unknown}
+ 142..155 '{ f(t); }': ()
+ 148..149 'f': fn f(&{unknown})
+ 148..152 'f(t)': ()
+ 150..151 't': &{unknown}
+ "#]],
+ );
+
+ check_no_mismatches(
+ r#"
+auto trait Send {}
+auto trait Sync {}
+trait T {
+ type Proj: Send + Sync;
+}
+
+fn f(t: &(dyn T<Proj = ()> + Send + Sync)) {}
+fn g(t: &(dyn Sync + T<Proj = ()> + Send)) {
+ f(t);
+}
+ "#,
+ );
+}
+
+#[test]
+fn dyn_multiple_projection_bounds() {
+ check_no_mismatches(
+ r#"
+trait Trait {
+ type T;
+ type U;
+}
+
+fn f(t: &dyn Trait<T = (), U = ()>) {}
+fn g(t: &dyn Trait<U = (), T = ()>) {
+ f(t);
+}
+ "#,
+ );
+
+ check_types(
+ r#"
+trait Trait {
+ type T;
+}
+
+fn f(t: &dyn Trait<T = (), T = ()>) {}
+ //^&{unknown}
+ "#,
+ );
+}
+
+#[test]
+fn dyn_duplicate_auto_trait() {
+ check_no_mismatches(
+ r#"
+auto trait Send {}
+
+fn f(t: &(dyn Send + Send)) {}
+fn g(t: &(dyn Send)) {
+ f(t);
+}
+ "#,
+ );
+
+ check_no_mismatches(
+ r#"
+auto trait Send {}
+trait T {}
+
+fn f(t: &(dyn T + Send + Send)) {}
+fn g(t: &(dyn T + Send)) {
+ f(t);
+}
+ "#,
+ );
+}
+
+#[test]
+fn gats_in_path() {
+ check_types(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+trait PointerFamily {
+ type Pointer<T>: Deref<Target = T>;
+}
+
+fn f<P: PointerFamily>(p: P::Pointer<i32>) {
+ let a = *p;
+ //^ i32
+}
+fn g<P: PointerFamily>(p: <P as PointerFamily>::Pointer<i32>) {
+ let a = *p;
+ //^ i32
+}
+ "#,
+ );
+}
+
+#[test]
+fn gats_with_impl_trait() {
+ // FIXME: the last function (`fn i()`) is not valid Rust as of this writing because you cannot
+ // specify the same associated type multiple times even if their arguments are different (c.f.
+ // `fn h()`, which is valid). Reconsider how to treat these invalid types.
+ check_types(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+trait Trait {
+ type Assoc<T>: Deref<Target = T>;
+ fn get<U>(&self) -> Self::Assoc<U>;
+}
+
+fn f<T>(v: impl Trait) {
+ let a = v.get::<i32>().deref();
+ //^ &i32
+ let a = v.get::<T>().deref();
+ //^ &T
+}
+fn g<'a, T: 'a>(v: impl Trait<Assoc<T> = &'a T>) {
+ let a = v.get::<T>();
+ //^ &T
+ let a = v.get::<()>();
+ //^ Trait::Assoc<(), impl Trait<Assoc<T> = &T>>
+}
+fn h<'a>(v: impl Trait<Assoc<i32> = &'a i32> + Trait<Assoc<i64> = &'a i64>) {
+ let a = v.get::<i32>();
+ //^ &i32
+ let a = v.get::<i64>();
+ //^ &i64
+}
+fn i<'a>(v: impl Trait<Assoc<i32> = &'a i32, Assoc<i64> = &'a i64>) {
+ let a = v.get::<i32>();
+ //^ &i32
+ let a = v.get::<i64>();
+ //^ &i64
+}
+ "#,
+ );
+}
+
+#[test]
+fn gats_with_dyn() {
+ // This test is here to keep track of how we infer things despite traits with GATs being not
+ // object-safe currently.
+ // FIXME: reconsider how to treat these invalid types.
+ check_infer_with_mismatches(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+trait Trait {
+ type Assoc<T>: Deref<Target = T>;
+ fn get<U>(&self) -> Self::Assoc<U>;
+}
+
+fn f<'a>(v: &dyn Trait<Assoc<i32> = &'a i32>) {
+ v.get::<i32>().deref();
+}
+ "#,
+ expect![[r#"
+ 90..94 'self': &Self
+ 127..128 'v': &(dyn Trait<Assoc<i32> = &i32>)
+ 164..195 '{ ...f(); }': ()
+ 170..171 'v': &(dyn Trait<Assoc<i32> = &i32>)
+ 170..184 'v.get::<i32>()': &i32
+ 170..192 'v.get:...eref()': &i32
+ "#]],
+ );
+}
+
+#[test]
+fn gats_in_associated_type_binding() {
+ check_types(
+ r#"
+trait Trait {
+ type Assoc<T>;
+ fn get<U>(&self) -> Self::Assoc<U>;
+}
+
+fn f<T>(t: T)
+where
+ T: Trait<Assoc<i32> = u32>,
+ T: Trait<Assoc<isize> = usize>,
+{
+ let a = t.get::<i32>();
+ //^ u32
+ let a = t.get::<isize>();
+ //^ usize
+ let a = t.get::<()>();
+ //^ Trait::Assoc<(), T>
+}
+
+ "#,
+ );
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "hir"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+either = "1.7.0"
+arrayvec = "0.7.2"
+itertools = "0.10.5"
+smallvec = "1.10.0"
+once_cell = "1.15.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+hir-expand = { path = "../hir-expand", version = "0.0.0" }
+hir-def = { path = "../hir-def", version = "0.0.0" }
+hir-ty = { path = "../hir-ty", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
--- /dev/null
- hir_ty::display::HirDisplay,
+//! HIR (previously known as descriptors) provides a high-level object oriented
+//! access to Rust code.
+//!
+//! The principal difference between HIR and syntax trees is that HIR is bound
+//! to a particular crate instance. That is, it has cfg flags and features
+//! applied. So, the relation between syntax and HIR is many-to-one.
+//!
+//! HIR is the public API of the all of the compiler logic above syntax trees.
+//! It is written in "OO" style. Each type is self contained (as in, it knows it's
+//! parents and full context). It should be "clean code".
+//!
+//! `hir_*` crates are the implementation of the compiler logic.
+//! They are written in "ECS" style, with relatively little abstractions.
+//! Many types are not self-contained, and explicitly use local indexes, arenas, etc.
+//!
+//! `hir` is what insulates the "we don't know how to actually write an incremental compiler"
+//! from the ide with completions, hovers, etc. It is a (soft, internal) boundary:
+//! <https://www.tedinski.com/2018/02/06/system-boundaries.html>.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![recursion_limit = "512"]
+
+mod semantics;
+mod source_analyzer;
+
+mod from_id;
+mod attrs;
+mod has_source;
+
+pub mod diagnostics;
+pub mod db;
+pub mod symbols;
+
+mod display;
+
+use std::{iter, ops::ControlFlow, sync::Arc};
+
+use arrayvec::ArrayVec;
+use base_db::{CrateDisplayName, CrateId, CrateOrigin, Edition, FileId, ProcMacroKind};
+use either::Either;
+use hir_def::{
+ adt::{ReprData, VariantData},
+ body::{BodyDiagnostic, SyntheticSyntax},
+ expr::{BindingAnnotation, LabelId, Pat, PatId},
+ generics::{TypeOrConstParamData, TypeParamProvenance},
+ item_tree::ItemTreeNode,
+ lang_item::LangItemTarget,
+ nameres::{self, diagnostics::DefDiagnostic},
+ per_ns::PerNs,
+ resolver::{HasResolver, Resolver},
+ src::HasSource as _,
+ AdtId, AssocItemId, AssocItemLoc, AttrDefId, ConstId, ConstParamId, DefWithBodyId, EnumId,
+ EnumVariantId, FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, LifetimeParamId,
+ LocalEnumVariantId, LocalFieldId, Lookup, MacroExpander, MacroId, ModuleId, StaticId, StructId,
+ TraitId, TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId,
+};
+use hir_expand::{name::name, MacroCallKind};
+use hir_ty::{
+ all_super_traits, autoderef,
+ consteval::{unknown_const_as_generic, ComputedExpr, ConstEvalError, ConstExt},
+ diagnostics::BodyValidationDiagnostic,
+ method_resolution::{self, TyFingerprint},
+ primitive::UintTy,
+ traits::FnTrait,
+ AliasTy, CallableDefId, CallableSig, Canonical, CanonicalVarKinds, Cast, ClosureId,
+ GenericArgData, Interner, ParamKind, QuantifiedWhereClause, Scalar, Substitution,
+ TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyDefId, TyExt, TyKind, WhereClause,
+};
+use itertools::Itertools;
+use nameres::diagnostics::DefDiagnosticKind;
+use once_cell::unsync::Lazy;
+use rustc_hash::FxHashSet;
+use stdx::{impl_from, never};
+use syntax::{
+ ast::{self, Expr, HasAttrs as _, HasDocComments, HasName},
+ AstNode, AstPtr, SmolStr, SyntaxNodePtr, TextRange, T,
+};
+
+use crate::db::{DefDatabase, HirDatabase};
+
+pub use crate::{
+ attrs::{HasAttrs, Namespace},
+ diagnostics::{
+ AnyDiagnostic, BreakOutsideOfLoop, InactiveCode, IncorrectCase, InvalidDeriveTarget,
+ MacroError, MalformedDerive, MismatchedArgCount, MissingFields, MissingMatchArms,
+ MissingUnsafe, NoSuchField, ReplaceFilterMapNextWithFindMap, TypeMismatch,
+ UnimplementedBuiltinMacro, UnresolvedExternCrate, UnresolvedImport, UnresolvedMacroCall,
+ UnresolvedModule, UnresolvedProcMacro,
+ },
+ has_source::HasSource,
+ semantics::{PathResolution, Semantics, SemanticsScope, TypeInfo, VisibleTraits},
+};
+
+// Be careful with these re-exports.
+//
+// `hir` is the boundary between the compiler and the IDE. It should try hard to
+// isolate the compiler from the ide, to allow the two to be refactored
+// independently. Re-exporting something from the compiler is the sure way to
+// breach the boundary.
+//
+// Generally, a refactoring which *removes* a name from this list is a good
+// idea!
+pub use {
+ cfg::{CfgAtom, CfgExpr, CfgOptions},
+ hir_def::{
+ adt::StructKind,
+ attr::{Attr, Attrs, AttrsWithOwner, Documentation},
+ builtin_attr::AttributeTemplate,
+ find_path::PrefixKind,
+ import_map,
+ nameres::ModuleSource,
+ path::{ModPath, PathKind},
+ type_ref::{Mutability, TypeRef},
+ visibility::Visibility,
+ },
+ hir_expand::{
+ name::{known, Name},
+ ExpandResult, HirFileId, InFile, MacroFile, Origin,
+ },
- let ty = hir_ty::replace_errors_with_variables(&self.ty);
- let sig = hir_ty::callable_sig_from_fnonce(&ty, self.env.clone(), db)?;
++ hir_ty::{display::HirDisplay, PointerCast, Safety},
+};
+
+// These are negative re-exports: pub using these names is forbidden, they
+// should remain private to hir internals.
+#[allow(unused)]
+use {
+ hir_def::path::Path,
+ hir_expand::{hygiene::Hygiene, name::AsName},
+};
+
+/// hir::Crate describes a single crate. It's the main interface with which
+/// a crate's dependencies interact. Mostly, it should be just a proxy for the
+/// root module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Crate {
+ pub(crate) id: CrateId,
+}
+
+#[derive(Debug)]
+pub struct CrateDependency {
+ pub krate: Crate,
+ pub name: Name,
+}
+
+impl Crate {
+ pub fn origin(self, db: &dyn HirDatabase) -> CrateOrigin {
+ db.crate_graph()[self.id].origin.clone()
+ }
+
+ pub fn is_builtin(self, db: &dyn HirDatabase) -> bool {
+ matches!(self.origin(db), CrateOrigin::Lang(_))
+ }
+
+ pub fn dependencies(self, db: &dyn HirDatabase) -> Vec<CrateDependency> {
+ db.crate_graph()[self.id]
+ .dependencies
+ .iter()
+ .map(|dep| {
+ let krate = Crate { id: dep.crate_id };
+ let name = dep.as_name();
+ CrateDependency { krate, name }
+ })
+ .collect()
+ }
+
+ pub fn reverse_dependencies(self, db: &dyn HirDatabase) -> Vec<Crate> {
+ let crate_graph = db.crate_graph();
+ crate_graph
+ .iter()
+ .filter(|&krate| {
+ crate_graph[krate].dependencies.iter().any(|it| it.crate_id == self.id)
+ })
+ .map(|id| Crate { id })
+ .collect()
+ }
+
+ pub fn transitive_reverse_dependencies(
+ self,
+ db: &dyn HirDatabase,
+ ) -> impl Iterator<Item = Crate> {
+ db.crate_graph().transitive_rev_deps(self.id).map(|id| Crate { id })
+ }
+
+ pub fn root_module(self, db: &dyn HirDatabase) -> Module {
+ let def_map = db.crate_def_map(self.id);
+ Module { id: def_map.module_id(def_map.root()) }
+ }
+
+ pub fn modules(self, db: &dyn HirDatabase) -> Vec<Module> {
+ let def_map = db.crate_def_map(self.id);
+ def_map.modules().map(|(id, _)| def_map.module_id(id).into()).collect()
+ }
+
+ pub fn root_file(self, db: &dyn HirDatabase) -> FileId {
+ db.crate_graph()[self.id].root_file_id
+ }
+
+ pub fn edition(self, db: &dyn HirDatabase) -> Edition {
+ db.crate_graph()[self.id].edition
+ }
+
+ pub fn version(self, db: &dyn HirDatabase) -> Option<String> {
+ db.crate_graph()[self.id].version.clone()
+ }
+
+ pub fn display_name(self, db: &dyn HirDatabase) -> Option<CrateDisplayName> {
+ db.crate_graph()[self.id].display_name.clone()
+ }
+
+ pub fn query_external_importables(
+ self,
+ db: &dyn DefDatabase,
+ query: import_map::Query,
+ ) -> impl Iterator<Item = Either<ModuleDef, Macro>> {
+ let _p = profile::span("query_external_importables");
+ import_map::search_dependencies(db, self.into(), query).into_iter().map(|item| {
+ match ItemInNs::from(item) {
+ ItemInNs::Types(mod_id) | ItemInNs::Values(mod_id) => Either::Left(mod_id),
+ ItemInNs::Macros(mac_id) => Either::Right(mac_id),
+ }
+ })
+ }
+
+ pub fn all(db: &dyn HirDatabase) -> Vec<Crate> {
+ db.crate_graph().iter().map(|id| Crate { id }).collect()
+ }
+
+ /// Try to get the root URL of the documentation of a crate.
+ pub fn get_html_root_url(self: &Crate, db: &dyn HirDatabase) -> Option<String> {
+ // Look for #![doc(html_root_url = "...")]
+ let attrs = db.attrs(AttrDefId::ModuleId(self.root_module(db).into()));
+ let doc_url = attrs.by_key("doc").find_string_value_in_tt("html_root_url");
+ doc_url.map(|s| s.trim_matches('"').trim_end_matches('/').to_owned() + "/")
+ }
+
+ pub fn cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
+ db.crate_graph()[self.id].cfg_options.clone()
+ }
+
+ pub fn potential_cfg(&self, db: &dyn HirDatabase) -> CfgOptions {
+ db.crate_graph()[self.id].potential_cfg_options.clone()
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Module {
+ pub(crate) id: ModuleId,
+}
+
+/// The defs which can be visible in the module.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ModuleDef {
+ Module(Module),
+ Function(Function),
+ Adt(Adt),
+ // Can't be directly declared, but can be imported.
+ Variant(Variant),
+ Const(Const),
+ Static(Static),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ BuiltinType(BuiltinType),
+ Macro(Macro),
+}
+impl_from!(
+ Module,
+ Function,
+ Adt(Struct, Enum, Union),
+ Variant,
+ Const,
+ Static,
+ Trait,
+ TypeAlias,
+ BuiltinType,
+ Macro
+ for ModuleDef
+);
+
+impl From<VariantDef> for ModuleDef {
+ fn from(var: VariantDef) -> Self {
+ match var {
+ VariantDef::Struct(t) => Adt::from(t).into(),
+ VariantDef::Union(t) => Adt::from(t).into(),
+ VariantDef::Variant(t) => t.into(),
+ }
+ }
+}
+
+impl ModuleDef {
+ pub fn module(self, db: &dyn HirDatabase) -> Option<Module> {
+ match self {
+ ModuleDef::Module(it) => it.parent(db),
+ ModuleDef::Function(it) => Some(it.module(db)),
+ ModuleDef::Adt(it) => Some(it.module(db)),
+ ModuleDef::Variant(it) => Some(it.module(db)),
+ ModuleDef::Const(it) => Some(it.module(db)),
+ ModuleDef::Static(it) => Some(it.module(db)),
+ ModuleDef::Trait(it) => Some(it.module(db)),
+ ModuleDef::TypeAlias(it) => Some(it.module(db)),
+ ModuleDef::Macro(it) => Some(it.module(db)),
+ ModuleDef::BuiltinType(_) => None,
+ }
+ }
+
+ pub fn canonical_path(&self, db: &dyn HirDatabase) -> Option<String> {
+ let mut segments = vec![self.name(db)?];
+ for m in self.module(db)?.path_to_root(db) {
+ segments.extend(m.name(db))
+ }
+ segments.reverse();
+ Some(segments.into_iter().join("::"))
+ }
+
+ pub fn canonical_module_path(
+ &self,
+ db: &dyn HirDatabase,
+ ) -> Option<impl Iterator<Item = Module>> {
+ self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ let name = match self {
+ ModuleDef::Module(it) => it.name(db)?,
+ ModuleDef::Const(it) => it.name(db)?,
+ ModuleDef::Adt(it) => it.name(db),
+ ModuleDef::Trait(it) => it.name(db),
+ ModuleDef::Function(it) => it.name(db),
+ ModuleDef::Variant(it) => it.name(db),
+ ModuleDef::TypeAlias(it) => it.name(db),
+ ModuleDef::Static(it) => it.name(db),
+ ModuleDef::Macro(it) => it.name(db),
+ ModuleDef::BuiltinType(it) => it.name(),
+ };
+ Some(name)
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase) -> Vec<AnyDiagnostic> {
+ let id = match self {
+ ModuleDef::Adt(it) => match it {
+ Adt::Struct(it) => it.id.into(),
+ Adt::Enum(it) => it.id.into(),
+ Adt::Union(it) => it.id.into(),
+ },
+ ModuleDef::Trait(it) => it.id.into(),
+ ModuleDef::Function(it) => it.id.into(),
+ ModuleDef::TypeAlias(it) => it.id.into(),
+ ModuleDef::Module(it) => it.id.into(),
+ ModuleDef::Const(it) => it.id.into(),
+ ModuleDef::Static(it) => it.id.into(),
+ ModuleDef::Variant(it) => {
+ EnumVariantId { parent: it.parent.into(), local_id: it.id }.into()
+ }
+ ModuleDef::BuiltinType(_) | ModuleDef::Macro(_) => return Vec::new(),
+ };
+
+ let module = match self.module(db) {
+ Some(it) => it,
+ None => return Vec::new(),
+ };
+
+ let mut acc = Vec::new();
+
+ match self.as_def_with_body() {
+ Some(def) => {
+ def.diagnostics(db, &mut acc);
+ }
+ None => {
+ for diag in hir_ty::diagnostics::incorrect_case(db, module.id.krate(), id) {
+ acc.push(diag.into())
+ }
+ }
+ }
+
+ acc
+ }
+
+ pub fn as_def_with_body(self) -> Option<DefWithBody> {
+ match self {
+ ModuleDef::Function(it) => Some(it.into()),
+ ModuleDef::Const(it) => Some(it.into()),
+ ModuleDef::Static(it) => Some(it.into()),
+ ModuleDef::Variant(it) => Some(it.into()),
+
+ ModuleDef::Module(_)
+ | ModuleDef::Adt(_)
+ | ModuleDef::Trait(_)
+ | ModuleDef::TypeAlias(_)
+ | ModuleDef::Macro(_)
+ | ModuleDef::BuiltinType(_) => None,
+ }
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ Some(match self {
+ ModuleDef::Module(it) => it.attrs(db),
+ ModuleDef::Function(it) => it.attrs(db),
+ ModuleDef::Adt(it) => it.attrs(db),
+ ModuleDef::Variant(it) => it.attrs(db),
+ ModuleDef::Const(it) => it.attrs(db),
+ ModuleDef::Static(it) => it.attrs(db),
+ ModuleDef::Trait(it) => it.attrs(db),
+ ModuleDef::TypeAlias(it) => it.attrs(db),
+ ModuleDef::Macro(it) => it.attrs(db),
+ ModuleDef::BuiltinType(_) => return None,
+ })
+ }
+}
+
+impl HasVisibility for ModuleDef {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match *self {
+ ModuleDef::Module(it) => it.visibility(db),
+ ModuleDef::Function(it) => it.visibility(db),
+ ModuleDef::Adt(it) => it.visibility(db),
+ ModuleDef::Const(it) => it.visibility(db),
+ ModuleDef::Static(it) => it.visibility(db),
+ ModuleDef::Trait(it) => it.visibility(db),
+ ModuleDef::TypeAlias(it) => it.visibility(db),
+ ModuleDef::Variant(it) => it.visibility(db),
+ ModuleDef::Macro(it) => it.visibility(db),
+ ModuleDef::BuiltinType(_) => Visibility::Public,
+ }
+ }
+}
+
+impl Module {
+ /// Name of this module.
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ let def_map = self.id.def_map(db.upcast());
+ let parent = def_map[self.id.local_id].parent?;
+ def_map[parent].children.iter().find_map(|(name, module_id)| {
+ if *module_id == self.id.local_id {
+ Some(name.clone())
+ } else {
+ None
+ }
+ })
+ }
+
+ /// Returns the crate this module is part of.
+ pub fn krate(self) -> Crate {
+ Crate { id: self.id.krate() }
+ }
+
+ /// Topmost parent of this module. Every module has a `crate_root`, but some
+ /// might be missing `krate`. This can happen if a module's file is not included
+ /// in the module tree of any target in `Cargo.toml`.
+ pub fn crate_root(self, db: &dyn HirDatabase) -> Module {
+ let def_map = db.crate_def_map(self.id.krate());
+ Module { id: def_map.module_id(def_map.root()) }
+ }
+
+ pub fn is_crate_root(self, db: &dyn HirDatabase) -> bool {
+ let def_map = db.crate_def_map(self.id.krate());
+ def_map.root() == self.id.local_id
+ }
+
+ /// Iterates over all child modules.
+ pub fn children(self, db: &dyn HirDatabase) -> impl Iterator<Item = Module> {
+ let def_map = self.id.def_map(db.upcast());
+ let children = def_map[self.id.local_id]
+ .children
+ .iter()
+ .map(|(_, module_id)| Module { id: def_map.module_id(*module_id) })
+ .collect::<Vec<_>>();
+ children.into_iter()
+ }
+
+ /// Finds a parent module.
+ pub fn parent(self, db: &dyn HirDatabase) -> Option<Module> {
+ // FIXME: handle block expressions as modules (their parent is in a different DefMap)
+ let def_map = self.id.def_map(db.upcast());
+ let parent_id = def_map[self.id.local_id].parent?;
+ Some(Module { id: def_map.module_id(parent_id) })
+ }
+
+ pub fn path_to_root(self, db: &dyn HirDatabase) -> Vec<Module> {
+ let mut res = vec![self];
+ let mut curr = self;
+ while let Some(next) = curr.parent(db) {
+ res.push(next);
+ curr = next
+ }
+ res
+ }
+
+ /// Returns a `ModuleScope`: a set of items, visible in this module.
+ pub fn scope(
+ self,
+ db: &dyn HirDatabase,
+ visible_from: Option<Module>,
+ ) -> Vec<(Name, ScopeDef)> {
+ self.id.def_map(db.upcast())[self.id.local_id]
+ .scope
+ .entries()
+ .filter_map(|(name, def)| {
+ if let Some(m) = visible_from {
+ let filtered =
+ def.filter_visibility(|vis| vis.is_visible_from(db.upcast(), m.id));
+ if filtered.is_none() && !def.is_none() {
+ None
+ } else {
+ Some((name, filtered))
+ }
+ } else {
+ Some((name, def))
+ }
+ })
+ .flat_map(|(name, def)| {
+ ScopeDef::all_items(def).into_iter().map(move |item| (name.clone(), item))
+ })
+ .collect()
+ }
+
+ /// Fills `acc` with the module's diagnostics.
+ pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ let _p = profile::span("Module::diagnostics").detail(|| {
+ format!("{:?}", self.name(db).map_or("<unknown>".into(), |name| name.to_string()))
+ });
+ let def_map = self.id.def_map(db.upcast());
+ for diag in def_map.diagnostics() {
+ if diag.in_module != self.id.local_id {
+ // FIXME: This is accidentally quadratic.
+ continue;
+ }
+ emit_def_diagnostic(db, acc, diag);
+ }
+ for decl in self.declarations(db) {
+ match decl {
+ ModuleDef::Module(m) => {
+ // Only add diagnostics from inline modules
+ if def_map[m.id.local_id].origin.is_inline() {
+ m.diagnostics(db, acc)
+ }
+ }
+ ModuleDef::Trait(t) => {
+ for diag in db.trait_data_with_diagnostics(t.id).1.iter() {
+ emit_def_diagnostic(db, acc, diag);
+ }
+ acc.extend(decl.diagnostics(db))
+ }
+ ModuleDef::Adt(adt) => {
+ match adt {
+ Adt::Struct(s) => {
+ for diag in db.struct_data_with_diagnostics(s.id).1.iter() {
+ emit_def_diagnostic(db, acc, diag);
+ }
+ }
+ Adt::Union(u) => {
+ for diag in db.union_data_with_diagnostics(u.id).1.iter() {
+ emit_def_diagnostic(db, acc, diag);
+ }
+ }
+ Adt::Enum(e) => {
+ for v in e.variants(db) {
+ acc.extend(ModuleDef::Variant(v).diagnostics(db));
+ }
+
+ for diag in db.enum_data_with_diagnostics(e.id).1.iter() {
+ emit_def_diagnostic(db, acc, diag);
+ }
+ }
+ }
+ acc.extend(decl.diagnostics(db))
+ }
+ _ => acc.extend(decl.diagnostics(db)),
+ }
+ }
+
+ for impl_def in self.impl_defs(db) {
+ for diag in db.impl_data_with_diagnostics(impl_def.id).1.iter() {
+ emit_def_diagnostic(db, acc, diag);
+ }
+
+ for item in impl_def.items(db) {
+ let def: DefWithBody = match item {
+ AssocItem::Function(it) => it.into(),
+ AssocItem::Const(it) => it.into(),
+ AssocItem::TypeAlias(_) => continue,
+ };
+
+ def.diagnostics(db, acc);
+ }
+ }
+ }
+
+ pub fn declarations(self, db: &dyn HirDatabase) -> Vec<ModuleDef> {
+ let def_map = self.id.def_map(db.upcast());
+ let scope = &def_map[self.id.local_id].scope;
+ scope
+ .declarations()
+ .map(ModuleDef::from)
+ .chain(scope.unnamed_consts().map(|id| ModuleDef::Const(Const::from(id))))
+ .collect()
+ }
+
+ pub fn legacy_macros(self, db: &dyn HirDatabase) -> Vec<Macro> {
+ let def_map = self.id.def_map(db.upcast());
+ let scope = &def_map[self.id.local_id].scope;
+ scope.legacy_macros().flat_map(|(_, it)| it).map(|&it| MacroId::from(it).into()).collect()
+ }
+
+ pub fn impl_defs(self, db: &dyn HirDatabase) -> Vec<Impl> {
+ let def_map = self.id.def_map(db.upcast());
+ def_map[self.id.local_id].scope.impls().map(Impl::from).collect()
+ }
+
+ /// Finds a path that can be used to refer to the given item from within
+ /// this module, if possible.
+ pub fn find_use_path(
+ self,
+ db: &dyn DefDatabase,
+ item: impl Into<ItemInNs>,
+ prefer_no_std: bool,
+ ) -> Option<ModPath> {
+ hir_def::find_path::find_path(db, item.into().into(), self.into(), prefer_no_std)
+ }
+
+ /// Finds a path that can be used to refer to the given item from within
+ /// this module, if possible. This is used for returning import paths for use-statements.
+ pub fn find_use_path_prefixed(
+ self,
+ db: &dyn DefDatabase,
+ item: impl Into<ItemInNs>,
+ prefix_kind: PrefixKind,
+ prefer_no_std: bool,
+ ) -> Option<ModPath> {
+ hir_def::find_path::find_path_prefixed(
+ db,
+ item.into().into(),
+ self.into(),
+ prefix_kind,
+ prefer_no_std,
+ )
+ }
+}
+
+fn emit_def_diagnostic(db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>, diag: &DefDiagnostic) {
+ match &diag.kind {
+ DefDiagnosticKind::UnresolvedModule { ast: declaration, candidates } => {
+ let decl = declaration.to_node(db.upcast());
+ acc.push(
+ UnresolvedModule {
+ decl: InFile::new(declaration.file_id, AstPtr::new(&decl)),
+ candidates: candidates.clone(),
+ }
+ .into(),
+ )
+ }
+ DefDiagnosticKind::UnresolvedExternCrate { ast } => {
+ let item = ast.to_node(db.upcast());
+ acc.push(
+ UnresolvedExternCrate { decl: InFile::new(ast.file_id, AstPtr::new(&item)) }.into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedImport { id, index } => {
+ let file_id = id.file_id();
+ let item_tree = id.item_tree(db.upcast());
+ let import = &item_tree[id.value];
+
+ let use_tree = import.use_tree_to_ast(db.upcast(), file_id, *index);
+ acc.push(
+ UnresolvedImport { decl: InFile::new(file_id, AstPtr::new(&use_tree)) }.into(),
+ );
+ }
+
+ DefDiagnosticKind::UnconfiguredCode { ast, cfg, opts } => {
+ let item = ast.to_node(db.upcast());
+ acc.push(
+ InactiveCode {
+ node: ast.with_value(AstPtr::new(&item).into()),
+ cfg: cfg.clone(),
+ opts: opts.clone(),
+ }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedProcMacro { ast, krate } => {
+ let (node, precise_location, macro_name, kind) = precise_macro_call_location(ast, db);
+ acc.push(
+ UnresolvedProcMacro { node, precise_location, macro_name, kind, krate: *krate }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::UnresolvedMacroCall { ast, path } => {
+ let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
+ acc.push(
+ UnresolvedMacroCall {
+ macro_call: node,
+ precise_location,
+ path: path.clone(),
+ is_bang: matches!(ast, MacroCallKind::FnLike { .. }),
+ }
+ .into(),
+ );
+ }
+
+ DefDiagnosticKind::MacroError { ast, message } => {
+ let (node, precise_location, _, _) = precise_macro_call_location(ast, db);
+ acc.push(MacroError { node, precise_location, message: message.clone() }.into());
+ }
+
+ DefDiagnosticKind::UnimplementedBuiltinMacro { ast } => {
+ let node = ast.to_node(db.upcast());
+ // Must have a name, otherwise we wouldn't emit it.
+ let name = node.name().expect("unimplemented builtin macro with no name");
+ acc.push(
+ UnimplementedBuiltinMacro {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&name))),
+ }
+ .into(),
+ );
+ }
+ DefDiagnosticKind::InvalidDeriveTarget { ast, id } => {
+ let node = ast.to_node(db.upcast());
+ let derive = node.attrs().nth(*id as usize);
+ match derive {
+ Some(derive) => {
+ acc.push(
+ InvalidDeriveTarget {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+ }
+ .into(),
+ );
+ }
+ None => stdx::never!("derive diagnostic on item without derive attribute"),
+ }
+ }
+ DefDiagnosticKind::MalformedDerive { ast, id } => {
+ let node = ast.to_node(db.upcast());
+ let derive = node.attrs().nth(*id as usize);
+ match derive {
+ Some(derive) => {
+ acc.push(
+ MalformedDerive {
+ node: ast.with_value(SyntaxNodePtr::from(AstPtr::new(&derive))),
+ }
+ .into(),
+ );
+ }
+ None => stdx::never!("derive diagnostic on item without derive attribute"),
+ }
+ }
+ }
+}
+
+fn precise_macro_call_location(
+ ast: &MacroCallKind,
+ db: &dyn HirDatabase,
+) -> (InFile<SyntaxNodePtr>, Option<TextRange>, Option<String>, MacroKind) {
+ // FIXME: maaybe we actually want slightly different ranges for the different macro diagnostics
+ // - e.g. the full attribute for macro errors, but only the name for name resolution
+ match ast {
+ MacroCallKind::FnLike { ast_id, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+ node.path()
+ .and_then(|it| it.segment())
+ .and_then(|it| it.name_ref())
+ .map(|it| it.syntax().text_range()),
+ node.path().and_then(|it| it.segment()).map(|it| it.to_string()),
+ MacroKind::ProcMacro,
+ )
+ }
+ MacroCallKind::Derive { ast_id, derive_attr_index, derive_index } => {
+ let node = ast_id.to_node(db.upcast());
+ // Compute the precise location of the macro name's token in the derive
+ // list.
+ let token = (|| {
+ let derive_attr = node
+ .doc_comments_and_attrs()
+ .nth(*derive_attr_index as usize)
+ .and_then(Either::left)?;
+ let token_tree = derive_attr.meta()?.token_tree()?;
+ let group_by = token_tree
+ .syntax()
+ .children_with_tokens()
+ .filter_map(|elem| match elem {
+ syntax::NodeOrToken::Token(tok) => Some(tok),
+ _ => None,
+ })
+ .group_by(|t| t.kind() == T![,]);
+ let (_, mut group) = group_by
+ .into_iter()
+ .filter(|&(comma, _)| !comma)
+ .nth(*derive_index as usize)?;
+ group.find(|t| t.kind() == T![ident])
+ })();
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&node))),
+ token.as_ref().map(|tok| tok.text_range()),
+ token.as_ref().map(ToString::to_string),
+ MacroKind::Derive,
+ )
+ }
+ MacroCallKind::Attr { ast_id, invoc_attr_index, .. } => {
+ let node = ast_id.to_node(db.upcast());
+ let attr = node
+ .doc_comments_and_attrs()
+ .nth((*invoc_attr_index) as usize)
+ .and_then(Either::left)
+ .unwrap_or_else(|| panic!("cannot find attribute #{}", invoc_attr_index));
+
+ (
+ ast_id.with_value(SyntaxNodePtr::from(AstPtr::new(&attr))),
+ Some(attr.syntax().text_range()),
+ attr.path()
+ .and_then(|path| path.segment())
+ .and_then(|seg| seg.name_ref())
+ .as_ref()
+ .map(ToString::to_string),
+ MacroKind::Attr,
+ )
+ }
+ }
+}
+
+impl HasVisibility for Module {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let def_map = self.id.def_map(db.upcast());
+ let module_data = &def_map[self.id.local_id];
+ module_data.visibility
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Field {
+ pub(crate) parent: VariantDef,
+ pub(crate) id: LocalFieldId,
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub enum FieldSource {
+ Named(ast::RecordField),
+ Pos(ast::TupleField),
+}
+
+impl Field {
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ self.parent.variant_data(db).fields()[self.id].name.clone()
+ }
+
+ /// Returns the type as in the signature of the struct (i.e., with
+ /// placeholder types for type parameters). Only use this in the context of
+ /// the field definition.
+ pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ let var_id = self.parent.into();
+ let generic_def_id: GenericDefId = match self.parent {
+ VariantDef::Struct(it) => it.id.into(),
+ VariantDef::Union(it) => it.id.into(),
+ VariantDef::Variant(it) => it.parent.id.into(),
+ };
+ let substs = TyBuilder::placeholder_subst(db, generic_def_id);
+ let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs);
+ Type::new(db, var_id, ty)
+ }
+
+ pub fn parent_def(&self, _db: &dyn HirDatabase) -> VariantDef {
+ self.parent
+ }
+}
+
+impl HasVisibility for Field {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let variant_data = self.parent.variant_data(db);
+ let visibility = &variant_data.fields()[self.id].visibility;
+ let parent_id: hir_def::VariantId = self.parent.into();
+ visibility.resolve(db.upcast(), &parent_id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Struct {
+ pub(crate) id: StructId,
+}
+
+impl Struct {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.struct_data(self.id).name.clone()
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ db.struct_data(self.id)
+ .variant_data
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn repr(self, db: &dyn HirDatabase) -> Option<ReprData> {
+ db.struct_data(self.id).repr.clone()
+ }
+
+ pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
+ self.variant_data(db).kind()
+ }
+
+ fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.struct_data(self.id).variant_data.clone()
+ }
+}
+
+impl HasVisibility for Struct {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.struct_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Union {
+ pub(crate) id: UnionId,
+}
+
+impl Union {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.union_data(self.id).name.clone()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ db.union_data(self.id)
+ .variant_data
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.union_data(self.id).variant_data.clone()
+ }
+}
+
+impl HasVisibility for Union {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.union_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Enum {
+ pub(crate) id: EnumId,
+}
+
+impl Enum {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.enum_data(self.id).name.clone()
+ }
+
+ pub fn variants(self, db: &dyn HirDatabase) -> Vec<Variant> {
+ db.enum_data(self.id).variants.iter().map(|(id, _)| Variant { parent: self, id }).collect()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ /// The type of the enum variant bodies.
+ pub fn variant_body_ty(self, db: &dyn HirDatabase) -> Type {
+ Type::new_for_crate(
+ self.id.lookup(db.upcast()).container.krate(),
+ TyBuilder::builtin(match db.enum_data(self.id).variant_body_type() {
+ Either::Left(builtin) => hir_def::builtin_type::BuiltinType::Int(builtin),
+ Either::Right(builtin) => hir_def::builtin_type::BuiltinType::Uint(builtin),
+ }),
+ )
+ }
+
+ pub fn is_data_carrying(self, db: &dyn HirDatabase) -> bool {
+ self.variants(db).iter().any(|v| !matches!(v.kind(db), StructKind::Unit))
+ }
+}
+
+impl HasVisibility for Enum {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.enum_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+impl From<&Variant> for DefWithBodyId {
+ fn from(&v: &Variant) -> Self {
+ DefWithBodyId::VariantId(v.into())
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Variant {
+ pub(crate) parent: Enum,
+ pub(crate) id: LocalEnumVariantId,
+}
+
+impl Variant {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent.module(db)
+ }
+
+ pub fn parent_enum(self, _db: &dyn HirDatabase) -> Enum {
+ self.parent
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.enum_data(self.parent.id).variants[self.id].name.clone()
+ }
+
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ self.variant_data(db)
+ .fields()
+ .iter()
+ .map(|(id, _)| Field { parent: self.into(), id })
+ .collect()
+ }
+
+ pub fn kind(self, db: &dyn HirDatabase) -> StructKind {
+ self.variant_data(db).kind()
+ }
+
+ pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ db.enum_data(self.parent.id).variants[self.id].variant_data.clone()
+ }
+
+ pub fn value(self, db: &dyn HirDatabase) -> Option<Expr> {
+ self.source(db)?.value.expr()
+ }
+
+ pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
+ db.const_eval_variant(self.into())
+ }
+}
+
+/// Variants inherit visibility from the parent enum.
+impl HasVisibility for Variant {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ self.parent_enum(db).visibility(db)
+ }
+}
+
+/// A Data Type
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum Adt {
+ Struct(Struct),
+ Union(Union),
+ Enum(Enum),
+}
+impl_from!(Struct, Union, Enum for Adt);
+
+impl Adt {
+ pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
+ let subst = db.generic_defaults(self.into());
+ subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
+ GenericArgData::Ty(x) => x.is_unknown(),
+ _ => false,
+ })
+ }
+
+ /// Turns this ADT into a type. Any type parameters of the ADT will be
+ /// turned into unknown types, which is good for e.g. finding the most
+ /// general set of completions, but will not look very nice when printed.
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let id = AdtId::from(self);
+ Type::from_def(db, id)
+ }
+
+ /// Turns this ADT into a type with the given type parameters. This isn't
+ /// the greatest API, FIXME find a better one.
+ pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type {
+ let id = AdtId::from(self);
+ let mut it = args.iter().map(|t| t.ty.clone());
+ let ty = TyBuilder::def_ty(db, id.into(), None)
+ .fill(|x| {
+ let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner));
+ match x {
+ ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ }
+ })
+ .build();
+ Type::new(db, id, ty)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ Adt::Struct(s) => s.module(db),
+ Adt::Union(s) => s.module(db),
+ Adt::Enum(e) => e.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self {
+ Adt::Struct(s) => s.name(db),
+ Adt::Union(u) => u.name(db),
+ Adt::Enum(e) => e.name(db),
+ }
+ }
+
+ pub fn as_enum(&self) -> Option<Enum> {
+ if let Self::Enum(v) = self {
+ Some(*v)
+ } else {
+ None
+ }
+ }
+}
+
+impl HasVisibility for Adt {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self {
+ Adt::Struct(it) => it.visibility(db),
+ Adt::Union(it) => it.visibility(db),
+ Adt::Enum(it) => it.visibility(db),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum VariantDef {
+ Struct(Struct),
+ Union(Union),
+ Variant(Variant),
+}
+impl_from!(Struct, Union, Variant for VariantDef);
+
+impl VariantDef {
+ pub fn fields(self, db: &dyn HirDatabase) -> Vec<Field> {
+ match self {
+ VariantDef::Struct(it) => it.fields(db),
+ VariantDef::Union(it) => it.fields(db),
+ VariantDef::Variant(it) => it.fields(db),
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ VariantDef::Struct(it) => it.module(db),
+ VariantDef::Union(it) => it.module(db),
+ VariantDef::Variant(it) => it.module(db),
+ }
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ match self {
+ VariantDef::Struct(s) => s.name(db),
+ VariantDef::Union(u) => u.name(db),
+ VariantDef::Variant(e) => e.name(db),
+ }
+ }
+
+ pub(crate) fn variant_data(self, db: &dyn HirDatabase) -> Arc<VariantData> {
+ match self {
+ VariantDef::Struct(it) => it.variant_data(db),
+ VariantDef::Union(it) => it.variant_data(db),
+ VariantDef::Variant(it) => it.variant_data(db),
+ }
+ }
+}
+
+/// The defs which have a body.
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum DefWithBody {
+ Function(Function),
+ Static(Static),
+ Const(Const),
+ Variant(Variant),
+}
+impl_from!(Function, Const, Static, Variant for DefWithBody);
+
+impl DefWithBody {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ DefWithBody::Const(c) => c.module(db),
+ DefWithBody::Function(f) => f.module(db),
+ DefWithBody::Static(s) => s.module(db),
+ DefWithBody::Variant(v) => v.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ match self {
+ DefWithBody::Function(f) => Some(f.name(db)),
+ DefWithBody::Static(s) => Some(s.name(db)),
+ DefWithBody::Const(c) => c.name(db),
+ DefWithBody::Variant(v) => Some(v.name(db)),
+ }
+ }
+
+ /// Returns the type this def's body has to evaluate to.
+ pub fn body_type(self, db: &dyn HirDatabase) -> Type {
+ match self {
+ DefWithBody::Function(it) => it.ret_type(db),
+ DefWithBody::Static(it) => it.ty(db),
+ DefWithBody::Const(it) => it.ty(db),
+ DefWithBody::Variant(it) => it.parent.variant_body_ty(db),
+ }
+ }
+
+ fn id(&self) -> DefWithBodyId {
+ match self {
+ DefWithBody::Function(it) => it.id.into(),
+ DefWithBody::Static(it) => it.id.into(),
+ DefWithBody::Const(it) => it.id.into(),
+ DefWithBody::Variant(it) => it.into(),
+ }
+ }
+
+ /// A textual representation of the HIR of this def's body for debugging purposes.
+ pub fn debug_hir(self, db: &dyn HirDatabase) -> String {
+ let body = db.body(self.id());
+ body.pretty_print(db.upcast(), self.id())
+ }
+
+ pub fn diagnostics(self, db: &dyn HirDatabase, acc: &mut Vec<AnyDiagnostic>) {
+ let krate = self.module(db).id.krate();
+
+ let (body, source_map) = db.body_with_source_map(self.into());
+
+ for (_, def_map) in body.blocks(db.upcast()) {
+ for diag in def_map.diagnostics() {
+ emit_def_diagnostic(db, acc, diag);
+ }
+ }
+
+ for diag in source_map.diagnostics() {
+ match diag {
+ BodyDiagnostic::InactiveCode { node, cfg, opts } => acc.push(
+ InactiveCode { node: node.clone(), cfg: cfg.clone(), opts: opts.clone() }
+ .into(),
+ ),
+ BodyDiagnostic::MacroError { node, message } => acc.push(
+ MacroError {
+ node: node.clone().map(|it| it.into()),
+ precise_location: None,
+ message: message.to_string(),
+ }
+ .into(),
+ ),
+ BodyDiagnostic::UnresolvedProcMacro { node, krate } => acc.push(
+ UnresolvedProcMacro {
+ node: node.clone().map(|it| it.into()),
+ precise_location: None,
+ macro_name: None,
+ kind: MacroKind::ProcMacro,
+ krate: *krate,
+ }
+ .into(),
+ ),
+ BodyDiagnostic::UnresolvedMacroCall { node, path } => acc.push(
+ UnresolvedMacroCall {
+ macro_call: node.clone().map(|ast_ptr| ast_ptr.into()),
+ precise_location: None,
+ path: path.clone(),
+ is_bang: true,
+ }
+ .into(),
+ ),
+ }
+ }
+
+ let infer = db.infer(self.into());
+ let source_map = Lazy::new(|| db.body_with_source_map(self.into()).1);
+ for d in &infer.diagnostics {
+ match d {
+ hir_ty::InferenceDiagnostic::NoSuchField { expr } => {
+ let field = source_map.field_syntax(*expr);
+ acc.push(NoSuchField { field }.into())
+ }
+ &hir_ty::InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break } => {
+ let expr = source_map
+ .expr_syntax(expr)
+ .expect("break outside of loop in synthetic syntax");
+ acc.push(BreakOutsideOfLoop { expr, is_break }.into())
+ }
+ hir_ty::InferenceDiagnostic::MismatchedArgCount { call_expr, expected, found } => {
+ match source_map.expr_syntax(*call_expr) {
+ Ok(source_ptr) => acc.push(
+ MismatchedArgCount {
+ call_expr: source_ptr,
+ expected: *expected,
+ found: *found,
+ }
+ .into(),
+ ),
+ Err(SyntheticSyntax) => (),
+ }
+ }
+ }
+ }
+ for (expr, mismatch) in infer.expr_type_mismatches() {
+ let expr = match source_map.expr_syntax(expr) {
+ Ok(expr) => expr,
+ Err(SyntheticSyntax) => continue,
+ };
+ acc.push(
+ TypeMismatch {
+ expr,
+ expected: Type::new(db, DefWithBodyId::from(self), mismatch.expected.clone()),
+ actual: Type::new(db, DefWithBodyId::from(self), mismatch.actual.clone()),
+ }
+ .into(),
+ );
+ }
+
+ for expr in hir_ty::diagnostics::missing_unsafe(db, self.into()) {
+ match source_map.expr_syntax(expr) {
+ Ok(expr) => acc.push(MissingUnsafe { expr }.into()),
+ Err(SyntheticSyntax) => {
+ // FIXME: Here and eslwhere in this file, the `expr` was
+ // desugared, report or assert that this doesn't happen.
+ }
+ }
+ }
+
+ for diagnostic in BodyValidationDiagnostic::collect(db, self.into()) {
+ match diagnostic {
+ BodyValidationDiagnostic::RecordMissingFields {
+ record,
+ variant,
+ missed_fields,
+ } => {
+ let variant_data = variant.variant_data(db.upcast());
+ let missed_fields = missed_fields
+ .into_iter()
+ .map(|idx| variant_data.fields()[idx].name.clone())
+ .collect();
+
+ match record {
+ Either::Left(record_expr) => match source_map.expr_syntax(record_expr) {
+ Ok(source_ptr) => {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::RecordExpr(record_expr) =
+ &source_ptr.value.to_node(&root)
+ {
+ if record_expr.record_expr_field_list().is_some() {
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: Either::Left(AstPtr::new(
+ record_expr,
+ )),
+ field_list_parent_path: record_expr
+ .path()
+ .map(|path| AstPtr::new(&path)),
+ missed_fields,
+ }
+ .into(),
+ )
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ },
+ Either::Right(record_pat) => match source_map.pat_syntax(record_pat) {
+ Ok(source_ptr) => {
+ if let Some(expr) = source_ptr.value.as_ref().left() {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Pat::RecordPat(record_pat) = expr.to_node(&root) {
+ if record_pat.record_pat_field_list().is_some() {
+ acc.push(
+ MissingFields {
+ file: source_ptr.file_id,
+ field_list_parent: Either::Right(AstPtr::new(
+ &record_pat,
+ )),
+ field_list_parent_path: record_pat
+ .path()
+ .map(|path| AstPtr::new(&path)),
+ missed_fields,
+ }
+ .into(),
+ )
+ }
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ },
+ }
+ }
+ BodyValidationDiagnostic::ReplaceFilterMapNextWithFindMap { method_call_expr } => {
+ if let Ok(next_source_ptr) = source_map.expr_syntax(method_call_expr) {
+ acc.push(
+ ReplaceFilterMapNextWithFindMap {
+ file: next_source_ptr.file_id,
+ next_expr: next_source_ptr.value,
+ }
+ .into(),
+ );
+ }
+ }
+ BodyValidationDiagnostic::MissingMatchArms { match_expr, uncovered_patterns } => {
+ match source_map.expr_syntax(match_expr) {
+ Ok(source_ptr) => {
+ let root = source_ptr.file_syntax(db.upcast());
+ if let ast::Expr::MatchExpr(match_expr) =
+ &source_ptr.value.to_node(&root)
+ {
+ if let Some(match_expr) = match_expr.expr() {
+ acc.push(
+ MissingMatchArms {
+ file: source_ptr.file_id,
+ match_expr: AstPtr::new(&match_expr),
+ uncovered_patterns,
+ }
+ .into(),
+ );
+ }
+ }
+ }
+ Err(SyntheticSyntax) => (),
+ }
+ }
+ }
+ }
+
+ let def: ModuleDef = match self {
+ DefWithBody::Function(it) => it.into(),
+ DefWithBody::Static(it) => it.into(),
+ DefWithBody::Const(it) => it.into(),
+ DefWithBody::Variant(it) => it.into(),
+ };
+ for diag in hir_ty::diagnostics::incorrect_case(db, krate, def.into()) {
+ acc.push(diag.into())
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Function {
+ pub(crate) id: FunctionId,
+}
+
+impl Function {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.lookup(db.upcast()).module(db.upcast()).into()
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.function_data(self.id).name.clone()
+ }
+
+ /// Get this function's return type
+ pub fn ret_type(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let ty = callable_sig.ret().clone();
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option<Type> {
+ if !self.is_async(db) {
+ return None;
+ }
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let ret_ty = callable_sig.ret().clone();
+ for pred in ret_ty.impl_trait_bounds(db).into_iter().flatten() {
+ if let WhereClause::AliasEq(output_eq) = pred.into_value_and_skipped_binders().0 {
+ return Type::new_with_resolver_inner(db, &resolver, output_eq.ty).into();
+ }
+ }
+ never!("Async fn ret_type should be impl Future");
+ None
+ }
+
+ pub fn has_self_param(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_self_param()
+ }
+
+ pub fn self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
+ self.has_self_param(db).then(|| SelfParam { func: self.id })
+ }
+
+ pub fn assoc_fn_params(self, db: &dyn HirDatabase) -> Vec<Param> {
+ let environment = db.trait_environment(self.id.into());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ callable_sig
+ .params()
+ .iter()
+ .enumerate()
+ .map(|(idx, ty)| {
+ let ty = Type { env: environment.clone(), ty: ty.clone() };
+ Param { func: self, ty, idx }
+ })
+ .collect()
+ }
+
+ pub fn method_params(self, db: &dyn HirDatabase) -> Option<Vec<Param>> {
+ if self.self_param(db).is_none() {
+ return None;
+ }
+ Some(self.params_without_self(db))
+ }
+
+ pub fn params_without_self(self, db: &dyn HirDatabase) -> Vec<Param> {
+ let environment = db.trait_environment(self.id.into());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs);
+ let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 };
+ callable_sig
+ .params()
+ .iter()
+ .enumerate()
+ .skip(skip)
+ .map(|(idx, ty)| {
+ let ty = Type { env: environment.clone(), ty: ty.clone() };
+ Param { func: self, ty, idx }
+ })
+ .collect()
+ }
+
+ pub fn is_const(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_const_kw()
+ }
+
+ pub fn is_async(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_async_kw()
+ }
+
+ pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool {
+ hir_ty::is_fn_unsafe_to_call(db, self.id)
+ }
+
+ /// Whether this function declaration has a definition.
+ ///
+ /// This is false in the case of required (not provided) trait methods.
+ pub fn has_body(self, db: &dyn HirDatabase) -> bool {
+ db.function_data(self.id).has_body()
+ }
+
+ pub fn as_proc_macro(self, db: &dyn HirDatabase) -> Option<Macro> {
+ let function_data = db.function_data(self.id);
+ let attrs = &function_data.attrs;
+ // FIXME: Store this in FunctionData flags?
+ if !(attrs.is_proc_macro()
+ || attrs.is_proc_macro_attribute()
+ || attrs.is_proc_macro_derive())
+ {
+ return None;
+ }
+ let loc = self.id.lookup(db.upcast());
+ let def_map = db.crate_def_map(loc.krate(db).into());
+ def_map.fn_as_proc_macro(self.id).map(|id| Macro { id: id.into() })
+ }
+}
+
+// Note: logically, this belongs to `hir_ty`, but we are not using it there yet.
+#[derive(Clone, Copy, PartialEq, Eq)]
+pub enum Access {
+ Shared,
+ Exclusive,
+ Owned,
+}
+
+impl From<hir_ty::Mutability> for Access {
+ fn from(mutability: hir_ty::Mutability) -> Access {
+ match mutability {
+ hir_ty::Mutability::Not => Access::Shared,
+ hir_ty::Mutability::Mut => Access::Exclusive,
+ }
+ }
+}
+
+#[derive(Clone, Debug)]
+pub struct Param {
+ func: Function,
+ /// The index in parameter list, including self parameter.
+ idx: usize,
+ ty: Type,
+}
+
+impl Param {
+ pub fn ty(&self) -> &Type {
+ &self.ty
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Option<Name> {
+ db.function_data(self.func.id).params[self.idx].0.clone()
+ }
+
+ pub fn as_local(&self, db: &dyn HirDatabase) -> Option<Local> {
+ let parent = DefWithBodyId::FunctionId(self.func.into());
+ let body = db.body(parent);
+ let pat_id = body.params[self.idx];
+ if let Pat::Bind { .. } = &body[pat_id] {
+ Some(Local { parent, pat_id: body.params[self.idx] })
+ } else {
+ None
+ }
+ }
+
+ pub fn pattern_source(&self, db: &dyn HirDatabase) -> Option<ast::Pat> {
+ self.source(db).and_then(|p| p.value.pat())
+ }
+
+ pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::Param>> {
+ let InFile { file_id, value } = self.func.source(db)?;
+ let params = value.param_list()?;
+ if params.self_param().is_some() {
+ params.params().nth(self.idx.checked_sub(1)?)
+ } else {
+ params.params().nth(self.idx)
+ }
+ .map(|value| InFile { file_id, value })
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct SelfParam {
+ func: FunctionId,
+}
+
+impl SelfParam {
+ pub fn access(self, db: &dyn HirDatabase) -> Access {
+ let func_data = db.function_data(self.func);
+ func_data
+ .params
+ .first()
+ .map(|(_, param)| match &**param {
+ TypeRef::Reference(.., mutability) => match mutability {
+ hir_def::type_ref::Mutability::Shared => Access::Shared,
+ hir_def::type_ref::Mutability::Mut => Access::Exclusive,
+ },
+ _ => Access::Owned,
+ })
+ .unwrap_or(Access::Owned)
+ }
+
+ pub fn display(self, db: &dyn HirDatabase) -> &'static str {
+ match self.access(db) {
+ Access::Shared => "&self",
+ Access::Exclusive => "&mut self",
+ Access::Owned => "self",
+ }
+ }
+
+ pub fn source(&self, db: &dyn HirDatabase) -> Option<InFile<ast::SelfParam>> {
+ let InFile { file_id, value } = Function::from(self.func).source(db)?;
+ value
+ .param_list()
+ .and_then(|params| params.self_param())
+ .map(|value| InFile { file_id, value })
+ }
+
+ pub fn ty(&self, db: &dyn HirDatabase) -> Type {
+ let substs = TyBuilder::placeholder_subst(db, self.func);
+ let callable_sig =
+ db.callable_item_signature(self.func.into()).substitute(Interner, &substs);
+ let environment = db.trait_environment(self.func.into());
+ let ty = callable_sig.params()[0].clone();
+ Type { env: environment, ty }
+ }
+}
+
+impl HasVisibility for Function {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.function_visibility(self.id)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Const {
+ pub(crate) id: ConstId,
+}
+
+impl Const {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ db.const_data(self.id).name.clone()
+ }
+
+ pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
+ self.source(db)?.value.body()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let data = db.const_data(self.id);
+ let resolver = self.id.resolver(db.upcast());
+ let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
+ let ty = ctx.lower_ty(&data.type_ref);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn eval(self, db: &dyn HirDatabase) -> Result<ComputedExpr, ConstEvalError> {
+ db.const_eval(self.id)
+ }
+}
+
+impl HasVisibility for Const {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.const_visibility(self.id)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Static {
+ pub(crate) id: StaticId,
+}
+
+impl Static {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.static_data(self.id).name.clone()
+ }
+
+ pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
+ db.static_data(self.id).mutable
+ }
+
+ pub fn value(self, db: &dyn HirDatabase) -> Option<ast::Expr> {
+ self.source(db)?.value.body()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let data = db.static_data(self.id);
+ let resolver = self.id.resolver(db.upcast());
+ let ctx = hir_ty::TyLoweringContext::new(db, &resolver);
+ let ty = ctx.lower_ty(&data.type_ref);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+}
+
+impl HasVisibility for Static {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.static_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Trait {
+ pub(crate) id: TraitId,
+}
+
+impl Trait {
+ pub fn lang(db: &dyn HirDatabase, krate: Crate, name: &Name) -> Option<Trait> {
+ db.lang_item(krate.into(), name.to_smol_str())
+ .and_then(LangItemTarget::as_trait)
+ .map(Into::into)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).container }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.trait_data(self.id).name.clone()
+ }
+
+ pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ db.trait_data(self.id).items.iter().map(|(_name, it)| (*it).into()).collect()
+ }
+
+ pub fn items_with_supertraits(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ let traits = all_super_traits(db.upcast(), self.into());
+ traits.iter().flat_map(|tr| Trait::from(*tr).items(db)).collect()
+ }
+
+ pub fn is_auto(self, db: &dyn HirDatabase) -> bool {
+ db.trait_data(self.id).is_auto
+ }
+
+ pub fn is_unsafe(&self, db: &dyn HirDatabase) -> bool {
+ db.trait_data(self.id).is_unsafe
+ }
+
+ pub fn type_or_const_param_count(
+ &self,
+ db: &dyn HirDatabase,
+ count_required_only: bool,
+ ) -> usize {
+ db.generic_params(GenericDefId::from(self.id))
+ .type_or_consts
+ .iter()
+ .filter(|(_, ty)| match ty {
+ TypeOrConstParamData::TypeParamData(ty)
+ if ty.provenance != TypeParamProvenance::TypeParamList =>
+ {
+ false
+ }
+ _ => true,
+ })
+ .filter(|(_, ty)| !count_required_only || !ty.has_default())
+ .count()
+ }
+}
+
+impl HasVisibility for Trait {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ db.trait_data(self.id).visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TypeAlias {
+ pub(crate) id: TypeAliasId,
+}
+
+impl TypeAlias {
+ pub fn has_non_default_type_params(self, db: &dyn HirDatabase) -> bool {
+ let subst = db.generic_defaults(self.id.into());
+ subst.iter().any(|ty| match ty.skip_binders().data(Interner) {
+ GenericArgData::Ty(x) => x.is_unknown(),
+ _ => false,
+ })
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.lookup(db.upcast()).module(db.upcast()) }
+ }
+
+ pub fn type_ref(self, db: &dyn HirDatabase) -> Option<TypeRef> {
+ db.type_alias_data(self.id).type_ref.as_deref().cloned()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::from_def(db, self.id)
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ db.type_alias_data(self.id).name.clone()
+ }
+}
+
+impl HasVisibility for TypeAlias {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ let function_data = db.type_alias_data(self.id);
+ let visibility = &function_data.visibility;
+ visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct BuiltinType {
+ pub(crate) inner: hir_def::builtin_type::BuiltinType,
+}
+
+impl BuiltinType {
+ pub fn str() -> BuiltinType {
+ BuiltinType { inner: hir_def::builtin_type::BuiltinType::Str }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::new_for_crate(db.crate_graph().iter().next().unwrap(), TyBuilder::builtin(self.inner))
+ }
+
+ pub fn name(self) -> Name {
+ self.inner.as_name()
+ }
+
+ pub fn is_int(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Int(_))
+ }
+
+ pub fn is_uint(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Uint(_))
+ }
+
+ pub fn is_float(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Float(_))
+ }
+
+ pub fn is_char(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Char)
+ }
+
+ pub fn is_bool(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Bool)
+ }
+
+ pub fn is_str(&self) -> bool {
+ matches!(self.inner, hir_def::builtin_type::BuiltinType::Str)
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum MacroKind {
+ /// `macro_rules!` or Macros 2.0 macro.
+ Declarative,
+ /// A built-in or custom derive.
+ Derive,
+ /// A built-in function-like macro.
+ BuiltIn,
+ /// A procedural attribute macro.
+ Attr,
+ /// A function-like procedural macro.
+ ProcMacro,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Macro {
+ pub(crate) id: MacroId,
+}
+
+impl Macro {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ Module { id: self.id.module(db.upcast()) }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self.id {
+ MacroId::Macro2Id(id) => db.macro2_data(id).name.clone(),
+ MacroId::MacroRulesId(id) => db.macro_rules_data(id).name.clone(),
+ MacroId::ProcMacroId(id) => db.proc_macro_data(id).name.clone(),
+ }
+ }
+
+ pub fn is_macro_export(self, db: &dyn HirDatabase) -> bool {
+ matches!(self.id, MacroId::MacroRulesId(id) if db.macro_rules_data(id).macro_export)
+ }
+
+ pub fn kind(&self, db: &dyn HirDatabase) -> MacroKind {
+ match self.id {
+ MacroId::Macro2Id(it) => match it.lookup(db.upcast()).expander {
+ MacroExpander::Declarative => MacroKind::Declarative,
+ MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
+ MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
+ MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
+ },
+ MacroId::MacroRulesId(it) => match it.lookup(db.upcast()).expander {
+ MacroExpander::Declarative => MacroKind::Declarative,
+ MacroExpander::BuiltIn(_) | MacroExpander::BuiltInEager(_) => MacroKind::BuiltIn,
+ MacroExpander::BuiltInAttr(_) => MacroKind::Attr,
+ MacroExpander::BuiltInDerive(_) => MacroKind::Derive,
+ },
+ MacroId::ProcMacroId(it) => match it.lookup(db.upcast()).kind {
+ ProcMacroKind::CustomDerive => MacroKind::Derive,
+ ProcMacroKind::FuncLike => MacroKind::ProcMacro,
+ ProcMacroKind::Attr => MacroKind::Attr,
+ },
+ }
+ }
+
+ pub fn is_fn_like(&self, db: &dyn HirDatabase) -> bool {
+ match self.kind(db) {
+ MacroKind::Declarative | MacroKind::BuiltIn | MacroKind::ProcMacro => true,
+ MacroKind::Attr | MacroKind::Derive => false,
+ }
+ }
+
+ pub fn is_builtin_derive(&self, db: &dyn HirDatabase) -> bool {
+ match self.id {
+ MacroId::Macro2Id(it) => {
+ matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
+ }
+ MacroId::MacroRulesId(it) => {
+ matches!(it.lookup(db.upcast()).expander, MacroExpander::BuiltInDerive(_))
+ }
+ MacroId::ProcMacroId(_) => false,
+ }
+ }
+
+ pub fn is_attr(&self, db: &dyn HirDatabase) -> bool {
+ matches!(self.kind(db), MacroKind::Attr)
+ }
+
+ pub fn is_derive(&self, db: &dyn HirDatabase) -> bool {
+ matches!(self.kind(db), MacroKind::Derive)
+ }
+}
+
+impl HasVisibility for Macro {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self.id {
+ MacroId::Macro2Id(id) => {
+ let data = db.macro2_data(id);
+ let visibility = &data.visibility;
+ visibility.resolve(db.upcast(), &self.id.resolver(db.upcast()))
+ }
+ MacroId::MacroRulesId(_) => Visibility::Public,
+ MacroId::ProcMacroId(_) => Visibility::Public,
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum ItemInNs {
+ Types(ModuleDef),
+ Values(ModuleDef),
+ Macros(Macro),
+}
+
+impl From<Macro> for ItemInNs {
+ fn from(it: Macro) -> Self {
+ Self::Macros(it)
+ }
+}
+
+impl From<ModuleDef> for ItemInNs {
+ fn from(module_def: ModuleDef) -> Self {
+ match module_def {
+ ModuleDef::Static(_) | ModuleDef::Const(_) | ModuleDef::Function(_) => {
+ ItemInNs::Values(module_def)
+ }
+ _ => ItemInNs::Types(module_def),
+ }
+ }
+}
+
+impl ItemInNs {
+ pub fn as_module_def(self) -> Option<ModuleDef> {
+ match self {
+ ItemInNs::Types(id) | ItemInNs::Values(id) => Some(id),
+ ItemInNs::Macros(_) => None,
+ }
+ }
+
+ /// Returns the crate defining this item (or `None` if `self` is built-in).
+ pub fn krate(&self, db: &dyn HirDatabase) -> Option<Crate> {
+ match self {
+ ItemInNs::Types(did) | ItemInNs::Values(did) => did.module(db).map(|m| m.krate()),
+ ItemInNs::Macros(id) => Some(id.module(db).krate()),
+ }
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ match self {
+ ItemInNs::Types(it) | ItemInNs::Values(it) => it.attrs(db),
+ ItemInNs::Macros(it) => Some(it.attrs(db)),
+ }
+ }
+}
+
+/// Invariant: `inner.as_assoc_item(db).is_some()`
+/// We do not actively enforce this invariant.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+ Function(Function),
+ Const(Const),
+ TypeAlias(TypeAlias),
+}
+#[derive(Debug)]
+pub enum AssocItemContainer {
+ Trait(Trait),
+ Impl(Impl),
+}
+pub trait AsAssocItem {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem>;
+}
+
+impl AsAssocItem for Function {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::Function, self.id)
+ }
+}
+impl AsAssocItem for Const {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::Const, self.id)
+ }
+}
+impl AsAssocItem for TypeAlias {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ as_assoc_item(db, AssocItem::TypeAlias, self.id)
+ }
+}
+impl AsAssocItem for ModuleDef {
+ fn as_assoc_item(self, db: &dyn HirDatabase) -> Option<AssocItem> {
+ match self {
+ ModuleDef::Function(it) => it.as_assoc_item(db),
+ ModuleDef::Const(it) => it.as_assoc_item(db),
+ ModuleDef::TypeAlias(it) => it.as_assoc_item(db),
+ _ => None,
+ }
+ }
+}
+fn as_assoc_item<ID, DEF, CTOR, AST>(db: &dyn HirDatabase, ctor: CTOR, id: ID) -> Option<AssocItem>
+where
+ ID: Lookup<Data = AssocItemLoc<AST>>,
+ DEF: From<ID>,
+ CTOR: FnOnce(DEF) -> AssocItem,
+ AST: ItemTreeNode,
+{
+ match id.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) => Some(ctor(DEF::from(id))),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None,
+ }
+}
+
+impl AssocItem {
+ pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
+ match self {
+ AssocItem::Function(it) => Some(it.name(db)),
+ AssocItem::Const(it) => it.name(db),
+ AssocItem::TypeAlias(it) => Some(it.name(db)),
+ }
+ }
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ AssocItem::Function(f) => f.module(db),
+ AssocItem::Const(c) => c.module(db),
+ AssocItem::TypeAlias(t) => t.module(db),
+ }
+ }
+ pub fn container(self, db: &dyn HirDatabase) -> AssocItemContainer {
+ let container = match self {
+ AssocItem::Function(it) => it.id.lookup(db.upcast()).container,
+ AssocItem::Const(it) => it.id.lookup(db.upcast()).container,
+ AssocItem::TypeAlias(it) => it.id.lookup(db.upcast()).container,
+ };
+ match container {
+ ItemContainerId::TraitId(id) => AssocItemContainer::Trait(id.into()),
+ ItemContainerId::ImplId(id) => AssocItemContainer::Impl(id.into()),
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ panic!("invalid AssocItem")
+ }
+ }
+ }
+
+ pub fn containing_trait(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Trait(t) => Some(t),
+ _ => None,
+ }
+ }
+
+ pub fn containing_trait_impl(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Impl(i) => i.trait_(db),
+ _ => None,
+ }
+ }
+
+ pub fn containing_trait_or_trait_impl(self, db: &dyn HirDatabase) -> Option<Trait> {
+ match self.container(db) {
+ AssocItemContainer::Trait(t) => Some(t),
+ AssocItemContainer::Impl(i) => i.trait_(db),
+ }
+ }
+}
+
+impl HasVisibility for AssocItem {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility {
+ match self {
+ AssocItem::Function(f) => f.visibility(db),
+ AssocItem::Const(c) => c.visibility(db),
+ AssocItem::TypeAlias(t) => t.visibility(db),
+ }
+ }
+}
+
+impl From<AssocItem> for ModuleDef {
+ fn from(assoc: AssocItem) -> Self {
+ match assoc {
+ AssocItem::Function(it) => ModuleDef::Function(it),
+ AssocItem::Const(it) => ModuleDef::Const(it),
+ AssocItem::TypeAlias(it) => ModuleDef::TypeAlias(it),
+ }
+ }
+}
+
+#[derive(Clone, Copy, PartialEq, Eq, Debug, Hash)]
+pub enum GenericDef {
+ Function(Function),
+ Adt(Adt),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ Impl(Impl),
+ // enum variants cannot have generics themselves, but their parent enums
+ // can, and this makes some code easier to write
+ Variant(Variant),
+ // consts can have type parameters from their parents (i.e. associated consts of traits)
+ Const(Const),
+}
+impl_from!(
+ Function,
+ Adt(Struct, Enum, Union),
+ Trait,
+ TypeAlias,
+ Impl,
+ Variant,
+ Const
+ for GenericDef
+);
+
+impl GenericDef {
+ pub fn params(self, db: &dyn HirDatabase) -> Vec<GenericParam> {
+ let generics = db.generic_params(self.into());
+ let ty_params = generics.type_or_consts.iter().map(|(local_id, _)| {
+ let toc = TypeOrConstParam { id: TypeOrConstParamId { parent: self.into(), local_id } };
+ match toc.split(db) {
+ Either::Left(x) => GenericParam::ConstParam(x),
+ Either::Right(x) => GenericParam::TypeParam(x),
+ }
+ });
+ let lt_params = generics
+ .lifetimes
+ .iter()
+ .map(|(local_id, _)| LifetimeParam {
+ id: LifetimeParamId { parent: self.into(), local_id },
+ })
+ .map(GenericParam::LifetimeParam);
+ lt_params.chain(ty_params).collect()
+ }
+
+ pub fn type_params(self, db: &dyn HirDatabase) -> Vec<TypeOrConstParam> {
+ let generics = db.generic_params(self.into());
+ generics
+ .type_or_consts
+ .iter()
+ .map(|(local_id, _)| TypeOrConstParam {
+ id: TypeOrConstParamId { parent: self.into(), local_id },
+ })
+ .collect()
+ }
+}
+
+/// A single local definition.
+///
+/// If the definition of this is part of a "MultiLocal", that is a local that has multiple declarations due to or-patterns
+/// then this only references a single one of those.
+/// To retrieve the other locals you should use [`Local::associated_locals`]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Local {
+ pub(crate) parent: DefWithBodyId,
+ pub(crate) pat_id: PatId,
+}
+
+impl Local {
+ pub fn is_param(self, db: &dyn HirDatabase) -> bool {
+ let src = self.source(db);
+ match src.value {
+ Either::Left(pat) => pat
+ .syntax()
+ .ancestors()
+ .map(|it| it.kind())
+ .take_while(|&kind| ast::Pat::can_cast(kind) || ast::Param::can_cast(kind))
+ .any(ast::Param::can_cast),
+ Either::Right(_) => true,
+ }
+ }
+
+ pub fn as_self_param(self, db: &dyn HirDatabase) -> Option<SelfParam> {
+ match self.parent {
+ DefWithBodyId::FunctionId(func) if self.is_self(db) => Some(SelfParam { func }),
+ _ => None,
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let body = db.body(self.parent);
+ match &body[self.pat_id] {
+ Pat::Bind { name, .. } => name.clone(),
+ _ => {
+ stdx::never!("hir::Local is missing a name!");
+ Name::missing()
+ }
+ }
+ }
+
+ pub fn is_self(self, db: &dyn HirDatabase) -> bool {
+ self.name(db) == name![self]
+ }
+
+ pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
+ let body = db.body(self.parent);
+ matches!(&body[self.pat_id], Pat::Bind { mode: BindingAnnotation::Mutable, .. })
+ }
+
+ pub fn is_ref(self, db: &dyn HirDatabase) -> bool {
+ let body = db.body(self.parent);
+ matches!(
+ &body[self.pat_id],
+ Pat::Bind { mode: BindingAnnotation::Ref | BindingAnnotation::RefMut, .. }
+ )
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
+ self.parent.into()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent(db).module(db)
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let def = self.parent;
+ let infer = db.infer(def);
+ let ty = infer[self.pat_id].clone();
+ Type::new(db, def, ty)
+ }
+
+ pub fn associated_locals(self, db: &dyn HirDatabase) -> Box<[Local]> {
+ let body = db.body(self.parent);
+ body.ident_patterns_for(&self.pat_id)
+ .iter()
+ .map(|&pat_id| Local { parent: self.parent, pat_id })
+ .collect()
+ }
+
+ /// If this local is part of a multi-local, retrieve the representative local.
+ /// That is the local that references are being resolved to.
+ pub fn representative(self, db: &dyn HirDatabase) -> Local {
+ let body = db.body(self.parent);
+ Local { pat_id: body.pattern_representative(self.pat_id), ..self }
+ }
+
+ pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::IdentPat, ast::SelfParam>> {
+ let (_body, source_map) = db.body_with_source_map(self.parent);
+ let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
+ let root = src.file_syntax(db.upcast());
+ src.map(|ast| match ast {
+ // Suspicious unwrap
+ Either::Left(it) => Either::Left(it.cast().unwrap().to_node(&root)),
+ Either::Right(it) => Either::Right(it.to_node(&root)),
+ })
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct DeriveHelper {
+ pub(crate) derive: MacroId,
+ pub(crate) idx: usize,
+}
+
+impl DeriveHelper {
+ pub fn derive(&self) -> Macro {
+ Macro { id: self.derive.into() }
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> Name {
+ match self.derive {
+ MacroId::Macro2Id(_) => None,
+ MacroId::MacroRulesId(_) => None,
+ MacroId::ProcMacroId(proc_macro) => db
+ .proc_macro_data(proc_macro)
+ .helpers
+ .as_ref()
+ .and_then(|it| it.get(self.idx))
+ .cloned(),
+ }
+ .unwrap_or_else(|| Name::missing())
+ }
+}
+
+// FIXME: Wrong name? This is could also be a registered attribute
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct BuiltinAttr {
+ krate: Option<CrateId>,
+ idx: usize,
+}
+
+impl BuiltinAttr {
+ // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
+ pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
+ if let builtin @ Some(_) = Self::builtin(name) {
+ return builtin;
+ }
+ let idx = db.crate_def_map(krate.id).registered_attrs().iter().position(|it| it == name)?;
+ Some(BuiltinAttr { krate: Some(krate.id), idx })
+ }
+
+ fn builtin(name: &str) -> Option<Self> {
+ hir_def::builtin_attr::INERT_ATTRIBUTES
+ .iter()
+ .position(|tool| tool.name == name)
+ .map(|idx| BuiltinAttr { krate: None, idx })
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
+ // FIXME: Return a `Name` here
+ match self.krate {
+ Some(krate) => db.crate_def_map(krate).registered_attrs()[self.idx].clone(),
+ None => SmolStr::new(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].name),
+ }
+ }
+
+ pub fn template(&self, _: &dyn HirDatabase) -> Option<AttributeTemplate> {
+ match self.krate {
+ Some(_) => None,
+ None => Some(hir_def::builtin_attr::INERT_ATTRIBUTES[self.idx].template),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct ToolModule {
+ krate: Option<CrateId>,
+ idx: usize,
+}
+
+impl ToolModule {
+ // FIXME: consider crates\hir_def\src\nameres\attr_resolution.rs?
+ pub(crate) fn by_name(db: &dyn HirDatabase, krate: Crate, name: &str) -> Option<Self> {
+ if let builtin @ Some(_) = Self::builtin(name) {
+ return builtin;
+ }
+ let idx = db.crate_def_map(krate.id).registered_tools().iter().position(|it| it == name)?;
+ Some(ToolModule { krate: Some(krate.id), idx })
+ }
+
+ fn builtin(name: &str) -> Option<Self> {
+ hir_def::builtin_attr::TOOL_MODULES
+ .iter()
+ .position(|&tool| tool == name)
+ .map(|idx| ToolModule { krate: None, idx })
+ }
+
+ pub fn name(&self, db: &dyn HirDatabase) -> SmolStr {
+ // FIXME: Return a `Name` here
+ match self.krate {
+ Some(krate) => db.crate_def_map(krate).registered_tools()[self.idx].clone(),
+ None => SmolStr::new(hir_def::builtin_attr::TOOL_MODULES[self.idx]),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct Label {
+ pub(crate) parent: DefWithBodyId,
+ pub(crate) label_id: LabelId,
+}
+
+impl Label {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.parent(db).module(db)
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> DefWithBody {
+ self.parent.into()
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let body = db.body(self.parent);
+ body[self.label_id].name.clone()
+ }
+
+ pub fn source(self, db: &dyn HirDatabase) -> InFile<ast::Label> {
+ let (_body, source_map) = db.body_with_source_map(self.parent);
+ let src = source_map.label_syntax(self.label_id);
+ let root = src.file_syntax(db.upcast());
+ src.map(|ast| ast.to_node(&root))
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum GenericParam {
+ TypeParam(TypeParam),
+ ConstParam(ConstParam),
+ LifetimeParam(LifetimeParam),
+}
+impl_from!(TypeParam, ConstParam, LifetimeParam for GenericParam);
+
+impl GenericParam {
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ match self {
+ GenericParam::TypeParam(it) => it.module(db),
+ GenericParam::ConstParam(it) => it.module(db),
+ GenericParam::LifetimeParam(it) => it.module(db),
+ }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ match self {
+ GenericParam::TypeParam(it) => it.name(db),
+ GenericParam::ConstParam(it) => it.name(db),
+ GenericParam::LifetimeParam(it) => it.name(db),
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct TypeParam {
+ pub(crate) id: TypeParamId,
+}
+
+impl TypeParam {
+ pub fn merge(self) -> TypeOrConstParam {
+ TypeOrConstParam { id: self.id.into() }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ self.merge().name(db)
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent().module(db.upcast()).into()
+ }
+
+ /// Is this type parameter implicitly introduced (eg. `Self` in a trait or an `impl Trait`
+ /// argument)?
+ pub fn is_implicit(self, db: &dyn HirDatabase) -> bool {
+ let params = db.generic_params(self.id.parent());
+ let data = ¶ms.type_or_consts[self.id.local_id()];
+ match data.type_param().unwrap().provenance {
+ hir_def::generics::TypeParamProvenance::TypeParamList => false,
+ hir_def::generics::TypeParamProvenance::TraitSelf
+ | hir_def::generics::TypeParamProvenance::ArgumentImplTrait => true,
+ }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.parent().resolver(db.upcast());
+ let ty =
+ TyKind::Placeholder(hir_ty::to_placeholder_idx(db, self.id.into())).intern(Interner);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ /// FIXME: this only lists trait bounds from the item defining the type
+ /// parameter, not additional bounds that might be added e.g. by a method if
+ /// the parameter comes from an impl!
+ pub fn trait_bounds(self, db: &dyn HirDatabase) -> Vec<Trait> {
+ db.generic_predicates_for_param(self.id.parent(), self.id.into(), None)
+ .iter()
+ .filter_map(|pred| match &pred.skip_binders().skip_binders() {
+ hir_ty::WhereClause::Implemented(trait_ref) => {
+ Some(Trait::from(trait_ref.hir_trait_id()))
+ }
+ _ => None,
+ })
+ .collect()
+ }
+
+ pub fn default(self, db: &dyn HirDatabase) -> Option<Type> {
+ let params = db.generic_defaults(self.id.parent());
+ let local_idx = hir_ty::param_idx(db, self.id.into())?;
+ let resolver = self.id.parent().resolver(db.upcast());
+ let ty = params.get(local_idx)?.clone();
+ let subst = TyBuilder::placeholder_subst(db, self.id.parent());
+ let ty = ty.substitute(Interner, &subst);
+ match ty.data(Interner) {
+ GenericArgData::Ty(x) => Some(Type::new_with_resolver_inner(db, &resolver, x.clone())),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct LifetimeParam {
+ pub(crate) id: LifetimeParamId,
+}
+
+impl LifetimeParam {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent);
+ params.lifetimes[self.id.local_id].name.clone()
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent.module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent.into()
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct ConstParam {
+ pub(crate) id: ConstParamId,
+}
+
+impl ConstParam {
+ pub fn merge(self) -> TypeOrConstParam {
+ TypeOrConstParam { id: self.id.into() }
+ }
+
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent());
+ match params.type_or_consts[self.id.local_id()].name() {
+ Some(x) => x.clone(),
+ None => {
+ never!();
+ Name::missing()
+ }
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent().module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent().into()
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ Type::new(db, self.id.parent(), db.const_param_ty(self.id))
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct TypeOrConstParam {
+ pub(crate) id: TypeOrConstParamId,
+}
+
+impl TypeOrConstParam {
+ pub fn name(self, db: &dyn HirDatabase) -> Name {
+ let params = db.generic_params(self.id.parent);
+ match params.type_or_consts[self.id.local_id].name() {
+ Some(n) => n.clone(),
+ _ => Name::missing(),
+ }
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.parent.module(db.upcast()).into()
+ }
+
+ pub fn parent(self, _db: &dyn HirDatabase) -> GenericDef {
+ self.id.parent.into()
+ }
+
+ pub fn split(self, db: &dyn HirDatabase) -> Either<ConstParam, TypeParam> {
+ let params = db.generic_params(self.id.parent);
+ match ¶ms.type_or_consts[self.id.local_id] {
+ hir_def::generics::TypeOrConstParamData::TypeParamData(_) => {
+ Either::Right(TypeParam { id: TypeParamId::from_unchecked(self.id) })
+ }
+ hir_def::generics::TypeOrConstParamData::ConstParamData(_) => {
+ Either::Left(ConstParam { id: ConstParamId::from_unchecked(self.id) })
+ }
+ }
+ }
+
+ pub fn ty(self, db: &dyn HirDatabase) -> Type {
+ match self.split(db) {
+ Either::Left(x) => x.ty(db),
+ Either::Right(x) => x.ty(db),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct Impl {
+ pub(crate) id: ImplId,
+}
+
+impl Impl {
+ pub fn all_in_crate(db: &dyn HirDatabase, krate: Crate) -> Vec<Impl> {
+ let inherent = db.inherent_impls_in_crate(krate.id);
+ let trait_ = db.trait_impls_in_crate(krate.id);
+
+ inherent.all_impls().chain(trait_.all_impls()).map(Self::from).collect()
+ }
+
+ pub fn all_for_type(db: &dyn HirDatabase, Type { ty, env }: Type) -> Vec<Impl> {
+ let def_crates = match method_resolution::def_crates(db, &ty, env.krate) {
+ Some(def_crates) => def_crates,
+ None => return Vec::new(),
+ };
+
+ let filter = |impl_def: &Impl| {
+ let self_ty = impl_def.self_ty(db);
+ let rref = self_ty.remove_ref();
+ ty.equals_ctor(rref.as_ref().map_or(&self_ty.ty, |it| &it.ty))
+ };
+
+ let fp = TyFingerprint::for_inherent_impl(&ty);
+ let fp = match fp {
+ Some(fp) => fp,
+ None => return Vec::new(),
+ };
+
+ let mut all = Vec::new();
+ def_crates.iter().for_each(|&id| {
+ all.extend(
+ db.inherent_impls_in_crate(id)
+ .for_self_ty(&ty)
+ .iter()
+ .cloned()
+ .map(Self::from)
+ .filter(filter),
+ )
+ });
+ for id in def_crates
+ .iter()
+ .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db))
+ .map(|Crate { id }| id)
+ .chain(def_crates.iter().copied())
+ .unique()
+ {
+ all.extend(
+ db.trait_impls_in_crate(id)
+ .for_self_ty_without_blanket_impls(fp)
+ .map(Self::from)
+ .filter(filter),
+ );
+ }
+ all
+ }
+
+ pub fn all_for_trait(db: &dyn HirDatabase, trait_: Trait) -> Vec<Impl> {
+ let krate = trait_.module(db).krate();
+ let mut all = Vec::new();
+ for Crate { id } in krate.transitive_reverse_dependencies(db).into_iter() {
+ let impls = db.trait_impls_in_crate(id);
+ all.extend(impls.for_trait(trait_.id).map(Self::from))
+ }
+ all
+ }
+
+ // FIXME: the return type is wrong. This should be a hir version of
+ // `TraitRef` (to account for parameters and qualifiers)
+ pub fn trait_(self, db: &dyn HirDatabase) -> Option<Trait> {
+ let trait_ref = db.impl_trait(self.id)?.skip_binders().clone();
+ let id = hir_ty::from_chalk_trait_id(trait_ref.trait_id);
+ Some(Trait { id })
+ }
+
+ pub fn self_ty(self, db: &dyn HirDatabase) -> Type {
+ let resolver = self.id.resolver(db.upcast());
+ let substs = TyBuilder::placeholder_subst(db, self.id);
+ let ty = db.impl_self_ty(self.id).substitute(Interner, &substs);
+ Type::new_with_resolver_inner(db, &resolver, ty)
+ }
+
+ pub fn items(self, db: &dyn HirDatabase) -> Vec<AssocItem> {
+ db.impl_data(self.id).items.iter().map(|it| (*it).into()).collect()
+ }
+
+ pub fn is_negative(self, db: &dyn HirDatabase) -> bool {
+ db.impl_data(self.id).is_negative
+ }
+
+ pub fn module(self, db: &dyn HirDatabase) -> Module {
+ self.id.lookup(db.upcast()).container.into()
+ }
+
+ pub fn is_builtin_derive(self, db: &dyn HirDatabase) -> Option<InFile<ast::Attr>> {
+ let src = self.source(db)?;
+ src.file_id.is_builtin_derive(db.upcast())
+ }
+}
+
+#[derive(Clone, PartialEq, Eq, Debug)]
+pub struct Type {
+ env: Arc<TraitEnvironment>,
+ ty: Ty,
+}
+
+impl Type {
+ pub(crate) fn new_with_resolver(db: &dyn HirDatabase, resolver: &Resolver, ty: Ty) -> Type {
+ Type::new_with_resolver_inner(db, resolver, ty)
+ }
+
+ pub(crate) fn new_with_resolver_inner(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ ty: Ty,
+ ) -> Type {
+ let environment = resolver.generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(resolver.krate())),
+ |d| db.trait_environment(d),
+ );
+ Type { env: environment, ty }
+ }
+
+ pub(crate) fn new_for_crate(krate: CrateId, ty: Ty) -> Type {
+ Type { env: Arc::new(TraitEnvironment::empty(krate)), ty }
+ }
+
+ pub fn reference(inner: &Type, m: Mutability) -> Type {
+ inner.derived(
+ TyKind::Ref(
+ if m.is_mut() { hir_ty::Mutability::Mut } else { hir_ty::Mutability::Not },
+ hir_ty::static_lifetime(),
+ inner.ty.clone(),
+ )
+ .intern(Interner),
+ )
+ }
+
+ fn new(db: &dyn HirDatabase, lexical_env: impl HasResolver, ty: Ty) -> Type {
+ let resolver = lexical_env.resolver(db.upcast());
+ let environment = resolver.generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(resolver.krate())),
+ |d| db.trait_environment(d),
+ );
+ Type { env: environment, ty }
+ }
+
+ fn from_def(db: &dyn HirDatabase, def: impl HasResolver + Into<TyDefId>) -> Type {
+ let ty_def = def.into();
+ let parent_subst = match ty_def {
+ TyDefId::TypeAliasId(id) => match id.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(id) => {
+ let subst = TyBuilder::subst_for_def(db, id, None).fill_with_unknown().build();
+ Some(subst)
+ }
+ ItemContainerId::ImplId(id) => {
+ let subst = TyBuilder::subst_for_def(db, id, None).fill_with_unknown().build();
+ Some(subst)
+ }
+ _ => None,
+ },
+ _ => None,
+ };
+ let ty = TyBuilder::def_ty(db, ty_def, parent_subst).fill_with_unknown().build();
+ Type::new(db, def, ty)
+ }
+
+ pub fn new_slice(ty: Type) -> Type {
+ Type { env: ty.env, ty: TyBuilder::slice(ty.ty) }
+ }
+
+ pub fn is_unit(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Tuple(0, ..))
+ }
+
+ pub fn is_bool(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Bool))
+ }
+
+ pub fn is_never(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Never)
+ }
+
+ pub fn is_mutable_reference(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Ref(hir_ty::Mutability::Mut, ..))
+ }
+
+ pub fn is_reference(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Ref(..))
+ }
+
+ pub fn as_reference(&self) -> Option<(Type, Mutability)> {
+ let (ty, _lt, m) = self.ty.as_reference()?;
+ let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut));
+ Some((self.derived(ty.clone()), m))
+ }
+
+ pub fn is_slice(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Slice(..))
+ }
+
+ pub fn is_usize(&self) -> bool {
+ matches!(self.ty.kind(Interner), TyKind::Scalar(Scalar::Uint(UintTy::Usize)))
+ }
+
+ pub fn remove_ref(&self) -> Option<Type> {
+ match &self.ty.kind(Interner) {
+ TyKind::Ref(.., ty) => Some(self.derived(ty.clone())),
+ _ => None,
+ }
+ }
+
+ pub fn strip_references(&self) -> Type {
+ self.derived(self.ty.strip_references().clone())
+ }
+
+ pub fn strip_reference(&self) -> Type {
+ self.derived(self.ty.strip_reference().clone())
+ }
+
+ pub fn is_unknown(&self) -> bool {
+ self.ty.is_unknown()
+ }
+
+ /// Checks that particular type `ty` implements `std::future::IntoFuture` or
+ /// `std::future::Future`.
+ /// This function is used in `.await` syntax completion.
+ pub fn impls_into_future(&self, db: &dyn HirDatabase) -> bool {
+ let trait_ = db
+ .lang_item(self.env.krate, SmolStr::new_inline("into_future"))
+ .and_then(|it| {
+ let into_future_fn = it.as_function()?;
+ let assoc_item = as_assoc_item(db, AssocItem::Function, into_future_fn)?;
+ let into_future_trait = assoc_item.containing_trait_or_trait_impl(db)?;
+ Some(into_future_trait.id)
+ })
+ .or_else(|| {
+ let future_trait =
+ db.lang_item(self.env.krate, SmolStr::new_inline("future_trait"))?;
+ future_trait.as_trait()
+ });
+
+ let trait_ = match trait_ {
+ Some(it) => it,
+ None => return false,
+ };
+
+ let canonical_ty =
+ Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
+ method_resolution::implements_trait(&canonical_ty, db, self.env.clone(), trait_)
+ }
+
+ /// Checks that particular type `ty` implements `std::ops::FnOnce`.
+ ///
+ /// This function can be used to check if a particular type is callable, since FnOnce is a
+ /// supertrait of Fn and FnMut, so all callable types implements at least FnOnce.
+ pub fn impls_fnonce(&self, db: &dyn HirDatabase) -> bool {
+ let fnonce_trait = match FnTrait::FnOnce.get_id(db, self.env.krate) {
+ Some(it) => it,
+ None => return false,
+ };
+
+ let canonical_ty =
+ Canonical { value: self.ty.clone(), binders: CanonicalVarKinds::empty(Interner) };
+ method_resolution::implements_trait_unique(
+ &canonical_ty,
+ db,
+ self.env.clone(),
+ fnonce_trait,
+ )
+ }
+
+ pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool {
+ let mut it = args.iter().map(|t| t.ty.clone());
+ let trait_ref = TyBuilder::trait_ref(db, trait_.id)
+ .push(self.ty.clone())
+ .fill(|x| {
+ let r = it.next().unwrap();
+ match x {
+ ParamKind::Type => GenericArgData::Ty(r).intern(Interner),
+ ParamKind::Const(ty) => {
+ // FIXME: this code is not covered in tests.
+ unknown_const_as_generic(ty.clone())
+ }
+ }
+ })
+ .build();
+
+ let goal = Canonical {
+ value: hir_ty::InEnvironment::new(&self.env.env, trait_ref.cast(Interner)),
+ binders: CanonicalVarKinds::empty(Interner),
+ };
+
+ db.trait_solve(self.env.krate, goal).is_some()
+ }
+
+ pub fn normalize_trait_assoc_type(
+ &self,
+ db: &dyn HirDatabase,
+ args: &[Type],
+ alias: TypeAlias,
+ ) -> Option<Type> {
+ let mut args = args.iter();
+ let trait_id = match alias.id.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(id) => id,
+ _ => unreachable!("non assoc type alias reached in normalize_trait_assoc_type()"),
+ };
+ let parent_subst = TyBuilder::subst_for_def(db, trait_id, None)
+ .push(self.ty.clone())
+ .fill(|x| {
+ // FIXME: this code is not covered in tests.
+ match x {
+ ParamKind::Type => {
+ GenericArgData::Ty(args.next().unwrap().ty.clone()).intern(Interner)
+ }
+ ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()),
+ }
+ })
+ .build();
+ // FIXME: We don't handle GATs yet.
+ let projection = TyBuilder::assoc_type_projection(db, alias.id, Some(parent_subst)).build();
+
+ let ty = db.normalize_projection(projection, self.env.clone());
+ if ty.is_unknown() {
+ None
+ } else {
+ Some(self.derived(ty))
+ }
+ }
+
+ pub fn is_copy(&self, db: &dyn HirDatabase) -> bool {
+ let lang_item = db.lang_item(self.env.krate, SmolStr::new_inline("copy"));
+ let copy_trait = match lang_item {
+ Some(LangItemTarget::TraitId(it)) => it,
+ _ => return false,
+ };
+ self.impls_trait(db, copy_trait.into(), &[])
+ }
+
+ pub fn as_callable(&self, db: &dyn HirDatabase) -> Option<Callable> {
+ let callee = match self.ty.kind(Interner) {
+ TyKind::Closure(id, _) => Callee::Closure(*id),
+ TyKind::Function(_) => Callee::FnPtr,
+ TyKind::FnDef(..) => Callee::Def(self.ty.callable_def(db)?),
+ _ => {
++ let sig = hir_ty::callable_sig_from_fnonce(&self.ty, self.env.clone(), db)?;
+ return Some(Callable {
+ ty: self.clone(),
+ sig,
+ callee: Callee::Other,
+ is_bound_method: false,
+ });
+ }
+ };
+
+ let sig = self.ty.callable_sig(db)?;
+ Some(Callable { ty: self.clone(), sig, callee, is_bound_method: false })
+ }
+
+ pub fn is_closure(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Closure { .. })
+ }
+
+ pub fn is_fn(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::FnDef(..) | TyKind::Function { .. })
+ }
+
+ pub fn is_array(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Array(..))
+ }
+
+ pub fn is_packed(&self, db: &dyn HirDatabase) -> bool {
+ let adt_id = match *self.ty.kind(Interner) {
+ TyKind::Adt(hir_ty::AdtId(adt_id), ..) => adt_id,
+ _ => return false,
+ };
+
+ let adt = adt_id.into();
+ match adt {
+ Adt::Struct(s) => matches!(s.repr(db), Some(ReprData { packed: true, .. })),
+ _ => false,
+ }
+ }
+
+ pub fn is_raw_ptr(&self) -> bool {
+ matches!(&self.ty.kind(Interner), TyKind::Raw(..))
+ }
+
+ pub fn contains_unknown(&self) -> bool {
+ return go(&self.ty);
+
+ fn go(ty: &Ty) -> bool {
+ match ty.kind(Interner) {
+ TyKind::Error => true,
+
+ TyKind::Adt(_, substs)
+ | TyKind::AssociatedType(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::OpaqueType(_, substs)
+ | TyKind::FnDef(_, substs)
+ | TyKind::Closure(_, substs) => {
+ substs.iter(Interner).filter_map(|a| a.ty(Interner)).any(go)
+ }
+
+ TyKind::Array(_ty, len) if len.is_unknown() => true,
+ TyKind::Array(ty, _)
+ | TyKind::Slice(ty)
+ | TyKind::Raw(_, ty)
+ | TyKind::Ref(_, _, ty) => go(ty),
+
+ TyKind::Scalar(_)
+ | TyKind::Str
+ | TyKind::Never
+ | TyKind::Placeholder(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _)
+ | TyKind::Dyn(_)
+ | TyKind::Function(_)
+ | TyKind::Alias(_)
+ | TyKind::Foreign(_)
+ | TyKind::Generator(..)
+ | TyKind::GeneratorWitness(..) => false,
+ }
+ }
+ }
+
+ pub fn fields(&self, db: &dyn HirDatabase) -> Vec<(Field, Type)> {
+ let (variant_id, substs) = match self.ty.kind(Interner) {
+ TyKind::Adt(hir_ty::AdtId(AdtId::StructId(s)), substs) => ((*s).into(), substs),
+ TyKind::Adt(hir_ty::AdtId(AdtId::UnionId(u)), substs) => ((*u).into(), substs),
+ _ => return Vec::new(),
+ };
+
+ db.field_types(variant_id)
+ .iter()
+ .map(|(local_id, ty)| {
+ let def = Field { parent: variant_id.into(), id: local_id };
+ let ty = ty.clone().substitute(Interner, substs);
+ (def, self.derived(ty))
+ })
+ .collect()
+ }
+
+ pub fn tuple_fields(&self, _db: &dyn HirDatabase) -> Vec<Type> {
+ if let TyKind::Tuple(_, substs) = &self.ty.kind(Interner) {
+ substs
+ .iter(Interner)
+ .map(|ty| self.derived(ty.assert_ty_ref(Interner).clone()))
+ .collect()
+ } else {
+ Vec::new()
+ }
+ }
+
+ pub fn autoderef<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Type> + 'a {
+ self.autoderef_(db).map(move |ty| self.derived(ty))
+ }
+
+ fn autoderef_<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Ty> + 'a {
+ // There should be no inference vars in types passed here
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+ let environment = self.env.clone();
+ autoderef(db, environment, canonical).map(|canonical| canonical.value)
+ }
+
+ // This would be nicer if it just returned an iterator, but that runs into
+ // lifetime problems, because we need to borrow temp `CrateImplDefs`.
+ pub fn iterate_assoc_items<T>(
+ &self,
+ db: &dyn HirDatabase,
+ krate: Crate,
+ mut callback: impl FnMut(AssocItem) -> Option<T>,
+ ) -> Option<T> {
+ let mut slot = None;
+ self.iterate_assoc_items_dyn(db, krate, &mut |assoc_item_id| {
+ slot = callback(assoc_item_id.into());
+ slot.is_some()
+ });
+ slot
+ }
+
+ fn iterate_assoc_items_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ krate: Crate,
+ callback: &mut dyn FnMut(AssocItemId) -> bool,
+ ) {
+ let def_crates = match method_resolution::def_crates(db, &self.ty, krate.id) {
+ Some(it) => it,
+ None => return,
+ };
+ for krate in def_crates {
+ let impls = db.inherent_impls_in_crate(krate);
+
+ for impl_def in impls.for_self_ty(&self.ty) {
+ for &item in db.impl_data(*impl_def).items.iter() {
+ if callback(item) {
+ return;
+ }
+ }
+ }
+ }
+ }
+
+ pub fn type_arguments(&self) -> impl Iterator<Item = Type> + '_ {
+ self.ty
+ .strip_references()
+ .as_adt()
+ .into_iter()
+ .flat_map(|(_, substs)| substs.iter(Interner))
+ .filter_map(|arg| arg.ty(Interner).cloned())
+ .map(move |ty| self.derived(ty))
+ }
+
+ pub fn iterate_method_candidates<T>(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ // FIXME this can be retrieved from `scope`, except autoimport uses this
+ // to specify a different set, so the method needs to be split
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ mut callback: impl FnMut(Function) -> Option<T>,
+ ) -> Option<T> {
+ let _p = profile::span("iterate_method_candidates");
+ let mut slot = None;
+
+ self.iterate_method_candidates_dyn(
+ db,
+ scope,
+ traits_in_scope,
+ with_local_impls,
+ name,
+ &mut |assoc_item_id| {
+ if let AssocItemId::FunctionId(func) = assoc_item_id {
+ if let Some(res) = callback(func.into()) {
+ slot = Some(res);
+ return ControlFlow::Break(());
+ }
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+ }
+
+ fn iterate_method_candidates_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+ ) {
+ // There should be no inference vars in types passed here
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+
+ let krate = scope.krate();
+ let environment = scope.resolver().generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(krate.id)),
+ |d| db.trait_environment(d),
+ );
+
+ method_resolution::iterate_method_candidates_dyn(
+ &canonical,
+ db,
+ environment,
+ traits_in_scope,
+ with_local_impls.and_then(|b| b.id.containing_block()).into(),
+ name,
+ method_resolution::LookupMode::MethodCall,
+ &mut |_adj, id| callback(id),
+ );
+ }
+
+ pub fn iterate_path_candidates<T>(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ mut callback: impl FnMut(AssocItem) -> Option<T>,
+ ) -> Option<T> {
+ let _p = profile::span("iterate_path_candidates");
+ let mut slot = None;
+ self.iterate_path_candidates_dyn(
+ db,
+ scope,
+ traits_in_scope,
+ with_local_impls,
+ name,
+ &mut |assoc_item_id| {
+ if let Some(res) = callback(assoc_item_id.into()) {
+ slot = Some(res);
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+ }
+
+ fn iterate_path_candidates_dyn(
+ &self,
+ db: &dyn HirDatabase,
+ scope: &SemanticsScope<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ with_local_impls: Option<Module>,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+ ) {
+ let canonical = hir_ty::replace_errors_with_variables(&self.ty);
+
+ let krate = scope.krate();
+ let environment = scope.resolver().generic_def().map_or_else(
+ || Arc::new(TraitEnvironment::empty(krate.id)),
+ |d| db.trait_environment(d),
+ );
+
+ method_resolution::iterate_path_candidates(
+ &canonical,
+ db,
+ environment,
+ traits_in_scope,
+ with_local_impls.and_then(|b| b.id.containing_block()).into(),
+ name,
+ &mut |id| callback(id),
+ );
+ }
+
+ pub fn as_adt(&self) -> Option<Adt> {
+ let (adt, _subst) = self.ty.as_adt()?;
+ Some(adt.into())
+ }
+
+ pub fn as_builtin(&self) -> Option<BuiltinType> {
+ self.ty.as_builtin().map(|inner| BuiltinType { inner })
+ }
+
+ pub fn as_dyn_trait(&self) -> Option<Trait> {
+ self.ty.dyn_trait().map(Into::into)
+ }
+
+ /// If a type can be represented as `dyn Trait`, returns all traits accessible via this type,
+ /// or an empty iterator otherwise.
+ pub fn applicable_inherent_traits<'a>(
+ &'a self,
+ db: &'a dyn HirDatabase,
+ ) -> impl Iterator<Item = Trait> + 'a {
+ let _p = profile::span("applicable_inherent_traits");
+ self.autoderef_(db)
+ .filter_map(|ty| ty.dyn_trait())
+ .flat_map(move |dyn_trait_id| hir_ty::all_super_traits(db.upcast(), dyn_trait_id))
+ .map(Trait::from)
+ }
+
+ pub fn env_traits<'a>(&'a self, db: &'a dyn HirDatabase) -> impl Iterator<Item = Trait> + 'a {
+ let _p = profile::span("env_traits");
+ self.autoderef_(db)
+ .filter(|ty| matches!(ty.kind(Interner), TyKind::Placeholder(_)))
+ .flat_map(|ty| {
+ self.env
+ .traits_in_scope_from_clauses(ty)
+ .flat_map(|t| hir_ty::all_super_traits(db.upcast(), t))
+ })
+ .map(Trait::from)
+ }
+
+ pub fn as_impl_traits(&self, db: &dyn HirDatabase) -> Option<impl Iterator<Item = Trait>> {
+ self.ty.impl_trait_bounds(db).map(|it| {
+ it.into_iter().filter_map(|pred| match pred.skip_binders() {
+ hir_ty::WhereClause::Implemented(trait_ref) => {
+ Some(Trait::from(trait_ref.hir_trait_id()))
+ }
+ _ => None,
+ })
+ })
+ }
+
+ pub fn as_associated_type_parent_trait(&self, db: &dyn HirDatabase) -> Option<Trait> {
+ self.ty.associated_type_parent_trait(db).map(Into::into)
+ }
+
+ fn derived(&self, ty: Ty) -> Type {
+ Type { env: self.env.clone(), ty }
+ }
+
+ pub fn walk(&self, db: &dyn HirDatabase, mut cb: impl FnMut(Type)) {
+ // TypeWalk::walk for a Ty at first visits parameters and only after that the Ty itself.
+ // We need a different order here.
+
+ fn walk_substs(
+ db: &dyn HirDatabase,
+ type_: &Type,
+ substs: &Substitution,
+ cb: &mut impl FnMut(Type),
+ ) {
+ for ty in substs.iter(Interner).filter_map(|a| a.ty(Interner)) {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+ }
+
+ fn walk_bounds(
+ db: &dyn HirDatabase,
+ type_: &Type,
+ bounds: &[QuantifiedWhereClause],
+ cb: &mut impl FnMut(Type),
+ ) {
+ for pred in bounds {
+ if let WhereClause::Implemented(trait_ref) = pred.skip_binders() {
+ cb(type_.clone());
+ // skip the self type. it's likely the type we just got the bounds from
+ for ty in
+ trait_ref.substitution.iter(Interner).skip(1).filter_map(|a| a.ty(Interner))
+ {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+ }
+ }
+ }
+
+ fn walk_type(db: &dyn HirDatabase, type_: &Type, cb: &mut impl FnMut(Type)) {
+ let ty = type_.ty.strip_references();
+ match ty.kind(Interner) {
+ TyKind::Adt(_, substs) => {
+ cb(type_.derived(ty.clone()));
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::AssociatedType(_, substs) => {
+ if ty.associated_type_parent_trait(db).is_some() {
+ cb(type_.derived(ty.clone()));
+ }
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::OpaqueType(_, subst) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+
+ walk_substs(db, type_, subst, cb);
+ }
+ TyKind::Alias(AliasTy::Opaque(opaque_ty)) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+
+ walk_substs(db, type_, &opaque_ty.substitution, cb);
+ }
+ TyKind::Placeholder(_) => {
+ if let Some(bounds) = ty.impl_trait_bounds(db) {
+ walk_bounds(db, &type_.derived(ty.clone()), &bounds, cb);
+ }
+ }
+ TyKind::Dyn(bounds) => {
+ walk_bounds(
+ db,
+ &type_.derived(ty.clone()),
+ bounds.bounds.skip_binders().interned(),
+ cb,
+ );
+ }
+
+ TyKind::Ref(_, _, ty)
+ | TyKind::Raw(_, ty)
+ | TyKind::Array(ty, _)
+ | TyKind::Slice(ty) => {
+ walk_type(db, &type_.derived(ty.clone()), cb);
+ }
+
+ TyKind::FnDef(_, substs)
+ | TyKind::Tuple(_, substs)
+ | TyKind::Closure(.., substs) => {
+ walk_substs(db, type_, substs, cb);
+ }
+ TyKind::Function(hir_ty::FnPointer { substitution, .. }) => {
+ walk_substs(db, type_, &substitution.0, cb);
+ }
+
+ _ => {}
+ }
+ }
+
+ walk_type(db, self, &mut cb);
+ }
+
+ pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool {
+ let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone()));
+ hir_ty::could_unify(db, self.env.clone(), &tys)
+ }
+
+ pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool {
+ let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone()));
+ hir_ty::could_coerce(db, self.env.clone(), &tys)
+ }
+
+ pub fn as_type_param(&self, db: &dyn HirDatabase) -> Option<TypeParam> {
+ match self.ty.kind(Interner) {
+ TyKind::Placeholder(p) => Some(TypeParam {
+ id: TypeParamId::from_unchecked(hir_ty::from_placeholder_idx(db, *p)),
+ }),
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct Callable {
+ ty: Type,
+ sig: CallableSig,
+ callee: Callee,
+ pub(crate) is_bound_method: bool,
+}
+
+#[derive(Debug)]
+enum Callee {
+ Def(CallableDefId),
+ Closure(ClosureId),
+ FnPtr,
+ Other,
+}
+
+pub enum CallableKind {
+ Function(Function),
+ TupleStruct(Struct),
+ TupleEnumVariant(Variant),
+ Closure,
+ FnPtr,
+ /// Some other type that implements `FnOnce`.
+ Other,
+}
+
+impl Callable {
+ pub fn kind(&self) -> CallableKind {
+ use Callee::*;
+ match self.callee {
+ Def(CallableDefId::FunctionId(it)) => CallableKind::Function(it.into()),
+ Def(CallableDefId::StructId(it)) => CallableKind::TupleStruct(it.into()),
+ Def(CallableDefId::EnumVariantId(it)) => CallableKind::TupleEnumVariant(it.into()),
+ Closure(_) => CallableKind::Closure,
+ FnPtr => CallableKind::FnPtr,
+ Other => CallableKind::Other,
+ }
+ }
+ pub fn receiver_param(&self, db: &dyn HirDatabase) -> Option<ast::SelfParam> {
+ let func = match self.callee {
+ Callee::Def(CallableDefId::FunctionId(it)) if self.is_bound_method => it,
+ _ => return None,
+ };
+ let src = func.lookup(db.upcast()).source(db.upcast());
+ let param_list = src.value.param_list()?;
+ param_list.self_param()
+ }
+ pub fn n_params(&self) -> usize {
+ self.sig.params().len() - if self.is_bound_method { 1 } else { 0 }
+ }
+ pub fn params(
+ &self,
+ db: &dyn HirDatabase,
+ ) -> Vec<(Option<Either<ast::SelfParam, ast::Pat>>, Type)> {
+ let types = self
+ .sig
+ .params()
+ .iter()
+ .skip(if self.is_bound_method { 1 } else { 0 })
+ .map(|ty| self.ty.derived(ty.clone()));
+ let map_param = |it: ast::Param| it.pat().map(Either::Right);
+ let patterns = match self.callee {
+ Callee::Def(CallableDefId::FunctionId(func)) => {
+ let src = func.lookup(db.upcast()).source(db.upcast());
+ src.value.param_list().map(|param_list| {
+ param_list
+ .self_param()
+ .map(|it| Some(Either::Left(it)))
+ .filter(|_| !self.is_bound_method)
+ .into_iter()
+ .chain(param_list.params().map(map_param))
+ })
+ }
+ Callee::Closure(closure_id) => match closure_source(db, closure_id) {
+ Some(src) => src.param_list().map(|param_list| {
+ param_list
+ .self_param()
+ .map(|it| Some(Either::Left(it)))
+ .filter(|_| !self.is_bound_method)
+ .into_iter()
+ .chain(param_list.params().map(map_param))
+ }),
+ None => None,
+ },
+ _ => None,
+ };
+ patterns.into_iter().flatten().chain(iter::repeat(None)).zip(types).collect()
+ }
+ pub fn return_type(&self) -> Type {
+ self.ty.derived(self.sig.ret().clone())
+ }
+}
+
+fn closure_source(db: &dyn HirDatabase, closure: ClosureId) -> Option<ast::ClosureExpr> {
+ let (owner, expr_id) = db.lookup_intern_closure(closure.into());
+ let (_, source_map) = db.body_with_source_map(owner);
+ let ast = source_map.expr_syntax(expr_id).ok()?;
+ let root = ast.file_syntax(db.upcast());
+ let expr = ast.value.to_node(&root);
+ match expr {
+ ast::Expr::ClosureExpr(it) => Some(it),
+ _ => None,
+ }
+}
+
+#[derive(Copy, Clone, Debug, Eq, PartialEq)]
+pub enum BindingMode {
+ Move,
+ Ref(Mutability),
+}
+
+/// For IDE only
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
+pub enum ScopeDef {
+ ModuleDef(ModuleDef),
+ GenericParam(GenericParam),
+ ImplSelfType(Impl),
+ AdtSelfType(Adt),
+ Local(Local),
+ Label(Label),
+ Unknown,
+}
+
+impl ScopeDef {
+ pub fn all_items(def: PerNs) -> ArrayVec<Self, 3> {
+ let mut items = ArrayVec::new();
+
+ match (def.take_types(), def.take_values()) {
+ (Some(m1), None) => items.push(ScopeDef::ModuleDef(m1.into())),
+ (None, Some(m2)) => items.push(ScopeDef::ModuleDef(m2.into())),
+ (Some(m1), Some(m2)) => {
+ // Some items, like unit structs and enum variants, are
+ // returned as both a type and a value. Here we want
+ // to de-duplicate them.
+ if m1 != m2 {
+ items.push(ScopeDef::ModuleDef(m1.into()));
+ items.push(ScopeDef::ModuleDef(m2.into()));
+ } else {
+ items.push(ScopeDef::ModuleDef(m1.into()));
+ }
+ }
+ (None, None) => {}
+ };
+
+ if let Some(macro_def_id) = def.take_macros() {
+ items.push(ScopeDef::ModuleDef(ModuleDef::Macro(macro_def_id.into())));
+ }
+
+ if items.is_empty() {
+ items.push(ScopeDef::Unknown);
+ }
+
+ items
+ }
+
+ pub fn attrs(&self, db: &dyn HirDatabase) -> Option<AttrsWithOwner> {
+ match self {
+ ScopeDef::ModuleDef(it) => it.attrs(db),
+ ScopeDef::GenericParam(it) => Some(it.attrs(db)),
+ ScopeDef::ImplSelfType(_)
+ | ScopeDef::AdtSelfType(_)
+ | ScopeDef::Local(_)
+ | ScopeDef::Label(_)
+ | ScopeDef::Unknown => None,
+ }
+ }
+
+ pub fn krate(&self, db: &dyn HirDatabase) -> Option<Crate> {
+ match self {
+ ScopeDef::ModuleDef(it) => it.module(db).map(|m| m.krate()),
+ ScopeDef::GenericParam(it) => Some(it.module(db).krate()),
+ ScopeDef::ImplSelfType(_) => None,
+ ScopeDef::AdtSelfType(it) => Some(it.module(db).krate()),
+ ScopeDef::Local(it) => Some(it.module(db).krate()),
+ ScopeDef::Label(it) => Some(it.module(db).krate()),
+ ScopeDef::Unknown => None,
+ }
+ }
+}
+
+impl From<ItemInNs> for ScopeDef {
+ fn from(item: ItemInNs) -> Self {
+ match item {
+ ItemInNs::Types(id) => ScopeDef::ModuleDef(id),
+ ItemInNs::Values(id) => ScopeDef::ModuleDef(id),
+ ItemInNs::Macros(id) => ScopeDef::ModuleDef(ModuleDef::Macro(id)),
+ }
+ }
+}
+
++#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
++pub enum Adjust {
++ /// Go from ! to any type.
++ NeverToAny,
++ /// Dereference once, producing a place.
++ Deref(Option<OverloadedDeref>),
++ /// Take the address and produce either a `&` or `*` pointer.
++ Borrow(AutoBorrow),
++ Pointer(PointerCast),
++}
++
++#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
++pub enum AutoBorrow {
++ /// Converts from T to &T.
++ Ref(Mutability),
++ /// Converts from T to *T.
++ RawPtr(Mutability),
++}
++
++#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
++pub struct OverloadedDeref(pub Mutability);
++
+pub trait HasVisibility {
+ fn visibility(&self, db: &dyn HirDatabase) -> Visibility;
+ fn is_visible_from(&self, db: &dyn HirDatabase, module: Module) -> bool {
+ let vis = self.visibility(db);
+ vis.is_visible_from(db.upcast(), module.id)
+ }
+}
+
+/// Trait for obtaining the defining crate of an item.
+pub trait HasCrate {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate;
+}
+
+impl<T: hir_def::HasModule> HasCrate for T {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db.upcast()).krate().into()
+ }
+}
+
+impl HasCrate for AssocItem {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Struct {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Union {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Field {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.parent_def(db).module(db).krate()
+ }
+}
+
+impl HasCrate for Variant {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Function {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Const {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for TypeAlias {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Type {
+ fn krate(&self, _db: &dyn HirDatabase) -> Crate {
+ self.env.krate.into()
+ }
+}
+
+impl HasCrate for Macro {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Trait {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Static {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Adt {
+ fn krate(&self, db: &dyn HirDatabase) -> Crate {
+ self.module(db).krate()
+ }
+}
+
+impl HasCrate for Module {
+ fn krate(&self, _: &dyn HirDatabase) -> Crate {
+ Module::krate(*self)
+ }
+}
--- /dev/null
- Access, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function,
- HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
- Name, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
+//! See `Semantics`.
+
+mod source_to_def;
+
+use std::{cell::RefCell, fmt, iter, ops};
+
+use base_db::{FileId, FileRange};
+use hir_def::{
+ body, macro_id_to_def_id,
+ resolver::{self, HasResolver, Resolver, TypeNs},
+ type_ref::Mutability,
+ AsMacroCall, FunctionId, MacroId, TraitId, VariantId,
+};
+use hir_expand::{
+ db::AstDatabase,
+ name::{known, AsName},
+ ExpansionInfo, MacroCallId,
+};
+use itertools::Itertools;
+use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::{smallvec, SmallVec};
+use syntax::{
+ algo::skip_trivia_token,
+ ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
+ match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+};
+
+use crate::{
+ db::HirDatabase,
+ semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
+ source_analyzer::{resolve_hir_path, SourceAnalyzer},
- // FIXME: Figure out a nice interface to inspect adjustments
- pub fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
- self.imp.is_implicit_reborrow(expr)
++ Access, Adjust, AutoBorrow, BindingMode, BuiltinAttr, Callable, ConstParam, Crate,
++ DeriveHelper, Field, Function, HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local,
++ Macro, Module, ModuleDef, Name, OverloadedDeref, Path, ScopeDef, ToolModule, Trait, Type,
++ TypeAlias, TypeParam, VariantDef,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum PathResolution {
+ /// An item
+ Def(ModuleDef),
+ /// A local binding (only value namespace)
+ Local(Local),
+ /// A type parameter
+ TypeParam(TypeParam),
+ /// A const parameter
+ ConstParam(ConstParam),
+ SelfType(Impl),
+ BuiltinAttr(BuiltinAttr),
+ ToolModule(ToolModule),
+ DeriveHelper(DeriveHelper),
+}
+
+impl PathResolution {
+ pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
+ match self {
+ PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
+ PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
+ Some(TypeNs::BuiltinType((*builtin).into()))
+ }
+ PathResolution::Def(
+ ModuleDef::Const(_)
+ | ModuleDef::Variant(_)
+ | ModuleDef::Macro(_)
+ | ModuleDef::Function(_)
+ | ModuleDef::Module(_)
+ | ModuleDef::Static(_)
+ | ModuleDef::Trait(_),
+ ) => None,
+ PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
+ Some(TypeNs::TypeAliasId((*alias).into()))
+ }
+ PathResolution::BuiltinAttr(_)
+ | PathResolution::ToolModule(_)
+ | PathResolution::Local(_)
+ | PathResolution::DeriveHelper(_)
+ | PathResolution::ConstParam(_) => None,
+ PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
+ PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct TypeInfo {
+ /// The original type of the expression or pattern.
+ pub original: Type,
+ /// The adjusted type, if an adjustment happened.
+ pub adjusted: Option<Type>,
+}
+
+impl TypeInfo {
+ pub fn original(self) -> Type {
+ self.original
+ }
+
+ pub fn has_adjustment(&self) -> bool {
+ self.adjusted.is_some()
+ }
+
+ /// The adjusted type, or the original in case no adjustments occurred.
+ pub fn adjusted(self) -> Type {
+ self.adjusted.unwrap_or(self.original)
+ }
+}
+
+/// Primary API to get semantic information, like types, from syntax trees.
+pub struct Semantics<'db, DB> {
+ pub db: &'db DB,
+ imp: SemanticsImpl<'db>,
+}
+
+pub struct SemanticsImpl<'db> {
+ pub db: &'db dyn HirDatabase,
+ s2d_cache: RefCell<SourceToDefCache>,
+ expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
+ // Rootnode to HirFileId cache
+ cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
+ // MacroCall to its expansion's HirFileId cache
+ macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
+}
+
+impl<DB> fmt::Debug for Semantics<'_, DB> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Semantics {{ ... }}")
+ }
+}
+
+impl<'db, DB: HirDatabase> Semantics<'db, DB> {
+ pub fn new(db: &DB) -> Semantics<'_, DB> {
+ let impl_ = SemanticsImpl::new(db);
+ Semantics { db, imp: impl_ }
+ }
+
+ pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ self.imp.parse(file_id)
+ }
+
+ pub fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ self.imp.parse_or_expand(file_id)
+ }
+
+ pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ self.imp.expand(macro_call)
+ }
+
+ /// If `item` has an attribute macro attached to it, expands it.
+ pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ self.imp.expand_attr_macro(item)
+ }
+
+ pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ self.imp.expand_derive_as_pseudo_attr_macro(attr)
+ }
+
+ pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ self.imp.resolve_derive_macro(derive)
+ }
+
+ pub fn expand_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ self.imp.expand_derive_macro(derive)
+ }
+
+ pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ self.imp.is_attr_macro_call(item)
+ }
+
+ pub fn is_derive_annotated(&self, item: &ast::Adt) -> bool {
+ self.imp.is_derive_annotated(item)
+ }
+
+ pub fn speculative_expand(
+ &self,
+ actual_macro_call: &ast::MacroCall,
+ speculative_args: &ast::TokenTree,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
+ }
+
+ pub fn speculative_expand_attr_macro(
+ &self,
+ actual_macro_call: &ast::Item,
+ speculative_args: &ast::Item,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
+ }
+
+ pub fn speculative_expand_derive_as_pseudo_attr_macro(
+ &self,
+ actual_macro_call: &ast::Attr,
+ speculative_args: &ast::Attr,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand_derive_as_pseudo_attr_macro(
+ actual_macro_call,
+ speculative_args,
+ token_to_map,
+ )
+ }
+
+ /// Descend the token into macrocalls to its first mapped counterpart.
+ pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_single(token)
+ }
+
+ /// Descend the token into macrocalls to all its mapped counterparts.
+ pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros(token)
+ }
+
+ /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
+ ///
+ /// Returns the original non descended token if none of the mapped counterparts have the same text.
+ pub fn descend_into_macros_with_same_text(
+ &self,
+ token: SyntaxToken,
+ ) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros_with_same_text(token)
+ }
+
+ pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_with_kind_preference(token)
+ }
+
+ /// Maps a node down by mapping its first and last token down.
+ pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ self.imp.descend_node_into_attributes(node)
+ }
+
+ /// Search for a definition's source and cache its syntax tree
+ pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ where
+ Def::Ast: AstNode,
+ {
+ self.imp.source(def)
+ }
+
+ pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
+ self.imp.find_file(syntax_node).file_id
+ }
+
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ /// If upmapping is not possible, this will fall back to the range of the macro call of the
+ /// macro file the node resides in.
+ pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ self.imp.original_range(node)
+ }
+
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ self.imp.original_range_opt(node)
+ }
+
+ /// Attempts to map the node out of macro expanded files.
+ /// This only work for attribute expansions, as other ones do not have nodes as input.
+ pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ self.imp.original_ast_node(node)
+ }
+ /// Attempts to map the node out of macro expanded files.
+ /// This only work for attribute expansions, as other ones do not have nodes as input.
+ pub fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
+ self.imp.original_syntax_node(node)
+ }
+
+ pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
+ self.imp.diagnostics_display_range(diagnostics)
+ }
+
+ pub fn token_ancestors_with_macros(
+ &self,
+ token: SyntaxToken,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
+ }
+
+ /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
+ pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
+ self.imp.ancestors_with_macros(node)
+ }
+
+ pub fn ancestors_at_offset_with_macros(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ self.imp.ancestors_at_offset_with_macros(node, offset)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
+ /// search up until it is of the target AstNode type
+ pub fn find_node_at_offset_with_macros<N: AstNode>(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<N> {
+ self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
+ /// descend it and find again
+ pub fn find_node_at_offset_with_descend<N: AstNode>(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<N> {
+ self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
+ /// descend it and find again
+ pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
+ &'slf self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = N> + 'slf {
+ self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
+ }
+
+ pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ self.imp.resolve_lifetime_param(lifetime)
+ }
+
+ pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ self.imp.resolve_label(lifetime)
+ }
+
+ pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ self.imp.resolve_type(ty)
+ }
+
+ pub fn resolve_trait(&self, trait_: &ast::Path) -> Option<Trait> {
+ self.imp.resolve_trait(trait_)
+ }
+
- fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
- self.analyze(expr.syntax())?.is_implicit_reborrow(self.db, expr)
++ pub fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjust>> {
++ self.imp.expr_adjustments(expr)
+ }
+
+ pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ self.imp.type_of_expr(expr)
+ }
+
+ pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ self.imp.type_of_pat(pat)
+ }
+
+ pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ self.imp.type_of_self(param)
+ }
+
+ pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.imp.pattern_adjustments(pat)
+ }
+
+ pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.imp.binding_mode_of_pat(pat)
+ }
+
+ pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
+ self.imp.resolve_method_call(call).map(Function::from)
+ }
+
+ pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
+ self.imp.resolve_await_to_poll(await_expr).map(Function::from)
+ }
+
+ pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
+ self.imp.resolve_prefix_expr(prefix_expr).map(Function::from)
+ }
+
+ pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
+ self.imp.resolve_index_expr(index_expr).map(Function::from)
+ }
+
+ pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
+ self.imp.resolve_bin_expr(bin_expr).map(Function::from)
+ }
+
+ pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
+ self.imp.resolve_try_expr(try_expr).map(Function::from)
+ }
+
+ pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ self.imp.resolve_method_call_as_callable(call)
+ }
+
+ pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ self.imp.resolve_field(field)
+ }
+
+ pub fn resolve_record_field(
+ &self,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ self.imp.resolve_record_field(field)
+ }
+
+ pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ self.imp.resolve_record_pat_field(field)
+ }
+
+ pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ self.imp.resolve_macro_call(macro_call)
+ }
+
+ pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ self.imp.is_unsafe_macro_call(macro_call)
+ }
+
+ pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ self.imp.resolve_attr_macro_call(item)
+ }
+
+ pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ self.imp.resolve_path(path)
+ }
+
+ pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
+ self.imp.resolve_extern_crate(extern_crate)
+ }
+
+ pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
+ self.imp.resolve_variant(record_lit).map(VariantDef::from)
+ }
+
+ pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ self.imp.resolve_bind_pat_to_const(pat)
+ }
+
+ pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ self.imp.record_literal_missing_fields(literal)
+ }
+
+ pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ self.imp.record_pattern_missing_fields(pattern)
+ }
+
+ pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
+ let src = self.imp.find_file(src.syntax()).with_value(src).cloned();
+ T::to_def(&self.imp, src)
+ }
+
+ pub fn to_module_def(&self, file: FileId) -> Option<Module> {
+ self.imp.to_module_def(file).next()
+ }
+
+ pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
+ self.imp.to_module_def(file)
+ }
+
+ pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.imp.scope(node)
+ }
+
+ pub fn scope_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SemanticsScope<'db>> {
+ self.imp.scope_at_offset(node, offset)
+ }
+
+ pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
+ self.imp.scope_for_def(def)
+ }
+
+ pub fn assert_contains_node(&self, node: &SyntaxNode) {
+ self.imp.assert_contains_node(node)
+ }
+
+ pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ self.imp.is_unsafe_method_call(method_call_expr)
+ }
+
+ pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ self.imp.is_unsafe_ref_expr(ref_expr)
+ }
+
+ pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ self.imp.is_unsafe_ident_pat(ident_pat)
+ }
+}
+
+impl<'db> SemanticsImpl<'db> {
+ fn new(db: &'db dyn HirDatabase) -> Self {
+ SemanticsImpl {
+ db,
+ s2d_cache: Default::default(),
+ cache: Default::default(),
+ expansion_info_cache: Default::default(),
+ macro_call_cache: Default::default(),
+ }
+ }
+
+ fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ let tree = self.db.parse(file_id).tree();
+ self.cache(tree.syntax().clone(), file_id.into());
+ tree
+ }
+
+ fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ }
+
+ fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ let sa = self.analyze_no_infer(macro_call.syntax())?;
+ let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
+ let node = self.parse_or_expand(file_id)?;
+ Some(node)
+ }
+
+ fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ let src = self.wrap_node_infile(item.clone());
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
+ self.parse_or_expand(macro_call_id.as_file())
+ }
+
+ fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ let src = self.wrap_node_infile(attr.clone());
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let call_id = self.with_ctx(|ctx| {
+ ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
+ })?;
+ self.parse_or_expand(call_id.as_file())
+ }
+
+ fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ let calls = self.derive_macro_calls(attr)?;
+ self.with_ctx(|ctx| {
+ Some(
+ calls
+ .into_iter()
+ .map(|call| {
+ macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id })
+ })
+ .collect(),
+ )
+ })
+ }
+
+ fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ let res: Vec<_> = self
+ .derive_macro_calls(attr)?
+ .into_iter()
+ .flat_map(|call| {
+ let file_id = call?.as_file();
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ })
+ .collect();
+ Some(res)
+ }
+
+ fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, &adt);
+ let src = InFile::new(file_id, attr.clone());
+ self.with_ctx(|ctx| {
+ let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
+ Some(res.to_vec())
+ })
+ }
+
+ fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, adt);
+ self.with_ctx(|ctx| ctx.has_derives(adt))
+ }
+
+ fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ let file_id = self.find_file(item.syntax()).file_id;
+ let src = InFile::new(file_id, item.clone());
+ self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
+ }
+
+ fn speculative_expand(
+ &self,
+ actual_macro_call: &ast::MacroCall,
+ speculative_args: &ast::TokenTree,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let SourceAnalyzer { file_id, resolver, .. } =
+ self.analyze_no_infer(actual_macro_call.syntax())?;
+ let macro_call = InFile::new(file_id, actual_macro_call);
+ let krate = resolver.krate();
+ let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
+ resolver
+ .resolve_path_as_macro(self.db.upcast(), &path)
+ .map(|it| macro_id_to_def_id(self.db.upcast(), it))
+ })?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ fn speculative_expand_attr(
+ &self,
+ actual_macro_call: &ast::Item,
+ speculative_args: &ast::Item,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let macro_call = self.wrap_node_infile(actual_macro_call.clone());
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ fn speculative_expand_derive_as_pseudo_attr_macro(
+ &self,
+ actual_macro_call: &ast::Attr,
+ speculative_args: &ast::Attr,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let attr = self.wrap_node_infile(actual_macro_call.clone());
+ let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
+ let macro_call_id = self.with_ctx(|ctx| {
+ ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
+ })?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ // This might not be the correct way to do this, but it works for now
+ fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ let mut res = smallvec![];
+ let tokens = (|| {
+ let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
+ let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
+ Some((first, last))
+ })();
+ let (first, last) = match tokens {
+ Some(it) => it,
+ None => return res,
+ };
+
+ if first == last {
+ self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
+ if let Some(node) = value.parent_ancestors().find_map(N::cast) {
+ res.push(node)
+ }
+ false
+ });
+ } else {
+ // Descend first and last token, then zip them to look for the node they belong to
+ let mut scratch: SmallVec<[_; 1]> = smallvec![];
+ self.descend_into_macros_impl(first, &mut |token| {
+ scratch.push(token);
+ false
+ });
+
+ let mut scratch = scratch.into_iter();
+ self.descend_into_macros_impl(
+ last,
+ &mut |InFile { value: last, file_id: last_fid }| {
+ if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
+ if first_fid == last_fid {
+ if let Some(p) = first.parent() {
+ let range = first.text_range().cover(last.text_range());
+ let node = find_root(&p)
+ .covering_element(range)
+ .ancestors()
+ .take_while(|it| it.text_range() == range)
+ .find_map(N::cast);
+ if let Some(node) = node {
+ res.push(node);
+ }
+ }
+ }
+ }
+ false
+ },
+ );
+ }
+ res
+ }
+
+ fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res.push(value);
+ false
+ });
+ res
+ }
+
+ fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let text = token.text();
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if value.text() == text {
+ res.push(value);
+ }
+ false
+ });
+ if res.is_empty() {
+ res.push(token);
+ }
+ res
+ }
+
+ fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ let fetch_kind = |token: &SyntaxToken| match token.parent() {
+ Some(node) => match node.kind() {
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
+ node.parent().map_or(kind, |it| it.kind())
+ }
+ _ => token.kind(),
+ },
+ None => token.kind(),
+ };
+ let preferred_kind = fetch_kind(&token);
+ let mut res = None;
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if fetch_kind(&value) == preferred_kind {
+ res = Some(value);
+ true
+ } else {
+ if let None = res {
+ res = Some(value)
+ }
+ false
+ }
+ });
+ res.unwrap_or(token)
+ }
+
+ fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ let mut res = token.clone();
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res = value;
+ true
+ });
+ res
+ }
+
+ fn descend_into_macros_impl(
+ &self,
+ token: SyntaxToken,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
+ ) {
+ let _p = profile::span("descend_into_macros");
+ let parent = match token.parent() {
+ Some(it) => it,
+ None => return,
+ };
+ let sa = match self.analyze_no_infer(&parent) {
+ Some(it) => it,
+ None => return,
+ };
+ let def_map = sa.resolver.def_map();
+
+ let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
+ let mut cache = self.expansion_info_cache.borrow_mut();
+ let mut mcache = self.macro_call_cache.borrow_mut();
+
+ let mut process_expansion_for_token =
+ |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
+ .as_ref()?;
+
+ {
+ let InFile { file_id, value } = expansion_info.expanded();
+ self.cache(value, file_id);
+ }
+
+ let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
+ let len = stack.len();
+
+ // requeue the tokens we got from mapping our current token down
+ stack.extend(mapped_tokens);
+ // if the length changed we have found a mapping for the token
+ (stack.len() != len).then(|| ())
+ };
+
+ // Remap the next token in the queue into a macro call its in, if it is not being remapped
+ // either due to not being in a macro-call or because its unused push it into the result vec,
+ // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
+ while let Some(token) = stack.pop() {
+ self.db.unwind_if_cancelled();
+ let was_not_remapped = (|| {
+ // First expand into attribute invocations
+ let containing_attribute_macro_call = self.with_ctx(|ctx| {
+ token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
+ if item.attrs().next().is_none() {
+ // Don't force populate the dyn cache for items that don't have an attribute anyways
+ return None;
+ }
+ Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
+ })
+ });
+ if let Some((call_id, item)) = containing_attribute_macro_call {
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(item),
+ token.as_ref(),
+ );
+ }
+
+ // Then check for token trees, that means we are either in a function-like macro or
+ // secondary attribute inputs
+ let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
+ let parent = tt.syntax().parent()?;
+
+ if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+ if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+
+ if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
+ let mcall = token.with_value(macro_call);
+ let file_id = match mcache.get(&mcall) {
+ Some(&it) => it,
+ None => {
+ let it = sa.expand(self.db, mcall.as_ref())?;
+ mcache.insert(mcall, it);
+ it
+ }
+ };
+ process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
+ } else if let Some(meta) = ast::Meta::cast(parent.clone()) {
+ // attribute we failed expansion for earlier, this might be a derive invocation
+ // or derive helper attribute
+ let attr = meta.parent_attr()?;
+
+ let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
+ // this might be a derive, or a derive helper on an ADT
+ let derive_call = self.with_ctx(|ctx| {
+ // so try downmapping the token into the pseudo derive expansion
+ // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
+ ctx.attr_to_derive_macro_call(
+ token.with_value(&adt),
+ token.with_value(attr.clone()),
+ )
+ .map(|(_, call_id, _)| call_id)
+ });
+
+ match derive_call {
+ Some(call_id) => {
+ // resolved to a derive
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(adt.into()),
+ token.as_ref(),
+ );
+ }
+ None => Some(adt),
+ }
+ } else {
+ // Otherwise this could be a derive helper on a variant or field
+ if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ }
+ }?;
+ if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
+ return None;
+ }
+ // Not an attribute, nor a derive, so it's either a builtin or a derive helper
+ // Try to resolve to a derive helper and downmap
+ let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
+ let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
+ let helpers =
+ def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
+ let item = Some(adt.into());
+ let mut res = None;
+ for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
+ res = res.or(process_expansion_for_token(
+ &mut stack,
+ derive.as_file(),
+ item.clone(),
+ token.as_ref(),
+ ));
+ }
+ res
+ } else {
+ None
+ }
+ })()
+ .is_none();
+
+ if was_not_remapped && f(token) {
+ break;
+ }
+ }
+ }
+
+ // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
+ // traversing the inner iterator when it finds a node.
+ // The outer iterator is over the tokens descendants
+ // The inner iterator is the ancestors of a descendant
+ fn descend_node_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
+ node.token_at_offset(offset)
+ .map(move |token| self.descend_into_macros(token))
+ .map(|descendants| {
+ descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
+ })
+ // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
+ // See algo::ancestors_at_offset, which uses the same approach
+ .kmerge_by(|left, right| {
+ left.clone()
+ .map(|node| node.text_range().len())
+ .lt(right.clone().map(|node| node.text_range().len()))
+ })
+ }
+
+ fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ let node = self.find_file(node);
+ node.original_file_range(self.db.upcast())
+ }
+
+ fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ let node = self.find_file(node);
+ node.original_file_range_opt(self.db.upcast())
+ }
+
+ fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
+ |InFile { file_id, value }| {
+ self.cache(find_root(value.syntax()), file_id);
+ value
+ },
+ )
+ }
+
+ fn original_syntax_node(&self, node: &SyntaxNode) -> Option<SyntaxNode> {
+ let InFile { file_id, .. } = self.find_file(node);
+ InFile::new(file_id, node).original_syntax_node(self.db.upcast()).map(
+ |InFile { file_id, value }| {
+ self.cache(find_root(&value), file_id);
+ value
+ },
+ )
+ }
+
+ fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
+ let root = self.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|it| it.to_node(&root));
+ node.as_ref().original_file_range(self.db.upcast())
+ }
+
+ fn token_ancestors_with_macros(
+ &self,
+ token: SyntaxToken,
+ ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
+ token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
+ }
+
+ fn ancestors_with_macros(
+ &self,
+ node: SyntaxNode,
+ ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
+ let node = self.find_file(&node);
+ let db = self.db.upcast();
+ iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
+ match value.parent() {
+ Some(parent) => Some(InFile::new(file_id, parent)),
+ None => {
+ self.cache(value.clone(), file_id);
+ file_id.call_node(db)
+ }
+ }
+ })
+ .map(|it| it.value)
+ }
+
+ fn ancestors_at_offset_with_macros(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ node.token_at_offset(offset)
+ .map(|token| self.token_ancestors_with_macros(token))
+ .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
+ }
+
+ fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ let text = lifetime.text();
+ let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
+ let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
+ gpl.lifetime_params()
+ .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
+ })?;
+ let src = self.wrap_node_infile(lifetime_param);
+ ToDef::to_def(self, src)
+ }
+
+ fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ let text = lifetime.text();
+ let label = lifetime.syntax().ancestors().find_map(|syn| {
+ let label = match_ast! {
+ match syn {
+ ast::ForExpr(it) => it.label(),
+ ast::WhileExpr(it) => it.label(),
+ ast::LoopExpr(it) => it.label(),
+ ast::BlockExpr(it) => it.label(),
+ _ => None,
+ }
+ };
+ label.filter(|l| {
+ l.lifetime()
+ .and_then(|lt| lt.lifetime_ident_token())
+ .map_or(false, |lt| lt.text() == text)
+ })
+ })?;
+ let src = self.wrap_node_infile(label);
+ ToDef::to_def(self, src)
+ }
+
+ fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ let analyze = self.analyze(ty.syntax())?;
+ let ctx = body::LowerCtx::new(self.db.upcast(), analyze.file_id);
+ let ty = hir_ty::TyLoweringContext::new(self.db, &analyze.resolver)
+ .lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
+ Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
+ }
+
+ fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
+ let analyze = self.analyze(path.syntax())?;
+ let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
+ let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+ match analyze
+ .resolver
+ .resolve_path_in_type_ns_fully(self.db.upcast(), hir_path.mod_path())?
+ {
+ TypeNs::TraitId(id) => Some(Trait { id }),
+ _ => None,
+ }
+ }
+
++ fn expr_adjustments(&self, expr: &ast::Expr) -> Option<Vec<Adjust>> {
++ let mutability = |m| match m {
++ hir_ty::Mutability::Not => Mutability::Shared,
++ hir_ty::Mutability::Mut => Mutability::Mut,
++ };
++ self.analyze(expr.syntax())?.expr_adjustments(self.db, expr).map(|it| {
++ it.iter()
++ .map(|adjust| match adjust.kind {
++ hir_ty::Adjust::NeverToAny => Adjust::NeverToAny,
++ hir_ty::Adjust::Deref(Some(hir_ty::OverloadedDeref(m))) => {
++ Adjust::Deref(Some(OverloadedDeref(mutability(m))))
++ }
++ hir_ty::Adjust::Deref(None) => Adjust::Deref(None),
++ hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::RawPtr(m)) => {
++ Adjust::Borrow(AutoBorrow::RawPtr(mutability(m)))
++ }
++ hir_ty::Adjust::Borrow(hir_ty::AutoBorrow::Ref(m)) => {
++ Adjust::Borrow(AutoBorrow::Ref(mutability(m)))
++ }
++ hir_ty::Adjust::Pointer(pc) => Adjust::Pointer(pc),
++ })
++ .collect()
++ })
+ }
+
+ fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ self.analyze(expr.syntax())?
+ .type_of_expr(self.db, expr)
+ .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
+ }
+
+ fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ self.analyze(pat.syntax())?
+ .type_of_pat(self.db, pat)
+ .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
+ }
+
+ fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ self.analyze(param.syntax())?.type_of_self(self.db, param)
+ }
+
+ fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.analyze(pat.syntax())
+ .and_then(|it| it.pattern_adjustments(self.db, pat))
+ .unwrap_or_default()
+ }
+
+ fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
+ }
+
+ fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
+ self.analyze(call.syntax())?.resolve_method_call(self.db, call)
+ }
+
+ fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
+ self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
+ }
+
+ fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<FunctionId> {
+ self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
+ }
+
+ fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<FunctionId> {
+ self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
+ }
+
+ fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<FunctionId> {
+ self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
+ }
+
+ fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<FunctionId> {
+ self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
+ }
+
+ fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
+ }
+
+ fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ self.analyze(field.syntax())?.resolve_field(self.db, field)
+ }
+
+ fn resolve_record_field(
+ &self,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ self.analyze(field.syntax())?.resolve_record_field(self.db, field)
+ }
+
+ fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
+ }
+
+ fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ let sa = self.analyze(macro_call.syntax())?;
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.resolve_macro_call(self.db, macro_call)
+ }
+
+ fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ let sa = match self.analyze(macro_call.syntax()) {
+ Some(it) => it,
+ None => return false,
+ };
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.is_unsafe_macro_call(self.db, macro_call)
+ }
+
+ fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ let item_in_file = self.wrap_node_infile(item.clone());
+ let id = self.with_ctx(|ctx| {
+ let macro_call_id = ctx.item_to_macro_call(item_in_file)?;
+ macro_call_to_macro_id(ctx, self.db.upcast(), macro_call_id)
+ })?;
+ Some(Macro { id })
+ }
+
+ fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ self.analyze(path.syntax())?.resolve_path(self.db, path)
+ }
+
+ fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
+ let krate = self.scope(extern_crate.syntax())?.krate();
+ let name = extern_crate.name_ref()?.as_name();
+ if name == known::SELF_PARAM {
+ return Some(krate);
+ }
+ krate
+ .dependencies(self.db)
+ .into_iter()
+ .find_map(|dep| (dep.name == name).then(|| dep.krate))
+ }
+
+ fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
+ self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
+ }
+
+ fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
+ }
+
+ fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ self.analyze(literal.syntax())
+ .and_then(|it| it.record_literal_missing_fields(self.db, literal))
+ .unwrap_or_default()
+ }
+
+ fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ self.analyze(pattern.syntax())
+ .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
+ .unwrap_or_default()
+ }
+
+ fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
+ let mut cache = self.s2d_cache.borrow_mut();
+ let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
+ f(&mut ctx)
+ }
+
+ fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {
+ self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
+ }
+
+ fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ })
+ }
+
+ fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> Option<SemanticsScope<'db>> {
+ self.analyze_with_offset_no_infer(node, offset).map(
+ |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ },
+ )
+ }
+
+ fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
+ let file_id = self.db.lookup_intern_trait(def.id).id.file_id();
+ let resolver = def.id.resolver(self.db.upcast());
+ SemanticsScope { db: self.db, file_id, resolver }
+ }
+
+ fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ where
+ Def::Ast: AstNode,
+ {
+ let res = def.source(self.db)?;
+ self.cache(find_root(res.value.syntax()), res.file_id);
+ Some(res)
+ }
+
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, None, true)
+ }
+
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, None, false)
+ }
+
+ fn analyze_with_offset_no_infer(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, Some(offset), false)
+ }
+
+ fn analyze_impl(
+ &self,
+ node: &SyntaxNode,
+ offset: Option<TextSize>,
+ infer_body: bool,
+ ) -> Option<SourceAnalyzer> {
+ let _p = profile::span("Semantics::analyze_impl");
+ let node = self.find_file(node);
+
+ let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
+ Some(it) => it,
+ None => return None,
+ };
+
+ let resolver = match container {
+ ChildContainer::DefWithBodyId(def) => {
+ return Some(if infer_body {
+ SourceAnalyzer::new_for_body(self.db, def, node, offset)
+ } else {
+ SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
+ })
+ }
+ ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::VariantId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
+ };
+ Some(SourceAnalyzer::new_for_resolver(resolver, node))
+ }
+
+ fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
+ assert!(root_node.parent().is_none());
+ let mut cache = self.cache.borrow_mut();
+ let prev = cache.insert(root_node, file_id);
+ assert!(prev == None || prev == Some(file_id))
+ }
+
+ fn assert_contains_node(&self, node: &SyntaxNode) {
+ self.find_file(node);
+ }
+
+ fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
+ let cache = self.cache.borrow();
+ cache.get(root_node).copied()
+ }
+
+ fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
+ let InFile { file_id, .. } = self.find_file(node.syntax());
+ InFile::new(file_id, node)
+ }
+
+ /// Wraps the node in a [`InFile`] with the file id it belongs to.
+ fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
+ let root_node = find_root(node);
+ let file_id = self.lookup(&root_node).unwrap_or_else(|| {
+ panic!(
+ "\n\nFailed to lookup {:?} in this Semantics.\n\
+ Make sure to use only query nodes, derived from this instance of Semantics.\n\
+ root node: {:?}\n\
+ known nodes: {}\n\n",
+ node,
+ root_node,
+ self.cache
+ .borrow()
+ .keys()
+ .map(|it| format!("{:?}", it))
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ });
+ InFile::new(file_id, node)
+ }
+
+ fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ method_call_expr
+ .receiver()
+ .and_then(|expr| {
+ let field_expr = match expr {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+ let ty = self.type_of_expr(&field_expr.expr()?)?.original;
+ if !ty.is_packed(self.db) {
+ return None;
+ }
+
+ let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
+ let res = match func.self_param(self.db)?.access(self.db) {
+ Access::Shared | Access::Exclusive => true,
+ Access::Owned => false,
+ };
+ Some(res)
+ })
+ .unwrap_or(false)
+ }
+
+ fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ ref_expr
+ .expr()
+ .and_then(|expr| {
+ let field_expr = match expr {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+ let expr = field_expr.expr()?;
+ self.type_of_expr(&expr)
+ })
+ // Binding a reference to a packed type is possibly unsafe.
+ .map(|ty| ty.original.is_packed(self.db))
+ .unwrap_or(false)
+
+ // FIXME This needs layout computation to be correct. It will highlight
+ // more than it should with the current implementation.
+ }
+
+ fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ if ident_pat.ref_token().is_none() {
+ return false;
+ }
+
+ ident_pat
+ .syntax()
+ .parent()
+ .and_then(|parent| {
+ // `IdentPat` can live under `RecordPat` directly under `RecordPatField` or
+ // `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`,
+ // so this tries to lookup the `IdentPat` anywhere along that structure to the
+ // `RecordPat` so we can get the containing type.
+ let record_pat = ast::RecordPatField::cast(parent.clone())
+ .and_then(|record_pat| record_pat.syntax().parent())
+ .or_else(|| Some(parent.clone()))
+ .and_then(|parent| {
+ ast::RecordPatFieldList::cast(parent)?
+ .syntax()
+ .parent()
+ .and_then(ast::RecordPat::cast)
+ });
+
+ // If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if
+ // this is initialized from a `FieldExpr`.
+ if let Some(record_pat) = record_pat {
+ self.type_of_pat(&ast::Pat::RecordPat(record_pat))
+ } else if let Some(let_stmt) = ast::LetStmt::cast(parent) {
+ let field_expr = match let_stmt.initializer()? {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+
+ self.type_of_expr(&field_expr.expr()?)
+ } else {
+ None
+ }
+ })
+ // Binding a reference to a packed type is possibly unsafe.
+ .map(|ty| ty.original.is_packed(self.db))
+ .unwrap_or(false)
+ }
+}
+
+fn macro_call_to_macro_id(
+ ctx: &mut SourceToDefCtx<'_, '_>,
+ db: &dyn AstDatabase,
+ macro_call_id: MacroCallId,
+) -> Option<MacroId> {
+ let loc = db.lookup_intern_macro_call(macro_call_id);
+ match loc.def.kind {
+ hir_expand::MacroDefKind::Declarative(it)
+ | hir_expand::MacroDefKind::BuiltIn(_, it)
+ | hir_expand::MacroDefKind::BuiltInAttr(_, it)
+ | hir_expand::MacroDefKind::BuiltInDerive(_, it)
+ | hir_expand::MacroDefKind::BuiltInEager(_, it) => {
+ ctx.macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ }
+ hir_expand::MacroDefKind::ProcMacro(_, _, it) => {
+ ctx.proc_macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ }
+ }
+}
+
+pub trait ToDef: AstNode + Clone {
+ type Def;
+
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def>;
+}
+
+macro_rules! to_def_impls {
+ ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
+ impl ToDef for $ast {
+ type Def = $def;
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def> {
+ sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
+ }
+ }
+ )*}
+}
+
+to_def_impls![
+ (crate::Module, ast::Module, module_to_def),
+ (crate::Module, ast::SourceFile, source_file_to_def),
+ (crate::Struct, ast::Struct, struct_to_def),
+ (crate::Enum, ast::Enum, enum_to_def),
+ (crate::Union, ast::Union, union_to_def),
+ (crate::Trait, ast::Trait, trait_to_def),
+ (crate::Impl, ast::Impl, impl_to_def),
+ (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
+ (crate::Const, ast::Const, const_to_def),
+ (crate::Static, ast::Static, static_to_def),
+ (crate::Function, ast::Fn, fn_to_def),
+ (crate::Field, ast::RecordField, record_field_to_def),
+ (crate::Field, ast::TupleField, tuple_field_to_def),
+ (crate::Variant, ast::Variant, enum_variant_to_def),
+ (crate::TypeParam, ast::TypeParam, type_param_to_def),
+ (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
+ (crate::ConstParam, ast::ConstParam, const_param_to_def),
+ (crate::GenericParam, ast::GenericParam, generic_param_to_def),
+ (crate::Macro, ast::Macro, macro_to_def),
+ (crate::Local, ast::IdentPat, bind_pat_to_def),
+ (crate::Local, ast::SelfParam, self_param_to_def),
+ (crate::Label, ast::Label, label_to_def),
+ (crate::Adt, ast::Adt, adt_to_def),
+];
+
+fn find_root(node: &SyntaxNode) -> SyntaxNode {
+ node.ancestors().last().unwrap()
+}
+
+/// `SemanticScope` encapsulates the notion of a scope (the set of visible
+/// names) at a particular program point.
+///
+/// It is a bit tricky, as scopes do not really exist inside the compiler.
+/// Rather, the compiler directly computes for each reference the definition it
+/// refers to. It might transiently compute the explicit scope map while doing
+/// so, but, generally, this is not something left after the analysis.
+///
+/// However, we do very much need explicit scopes for IDE purposes --
+/// completion, at its core, lists the contents of the current scope. The notion
+/// of scope is also useful to answer questions like "what would be the meaning
+/// of this piece of code if we inserted it into this position?".
+///
+/// So `SemanticsScope` is constructed from a specific program point (a syntax
+/// node or just a raw offset) and provides access to the set of visible names
+/// on a somewhat best-effort basis.
+///
+/// Note that if you are wondering "what does this specific existing name mean?",
+/// you'd better use the `resolve_` family of methods.
+#[derive(Debug)]
+pub struct SemanticsScope<'a> {
+ pub db: &'a dyn HirDatabase,
+ file_id: HirFileId,
+ resolver: Resolver,
+}
+
+impl<'a> SemanticsScope<'a> {
+ pub fn module(&self) -> Module {
+ Module { id: self.resolver.module() }
+ }
+
+ pub fn krate(&self) -> Crate {
+ Crate { id: self.resolver.krate() }
+ }
+
+ pub(crate) fn resolver(&self) -> &Resolver {
+ &self.resolver
+ }
+
+ /// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type
+ pub fn visible_traits(&self) -> VisibleTraits {
+ let resolver = &self.resolver;
+ VisibleTraits(resolver.traits_in_scope(self.db.upcast()))
+ }
+
+ pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+ let scope = self.resolver.names_in_scope(self.db.upcast());
+ for (name, entries) in scope {
+ for entry in entries {
+ let def = match entry {
+ resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
+ resolver::ScopeDef::Unknown => ScopeDef::Unknown,
+ resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
+ resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
+ resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
+ resolver::ScopeDef::Local(pat_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Local(Local { parent, pat_id }),
+ None => continue,
+ },
+ resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Label(Label { parent, label_id }),
+ None => continue,
+ },
+ };
+ f(name.clone(), def)
+ }
+ }
+ }
+
+ /// Resolve a path as-if it was written at the given scope. This is
+ /// necessary a heuristic, as it doesn't take hygiene into account.
+ pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
+ let ctx = body::LowerCtx::new(self.db.upcast(), self.file_id);
+ let path = Path::from_src(path.clone(), &ctx)?;
+ resolve_hir_path(self.db, &self.resolver, &path)
+ }
+
+ /// Iterates over associated types that may be specified after the given path (using
+ /// `Ty::Assoc` syntax).
+ pub fn assoc_type_shorthand_candidates<R>(
+ &self,
+ resolution: &PathResolution,
+ mut cb: impl FnMut(&Name, TypeAlias) -> Option<R>,
+ ) -> Option<R> {
+ let def = self.resolver.generic_def()?;
+ hir_ty::associated_type_shorthand_candidates(
+ self.db,
+ def,
+ resolution.in_type_ns()?,
+ |name, _, id| cb(name, id.into()),
+ )
+ }
+}
+
+pub struct VisibleTraits(pub FxHashSet<TraitId>);
+
+impl ops::Deref for VisibleTraits {
+ type Target = FxHashSet<TraitId>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
--- /dev/null
- Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind,
- TyLoweringContext,
+//! Lookup hir elements using positions in the source code. This is a lossy
+//! transformation: in general, a single source might correspond to several
+//! modules, functions, etc, due to macros, cfgs and `#[path=]` attributes on
+//! modules.
+//!
+//! So, this modules should not be used during hir construction, it exists
+//! purely for "IDE needs".
+use std::{
+ iter::{self, once},
+ sync::Arc,
+};
+
+use hir_def::{
+ body::{
+ self,
+ scope::{ExprScopes, ScopeId},
+ Body, BodySourceMap,
+ },
+ expr::{ExprId, Pat, PatId},
+ macro_id_to_def_id,
+ path::{ModPath, Path, PathKind},
+ resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
+ type_ref::Mutability,
+ AsMacroCall, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, LocalFieldId,
+ Lookup, ModuleDefId, TraitId, VariantId,
+};
+use hir_expand::{
+ builtin_fn_macro::BuiltinFnLikeExpander,
+ hygiene::Hygiene,
+ mod_path::path,
+ name,
+ name::{AsName, Name},
+ HirFileId, InFile,
+};
+use hir_ty::{
+ diagnostics::{
+ record_literal_missing_fields, record_pattern_missing_fields, unsafe_expressions,
+ UnsafeExpr,
+ },
+ method_resolution::{self, lang_names_for_bin_op},
- pub(crate) fn is_implicit_reborrow(
++ Adjustment, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind, TyLoweringContext,
+};
+use itertools::Itertools;
+use smallvec::SmallVec;
+use syntax::{
+ ast::{self, AstNode},
+ SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
+ BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static,
+ Struct, ToolModule, Trait, Type, TypeAlias, Variant,
+};
+
+/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
+/// original source files. It should not be used inside the HIR itself.
+#[derive(Debug)]
+pub(crate) struct SourceAnalyzer {
+ pub(crate) file_id: HirFileId,
+ pub(crate) resolver: Resolver,
+ def: Option<(DefWithBodyId, Arc<Body>, Arc<BodySourceMap>)>,
+ infer: Option<Arc<InferenceResult>>,
+}
+
+impl SourceAnalyzer {
+ pub(crate) fn new_for_body(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer {
+ resolver,
+ def: Some((def, body, source_map)),
+ infer: Some(db.infer(def)),
+ file_id,
+ }
+ }
+
+ pub(crate) fn new_for_body_no_infer(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id }
+ }
+
+ pub(crate) fn new_for_resolver(
+ resolver: Resolver,
+ node: InFile<&SyntaxNode>,
+ ) -> SourceAnalyzer {
+ SourceAnalyzer { resolver, def: None, infer: None, file_id: node.file_id }
+ }
+
+ fn body_source_map(&self) -> Option<&BodySourceMap> {
+ self.def.as_ref().map(|(.., source_map)| &**source_map)
+ }
+ fn body(&self) -> Option<&Body> {
+ self.def.as_ref().map(|(_, body, _)| &**body)
+ }
+
+ fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<ExprId> {
+ let src = match expr {
+ ast::Expr::MacroExpr(expr) => {
+ self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?.clone()))?
+ }
+ _ => InFile::new(self.file_id, expr.clone()),
+ };
+ let sm = self.body_source_map()?;
+ sm.node_expr(src.as_ref())
+ }
+
+ fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
+ // FIXME: macros, see `expr_id`
+ let src = InFile { file_id: self.file_id, value: pat };
+ self.body_source_map()?.node_pat(src)
+ }
+
+ fn expand_expr(
+ &self,
+ db: &dyn HirDatabase,
+ expr: InFile<ast::MacroCall>,
+ ) -> Option<InFile<ast::Expr>> {
+ let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?;
+ let expanded = db.parse_or_expand(macro_file)?;
+ let res = if let Some(stmts) = ast::MacroStmts::cast(expanded.clone()) {
+ match stmts.expr()? {
+ ast::Expr::MacroExpr(mac) => {
+ self.expand_expr(db, InFile::new(macro_file, mac.macro_call()?))?
+ }
+ expr => InFile::new(macro_file, expr),
+ }
+ } else if let Some(call) = ast::MacroCall::cast(expanded.clone()) {
+ self.expand_expr(db, InFile::new(macro_file, call))?
+ } else {
+ InFile::new(macro_file, ast::Expr::cast(expanded)?)
+ };
+
+ Some(res)
+ }
+
- ) -> Option<Mutability> {
++ pub(crate) fn expr_adjustments(
+ &self,
+ db: &dyn HirDatabase,
+ expr: &ast::Expr,
- let adjustments = infer.expr_adjustments.get(&expr_id)?;
- adjustments.windows(2).find_map(|slice| match slice {
- &[Adjustment {kind: Adjust::Deref(None), ..}, Adjustment {kind: Adjust::Borrow(AutoBorrow::Ref(m)), ..}] => Some(match m {
- hir_ty::Mutability::Mut => Mutability::Mut,
- hir_ty::Mutability::Not => Mutability::Shared,
- }),
- _ => None,
- })
++ ) -> Option<&[Adjustment]> {
+ let expr_id = self.expr_id(db, expr)?;
+ let infer = self.infer.as_ref()?;
++ infer.expr_adjustments.get(&expr_id).map(|v| &**v)
+ }
+
+ pub(crate) fn type_of_expr(
+ &self,
+ db: &dyn HirDatabase,
+ expr: &ast::Expr,
+ ) -> Option<(Type, Option<Type>)> {
+ let expr_id = self.expr_id(db, expr)?;
+ let infer = self.infer.as_ref()?;
+ let coerced = infer
+ .expr_adjustments
+ .get(&expr_id)
+ .and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
+ let ty = infer[expr_id].clone();
+ let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
+ Some((mk_ty(ty), coerced.map(mk_ty)))
+ }
+
+ pub(crate) fn type_of_pat(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::Pat,
+ ) -> Option<(Type, Option<Type>)> {
+ let pat_id = self.pat_id(pat)?;
+ let infer = self.infer.as_ref()?;
+ let coerced = infer
+ .pat_adjustments
+ .get(&pat_id)
+ .and_then(|adjusts| adjusts.last().map(|adjust| adjust.clone()));
+ let ty = infer[pat_id].clone();
+ let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
+ Some((mk_ty(ty), coerced.map(mk_ty)))
+ }
+
+ pub(crate) fn type_of_self(
+ &self,
+ db: &dyn HirDatabase,
+ param: &ast::SelfParam,
+ ) -> Option<Type> {
+ let src = InFile { file_id: self.file_id, value: param };
+ let pat_id = self.body_source_map()?.node_self_param(src)?;
+ let ty = self.infer.as_ref()?[pat_id].clone();
+ Some(Type::new_with_resolver(db, &self.resolver, ty))
+ }
+
+ pub(crate) fn binding_mode_of_pat(
+ &self,
+ _db: &dyn HirDatabase,
+ pat: &ast::IdentPat,
+ ) -> Option<BindingMode> {
+ let pat_id = self.pat_id(&pat.clone().into())?;
+ let infer = self.infer.as_ref()?;
+ infer.pat_binding_modes.get(&pat_id).map(|bm| match bm {
+ hir_ty::BindingMode::Move => BindingMode::Move,
+ hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
+ hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
+ BindingMode::Ref(Mutability::Shared)
+ }
+ })
+ }
+ pub(crate) fn pattern_adjustments(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::Pat,
+ ) -> Option<SmallVec<[Type; 1]>> {
+ let pat_id = self.pat_id(&pat)?;
+ let infer = self.infer.as_ref()?;
+ Some(
+ infer
+ .pat_adjustments
+ .get(&pat_id)?
+ .iter()
+ .map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
+ .collect(),
+ )
+ }
+
+ pub(crate) fn resolve_method_call_as_callable(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<Callable> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let (func, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+ let ty = db.value_ty(func.into()).substitute(Interner, &substs);
+ let ty = Type::new_with_resolver(db, &self.resolver, ty);
+ let mut res = ty.as_callable(db)?;
+ res.is_bound_method = true;
+ Some(res)
+ }
+
+ pub(crate) fn resolve_method_call(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<FunctionId> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+
+ Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, substs))
+ }
+
+ pub(crate) fn resolve_await_to_poll(
+ &self,
+ db: &dyn HirDatabase,
+ await_expr: &ast::AwaitExpr,
+ ) -> Option<FunctionId> {
+ let mut ty = self.ty_of_expr(db, &await_expr.expr()?.into())?.clone();
+
+ let into_future_trait = self
+ .resolver
+ .resolve_known_trait(db.upcast(), &path![core::future::IntoFuture])
+ .map(Trait::from);
+
+ if let Some(into_future_trait) = into_future_trait {
+ let type_ = Type::new_with_resolver(db, &self.resolver, ty.clone());
+ if type_.impls_trait(db, into_future_trait, &[]) {
+ let items = into_future_trait.items(db);
+ let into_future_type = items.into_iter().find_map(|item| match item {
+ AssocItem::TypeAlias(alias)
+ if alias.name(db) == hir_expand::name![IntoFuture] =>
+ {
+ Some(alias)
+ }
+ _ => None,
+ })?;
+ let future_trait = type_.normalize_trait_assoc_type(db, &[], into_future_type)?;
+ ty = future_trait.ty;
+ }
+ }
+
+ let future_trait = db
+ .lang_item(self.resolver.krate(), hir_expand::name![future_trait].to_smol_str())?
+ .as_trait()?;
+ let poll_fn = db
+ .lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())?
+ .as_function()?;
+ // HACK: subst for `poll()` coincides with that for `Future` because `poll()` itself
+ // doesn't have any generic parameters, so we skip building another subst for `poll()`.
+ let substs = hir_ty::TyBuilder::subst_for_def(db, future_trait, None).push(ty).build();
+ Some(self.resolve_impl_method_or_trait_def(db, poll_fn, substs))
+ }
+
+ pub(crate) fn resolve_prefix_expr(
+ &self,
+ db: &dyn HirDatabase,
+ prefix_expr: &ast::PrefixExpr,
+ ) -> Option<FunctionId> {
+ let lang_item_name = match prefix_expr.op_kind()? {
+ ast::UnaryOp::Deref => name![deref],
+ ast::UnaryOp::Not => name![not],
+ ast::UnaryOp::Neg => name![neg],
+ };
+ let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?;
+
+ let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
+ // HACK: subst for all methods coincides with that for their trait because the methods
+ // don't have any generic parameters, so we skip building another subst for the methods.
+ let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
+
+ Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
+ }
+
+ pub(crate) fn resolve_index_expr(
+ &self,
+ db: &dyn HirDatabase,
+ index_expr: &ast::IndexExpr,
+ ) -> Option<FunctionId> {
+ let base_ty = self.ty_of_expr(db, &index_expr.base()?.into())?;
+ let index_ty = self.ty_of_expr(db, &index_expr.index()?.into())?;
+
+ let lang_item_name = name![index];
+
+ let (op_trait, op_fn) = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
+ // HACK: subst for all methods coincides with that for their trait because the methods
+ // don't have any generic parameters, so we skip building another subst for the methods.
+ let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None)
+ .push(base_ty.clone())
+ .push(index_ty.clone())
+ .build();
+ Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
+ }
+
+ pub(crate) fn resolve_bin_expr(
+ &self,
+ db: &dyn HirDatabase,
+ binop_expr: &ast::BinExpr,
+ ) -> Option<FunctionId> {
+ let op = binop_expr.op_kind()?;
+ let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?;
+ let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?;
+
+ let (op_trait, op_fn) = lang_names_for_bin_op(op)
+ .and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?;
+ // HACK: subst for `index()` coincides with that for `Index` because `index()` itself
+ // doesn't have any generic parameters, so we skip building another subst for `index()`.
+ let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None)
+ .push(lhs.clone())
+ .push(rhs.clone())
+ .build();
+
+ Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
+ }
+
+ pub(crate) fn resolve_try_expr(
+ &self,
+ db: &dyn HirDatabase,
+ try_expr: &ast::TryExpr,
+ ) -> Option<FunctionId> {
+ let ty = self.ty_of_expr(db, &try_expr.expr()?.into())?;
+
+ let op_fn =
+ db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?;
+ let op_trait = match op_fn.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(id) => id,
+ _ => return None,
+ };
+ // HACK: subst for `branch()` coincides with that for `Try` because `branch()` itself
+ // doesn't have any generic parameters, so we skip building another subst for `branch()`.
+ let substs = hir_ty::TyBuilder::subst_for_def(db, op_trait, None).push(ty.clone()).build();
+
+ Some(self.resolve_impl_method_or_trait_def(db, op_fn, substs))
+ }
+
+ pub(crate) fn resolve_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Field> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_record_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
+ let expr = ast::Expr::from(record_expr);
+ let expr_id = self.body_source_map()?.node_expr(InFile::new(self.file_id, &expr))?;
+
+ let local_name = field.field_name()?.as_name();
+ let local = if field.name_ref().is_some() {
+ None
+ } else {
+ // Shorthand syntax, resolve to the local
+ let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone()));
+ match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
+ Some(ValueNs::LocalBinding(pat_id)) => {
+ Some(Local { pat_id, parent: self.resolver.body_owner()? })
+ }
+ _ => None,
+ }
+ };
+ let (_, subst) = self.infer.as_ref()?.type_of_expr.get(expr_id)?.as_adt()?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_expr(expr_id)?;
+ let variant_data = variant.variant_data(db.upcast());
+ let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
+ let field_ty =
+ db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
+ Some((field.into(), local, Type::new_with_resolver(db, &self.resolver, field_ty)))
+ }
+
+ pub(crate) fn resolve_record_pat_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::RecordPatField,
+ ) -> Option<Field> {
+ let field_name = field.field_name()?.as_name();
+ let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
+ let pat_id = self.pat_id(&record_pat.into())?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?;
+ let variant_data = variant.variant_data(db.upcast());
+ let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
+ Some(field.into())
+ }
+
+ pub(crate) fn resolve_macro_call(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> Option<Macro> {
+ let ctx = body::LowerCtx::new(db.upcast(), macro_call.file_id);
+ let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
+ self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_bind_pat_to_const(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::IdentPat,
+ ) -> Option<ModuleDef> {
+ let pat_id = self.pat_id(&pat.clone().into())?;
+ let body = self.body()?;
+ let path = match &body[pat_id] {
+ Pat::Path(path) => path,
+ _ => return None,
+ };
+ let res = resolve_hir_path(db, &self.resolver, path)?;
+ match res {
+ PathResolution::Def(def) => Some(def),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn resolve_path(
+ &self,
+ db: &dyn HirDatabase,
+ path: &ast::Path,
+ ) -> Option<PathResolution> {
+ let parent = path.syntax().parent();
+ let parent = || parent.clone();
+
+ let mut prefer_value_ns = false;
+ let resolved = (|| {
+ let infer = self.infer.as_deref()?;
+ if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) {
+ let expr_id = self.expr_id(db, &path_expr.into())?;
+ if let Some(assoc) = infer.assoc_resolutions_for_expr(expr_id) {
+ let assoc = match assoc {
+ AssocItemId::FunctionId(f_in_trait) => {
+ match infer.type_of_expr.get(expr_id) {
+ None => assoc,
+ Some(func_ty) => {
+ if let TyKind::FnDef(_fn_def, subs) = func_ty.kind(Interner) {
+ self.resolve_impl_method_or_trait_def(
+ db,
+ f_in_trait,
+ subs.clone(),
+ )
+ .into()
+ } else {
+ assoc
+ }
+ }
+ }
+ }
+
+ _ => assoc,
+ };
+
+ return Some(PathResolution::Def(AssocItem::from(assoc).into()));
+ }
+ if let Some(VariantId::EnumVariantId(variant)) =
+ infer.variant_resolution_for_expr(expr_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ prefer_value_ns = true;
+ } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
+ let pat_id = self.pat_id(&path_pat.into())?;
+ if let Some(assoc) = infer.assoc_resolutions_for_pat(pat_id) {
+ return Some(PathResolution::Def(AssocItem::from(assoc).into()));
+ }
+ if let Some(VariantId::EnumVariantId(variant)) =
+ infer.variant_resolution_for_pat(pat_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ } else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) {
+ let expr_id = self.expr_id(db, &rec_lit.into())?;
+ if let Some(VariantId::EnumVariantId(variant)) =
+ infer.variant_resolution_for_expr(expr_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ } else {
+ let record_pat = parent().and_then(ast::RecordPat::cast).map(ast::Pat::from);
+ let tuple_struct_pat =
+ || parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from);
+ if let Some(pat) = record_pat.or_else(tuple_struct_pat) {
+ let pat_id = self.pat_id(&pat)?;
+ let variant_res_for_pat = infer.variant_resolution_for_pat(pat_id);
+ if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ }
+ }
+ None
+ })();
+ if let Some(_) = resolved {
+ return resolved;
+ }
+
+ // This must be a normal source file rather than macro file.
+ let hygiene = Hygiene::new(db.upcast(), self.file_id);
+ let ctx = body::LowerCtx::with_hygiene(db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+
+ // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
+ // trying to resolve foo::bar.
+ if let Some(use_tree) = parent().and_then(ast::UseTree::cast) {
+ if use_tree.coloncolon_token().is_some() {
+ return resolve_hir_path_qualifier(db, &self.resolver, &hir_path);
+ }
+ }
+
+ let meta_path = path
+ .syntax()
+ .ancestors()
+ .take_while(|it| {
+ let kind = it.kind();
+ ast::Path::can_cast(kind) || ast::Meta::can_cast(kind)
+ })
+ .last()
+ .and_then(ast::Meta::cast);
+
+ // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are
+ // trying to resolve foo::bar.
+ if path.parent_path().is_some() {
+ return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path) {
+ None if meta_path.is_some() => {
+ path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
+ .map(PathResolution::ToolModule)
+ })
+ }
+ res => res,
+ };
+ } else if let Some(meta_path) = meta_path {
+ // Case where we are resolving the final path segment of a path in an attribute
+ // in this case we have to check for inert/builtin attributes and tools and prioritize
+ // resolution of attributes over other namespaces
+ if let Some(name_ref) = path.as_single_name_ref() {
+ let builtin =
+ BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text());
+ if let Some(_) = builtin {
+ return builtin.map(PathResolution::BuiltinAttr);
+ }
+
+ if let Some(attr) = meta_path.parent_attr() {
+ let adt = if let Some(field) =
+ attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ };
+ if let Some(adt) = adt {
+ let ast_id = db.ast_id_map(self.file_id).ast_id(&adt);
+ if let Some(helpers) = self
+ .resolver
+ .def_map()
+ .derive_helpers_in_scope(InFile::new(self.file_id, ast_id))
+ {
+ // FIXME: Multiple derives can have the same helper
+ let name_ref = name_ref.as_name();
+ for (macro_id, mut helpers) in
+ helpers.iter().group_by(|(_, macro_id, ..)| macro_id).into_iter()
+ {
+ if let Some(idx) = helpers.position(|(name, ..)| *name == name_ref)
+ {
+ return Some(PathResolution::DeriveHelper(DeriveHelper {
+ derive: *macro_id,
+ idx,
+ }));
+ }
+ }
+ }
+ }
+ }
+ }
+ return match resolve_hir_path_as_macro(db, &self.resolver, &hir_path) {
+ Some(m) => Some(PathResolution::Def(ModuleDef::Macro(m))),
+ // this labels any path that starts with a tool module as the tool itself, this is technically wrong
+ // but there is no benefit in differentiating these two cases for the time being
+ None => path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
+ .map(PathResolution::ToolModule)
+ }),
+ };
+ }
+ if parent().map_or(false, |it| ast::Visibility::can_cast(it.kind())) {
+ resolve_hir_path_qualifier(db, &self.resolver, &hir_path)
+ } else {
+ resolve_hir_path_(db, &self.resolver, &hir_path, prefer_value_ns)
+ }
+ }
+
+ pub(crate) fn record_literal_missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ literal: &ast::RecordExpr,
+ ) -> Option<Vec<(Field, Type)>> {
+ let body = self.body()?;
+ let infer = self.infer.as_ref()?;
+
+ let expr_id = self.expr_id(db, &literal.clone().into())?;
+ let substs = infer.type_of_expr[expr_id].as_adt()?.1;
+
+ let (variant, missing_fields, _exhaustive) =
+ record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
+ let res = self.missing_fields(db, substs, variant, missing_fields);
+ Some(res)
+ }
+
+ pub(crate) fn record_pattern_missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ pattern: &ast::RecordPat,
+ ) -> Option<Vec<(Field, Type)>> {
+ let body = self.body()?;
+ let infer = self.infer.as_ref()?;
+
+ let pat_id = self.pat_id(&pattern.clone().into())?;
+ let substs = infer.type_of_pat[pat_id].as_adt()?.1;
+
+ let (variant, missing_fields, _exhaustive) =
+ record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
+ let res = self.missing_fields(db, substs, variant, missing_fields);
+ Some(res)
+ }
+
+ fn missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ substs: &Substitution,
+ variant: VariantId,
+ missing_fields: Vec<LocalFieldId>,
+ ) -> Vec<(Field, Type)> {
+ let field_types = db.field_types(variant);
+
+ missing_fields
+ .into_iter()
+ .map(|local_id| {
+ let field = FieldId { parent: variant, local_id };
+ let ty = field_types[local_id].clone().substitute(Interner, substs);
+ (field.into(), Type::new_with_resolver_inner(db, &self.resolver, ty))
+ })
+ .collect()
+ }
+
+ pub(crate) fn expand(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> Option<HirFileId> {
+ let krate = self.resolver.krate();
+ let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
+ self.resolver
+ .resolve_path_as_macro(db.upcast(), &path)
+ .map(|it| macro_id_to_def_id(db.upcast(), it))
+ })?;
+ Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
+ }
+
+ pub(crate) fn resolve_variant(
+ &self,
+ db: &dyn HirDatabase,
+ record_lit: ast::RecordExpr,
+ ) -> Option<VariantId> {
+ let infer = self.infer.as_ref()?;
+ let expr_id = self.expr_id(db, &record_lit.into())?;
+ infer.variant_resolution_for_expr(expr_id)
+ }
+
+ pub(crate) fn is_unsafe_macro_call(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> bool {
+ // check for asm/global_asm
+ if let Some(mac) = self.resolve_macro_call(db, macro_call) {
+ let ex = match mac.id {
+ hir_def::MacroId::Macro2Id(it) => it.lookup(db.upcast()).expander,
+ hir_def::MacroId::MacroRulesId(it) => it.lookup(db.upcast()).expander,
+ _ => hir_def::MacroExpander::Declarative,
+ };
+ match ex {
+ hir_def::MacroExpander::BuiltIn(e)
+ if e == BuiltinFnLikeExpander::Asm || e == BuiltinFnLikeExpander::GlobalAsm =>
+ {
+ return true
+ }
+ _ => (),
+ }
+ }
+ let macro_expr = match macro_call
+ .map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast))
+ .transpose()
+ {
+ Some(it) => it,
+ None => return false,
+ };
+
+ if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) {
+ if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr.as_ref()) {
+ let mut is_unsafe = false;
+ unsafe_expressions(
+ db,
+ infer,
+ *def,
+ body,
+ expanded_expr,
+ &mut |UnsafeExpr { inside_unsafe_block, .. }| is_unsafe |= !inside_unsafe_block,
+ );
+ return is_unsafe;
+ }
+ }
+ false
+ }
+
+ fn resolve_impl_method_or_trait_def(
+ &self,
+ db: &dyn HirDatabase,
+ func: FunctionId,
+ substs: Substitution,
+ ) -> FunctionId {
+ let krate = self.resolver.krate();
+ let owner = match self.resolver.body_owner() {
+ Some(it) => it,
+ None => return func,
+ };
+ let env = owner.as_generic_def_id().map_or_else(
+ || Arc::new(hir_ty::TraitEnvironment::empty(krate)),
+ |d| db.trait_environment(d),
+ );
+ method_resolution::lookup_impl_method(db, env, func, substs)
+ }
+
+ fn lang_trait_fn(
+ &self,
+ db: &dyn HirDatabase,
+ lang_trait: &Name,
+ method_name: &Name,
+ ) -> Option<(TraitId, FunctionId)> {
+ let trait_id = db.lang_item(self.resolver.krate(), lang_trait.to_smol_str())?.as_trait()?;
+ let fn_id = db.trait_data(trait_id).method_by_name(method_name)?;
+ Some((trait_id, fn_id))
+ }
+
+ fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> {
+ self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, &expr)?)
+ }
+}
+
+fn scope_for(
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ node: InFile<&SyntaxNode>,
+) -> Option<ScopeId> {
+ node.value
+ .ancestors()
+ .filter_map(ast::Expr::cast)
+ .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it)))
+ .find_map(|it| scopes.scope_for(it))
+}
+
+fn scope_for_offset(
+ db: &dyn HirDatabase,
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ from_file: HirFileId,
+ offset: TextSize,
+) -> Option<ScopeId> {
+ scopes
+ .scope_by_expr()
+ .iter()
+ .filter_map(|(id, scope)| {
+ let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
+ if from_file == file_id {
+ return Some((value.text_range(), scope));
+ }
+
+ // FIXME handle attribute expansion
+ let source = iter::successors(file_id.call_node(db.upcast()), |it| {
+ it.file_id.call_node(db.upcast())
+ })
+ .find(|it| it.file_id == from_file)
+ .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
+ Some((source.value.text_range(), scope))
+ })
+ .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
+ // find containing scope
+ .min_by_key(|(expr_range, _scope)| expr_range.len())
+ .map(|(expr_range, scope)| {
+ adjust(db, scopes, source_map, expr_range, from_file, offset).unwrap_or(*scope)
+ })
+}
+
+// XXX: during completion, cursor might be outside of any particular
+// expression. Try to figure out the correct scope...
+fn adjust(
+ db: &dyn HirDatabase,
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ expr_range: TextRange,
+ from_file: HirFileId,
+ offset: TextSize,
+) -> Option<ScopeId> {
+ let child_scopes = scopes
+ .scope_by_expr()
+ .iter()
+ .filter_map(|(id, scope)| {
+ let source = source_map.expr_syntax(*id).ok()?;
+ // FIXME: correctly handle macro expansion
+ if source.file_id != from_file {
+ return None;
+ }
+ let root = source.file_syntax(db.upcast());
+ let node = source.value.to_node(&root);
+ Some((node.syntax().text_range(), scope))
+ })
+ .filter(|&(range, _)| {
+ range.start() <= offset && expr_range.contains_range(range) && range != expr_range
+ });
+
+ child_scopes
+ .max_by(|&(r1, _), &(r2, _)| {
+ if r1.contains_range(r2) {
+ std::cmp::Ordering::Greater
+ } else if r2.contains_range(r1) {
+ std::cmp::Ordering::Less
+ } else {
+ r1.start().cmp(&r2.start())
+ }
+ })
+ .map(|(_ptr, scope)| *scope)
+}
+
+#[inline]
+pub(crate) fn resolve_hir_path(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolve_hir_path_(db, resolver, path, false)
+}
+
+#[inline]
+pub(crate) fn resolve_hir_path_as_macro(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<Macro> {
+ resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(Into::into)
+}
+
+fn resolve_hir_path_(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+ prefer_value_ns: bool,
+) -> Option<PathResolution> {
+ let types = || {
+ let (ty, unresolved) = match path.type_anchor() {
+ Some(type_ref) => {
+ let (_, res) = TyLoweringContext::new(db, resolver).lower_ty_ext(type_ref);
+ res.map(|ty_ns| (ty_ns, path.segments().first()))
+ }
+ None => {
+ let (ty, remaining) =
+ resolver.resolve_path_in_type_ns(db.upcast(), path.mod_path())?;
+ match remaining {
+ Some(remaining) if remaining > 1 => {
+ if remaining + 1 == path.segments().len() {
+ Some((ty, path.segments().last()))
+ } else {
+ None
+ }
+ }
+ _ => Some((ty, path.segments().get(1))),
+ }
+ }
+ }?;
+
+ // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type
+ // within the trait's associated types.
+ if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
+ if let Some(type_alias_id) =
+ db.trait_data(trait_id).associated_type_by_name(unresolved.name)
+ {
+ return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
+ }
+ }
+
+ let res = match ty {
+ TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+ TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
+ TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
+ PathResolution::Def(Adt::from(it).into())
+ }
+ TypeNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+ TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
+ TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ };
+ match unresolved {
+ Some(unresolved) => resolver
+ .generic_def()
+ .and_then(|def| {
+ hir_ty::associated_type_shorthand_candidates(
+ db,
+ def,
+ res.in_type_ns()?,
+ |name, _, id| (name == unresolved.name).then(|| id),
+ )
+ })
+ .map(TypeAlias::from)
+ .map(Into::into)
+ .map(PathResolution::Def),
+ None => Some(res),
+ }
+ };
+
+ let body_owner = resolver.body_owner();
+ let values = || {
+ resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
+ let res = match val {
+ ValueNs::LocalBinding(pat_id) => {
+ let var = Local { parent: body_owner?, pat_id };
+ PathResolution::Local(var)
+ }
+ ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
+ ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
+ ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
+ ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
+ ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
+ ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
+ };
+ Some(res)
+ })
+ };
+
+ let items = || {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path())
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ };
+
+ let macros = || {
+ resolver
+ .resolve_path_as_macro(db.upcast(), path.mod_path())
+ .map(|def| PathResolution::Def(ModuleDef::Macro(def.into())))
+ };
+
+ if prefer_value_ns { values().or_else(types) } else { types().or_else(values) }
+ .or_else(items)
+ .or_else(macros)
+}
+
+/// Resolves a path where we know it is a qualifier of another path.
+///
+/// For example, if we have:
+/// ```
+/// mod my {
+/// pub mod foo {
+/// struct Bar;
+/// }
+///
+/// pub fn foo() {}
+/// }
+/// ```
+/// then we know that `foo` in `my::foo::Bar` refers to the module, not the function.
+fn resolve_hir_path_qualifier(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolver
+ .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
+ .map(|ty| match ty {
+ TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+ TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
+ TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
+ PathResolution::Def(Adt::from(it).into())
+ }
+ TypeNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+ TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
+ TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ })
+ .or_else(|| {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path())
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ })
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "ide-assists"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+
+itertools = "0.10.5"
+either = "1.7.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+expect-test = "1.4.0"
+
+[features]
+in-rust-tree = []
--- /dev/null
- const CONST: usize = 42;
+use hir::HasSource;
+use ide_db::{
+ syntax_helpers::insert_whitespace_into_node::insert_ws_into, traits::resolve_target_trait,
+};
+use syntax::ast::{self, make, AstNode};
+
+use crate::{
+ assist_context::{AssistContext, Assists},
+ utils::{
+ add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body, render_snippet,
+ Cursor, DefaultMethods,
+ },
+ AssistId, AssistKind,
+};
+
+// Assist: add_impl_missing_members
+//
+// Adds scaffold for required impl members.
+//
+// ```
+// trait Trait<T> {
+// type X;
+// fn foo(&self) -> T;
+// fn bar(&self) {}
+// }
+//
+// impl Trait<u32> for () {$0
+//
+// }
+// ```
+// ->
+// ```
+// trait Trait<T> {
+// type X;
+// fn foo(&self) -> T;
+// fn bar(&self) {}
+// }
+//
+// impl Trait<u32> for () {
+// $0type X;
+//
+// fn foo(&self) -> u32 {
+// todo!()
+// }
+// }
+// ```
+pub(crate) fn add_missing_impl_members(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ add_missing_impl_members_inner(
+ acc,
+ ctx,
+ DefaultMethods::No,
+ "add_impl_missing_members",
+ "Implement missing members",
+ )
+}
+
+// Assist: add_impl_default_members
+//
+// Adds scaffold for overriding default impl members.
+//
+// ```
+// trait Trait {
+// type X;
+// fn foo(&self);
+// fn bar(&self) {}
+// }
+//
+// impl Trait for () {
+// type X = ();
+// fn foo(&self) {}$0
+// }
+// ```
+// ->
+// ```
+// trait Trait {
+// type X;
+// fn foo(&self);
+// fn bar(&self) {}
+// }
+//
+// impl Trait for () {
+// type X = ();
+// fn foo(&self) {}
+//
+// $0fn bar(&self) {}
+// }
+// ```
+pub(crate) fn add_missing_default_members(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ add_missing_impl_members_inner(
+ acc,
+ ctx,
+ DefaultMethods::Only,
+ "add_impl_default_members",
+ "Implement default members",
+ )
+}
+
+fn add_missing_impl_members_inner(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ mode: DefaultMethods,
+ assist_id: &'static str,
+ label: &'static str,
+) -> Option<()> {
+ let _p = profile::span("add_missing_impl_members_inner");
+ let impl_def = ctx.find_node_at_offset::<ast::Impl>()?;
+ let target_scope = ctx.sema.scope(impl_def.syntax())?;
+ let trait_ = resolve_target_trait(&ctx.sema, &impl_def)?;
+
+ let missing_items = filter_assoc_items(
+ &ctx.sema,
+ &ide_db::traits::get_missing_assoc_items(&ctx.sema, &impl_def),
+ mode,
+ );
+
+ if missing_items.is_empty() {
+ return None;
+ }
+
+ let target = impl_def.syntax().text_range();
+ acc.add(AssistId(assist_id, AssistKind::QuickFix), label, target, |builder| {
+ let missing_items = missing_items
+ .into_iter()
+ .map(|it| {
+ if ctx.sema.hir_file_for(it.syntax()).is_macro() {
+ if let Some(it) = ast::AssocItem::cast(insert_ws_into(it.syntax().clone())) {
+ return it;
+ }
+ }
+ it.clone_for_update()
+ })
+ .collect();
+ let (new_impl_def, first_new_item) = add_trait_assoc_items_to_impl(
+ &ctx.sema,
+ missing_items,
+ trait_,
+ impl_def.clone(),
+ target_scope,
+ );
+ match ctx.config.snippet_cap {
+ None => builder.replace(target, new_impl_def.to_string()),
+ Some(cap) => {
+ let mut cursor = Cursor::Before(first_new_item.syntax());
+ let placeholder;
+ if let DefaultMethods::No = mode {
+ if let ast::AssocItem::Fn(func) = &first_new_item {
+ if try_gen_trait_body(ctx, func, &trait_, &impl_def).is_none() {
+ if let Some(m) =
+ func.syntax().descendants().find_map(ast::MacroCall::cast)
+ {
+ if m.syntax().text() == "todo!()" {
+ placeholder = m;
+ cursor = Cursor::Replace(placeholder.syntax());
+ }
+ }
+ }
+ }
+ }
+ builder.replace_snippet(
+ cap,
+ target,
+ render_snippet(cap, new_impl_def.syntax(), cursor),
+ )
+ }
+ };
+ })
+}
+
+fn try_gen_trait_body(
+ ctx: &AssistContext<'_>,
+ func: &ast::Fn,
+ trait_: &hir::Trait,
+ impl_def: &ast::Impl,
+) -> Option<()> {
+ let trait_path = make::ext::ident_path(&trait_.name(ctx.db()).to_string());
+ let hir_ty = ctx.sema.resolve_type(&impl_def.self_ty()?)?;
+ let adt = hir_ty.as_adt()?.source(ctx.db())?;
+ gen_trait_fn_body(func, &trait_path, &adt.value)
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn test_add_missing_impl_members() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
++ const CONST_2: i32;
+
+ fn foo(&self);
+ fn bar(&self);
+ fn baz(&self);
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+$0
+}"#,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
++ const CONST_2: i32;
+
+ fn foo(&self);
+ fn bar(&self);
+ fn baz(&self);
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+
+ $0type Output;
+
- trait Foo { fn foo(&self, bar: Bar); }
++ const CONST_2: i32;
+
+ fn foo(&self) {
+ todo!()
+ }
+
+ fn baz(&self) {
+ todo!()
+ }
+
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_copied_overriden_members() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo {
+ fn foo(&self);
+ fn bar(&self) -> bool { true }
+ fn baz(&self) -> u32 { 42 }
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+$0
+}"#,
+ r#"
+trait Foo {
+ fn foo(&self);
+ fn bar(&self) -> bool { true }
+ fn baz(&self) -> u32 { 42 }
+}
+
+struct S;
+
+impl Foo for S {
+ fn bar(&self) {}
+
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_empty_impl_def() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S { $0 }"#,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_impl_def_without_braces() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S$0"#,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn fill_in_type_params_1() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl Foo<u32> for S { $0 }"#,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl Foo<u32> for S {
+ fn foo(&self, t: u32) -> &u32 {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn fill_in_type_params_2() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl<U> Foo<U> for S { $0 }"#,
+ r#"
+trait Foo<T> { fn foo(&self, t: T) -> &T; }
+struct S;
+impl<U> Foo<U> for S {
+ fn foo(&self, t: U) -> &U {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_cursor_after_empty_impl_def() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {}$0"#,
+ r#"
+trait Foo { fn foo(&self); }
+struct S;
+impl Foo for S {
+ fn foo(&self) {
+ ${0:todo!()}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_qualify_path_1() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar;
- trait Foo { fn foo(&self, bar: Bar); }
++ pub trait Foo { fn foo(&self, bar: Bar); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar;
- trait Foo { fn foo(&self, bar: Bar<u32>); }
++ pub trait Foo { fn foo(&self, bar: Bar); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_2() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub trait Foo { fn foo(&self, bar: Bar); }
+ }
+}
+
+use foo::bar;
+
+struct S;
+impl bar::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub mod bar {
+ pub struct Bar;
+ pub trait Foo { fn foo(&self, bar: Bar); }
+ }
+}
+
+use foo::bar;
+
+struct S;
+impl bar::Foo for S {
+ fn foo(&self, bar: bar::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_generic() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
- trait Foo { fn foo(&self, bar: Bar<u32>); }
++ pub trait Foo { fn foo(&self, bar: Bar<u32>); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
- trait Foo<T> { fn foo(&self, bar: Bar<T>); }
++ pub trait Foo { fn foo(&self, bar: Bar<u32>); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar<u32>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_and_substitute_param() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
- trait Foo<T> { fn foo(&self, bar: Bar<T>); }
++ pub trait Foo<T> { fn foo(&self, bar: Bar<T>); }
+}
+struct S;
+impl foo::Foo<u32> for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
- trait Foo<T> { fn foo(&self, bar: T); }
++ pub trait Foo<T> { fn foo(&self, bar: Bar<T>); }
+}
+struct S;
+impl foo::Foo<u32> for S {
+ fn foo(&self, bar: foo::Bar<u32>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_substitute_param_no_qualify() {
+ // when substituting params, the substituted param should not be qualified!
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
- trait Foo<T> { fn foo(&self, bar: T); }
++ pub trait Foo<T> { fn foo(&self, bar: T); }
+ pub struct Param;
+}
+struct Param;
+struct S;
+impl foo::Foo<Param> for S { $0 }"#,
+ r#"
+mod foo {
- trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); }
++ pub trait Foo<T> { fn foo(&self, bar: T); }
+ pub struct Param;
+}
+struct Param;
+struct S;
+impl foo::Foo<Param> for S {
+ fn foo(&self, bar: Param) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_associated_item() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ impl Bar<T> { type Assoc = u32; }
- trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); }
++ pub trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ impl Bar<T> { type Assoc = u32; }
- trait Foo { fn foo(&self, bar: Bar<Baz>); }
++ pub trait Foo { fn foo(&self, bar: Bar<u32>::Assoc); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar<u32>::Assoc) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_nested() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ pub struct Baz;
- trait Foo { fn foo(&self, bar: Bar<Baz>); }
++ pub trait Foo { fn foo(&self, bar: Bar<Baz>); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub struct Bar<T>;
+ pub struct Baz;
- trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }
++ pub trait Foo { fn foo(&self, bar: Bar<Baz>); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: foo::Bar<foo::Baz>) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_qualify_path_fn_trait_notation() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod foo {
+ pub trait Fn<Args> { type Output; }
- trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }
++ pub trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }
+}
+struct S;
+impl foo::Foo for S { $0 }"#,
+ r#"
+mod foo {
+ pub trait Fn<Args> { type Output; }
- $0fn valid(some: u32) -> bool { false }
++ pub trait Foo { fn foo(&self, bar: dyn Fn(u32) -> i32); }
+}
+struct S;
+impl foo::Foo for S {
+ fn foo(&self, bar: dyn Fn(u32) -> i32) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_empty_trait() {
+ check_assist_not_applicable(
+ add_missing_impl_members,
+ r#"
+trait Foo;
+struct S;
+impl Foo for S { $0 }"#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_unnamed_trait_members_and_default_methods() {
+ check_assist_not_applicable(
+ add_missing_impl_members,
+ r#"
+trait Foo {
+ fn (arg: u32);
+ fn valid(some: u32) -> bool { false }
+}
+struct S;
+impl Foo for S { $0 }"#,
+ )
+ }
+
+ #[test]
+ fn test_with_docstring_and_attrs() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+#[doc(alias = "test alias")]
+trait Foo {
+ /// doc string
+ type Output;
+
+ #[must_use]
+ fn foo(&self);
+}
+struct S;
+impl Foo for S {}$0"#,
+ r#"
+#[doc(alias = "test alias")]
+trait Foo {
+ /// doc string
+ type Output;
+
+ #[must_use]
+ fn foo(&self);
+}
+struct S;
+impl Foo for S {
+ $0type Output;
+
+ fn foo(&self) {
+ todo!()
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_default_methods() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
++ const CONST_2: i32;
+
+ fn valid(some: u32) -> bool { false }
+ fn foo(some: u32) -> bool;
+}
+struct S;
+impl Foo for S { $0 }"#,
+ r#"
+trait Foo {
+ type Output;
+
+ const CONST: usize = 42;
++ const CONST_2: i32;
+
+ fn valid(some: u32) -> bool { false }
+ fn foo(some: u32) -> bool;
+}
+struct S;
+impl Foo for S {
++ $0const CONST: usize = 42;
++
++ fn valid(some: u32) -> bool { false }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_generic_single_default_parameter() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T = Self> {
+ fn bar(&self, other: &T);
+}
+
+struct S;
+impl Foo for S { $0 }"#,
+ r#"
+trait Foo<T = Self> {
+ fn bar(&self, other: &T);
+}
+
+struct S;
+impl Foo for S {
+ fn bar(&self, other: &Self) {
+ ${0:todo!()}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_generic_default_parameter_is_second() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<T1, T2 = Self> {
+ fn bar(&self, this: &T1, that: &T2);
+}
+
+struct S<T>;
+impl Foo<T> for S<T> { $0 }"#,
+ r#"
+trait Foo<T1, T2 = Self> {
+ fn bar(&self, this: &T1, that: &T2);
+}
+
+struct S<T>;
+impl Foo<T> for S<T> {
+ fn bar(&self, this: &T, that: &Self) {
+ ${0:todo!()}
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_assoc_type_bounds_are_removed() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Tr {
+ type Ty: Copy + 'static;
+}
+
+impl Tr for ()$0 {
+}"#,
+ r#"
+trait Tr {
+ type Ty: Copy + 'static;
+}
+
+impl Tr for () {
+ $0type Ty;
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_whitespace_fixup_preserves_bad_tokens() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for ()$0 {
+ +++
+}"#,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for () {
+ fn foo() {
+ ${0:todo!()}
+ }
+ +++
+}"#,
+ )
+ }
+
+ #[test]
+ fn test_whitespace_fixup_preserves_comments() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for ()$0 {
+ // very important
+}"#,
+ r#"
+trait Tr {
+ fn foo();
+}
+
+impl Tr for () {
+ fn foo() {
+ ${0:todo!()}
+ }
+ // very important
+}"#,
+ )
+ }
+
+ #[test]
+ fn weird_path() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Test {
+ fn foo(&self, x: crate)
+}
+impl Test for () {
+ $0
+}
+"#,
+ r#"
+trait Test {
+ fn foo(&self, x: crate)
+}
+impl Test for () {
+ fn foo(&self, x: crate) {
+ ${0:todo!()}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn missing_generic_type() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+trait Foo<BAR> {
+ fn foo(&self, bar: BAR);
+}
+impl Foo for () {
+ $0
+}
+"#,
+ r#"
+trait Foo<BAR> {
+ fn foo(&self, bar: BAR);
+}
+impl Foo for () {
+ fn foo(&self, bar: BAR) {
+ ${0:todo!()}
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn does_not_requalify_self_as_crate() {
+ check_assist(
+ add_missing_default_members,
+ r"
+struct Wrapper<T>(T);
+
+trait T {
+ fn f(self) -> Wrapper<Self> {
+ Wrapper(self)
+ }
+}
+
+impl T for () {
+ $0
+}
+",
+ r"
+struct Wrapper<T>(T);
+
+trait T {
+ fn f(self) -> Wrapper<Self> {
+ Wrapper(self)
+ }
+}
+
+impl T for () {
+ $0fn f(self) -> Wrapper<Self> {
+ Wrapper(self)
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_default_body_generation() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+//- minicore: default
+struct Foo(usize);
+
+impl Default for Foo {
+ $0
+}
+"#,
+ r#"
+struct Foo(usize);
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self(Default::default())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_from_macro() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+macro_rules! foo {
+ () => {
+ trait FooB {
+ fn foo<'lt>(&'lt self) {}
+ }
+ }
+}
+foo!();
+struct Foo(usize);
+
+impl FooB for Foo {
+ $0
+}
+"#,
+ r#"
+macro_rules! foo {
+ () => {
+ trait FooB {
+ fn foo<'lt>(&'lt self) {}
+ }
+ }
+}
+foo!();
+struct Foo(usize);
+
+impl FooB for Foo {
+ $0fn foo<'lt>(&'lt self){}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_assoc_type_when_trait_with_same_name_in_scope() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Foo {}
+
+pub trait Types {
+ type Foo;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl<T: Types> Behavior<T> for Impl { $0 }"#,
+ r#"
+pub trait Foo {}
+
+pub trait Types {
+ type Foo;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl<T: Types> Behavior<T> for Impl {
+ fn reproduce(&self, foo: <T as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_qualified() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for std::string::String {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<std::string::String> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+impl Types for std::string::String {
+ type Foo = bool;
+}
+
+pub trait Behavior<T: Types> {
+ fn reproduce(&self, foo: T::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<std::string::String> for Impl {
+ fn reproduce(&self, foo: <std::string::String as Types>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option_ambiguous() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Foo = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: <T as Types2>::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Foo = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: <T as Types2>::Foo);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types2>::Foo) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Bar;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+pub trait Types {
+ type Foo;
+}
+
+pub trait Types2 {
+ type Bar;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as Types2>::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_assoc_type_on_concrete_type_multi_option_foreign() {
+ check_assist(
+ add_missing_impl_members,
+ r#"
+mod bar {
+ pub trait Types2 {
+ type Bar;
+ }
+}
+
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl bar::Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + bar::Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl { $0 }"#,
+ r#"
+mod bar {
+ pub trait Types2 {
+ type Bar;
+ }
+}
+
+pub trait Types {
+ type Foo;
+}
+
+impl Types for u32 {
+ type Foo = bool;
+}
+
+impl bar::Types2 for u32 {
+ type Bar = String;
+}
+
+pub trait Behavior<T: Types + bar::Types2> {
+ fn reproduce(&self, foo: T::Bar);
+}
+
+pub struct Impl;
+
+impl Behavior<u32> for Impl {
+ fn reproduce(&self, foo: <u32 as bar::Types2>::Bar) {
+ ${0:todo!()}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_transform_path_in_path_expr() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+pub trait Const {
+ const FOO: u32;
+}
+
+pub trait Trait<T: Const> {
+ fn foo() -> bool {
+ match T::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}
+
+impl Const for u32 {
+ const FOO: u32 = 1;
+}
+
+struct Impl;
+
+impl Trait<u32> for Impl { $0 }"#,
+ r#"
+pub trait Const {
+ const FOO: u32;
+}
+
+pub trait Trait<T: Const> {
+ fn foo() -> bool {
+ match T::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}
+
+impl Const for u32 {
+ const FOO: u32 = 1;
+}
+
+struct Impl;
+
+impl Trait<u32> for Impl {
+ $0fn foo() -> bool {
+ match <u32 as Const>::FOO {
+ 0 => true,
+ _ => false,
+ }
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_default_partial_eq() {
+ check_assist(
+ add_missing_default_members,
+ r#"
+//- minicore: eq
+struct SomeStruct {
+ data: usize,
+ field: (usize, usize),
+}
+impl PartialEq for SomeStruct {$0}
+"#,
+ r#"
+struct SomeStruct {
+ data: usize,
+ field: (usize, usize),
+}
+impl PartialEq for SomeStruct {
+ $0fn ne(&self, other: &Self) -> bool {
+ !self.eq(other)
+ }
+}
+"#,
+ );
+ }
+}
--- /dev/null
- let extracted_from_trait_impl = body.extracted_from_trait_impl();
-
+use std::iter;
+
+use ast::make;
+use either::Either;
+use hir::{
+ HasSource, HirDisplay, InFile, Local, ModuleDef, PathResolution, Semantics, TypeInfo, TypeParam,
+};
+use ide_db::{
+ defs::{Definition, NameRefClass},
+ famous_defs::FamousDefs,
+ helpers::mod_path_to_ast,
+ imports::insert_use::{insert_use, ImportScope},
+ search::{FileReference, ReferenceCategory, SearchScope},
+ syntax_helpers::node_ext::{preorder_expr, walk_expr, walk_pat, walk_patterns_in_expr},
+ FxIndexSet, RootDatabase,
+};
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ AstNode, HasGenericParams,
+ },
+ match_ast, ted, SyntaxElement,
+ SyntaxKind::{self, COMMENT},
+ SyntaxNode, SyntaxToken, TextRange, TextSize, TokenAtOffset, WalkEvent, T,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists, TreeMutator},
+ utils::generate_impl_text,
+ AssistId,
+};
+
+// Assist: extract_function
+//
+// Extracts selected statements and comments into new function.
+//
+// ```
+// fn main() {
+// let n = 1;
+// $0let m = n + 2;
+// // calculate
+// let k = m + n;$0
+// let g = 3;
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// let n = 1;
+// fun_name(n);
+// let g = 3;
+// }
+//
+// fn $0fun_name(n: i32) {
+// let m = n + 2;
+// // calculate
+// let k = m + n;
+// }
+// ```
+pub(crate) fn extract_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let range = ctx.selection_trimmed();
+ if range.is_empty() {
+ return None;
+ }
+
+ let node = ctx.covering_element();
+ if node.kind() == COMMENT {
+ cov_mark::hit!(extract_function_in_comment_is_not_applicable);
+ return None;
+ }
+
+ let node = match node {
+ syntax::NodeOrToken::Node(n) => n,
+ syntax::NodeOrToken::Token(t) => t.parent()?,
+ };
+
+ let body = extraction_target(&node, range)?;
+ let container_info = body.analyze_container(&ctx.sema)?;
+
+ let (locals_used, self_param) = body.analyze(&ctx.sema);
+
+ let anchor = if self_param.is_some() { Anchor::Method } else { Anchor::Freestanding };
+ let insert_after = node_to_insert_after(&body, anchor)?;
+ let semantics_scope = ctx.sema.scope(&insert_after)?;
+ let module = semantics_scope.module();
+
+ let ret_ty = body.return_ty(ctx)?;
+ let control_flow = body.external_control_flow(ctx, &container_info)?;
+ let ret_values = body.ret_values(ctx, node.parent().as_ref().unwrap_or(&node));
+
+ let target_range = body.text_range();
+
+ let scope = ImportScope::find_insert_use_container(&node, &ctx.sema)?;
+
+ acc.add(
+ AssistId("extract_function", crate::AssistKind::RefactorExtract),
+ "Extract into function",
+ target_range,
+ move |builder| {
+ let outliving_locals: Vec<_> = ret_values.collect();
+ if stdx::never!(!outliving_locals.is_empty() && !ret_ty.is_unit()) {
+ // We should not have variables that outlive body if we have expression block
+ return;
+ }
+
+ let params =
+ body.extracted_function_params(ctx, &container_info, locals_used.iter().copied());
+
- Some(adt) if extracted_from_trait_impl => {
+ let name = make_function_name(&semantics_scope);
+
+ let fun = Function {
+ name,
+ self_param,
+ params,
+ control_flow,
+ ret_ty,
+ body,
+ outliving_locals,
+ mods: container_info,
+ };
+
+ let new_indent = IndentLevel::from_node(&insert_after);
+ let old_indent = fun.body.indent_level();
+
+ builder.replace(target_range, make_call(ctx, &fun, old_indent));
+
++ let has_impl_wrapper =
++ insert_after.ancestors().any(|a| a.kind() == SyntaxKind::IMPL && a != insert_after);
++
+ let fn_def = match fun.self_param_adt(ctx) {
- #[derive(Debug)]
++ Some(adt) if anchor == Anchor::Method && !has_impl_wrapper => {
+ let fn_def = format_function(ctx, module, &fun, old_indent, new_indent + 1);
+ generate_impl_text(&adt, &fn_def).replace("{\n\n", "{")
+ }
+ _ => format_function(ctx, module, &fun, old_indent, new_indent),
+ };
+
+ if fn_def.contains("ControlFlow") {
+ let scope = match scope {
+ ImportScope::File(it) => ImportScope::File(builder.make_mut(it)),
+ ImportScope::Module(it) => ImportScope::Module(builder.make_mut(it)),
+ ImportScope::Block(it) => ImportScope::Block(builder.make_mut(it)),
+ };
+
+ let control_flow_enum =
+ FamousDefs(&ctx.sema, module.krate()).core_ops_ControlFlow();
+
+ if let Some(control_flow_enum) = control_flow_enum {
+ let mod_path = module.find_use_path_prefixed(
+ ctx.sema.db,
+ ModuleDef::from(control_flow_enum),
+ ctx.config.insert_use.prefix_kind,
+ ctx.config.prefer_no_std,
+ );
+
+ if let Some(mod_path) = mod_path {
+ insert_use(&scope, mod_path_to_ast(&mod_path), &ctx.config.insert_use);
+ }
+ }
+ }
+
+ let insert_offset = insert_after.text_range().end();
+
+ match ctx.config.snippet_cap {
+ Some(cap) => builder.insert_snippet(cap, insert_offset, fn_def),
+ None => builder.insert(insert_offset, fn_def),
+ };
+ },
+ )
+}
+
+fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef {
+ let mut names_in_scope = vec![];
+ semantics_scope.process_all_names(&mut |name, _| names_in_scope.push(name.to_string()));
+
+ let default_name = "fun_name";
+
+ let mut name = default_name.to_string();
+ let mut counter = 0;
+ while names_in_scope.contains(&name) {
+ counter += 1;
+ name = format!("{default_name}{counter}")
+ }
+ make::name_ref(&name)
+}
+
+/// Try to guess what user wants to extract
+///
+/// We have basically have two cases:
+/// * We want whole node, like `loop {}`, `2 + 2`, `{ let n = 1; }` exprs.
+/// Then we can use `ast::Expr`
+/// * We want a few statements for a block. E.g.
+/// ```rust,no_run
+/// fn foo() -> i32 {
+/// let m = 1;
+/// $0
+/// let n = 2;
+/// let k = 3;
+/// k + n
+/// $0
+/// }
+/// ```
+///
+fn extraction_target(node: &SyntaxNode, selection_range: TextRange) -> Option<FunctionBody> {
+ if let Some(stmt) = ast::Stmt::cast(node.clone()) {
+ return match stmt {
+ ast::Stmt::Item(_) => None,
+ ast::Stmt::ExprStmt(_) | ast::Stmt::LetStmt(_) => Some(FunctionBody::from_range(
+ node.parent().and_then(ast::StmtList::cast)?,
+ node.text_range(),
+ )),
+ };
+ }
+
+ // Covering element returned the parent block of one or multiple statements that have been selected
+ if let Some(stmt_list) = ast::StmtList::cast(node.clone()) {
+ if let Some(block_expr) = stmt_list.syntax().parent().and_then(ast::BlockExpr::cast) {
+ if block_expr.syntax().text_range() == selection_range {
+ return FunctionBody::from_expr(block_expr.into());
+ }
+ }
+
+ // Extract the full statements.
+ return Some(FunctionBody::from_range(stmt_list, selection_range));
+ }
+
+ let expr = ast::Expr::cast(node.clone())?;
+ // A node got selected fully
+ if node.text_range() == selection_range {
+ return FunctionBody::from_expr(expr);
+ }
+
+ node.ancestors().find_map(ast::Expr::cast).and_then(FunctionBody::from_expr)
+}
+
+#[derive(Debug)]
+struct Function {
+ name: ast::NameRef,
+ self_param: Option<ast::SelfParam>,
+ params: Vec<Param>,
+ control_flow: ControlFlow,
+ ret_ty: RetType,
+ body: FunctionBody,
+ outliving_locals: Vec<OutlivedLocal>,
+ mods: ContainerInfo,
+}
+
+#[derive(Debug)]
+struct Param {
+ var: Local,
+ ty: hir::Type,
+ move_local: bool,
+ requires_mut: bool,
+ is_copy: bool,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+enum ParamKind {
+ Value,
+ MutValue,
+ SharedRef,
+ MutRef,
+}
+
+#[derive(Debug, Eq, PartialEq)]
+enum FunType {
+ Unit,
+ Single(hir::Type),
+ Tuple(Vec<hir::Type>),
+}
+
+/// Where to put extracted function definition
++#[derive(Debug, Eq, PartialEq, Clone, Copy)]
+enum Anchor {
+ /// Extract free function and put right after current top-level function
+ Freestanding,
+ /// Extract method and put right after current function in the impl-block
+ Method,
+}
+
+// FIXME: ControlFlow and ContainerInfo both track some function modifiers, feels like these two should
+// probably be merged somehow.
+#[derive(Debug)]
+struct ControlFlow {
+ kind: Option<FlowKind>,
+ is_async: bool,
+ is_unsafe: bool,
+}
+
+/// The thing whose expression we are extracting from. Can be a function, const, static, const arg, ...
+#[derive(Clone, Debug)]
+struct ContainerInfo {
+ is_const: bool,
+ is_in_tail: bool,
+ parent_loop: Option<SyntaxNode>,
+ /// The function's return type, const's type etc.
+ ret_type: Option<hir::Type>,
+ generic_param_lists: Vec<ast::GenericParamList>,
+ where_clauses: Vec<ast::WhereClause>,
+}
+
+/// Control flow that is exported from extracted function
+///
+/// E.g.:
+/// ```rust,no_run
+/// loop {
+/// $0
+/// if 42 == 42 {
+/// break;
+/// }
+/// $0
+/// }
+/// ```
+#[derive(Debug, Clone)]
+enum FlowKind {
+ /// Return with value (`return $expr;`)
+ Return(Option<ast::Expr>),
+ Try {
+ kind: TryKind,
+ },
+ /// Break with label and value (`break 'label $expr;`)
+ Break(Option<ast::Lifetime>, Option<ast::Expr>),
+ /// Continue with label (`continue 'label;`)
+ Continue(Option<ast::Lifetime>),
+}
+
+#[derive(Debug, Clone)]
+enum TryKind {
+ Option,
+ Result { ty: hir::Type },
+}
+
+#[derive(Debug)]
+enum RetType {
+ Expr(hir::Type),
+ Stmt,
+}
+
+impl RetType {
+ fn is_unit(&self) -> bool {
+ match self {
+ RetType::Expr(ty) => ty.is_unit(),
+ RetType::Stmt => true,
+ }
+ }
+}
+
+/// Semantically same as `ast::Expr`, but preserves identity when using only part of the Block
+/// This is the future function body, the part that is being extracted.
+#[derive(Debug)]
+enum FunctionBody {
+ Expr(ast::Expr),
+ Span { parent: ast::StmtList, text_range: TextRange },
+}
+
+#[derive(Debug)]
+struct OutlivedLocal {
+ local: Local,
+ mut_usage_outside_body: bool,
+}
+
+/// Container of local variable usages
+///
+/// Semanticall same as `UsageSearchResult`, but provides more convenient interface
+struct LocalUsages(ide_db::search::UsageSearchResult);
+
+impl LocalUsages {
+ fn find_local_usages(ctx: &AssistContext<'_>, var: Local) -> Self {
+ Self(
+ Definition::Local(var)
+ .usages(&ctx.sema)
+ .in_scope(SearchScope::single_file(ctx.file_id()))
+ .all(),
+ )
+ }
+
+ fn iter(&self) -> impl Iterator<Item = &FileReference> + '_ {
+ self.0.iter().flat_map(|(_, rs)| rs)
+ }
+}
+
+impl Function {
+ fn return_type(&self, ctx: &AssistContext<'_>) -> FunType {
+ match &self.ret_ty {
+ RetType::Expr(ty) if ty.is_unit() => FunType::Unit,
+ RetType::Expr(ty) => FunType::Single(ty.clone()),
+ RetType::Stmt => match self.outliving_locals.as_slice() {
+ [] => FunType::Unit,
+ [var] => FunType::Single(var.local.ty(ctx.db())),
+ vars => {
+ let types = vars.iter().map(|v| v.local.ty(ctx.db())).collect();
+ FunType::Tuple(types)
+ }
+ },
+ }
+ }
+
+ fn self_param_adt(&self, ctx: &AssistContext<'_>) -> Option<ast::Adt> {
+ let self_param = self.self_param.as_ref()?;
+ let def = ctx.sema.to_def(self_param)?;
+ let adt = def.ty(ctx.db()).strip_references().as_adt()?;
+ let InFile { file_id: _, value } = adt.source(ctx.db())?;
+ Some(value)
+ }
+}
+
+impl ParamKind {
+ fn is_ref(&self) -> bool {
+ matches!(self, ParamKind::SharedRef | ParamKind::MutRef)
+ }
+}
+
+impl Param {
+ fn kind(&self) -> ParamKind {
+ match (self.move_local, self.requires_mut, self.is_copy) {
+ (false, true, _) => ParamKind::MutRef,
+ (false, false, false) => ParamKind::SharedRef,
+ (true, true, _) => ParamKind::MutValue,
+ (_, false, _) => ParamKind::Value,
+ }
+ }
+
+ fn to_arg(&self, ctx: &AssistContext<'_>) -> ast::Expr {
+ let var = path_expr_from_local(ctx, self.var);
+ match self.kind() {
+ ParamKind::Value | ParamKind::MutValue => var,
+ ParamKind::SharedRef => make::expr_ref(var, false),
+ ParamKind::MutRef => make::expr_ref(var, true),
+ }
+ }
+
+ fn to_param(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Param {
+ let var = self.var.name(ctx.db()).to_string();
+ let var_name = make::name(&var);
+ let pat = match self.kind() {
+ ParamKind::MutValue => make::ident_pat(false, true, var_name),
+ ParamKind::Value | ParamKind::SharedRef | ParamKind::MutRef => {
+ make::ext::simple_ident_pat(var_name)
+ }
+ };
+
+ let ty = make_ty(&self.ty, ctx, module);
+ let ty = match self.kind() {
+ ParamKind::Value | ParamKind::MutValue => ty,
+ ParamKind::SharedRef => make::ty_ref(ty, false),
+ ParamKind::MutRef => make::ty_ref(ty, true),
+ };
+
+ make::param(pat.into(), ty)
+ }
+}
+
+impl TryKind {
+ fn of_ty(ty: hir::Type, ctx: &AssistContext<'_>) -> Option<TryKind> {
+ if ty.is_unknown() {
+ // We favour Result for `expr?`
+ return Some(TryKind::Result { ty });
+ }
+ let adt = ty.as_adt()?;
+ let name = adt.name(ctx.db());
+ // FIXME: use lang items to determine if it is std type or user defined
+ // E.g. if user happens to define type named `Option`, we would have false positive
+ match name.to_string().as_str() {
+ "Option" => Some(TryKind::Option),
+ "Result" => Some(TryKind::Result { ty }),
+ _ => None,
+ }
+ }
+}
+
+impl FlowKind {
+ fn make_result_handler(&self, expr: Option<ast::Expr>) -> ast::Expr {
+ match self {
+ FlowKind::Return(_) => make::expr_return(expr),
+ FlowKind::Break(label, _) => make::expr_break(label.clone(), expr),
+ FlowKind::Try { .. } => {
+ stdx::never!("cannot have result handler with try");
+ expr.unwrap_or_else(|| make::expr_return(None))
+ }
+ FlowKind::Continue(label) => {
+ stdx::always!(expr.is_none(), "continue with value is not possible");
+ make::expr_continue(label.clone())
+ }
+ }
+ }
+
+ fn expr_ty(&self, ctx: &AssistContext<'_>) -> Option<hir::Type> {
+ match self {
+ FlowKind::Return(Some(expr)) | FlowKind::Break(_, Some(expr)) => {
+ ctx.sema.type_of_expr(expr).map(TypeInfo::adjusted)
+ }
+ FlowKind::Try { .. } => {
+ stdx::never!("try does not have defined expr_ty");
+ None
+ }
+ _ => None,
+ }
+ }
+}
+
+impl FunctionBody {
+ fn parent(&self) -> Option<SyntaxNode> {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().parent(),
+ FunctionBody::Span { parent, .. } => Some(parent.syntax().clone()),
+ }
+ }
+
+ fn node(&self) -> &SyntaxNode {
+ match self {
+ FunctionBody::Expr(e) => e.syntax(),
+ FunctionBody::Span { parent, .. } => parent.syntax(),
+ }
+ }
+
+ fn extracted_from_trait_impl(&self) -> bool {
+ match self.node().ancestors().find_map(ast::Impl::cast) {
+ Some(c) => return c.trait_().is_some(),
+ None => false,
+ }
+ }
+
+ fn descendants(&self) -> impl Iterator<Item = SyntaxNode> {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().descendants(),
+ FunctionBody::Span { parent, .. } => parent.syntax().descendants(),
+ }
+ }
+
+ fn descendant_paths(&self) -> impl Iterator<Item = ast::Path> {
+ self.descendants().filter_map(|node| {
+ match_ast! {
+ match node {
+ ast::Path(it) => Some(it),
+ _ => None
+ }
+ }
+ })
+ }
+
+ fn from_expr(expr: ast::Expr) -> Option<Self> {
+ match expr {
+ ast::Expr::BreakExpr(it) => it.expr().map(Self::Expr),
+ ast::Expr::ReturnExpr(it) => it.expr().map(Self::Expr),
+ ast::Expr::BlockExpr(it) if !it.is_standalone() => None,
+ expr => Some(Self::Expr(expr)),
+ }
+ }
+
+ fn from_range(parent: ast::StmtList, selected: TextRange) -> FunctionBody {
+ let full_body = parent.syntax().children_with_tokens();
+
+ let mut text_range = full_body
+ .filter(|it| ast::Stmt::can_cast(it.kind()) || it.kind() == COMMENT)
+ .map(|element| element.text_range())
+ .filter(|&range| selected.intersect(range).filter(|it| !it.is_empty()).is_some())
+ .reduce(|acc, stmt| acc.cover(stmt));
+
+ if let Some(tail_range) = parent
+ .tail_expr()
+ .map(|it| it.syntax().text_range())
+ .filter(|&it| selected.intersect(it).is_some())
+ {
+ text_range = Some(match text_range {
+ Some(text_range) => text_range.cover(tail_range),
+ None => tail_range,
+ });
+ }
+ Self::Span { parent, text_range: text_range.unwrap_or(selected) }
+ }
+
+ fn indent_level(&self) -> IndentLevel {
+ match &self {
+ FunctionBody::Expr(expr) => IndentLevel::from_node(expr.syntax()),
+ FunctionBody::Span { parent, .. } => IndentLevel::from_node(parent.syntax()) + 1,
+ }
+ }
+
+ fn tail_expr(&self) -> Option<ast::Expr> {
+ match &self {
+ FunctionBody::Expr(expr) => Some(expr.clone()),
+ FunctionBody::Span { parent, text_range } => {
+ let tail_expr = parent.tail_expr()?;
+ text_range.contains_range(tail_expr.syntax().text_range()).then(|| tail_expr)
+ }
+ }
+ }
+
+ fn walk_expr(&self, cb: &mut dyn FnMut(ast::Expr)) {
+ match self {
+ FunctionBody::Expr(expr) => walk_expr(expr, cb),
+ FunctionBody::Span { parent, text_range } => {
+ parent
+ .statements()
+ .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
+ .filter_map(|stmt| match stmt {
+ ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr(),
+ ast::Stmt::Item(_) => None,
+ ast::Stmt::LetStmt(stmt) => stmt.initializer(),
+ })
+ .for_each(|expr| walk_expr(&expr, cb));
+ if let Some(expr) = parent
+ .tail_expr()
+ .filter(|it| text_range.contains_range(it.syntax().text_range()))
+ {
+ walk_expr(&expr, cb);
+ }
+ }
+ }
+ }
+
+ fn preorder_expr(&self, cb: &mut dyn FnMut(WalkEvent<ast::Expr>) -> bool) {
+ match self {
+ FunctionBody::Expr(expr) => preorder_expr(expr, cb),
+ FunctionBody::Span { parent, text_range } => {
+ parent
+ .statements()
+ .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
+ .filter_map(|stmt| match stmt {
+ ast::Stmt::ExprStmt(expr_stmt) => expr_stmt.expr(),
+ ast::Stmt::Item(_) => None,
+ ast::Stmt::LetStmt(stmt) => stmt.initializer(),
+ })
+ .for_each(|expr| preorder_expr(&expr, cb));
+ if let Some(expr) = parent
+ .tail_expr()
+ .filter(|it| text_range.contains_range(it.syntax().text_range()))
+ {
+ preorder_expr(&expr, cb);
+ }
+ }
+ }
+ }
+
+ fn walk_pat(&self, cb: &mut dyn FnMut(ast::Pat)) {
+ match self {
+ FunctionBody::Expr(expr) => walk_patterns_in_expr(expr, cb),
+ FunctionBody::Span { parent, text_range } => {
+ parent
+ .statements()
+ .filter(|stmt| text_range.contains_range(stmt.syntax().text_range()))
+ .for_each(|stmt| match stmt {
+ ast::Stmt::ExprStmt(expr_stmt) => {
+ if let Some(expr) = expr_stmt.expr() {
+ walk_patterns_in_expr(&expr, cb)
+ }
+ }
+ ast::Stmt::Item(_) => (),
+ ast::Stmt::LetStmt(stmt) => {
+ if let Some(pat) = stmt.pat() {
+ walk_pat(&pat, cb);
+ }
+ if let Some(expr) = stmt.initializer() {
+ walk_patterns_in_expr(&expr, cb);
+ }
+ }
+ });
+ if let Some(expr) = parent
+ .tail_expr()
+ .filter(|it| text_range.contains_range(it.syntax().text_range()))
+ {
+ walk_patterns_in_expr(&expr, cb);
+ }
+ }
+ }
+ }
+
+ fn text_range(&self) -> TextRange {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().text_range(),
+ &FunctionBody::Span { text_range, .. } => text_range,
+ }
+ }
+
+ fn contains_range(&self, range: TextRange) -> bool {
+ self.text_range().contains_range(range)
+ }
+
+ fn precedes_range(&self, range: TextRange) -> bool {
+ self.text_range().end() <= range.start()
+ }
+
+ fn contains_node(&self, node: &SyntaxNode) -> bool {
+ self.contains_range(node.text_range())
+ }
+}
+
+impl FunctionBody {
+ /// Analyzes a function body, returning the used local variables that are referenced in it as well as
+ /// whether it contains an await expression.
+ fn analyze(
+ &self,
+ sema: &Semantics<'_, RootDatabase>,
+ ) -> (FxIndexSet<Local>, Option<ast::SelfParam>) {
+ let mut self_param = None;
+ let mut res = FxIndexSet::default();
+ let mut cb = |name_ref: Option<_>| {
+ let local_ref =
+ match name_ref.and_then(|name_ref| NameRefClass::classify(sema, &name_ref)) {
+ Some(
+ NameRefClass::Definition(Definition::Local(local_ref))
+ | NameRefClass::FieldShorthand { local_ref, field_ref: _ },
+ ) => local_ref,
+ _ => return,
+ };
+ let InFile { file_id, value } = local_ref.source(sema.db);
+ // locals defined inside macros are not relevant to us
+ if !file_id.is_macro() {
+ match value {
+ Either::Right(it) => {
+ self_param.replace(it);
+ }
+ Either::Left(_) => {
+ res.insert(local_ref);
+ }
+ }
+ }
+ };
+ self.walk_expr(&mut |expr| match expr {
+ ast::Expr::PathExpr(path_expr) => {
+ cb(path_expr.path().and_then(|it| it.as_single_name_ref()))
+ }
+ ast::Expr::ClosureExpr(closure_expr) => {
+ if let Some(body) = closure_expr.body() {
+ body.syntax().descendants().map(ast::NameRef::cast).for_each(|it| cb(it));
+ }
+ }
+ ast::Expr::MacroExpr(expr) => {
+ if let Some(tt) = expr.macro_call().and_then(|call| call.token_tree()) {
+ tt.syntax()
+ .children_with_tokens()
+ .flat_map(SyntaxElement::into_token)
+ .filter(|it| it.kind() == SyntaxKind::IDENT)
+ .flat_map(|t| sema.descend_into_macros(t))
+ .for_each(|t| cb(t.parent().and_then(ast::NameRef::cast)));
+ }
+ }
+ _ => (),
+ });
+ (res, self_param)
+ }
+
+ fn analyze_container(&self, sema: &Semantics<'_, RootDatabase>) -> Option<ContainerInfo> {
+ let mut ancestors = self.parent()?.ancestors();
+ let infer_expr_opt = |expr| sema.type_of_expr(&expr?).map(TypeInfo::adjusted);
+ let mut parent_loop = None;
+ let mut set_parent_loop = |loop_: &dyn ast::HasLoopBody| {
+ if loop_
+ .loop_body()
+ .map_or(false, |it| it.syntax().text_range().contains_range(self.text_range()))
+ {
+ parent_loop.get_or_insert(loop_.syntax().clone());
+ }
+ };
+
+ let (is_const, expr, ty) = loop {
+ let anc = ancestors.next()?;
+ break match_ast! {
+ match anc {
+ ast::ClosureExpr(closure) => (false, closure.body(), infer_expr_opt(closure.body())),
+ ast::BlockExpr(block_expr) => {
+ let (constness, block) = match block_expr.modifier() {
+ Some(ast::BlockModifier::Const(_)) => (true, block_expr),
+ Some(ast::BlockModifier::Try(_)) => (false, block_expr),
+ Some(ast::BlockModifier::Label(label)) if label.lifetime().is_some() => (false, block_expr),
+ _ => continue,
+ };
+ let expr = Some(ast::Expr::BlockExpr(block));
+ (constness, expr.clone(), infer_expr_opt(expr))
+ },
+ ast::Fn(fn_) => {
+ let func = sema.to_def(&fn_)?;
+ let mut ret_ty = func.ret_type(sema.db);
+ if func.is_async(sema.db) {
+ if let Some(async_ret) = func.async_ret_type(sema.db) {
+ ret_ty = async_ret;
+ }
+ }
+ (fn_.const_token().is_some(), fn_.body().map(ast::Expr::BlockExpr), Some(ret_ty))
+ },
+ ast::Static(statik) => {
+ (true, statik.body(), Some(sema.to_def(&statik)?.ty(sema.db)))
+ },
+ ast::ConstArg(ca) => {
+ (true, ca.expr(), infer_expr_opt(ca.expr()))
+ },
+ ast::Const(konst) => {
+ (true, konst.body(), Some(sema.to_def(&konst)?.ty(sema.db)))
+ },
+ ast::ConstParam(cp) => {
+ (true, cp.default_val(), Some(sema.to_def(&cp)?.ty(sema.db)))
+ },
+ ast::ConstBlockPat(cbp) => {
+ let expr = cbp.block_expr().map(ast::Expr::BlockExpr);
+ (true, expr.clone(), infer_expr_opt(expr))
+ },
+ ast::Variant(__) => return None,
+ ast::Meta(__) => return None,
+ ast::LoopExpr(it) => {
+ set_parent_loop(&it);
+ continue;
+ },
+ ast::ForExpr(it) => {
+ set_parent_loop(&it);
+ continue;
+ },
+ ast::WhileExpr(it) => {
+ set_parent_loop(&it);
+ continue;
+ },
+ _ => continue,
+ }
+ };
+ };
+ let container_tail = match expr? {
+ ast::Expr::BlockExpr(block) => block.tail_expr(),
+ expr => Some(expr),
+ };
+ let is_in_tail =
+ container_tail.zip(self.tail_expr()).map_or(false, |(container_tail, body_tail)| {
+ container_tail.syntax().text_range().contains_range(body_tail.syntax().text_range())
+ });
+
+ let parent = self.parent()?;
+ let parents = generic_parents(&parent);
+ let generic_param_lists = parents.iter().filter_map(|it| it.generic_param_list()).collect();
+ let where_clauses = parents.iter().filter_map(|it| it.where_clause()).collect();
+
+ Some(ContainerInfo {
+ is_in_tail,
+ is_const,
+ parent_loop,
+ ret_type: ty,
+ generic_param_lists,
+ where_clauses,
+ })
+ }
+
+ fn return_ty(&self, ctx: &AssistContext<'_>) -> Option<RetType> {
+ match self.tail_expr() {
+ Some(expr) => ctx.sema.type_of_expr(&expr).map(TypeInfo::original).map(RetType::Expr),
+ None => Some(RetType::Stmt),
+ }
+ }
+
+ /// Local variables defined inside `body` that are accessed outside of it
+ fn ret_values<'a>(
+ &self,
+ ctx: &'a AssistContext<'_>,
+ parent: &SyntaxNode,
+ ) -> impl Iterator<Item = OutlivedLocal> + 'a {
+ let parent = parent.clone();
+ let range = self.text_range();
+ locals_defined_in_body(&ctx.sema, self)
+ .into_iter()
+ .filter_map(move |local| local_outlives_body(ctx, range, local, &parent))
+ }
+
+ /// Analyses the function body for external control flow.
+ fn external_control_flow(
+ &self,
+ ctx: &AssistContext<'_>,
+ container_info: &ContainerInfo,
+ ) -> Option<ControlFlow> {
+ let mut ret_expr = None;
+ let mut try_expr = None;
+ let mut break_expr = None;
+ let mut continue_expr = None;
+ let mut is_async = false;
+ let mut _is_unsafe = false;
+
+ let mut unsafe_depth = 0;
+ let mut loop_depth = 0;
+
+ self.preorder_expr(&mut |expr| {
+ let expr = match expr {
+ WalkEvent::Enter(e) => e,
+ WalkEvent::Leave(expr) => {
+ match expr {
+ ast::Expr::LoopExpr(_)
+ | ast::Expr::ForExpr(_)
+ | ast::Expr::WhileExpr(_) => loop_depth -= 1,
+ ast::Expr::BlockExpr(block_expr) if block_expr.unsafe_token().is_some() => {
+ unsafe_depth -= 1
+ }
+ _ => (),
+ }
+ return false;
+ }
+ };
+ match expr {
+ ast::Expr::LoopExpr(_) | ast::Expr::ForExpr(_) | ast::Expr::WhileExpr(_) => {
+ loop_depth += 1;
+ }
+ ast::Expr::BlockExpr(block_expr) if block_expr.unsafe_token().is_some() => {
+ unsafe_depth += 1
+ }
+ ast::Expr::ReturnExpr(it) => {
+ ret_expr = Some(it);
+ }
+ ast::Expr::TryExpr(it) => {
+ try_expr = Some(it);
+ }
+ ast::Expr::BreakExpr(it) if loop_depth == 0 => {
+ break_expr = Some(it);
+ }
+ ast::Expr::ContinueExpr(it) if loop_depth == 0 => {
+ continue_expr = Some(it);
+ }
+ ast::Expr::AwaitExpr(_) => is_async = true,
+ // FIXME: Do unsafe analysis on expression, sem highlighting knows this so we should be able
+ // to just lift that out of there
+ // expr if unsafe_depth ==0 && expr.is_unsafe => is_unsafe = true,
+ _ => {}
+ }
+ false
+ });
+
+ let kind = match (try_expr, ret_expr, break_expr, continue_expr) {
+ (Some(_), _, None, None) => {
+ let ret_ty = container_info.ret_type.clone()?;
+ let kind = TryKind::of_ty(ret_ty, ctx)?;
+
+ Some(FlowKind::Try { kind })
+ }
+ (Some(_), _, _, _) => {
+ cov_mark::hit!(external_control_flow_try_and_bc);
+ return None;
+ }
+ (None, Some(r), None, None) => Some(FlowKind::Return(r.expr())),
+ (None, Some(_), _, _) => {
+ cov_mark::hit!(external_control_flow_return_and_bc);
+ return None;
+ }
+ (None, None, Some(_), Some(_)) => {
+ cov_mark::hit!(external_control_flow_break_and_continue);
+ return None;
+ }
+ (None, None, Some(b), None) => Some(FlowKind::Break(b.lifetime(), b.expr())),
+ (None, None, None, Some(c)) => Some(FlowKind::Continue(c.lifetime())),
+ (None, None, None, None) => None,
+ };
+
+ Some(ControlFlow { kind, is_async, is_unsafe: _is_unsafe })
+ }
+
+ /// find variables that should be extracted as params
+ ///
+ /// Computes additional info that affects param type and mutability
+ fn extracted_function_params(
+ &self,
+ ctx: &AssistContext<'_>,
+ container_info: &ContainerInfo,
+ locals: impl Iterator<Item = Local>,
+ ) -> Vec<Param> {
+ locals
+ .map(|local| (local, local.source(ctx.db())))
+ .filter(|(_, src)| is_defined_outside_of_body(ctx, self, src))
+ .filter_map(|(local, src)| match src.value {
+ Either::Left(src) => Some((local, src)),
+ Either::Right(_) => {
+ stdx::never!(false, "Local::is_self returned false, but source is SelfParam");
+ None
+ }
+ })
+ .map(|(var, src)| {
+ let usages = LocalUsages::find_local_usages(ctx, var);
+ let ty = var.ty(ctx.db());
+
+ let defined_outside_parent_loop = container_info
+ .parent_loop
+ .as_ref()
+ .map_or(true, |it| it.text_range().contains_range(src.syntax().text_range()));
+
+ let is_copy = ty.is_copy(ctx.db());
+ let has_usages = self.has_usages_after_body(&usages);
+ let requires_mut =
+ !ty.is_mutable_reference() && has_exclusive_usages(ctx, &usages, self);
+ // We can move the value into the function call if it's not used after the call,
+ // if the var is not used but defined outside a loop we are extracting from we can't move it either
+ // as the function will reuse it in the next iteration.
+ let move_local = (!has_usages && defined_outside_parent_loop) || ty.is_reference();
+ Param { var, ty, move_local, requires_mut, is_copy }
+ })
+ .collect()
+ }
+
+ fn has_usages_after_body(&self, usages: &LocalUsages) -> bool {
+ usages.iter().any(|reference| self.precedes_range(reference.range))
+ }
+}
+
+enum GenericParent {
+ Fn(ast::Fn),
+ Impl(ast::Impl),
+ Trait(ast::Trait),
+}
+
+impl GenericParent {
+ fn generic_param_list(&self) -> Option<ast::GenericParamList> {
+ match self {
+ GenericParent::Fn(fn_) => fn_.generic_param_list(),
+ GenericParent::Impl(impl_) => impl_.generic_param_list(),
+ GenericParent::Trait(trait_) => trait_.generic_param_list(),
+ }
+ }
+
+ fn where_clause(&self) -> Option<ast::WhereClause> {
+ match self {
+ GenericParent::Fn(fn_) => fn_.where_clause(),
+ GenericParent::Impl(impl_) => impl_.where_clause(),
+ GenericParent::Trait(trait_) => trait_.where_clause(),
+ }
+ }
+}
+
+/// Search `parent`'s ancestors for items with potentially applicable generic parameters
+fn generic_parents(parent: &SyntaxNode) -> Vec<GenericParent> {
+ let mut list = Vec::new();
+ if let Some(parent_item) = parent.ancestors().find_map(ast::Item::cast) {
+ match parent_item {
+ ast::Item::Fn(ref fn_) => {
+ if let Some(parent_parent) = parent_item
+ .syntax()
+ .parent()
+ .and_then(|it| it.parent())
+ .and_then(ast::Item::cast)
+ {
+ match parent_parent {
+ ast::Item::Impl(impl_) => list.push(GenericParent::Impl(impl_)),
+ ast::Item::Trait(trait_) => list.push(GenericParent::Trait(trait_)),
+ _ => (),
+ }
+ }
+ list.push(GenericParent::Fn(fn_.clone()));
+ }
+ _ => (),
+ }
+ }
+ list
+}
+
+/// checks if relevant var is used with `&mut` access inside body
+fn has_exclusive_usages(
+ ctx: &AssistContext<'_>,
+ usages: &LocalUsages,
+ body: &FunctionBody,
+) -> bool {
+ usages
+ .iter()
+ .filter(|reference| body.contains_range(reference.range))
+ .any(|reference| reference_is_exclusive(reference, body, ctx))
+}
+
+/// checks if this reference requires `&mut` access inside node
+fn reference_is_exclusive(
+ reference: &FileReference,
+ node: &dyn HasTokenAtOffset,
+ ctx: &AssistContext<'_>,
+) -> bool {
+ // we directly modify variable with set: `n = 0`, `n += 1`
+ if reference.category == Some(ReferenceCategory::Write) {
+ return true;
+ }
+
+ // we take `&mut` reference to variable: `&mut v`
+ let path = match path_element_of_reference(node, reference) {
+ Some(path) => path,
+ None => return false,
+ };
+
+ expr_require_exclusive_access(ctx, &path).unwrap_or(false)
+}
+
+/// checks if this expr requires `&mut` access, recurses on field access
+fn expr_require_exclusive_access(ctx: &AssistContext<'_>, expr: &ast::Expr) -> Option<bool> {
+ if let ast::Expr::MacroExpr(_) = expr {
+ // FIXME: expand macro and check output for mutable usages of the variable?
+ return None;
+ }
+
+ let parent = expr.syntax().parent()?;
+
+ if let Some(bin_expr) = ast::BinExpr::cast(parent.clone()) {
+ if matches!(bin_expr.op_kind()?, ast::BinaryOp::Assignment { .. }) {
+ return Some(bin_expr.lhs()?.syntax() == expr.syntax());
+ }
+ return Some(false);
+ }
+
+ if let Some(ref_expr) = ast::RefExpr::cast(parent.clone()) {
+ return Some(ref_expr.mut_token().is_some());
+ }
+
+ if let Some(method_call) = ast::MethodCallExpr::cast(parent.clone()) {
+ let func = ctx.sema.resolve_method_call(&method_call)?;
+ let self_param = func.self_param(ctx.db())?;
+ let access = self_param.access(ctx.db());
+
+ return Some(matches!(access, hir::Access::Exclusive));
+ }
+
+ if let Some(field) = ast::FieldExpr::cast(parent) {
+ return expr_require_exclusive_access(ctx, &field.into());
+ }
+
+ Some(false)
+}
+
+trait HasTokenAtOffset {
+ fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken>;
+}
+
+impl HasTokenAtOffset for SyntaxNode {
+ fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
+ SyntaxNode::token_at_offset(self, offset)
+ }
+}
+
+impl HasTokenAtOffset for FunctionBody {
+ fn token_at_offset(&self, offset: TextSize) -> TokenAtOffset<SyntaxToken> {
+ match self {
+ FunctionBody::Expr(expr) => expr.syntax().token_at_offset(offset),
+ FunctionBody::Span { parent, text_range } => {
+ match parent.syntax().token_at_offset(offset) {
+ TokenAtOffset::None => TokenAtOffset::None,
+ TokenAtOffset::Single(t) => {
+ if text_range.contains_range(t.text_range()) {
+ TokenAtOffset::Single(t)
+ } else {
+ TokenAtOffset::None
+ }
+ }
+ TokenAtOffset::Between(a, b) => {
+ match (
+ text_range.contains_range(a.text_range()),
+ text_range.contains_range(b.text_range()),
+ ) {
+ (true, true) => TokenAtOffset::Between(a, b),
+ (true, false) => TokenAtOffset::Single(a),
+ (false, true) => TokenAtOffset::Single(b),
+ (false, false) => TokenAtOffset::None,
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+/// find relevant `ast::Expr` for reference
+///
+/// # Preconditions
+///
+/// `node` must cover `reference`, that is `node.text_range().contains_range(reference.range)`
+fn path_element_of_reference(
+ node: &dyn HasTokenAtOffset,
+ reference: &FileReference,
+) -> Option<ast::Expr> {
+ let token = node.token_at_offset(reference.range.start()).right_biased().or_else(|| {
+ stdx::never!(false, "cannot find token at variable usage: {:?}", reference);
+ None
+ })?;
+ let path = token.parent_ancestors().find_map(ast::Expr::cast).or_else(|| {
+ stdx::never!(false, "cannot find path parent of variable usage: {:?}", token);
+ None
+ })?;
+ stdx::always!(
+ matches!(path, ast::Expr::PathExpr(_) | ast::Expr::MacroExpr(_)),
+ "unexpected expression type for variable usage: {:?}",
+ path
+ );
+ Some(path)
+}
+
+/// list local variables defined inside `body`
+fn locals_defined_in_body(
+ sema: &Semantics<'_, RootDatabase>,
+ body: &FunctionBody,
+) -> FxIndexSet<Local> {
+ // FIXME: this doesn't work well with macros
+ // see https://github.com/rust-lang/rust-analyzer/pull/7535#discussion_r570048550
+ let mut res = FxIndexSet::default();
+ body.walk_pat(&mut |pat| {
+ if let ast::Pat::IdentPat(pat) = pat {
+ if let Some(local) = sema.to_def(&pat) {
+ res.insert(local);
+ }
+ }
+ });
+ res
+}
+
+/// Returns usage details if local variable is used after(outside of) body
+fn local_outlives_body(
+ ctx: &AssistContext<'_>,
+ body_range: TextRange,
+ local: Local,
+ parent: &SyntaxNode,
+) -> Option<OutlivedLocal> {
+ let usages = LocalUsages::find_local_usages(ctx, local);
+ let mut has_mut_usages = false;
+ let mut any_outlives = false;
+ for usage in usages.iter() {
+ if body_range.end() <= usage.range.start() {
+ has_mut_usages |= reference_is_exclusive(usage, parent, ctx);
+ any_outlives |= true;
+ if has_mut_usages {
+ break; // no need to check more elements we have all the info we wanted
+ }
+ }
+ }
+ if !any_outlives {
+ return None;
+ }
+ Some(OutlivedLocal { local, mut_usage_outside_body: has_mut_usages })
+}
+
+/// checks if the relevant local was defined before(outside of) body
+fn is_defined_outside_of_body(
+ ctx: &AssistContext<'_>,
+ body: &FunctionBody,
+ src: &hir::InFile<Either<ast::IdentPat, ast::SelfParam>>,
+) -> bool {
+ src.file_id.original_file(ctx.db()) == ctx.file_id()
+ && !body.contains_node(either_syntax(&src.value))
+}
+
+fn either_syntax(value: &Either<ast::IdentPat, ast::SelfParam>) -> &SyntaxNode {
+ match value {
+ Either::Left(pat) => pat.syntax(),
+ Either::Right(it) => it.syntax(),
+ }
+}
+
+/// find where to put extracted function definition
+///
+/// Function should be put right after returned node
+fn node_to_insert_after(body: &FunctionBody, anchor: Anchor) -> Option<SyntaxNode> {
+ let node = body.node();
+ let mut ancestors = node.ancestors().peekable();
+ let mut last_ancestor = None;
+ while let Some(next_ancestor) = ancestors.next() {
+ match next_ancestor.kind() {
+ SyntaxKind::SOURCE_FILE => break,
++ SyntaxKind::IMPL => {
++ if body.extracted_from_trait_impl() && matches!(anchor, Anchor::Method) {
++ let impl_node = find_non_trait_impl(&next_ancestor);
++ if let target_node @ Some(_) = impl_node.as_ref().and_then(last_impl_member) {
++ return target_node;
++ }
++ }
++ }
+ SyntaxKind::ITEM_LIST if !matches!(anchor, Anchor::Freestanding) => continue,
+ SyntaxKind::ITEM_LIST => {
+ if ancestors.peek().map(SyntaxNode::kind) == Some(SyntaxKind::MODULE) {
+ break;
+ }
+ }
+ SyntaxKind::ASSOC_ITEM_LIST if !matches!(anchor, Anchor::Method) => continue,
+ SyntaxKind::ASSOC_ITEM_LIST if body.extracted_from_trait_impl() => continue,
+ SyntaxKind::ASSOC_ITEM_LIST => {
+ if ancestors.peek().map(SyntaxNode::kind) == Some(SyntaxKind::IMPL) {
+ break;
+ }
+ }
+ _ => (),
+ }
+ last_ancestor = Some(next_ancestor);
+ }
+ last_ancestor
+}
+
++fn find_non_trait_impl(trait_impl: &SyntaxNode) -> Option<ast::Impl> {
++ let as_impl = ast::Impl::cast(trait_impl.clone())?;
++ let impl_type = Some(impl_type_name(&as_impl)?);
++
++ let sibblings = trait_impl.parent()?.children();
++ sibblings
++ .filter_map(ast::Impl::cast)
++ .find(|s| impl_type_name(s) == impl_type && !is_trait_impl(s))
++}
++
++fn last_impl_member(impl_node: &ast::Impl) -> Option<SyntaxNode> {
++ let last_child = impl_node.assoc_item_list()?.assoc_items().last()?;
++ Some(last_child.syntax().clone())
++}
++
++fn is_trait_impl(node: &ast::Impl) -> bool {
++ node.trait_().is_some()
++}
++
++fn impl_type_name(impl_node: &ast::Impl) -> Option<String> {
++ Some(impl_node.self_ty()?.to_string())
++}
++
+fn make_call(ctx: &AssistContext<'_>, fun: &Function, indent: IndentLevel) -> String {
+ let ret_ty = fun.return_type(ctx);
+
+ let args = make::arg_list(fun.params.iter().map(|param| param.to_arg(ctx)));
+ let name = fun.name.clone();
+ let mut call_expr = if fun.self_param.is_some() {
+ let self_arg = make::expr_path(make::ext::ident_path("self"));
+ make::expr_method_call(self_arg, name, args)
+ } else {
+ let func = make::expr_path(make::path_unqualified(make::path_segment(name)));
+ make::expr_call(func, args)
+ };
+
+ let handler = FlowHandler::from_ret_ty(fun, &ret_ty);
+
+ if fun.control_flow.is_async {
+ call_expr = make::expr_await(call_expr);
+ }
+ let expr = handler.make_call_expr(call_expr).indent(indent);
+
+ let mut_modifier = |var: &OutlivedLocal| if var.mut_usage_outside_body { "mut " } else { "" };
+
+ let mut buf = String::new();
+ match fun.outliving_locals.as_slice() {
+ [] => {}
+ [var] => {
+ let modifier = mut_modifier(var);
+ let name = var.local.name(ctx.db());
+ format_to!(buf, "let {modifier}{name} = ")
+ }
+ vars => {
+ buf.push_str("let (");
+ let bindings = vars.iter().format_with(", ", |local, f| {
+ let modifier = mut_modifier(local);
+ let name = local.local.name(ctx.db());
+ f(&format_args!("{modifier}{name}"))
+ });
+ format_to!(buf, "{bindings}");
+ buf.push_str(") = ");
+ }
+ }
+
+ format_to!(buf, "{expr}");
+ let insert_comma = fun
+ .body
+ .parent()
+ .and_then(ast::MatchArm::cast)
+ .map_or(false, |it| it.comma_token().is_none());
+ if insert_comma {
+ buf.push(',');
+ } else if fun.ret_ty.is_unit() && (!fun.outliving_locals.is_empty() || !expr.is_block_like()) {
+ buf.push(';');
+ }
+ buf
+}
+
+enum FlowHandler {
+ None,
+ Try { kind: TryKind },
+ If { action: FlowKind },
+ IfOption { action: FlowKind },
+ MatchOption { none: FlowKind },
+ MatchResult { err: FlowKind },
+}
+
+impl FlowHandler {
+ fn from_ret_ty(fun: &Function, ret_ty: &FunType) -> FlowHandler {
+ match &fun.control_flow.kind {
+ None => FlowHandler::None,
+ Some(flow_kind) => {
+ let action = flow_kind.clone();
+ if *ret_ty == FunType::Unit {
+ match flow_kind {
+ FlowKind::Return(None)
+ | FlowKind::Break(_, None)
+ | FlowKind::Continue(_) => FlowHandler::If { action },
+ FlowKind::Return(_) | FlowKind::Break(_, _) => {
+ FlowHandler::IfOption { action }
+ }
+ FlowKind::Try { kind } => FlowHandler::Try { kind: kind.clone() },
+ }
+ } else {
+ match flow_kind {
+ FlowKind::Return(None)
+ | FlowKind::Break(_, None)
+ | FlowKind::Continue(_) => FlowHandler::MatchOption { none: action },
+ FlowKind::Return(_) | FlowKind::Break(_, _) => {
+ FlowHandler::MatchResult { err: action }
+ }
+ FlowKind::Try { kind } => FlowHandler::Try { kind: kind.clone() },
+ }
+ }
+ }
+ }
+ }
+
+ fn make_call_expr(&self, call_expr: ast::Expr) -> ast::Expr {
+ match self {
+ FlowHandler::None => call_expr,
+ FlowHandler::Try { kind: _ } => make::expr_try(call_expr),
+ FlowHandler::If { action } => {
+ let action = action.make_result_handler(None);
+ let stmt = make::expr_stmt(action);
+ let block = make::block_expr(iter::once(stmt.into()), None);
+ let controlflow_break_path = make::path_from_text("ControlFlow::Break");
+ let condition = make::expr_let(
+ make::tuple_struct_pat(
+ controlflow_break_path,
+ iter::once(make::wildcard_pat().into()),
+ )
+ .into(),
+ call_expr,
+ );
+ make::expr_if(condition.into(), block, None)
+ }
+ FlowHandler::IfOption { action } => {
+ let path = make::ext::ident_path("Some");
+ let value_pat = make::ext::simple_ident_pat(make::name("value"));
+ let pattern = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let cond = make::expr_let(pattern.into(), call_expr);
+ let value = make::expr_path(make::ext::ident_path("value"));
+ let action_expr = action.make_result_handler(Some(value));
+ let action_stmt = make::expr_stmt(action_expr);
+ let then = make::block_expr(iter::once(action_stmt.into()), None);
+ make::expr_if(cond.into(), then, None)
+ }
+ FlowHandler::MatchOption { none } => {
+ let some_name = "value";
+
+ let some_arm = {
+ let path = make::ext::ident_path("Some");
+ let value_pat = make::ext::simple_ident_pat(make::name(some_name));
+ let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let value = make::expr_path(make::ext::ident_path(some_name));
+ make::match_arm(iter::once(pat.into()), None, value)
+ };
+ let none_arm = {
+ let path = make::ext::ident_path("None");
+ let pat = make::path_pat(path);
+ make::match_arm(iter::once(pat), None, none.make_result_handler(None))
+ };
+ let arms = make::match_arm_list(vec![some_arm, none_arm]);
+ make::expr_match(call_expr, arms)
+ }
+ FlowHandler::MatchResult { err } => {
+ let ok_name = "value";
+ let err_name = "value";
+
+ let ok_arm = {
+ let path = make::ext::ident_path("Ok");
+ let value_pat = make::ext::simple_ident_pat(make::name(ok_name));
+ let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let value = make::expr_path(make::ext::ident_path(ok_name));
+ make::match_arm(iter::once(pat.into()), None, value)
+ };
+ let err_arm = {
+ let path = make::ext::ident_path("Err");
+ let value_pat = make::ext::simple_ident_pat(make::name(err_name));
+ let pat = make::tuple_struct_pat(path, iter::once(value_pat.into()));
+ let value = make::expr_path(make::ext::ident_path(err_name));
+ make::match_arm(
+ iter::once(pat.into()),
+ None,
+ err.make_result_handler(Some(value)),
+ )
+ };
+ let arms = make::match_arm_list(vec![ok_arm, err_arm]);
+ make::expr_match(call_expr, arms)
+ }
+ }
+ }
+}
+
+fn path_expr_from_local(ctx: &AssistContext<'_>, var: Local) -> ast::Expr {
+ let name = var.name(ctx.db()).to_string();
+ make::expr_path(make::ext::ident_path(&name))
+}
+
+fn format_function(
+ ctx: &AssistContext<'_>,
+ module: hir::Module,
+ fun: &Function,
+ old_indent: IndentLevel,
+ new_indent: IndentLevel,
+) -> String {
+ let mut fn_def = String::new();
+
+ let fun_name = &fun.name;
+ let params = fun.make_param_list(ctx, module);
+ let ret_ty = fun.make_ret_ty(ctx, module);
+ let body = make_body(ctx, old_indent, new_indent, fun);
+ let const_kw = if fun.mods.is_const { "const " } else { "" };
+ let async_kw = if fun.control_flow.is_async { "async " } else { "" };
+ let unsafe_kw = if fun.control_flow.is_unsafe { "unsafe " } else { "" };
+ let (generic_params, where_clause) = make_generic_params_and_where_clause(ctx, fun);
+
+ format_to!(fn_def, "\n\n{new_indent}{const_kw}{async_kw}{unsafe_kw}");
+ match ctx.config.snippet_cap {
+ Some(_) => format_to!(fn_def, "fn $0{fun_name}"),
+ None => format_to!(fn_def, "fn {fun_name}"),
+ }
+
+ if let Some(generic_params) = generic_params {
+ format_to!(fn_def, "{generic_params}");
+ }
+
+ format_to!(fn_def, "{params}");
+
+ if let Some(ret_ty) = ret_ty {
+ format_to!(fn_def, " {ret_ty}");
+ }
+
+ if let Some(where_clause) = where_clause {
+ format_to!(fn_def, " {where_clause}");
+ }
+
+ format_to!(fn_def, " {body}");
+
+ fn_def
+}
+
+fn make_generic_params_and_where_clause(
+ ctx: &AssistContext<'_>,
+ fun: &Function,
+) -> (Option<ast::GenericParamList>, Option<ast::WhereClause>) {
+ let used_type_params = fun.type_params(ctx);
+
+ let generic_param_list = make_generic_param_list(ctx, fun, &used_type_params);
+ let where_clause = make_where_clause(ctx, fun, &used_type_params);
+
+ (generic_param_list, where_clause)
+}
+
+fn make_generic_param_list(
+ ctx: &AssistContext<'_>,
+ fun: &Function,
+ used_type_params: &[TypeParam],
+) -> Option<ast::GenericParamList> {
+ let mut generic_params = fun
+ .mods
+ .generic_param_lists
+ .iter()
+ .flat_map(|parent_params| {
+ parent_params
+ .generic_params()
+ .filter(|param| param_is_required(ctx, param, used_type_params))
+ })
+ .peekable();
+
+ if generic_params.peek().is_some() {
+ Some(make::generic_param_list(generic_params))
+ } else {
+ None
+ }
+}
+
+fn param_is_required(
+ ctx: &AssistContext<'_>,
+ param: &ast::GenericParam,
+ used_type_params: &[TypeParam],
+) -> bool {
+ match param {
+ ast::GenericParam::ConstParam(_) | ast::GenericParam::LifetimeParam(_) => false,
+ ast::GenericParam::TypeParam(type_param) => match &ctx.sema.to_def(type_param) {
+ Some(def) => used_type_params.contains(def),
+ _ => false,
+ },
+ }
+}
+
+fn make_where_clause(
+ ctx: &AssistContext<'_>,
+ fun: &Function,
+ used_type_params: &[TypeParam],
+) -> Option<ast::WhereClause> {
+ let mut predicates = fun
+ .mods
+ .where_clauses
+ .iter()
+ .flat_map(|parent_where_clause| {
+ parent_where_clause
+ .predicates()
+ .filter(|pred| pred_is_required(ctx, pred, used_type_params))
+ })
+ .peekable();
+
+ if predicates.peek().is_some() {
+ Some(make::where_clause(predicates))
+ } else {
+ None
+ }
+}
+
+fn pred_is_required(
+ ctx: &AssistContext<'_>,
+ pred: &ast::WherePred,
+ used_type_params: &[TypeParam],
+) -> bool {
+ match resolved_type_param(ctx, pred) {
+ Some(it) => used_type_params.contains(&it),
+ None => false,
+ }
+}
+
+fn resolved_type_param(ctx: &AssistContext<'_>, pred: &ast::WherePred) -> Option<TypeParam> {
+ let path = match pred.ty()? {
+ ast::Type::PathType(path_type) => path_type.path(),
+ _ => None,
+ }?;
+
+ match ctx.sema.resolve_path(&path)? {
+ PathResolution::TypeParam(type_param) => Some(type_param),
+ _ => None,
+ }
+}
+
+impl Function {
+ /// Collect all the `TypeParam`s used in the `body` and `params`.
+ fn type_params(&self, ctx: &AssistContext<'_>) -> Vec<TypeParam> {
+ let type_params_in_descendant_paths =
+ self.body.descendant_paths().filter_map(|it| match ctx.sema.resolve_path(&it) {
+ Some(PathResolution::TypeParam(type_param)) => Some(type_param),
+ _ => None,
+ });
+ let type_params_in_params = self.params.iter().filter_map(|p| p.ty.as_type_param(ctx.db()));
+ type_params_in_descendant_paths.chain(type_params_in_params).collect()
+ }
+
+ fn make_param_list(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::ParamList {
+ let self_param = self.self_param.clone();
+ let params = self.params.iter().map(|param| param.to_param(ctx, module));
+ make::param_list(self_param, params)
+ }
+
+ fn make_ret_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> Option<ast::RetType> {
+ let fun_ty = self.return_type(ctx);
+ let handler = if self.mods.is_in_tail {
+ FlowHandler::None
+ } else {
+ FlowHandler::from_ret_ty(self, &fun_ty)
+ };
+ let ret_ty = match &handler {
+ FlowHandler::None => {
+ if matches!(fun_ty, FunType::Unit) {
+ return None;
+ }
+ fun_ty.make_ty(ctx, module)
+ }
+ FlowHandler::Try { kind: TryKind::Option } => {
+ make::ext::ty_option(fun_ty.make_ty(ctx, module))
+ }
+ FlowHandler::Try { kind: TryKind::Result { ty: parent_ret_ty } } => {
+ let handler_ty = parent_ret_ty
+ .type_arguments()
+ .nth(1)
+ .map(|ty| make_ty(&ty, ctx, module))
+ .unwrap_or_else(make::ty_placeholder);
+ make::ext::ty_result(fun_ty.make_ty(ctx, module), handler_ty)
+ }
+ FlowHandler::If { .. } => make::ty("ControlFlow<()>"),
+ FlowHandler::IfOption { action } => {
+ let handler_ty = action
+ .expr_ty(ctx)
+ .map(|ty| make_ty(&ty, ctx, module))
+ .unwrap_or_else(make::ty_placeholder);
+ make::ext::ty_option(handler_ty)
+ }
+ FlowHandler::MatchOption { .. } => make::ext::ty_option(fun_ty.make_ty(ctx, module)),
+ FlowHandler::MatchResult { err } => {
+ let handler_ty = err
+ .expr_ty(ctx)
+ .map(|ty| make_ty(&ty, ctx, module))
+ .unwrap_or_else(make::ty_placeholder);
+ make::ext::ty_result(fun_ty.make_ty(ctx, module), handler_ty)
+ }
+ };
+ Some(make::ret_type(ret_ty))
+ }
+}
+
+impl FunType {
+ fn make_ty(&self, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
+ match self {
+ FunType::Unit => make::ty_unit(),
+ FunType::Single(ty) => make_ty(ty, ctx, module),
+ FunType::Tuple(types) => match types.as_slice() {
+ [] => {
+ stdx::never!("tuple type with 0 elements");
+ make::ty_unit()
+ }
+ [ty] => {
+ stdx::never!("tuple type with 1 element");
+ make_ty(ty, ctx, module)
+ }
+ types => {
+ let types = types.iter().map(|ty| make_ty(ty, ctx, module));
+ make::ty_tuple(types)
+ }
+ },
+ }
+ }
+}
+
+fn make_body(
+ ctx: &AssistContext<'_>,
+ old_indent: IndentLevel,
+ new_indent: IndentLevel,
+ fun: &Function,
+) -> ast::BlockExpr {
+ let ret_ty = fun.return_type(ctx);
+ let handler = if fun.mods.is_in_tail {
+ FlowHandler::None
+ } else {
+ FlowHandler::from_ret_ty(fun, &ret_ty)
+ };
+
+ let block = match &fun.body {
+ FunctionBody::Expr(expr) => {
+ let expr = rewrite_body_segment(ctx, &fun.params, &handler, expr.syntax());
+ let expr = ast::Expr::cast(expr).unwrap();
+ match expr {
+ ast::Expr::BlockExpr(block) => {
+ // If the extracted expression is itself a block, there is no need to wrap it inside another block.
+ let block = block.dedent(old_indent);
+ // Recreate the block for formatting consistency with other extracted functions.
+ make::block_expr(block.statements(), block.tail_expr())
+ }
+ _ => {
+ let expr = expr.dedent(old_indent).indent(IndentLevel(1));
+
+ make::block_expr(Vec::new(), Some(expr))
+ }
+ }
+ }
+ FunctionBody::Span { parent, text_range } => {
+ let mut elements: Vec<_> = parent
+ .syntax()
+ .children_with_tokens()
+ .filter(|it| text_range.contains_range(it.text_range()))
+ .map(|it| match &it {
+ syntax::NodeOrToken::Node(n) => syntax::NodeOrToken::Node(
+ rewrite_body_segment(ctx, &fun.params, &handler, n),
+ ),
+ _ => it,
+ })
+ .collect();
+
+ let mut tail_expr = match &elements.last() {
+ Some(syntax::NodeOrToken::Node(node)) if ast::Expr::can_cast(node.kind()) => {
+ ast::Expr::cast(node.clone())
+ }
+ _ => None,
+ };
+
+ match tail_expr {
+ Some(_) => {
+ elements.pop();
+ }
+ None => match fun.outliving_locals.as_slice() {
+ [] => {}
+ [var] => {
+ tail_expr = Some(path_expr_from_local(ctx, var.local));
+ }
+ vars => {
+ let exprs = vars.iter().map(|var| path_expr_from_local(ctx, var.local));
+ let expr = make::expr_tuple(exprs);
+ tail_expr = Some(expr);
+ }
+ },
+ };
+
+ let body_indent = IndentLevel(1);
+ let elements = elements
+ .into_iter()
+ .map(|node_or_token| match &node_or_token {
+ syntax::NodeOrToken::Node(node) => match ast::Stmt::cast(node.clone()) {
+ Some(stmt) => {
+ let indented = stmt.dedent(old_indent).indent(body_indent);
+ let ast_node = indented.syntax().clone_subtree();
+ syntax::NodeOrToken::Node(ast_node)
+ }
+ _ => node_or_token,
+ },
+ _ => node_or_token,
+ })
+ .collect::<Vec<SyntaxElement>>();
+ let tail_expr = tail_expr.map(|expr| expr.dedent(old_indent).indent(body_indent));
+
+ make::hacky_block_expr_with_comments(elements, tail_expr)
+ }
+ };
+
+ let block = match &handler {
+ FlowHandler::None => block,
+ FlowHandler::Try { kind } => {
+ let block = with_default_tail_expr(block, make::expr_unit());
+ map_tail_expr(block, |tail_expr| {
+ let constructor = match kind {
+ TryKind::Option => "Some",
+ TryKind::Result { .. } => "Ok",
+ };
+ let func = make::expr_path(make::ext::ident_path(constructor));
+ let args = make::arg_list(iter::once(tail_expr));
+ make::expr_call(func, args)
+ })
+ }
+ FlowHandler::If { .. } => {
+ let controlflow_continue = make::expr_call(
+ make::expr_path(make::path_from_text("ControlFlow::Continue")),
+ make::arg_list(iter::once(make::expr_unit())),
+ );
+ with_tail_expr(block, controlflow_continue)
+ }
+ FlowHandler::IfOption { .. } => {
+ let none = make::expr_path(make::ext::ident_path("None"));
+ with_tail_expr(block, none)
+ }
+ FlowHandler::MatchOption { .. } => map_tail_expr(block, |tail_expr| {
+ let some = make::expr_path(make::ext::ident_path("Some"));
+ let args = make::arg_list(iter::once(tail_expr));
+ make::expr_call(some, args)
+ }),
+ FlowHandler::MatchResult { .. } => map_tail_expr(block, |tail_expr| {
+ let ok = make::expr_path(make::ext::ident_path("Ok"));
+ let args = make::arg_list(iter::once(tail_expr));
+ make::expr_call(ok, args)
+ }),
+ };
+
+ block.indent(new_indent)
+}
+
+fn map_tail_expr(block: ast::BlockExpr, f: impl FnOnce(ast::Expr) -> ast::Expr) -> ast::BlockExpr {
+ let tail_expr = match block.tail_expr() {
+ Some(tail_expr) => tail_expr,
+ None => return block,
+ };
+ make::block_expr(block.statements(), Some(f(tail_expr)))
+}
+
+fn with_default_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr {
+ match block.tail_expr() {
+ Some(_) => block,
+ None => make::block_expr(block.statements(), Some(tail_expr)),
+ }
+}
+
+fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr {
+ let stmt_tail = block.tail_expr().map(|expr| make::expr_stmt(expr).into());
+ let stmts = block.statements().chain(stmt_tail);
+ make::block_expr(stmts, Some(tail_expr))
+}
+
+fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String {
+ ty.display_source_code(ctx.db(), module.into()).ok().unwrap_or_else(|| "_".to_string())
+}
+
+fn make_ty(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {
+ let ty_str = format_type(ty, ctx, module);
+ make::ty(&ty_str)
+}
+
+fn rewrite_body_segment(
+ ctx: &AssistContext<'_>,
+ params: &[Param],
+ handler: &FlowHandler,
+ syntax: &SyntaxNode,
+) -> SyntaxNode {
+ let syntax = fix_param_usages(ctx, params, syntax);
+ update_external_control_flow(handler, &syntax);
+ syntax
+}
+
+/// change all usages to account for added `&`/`&mut` for some params
+fn fix_param_usages(ctx: &AssistContext<'_>, params: &[Param], syntax: &SyntaxNode) -> SyntaxNode {
+ let mut usages_for_param: Vec<(&Param, Vec<ast::Expr>)> = Vec::new();
+
+ let tm = TreeMutator::new(syntax);
+
+ for param in params {
+ if !param.kind().is_ref() {
+ continue;
+ }
+
+ let usages = LocalUsages::find_local_usages(ctx, param.var);
+ let usages = usages
+ .iter()
+ .filter(|reference| syntax.text_range().contains_range(reference.range))
+ .filter_map(|reference| path_element_of_reference(syntax, reference))
+ .map(|expr| tm.make_mut(&expr));
+
+ usages_for_param.push((param, usages.collect()));
+ }
+
+ let res = tm.make_syntax_mut(syntax);
+
+ for (param, usages) in usages_for_param {
+ for usage in usages {
+ match usage.syntax().ancestors().skip(1).find_map(ast::Expr::cast) {
+ Some(ast::Expr::MethodCallExpr(_) | ast::Expr::FieldExpr(_)) => {
+ // do nothing
+ }
+ Some(ast::Expr::RefExpr(node))
+ if param.kind() == ParamKind::MutRef && node.mut_token().is_some() =>
+ {
+ ted::replace(node.syntax(), node.expr().unwrap().syntax());
+ }
+ Some(ast::Expr::RefExpr(node))
+ if param.kind() == ParamKind::SharedRef && node.mut_token().is_none() =>
+ {
+ ted::replace(node.syntax(), node.expr().unwrap().syntax());
+ }
+ Some(_) | None => {
+ let p = &make::expr_prefix(T![*], usage.clone()).clone_for_update();
+ ted::replace(usage.syntax(), p.syntax())
+ }
+ }
+ }
+ }
+
+ res
+}
+
+fn update_external_control_flow(handler: &FlowHandler, syntax: &SyntaxNode) {
+ let mut nested_loop = None;
+ let mut nested_scope = None;
+ for event in syntax.preorder() {
+ match event {
+ WalkEvent::Enter(e) => match e.kind() {
+ SyntaxKind::LOOP_EXPR | SyntaxKind::WHILE_EXPR | SyntaxKind::FOR_EXPR => {
+ if nested_loop.is_none() {
+ nested_loop = Some(e.clone());
+ }
+ }
+ SyntaxKind::FN
+ | SyntaxKind::CONST
+ | SyntaxKind::STATIC
+ | SyntaxKind::IMPL
+ | SyntaxKind::MODULE => {
+ if nested_scope.is_none() {
+ nested_scope = Some(e.clone());
+ }
+ }
+ _ => {}
+ },
+ WalkEvent::Leave(e) => {
+ if nested_scope.is_none() {
+ if let Some(expr) = ast::Expr::cast(e.clone()) {
+ match expr {
+ ast::Expr::ReturnExpr(return_expr) if nested_scope.is_none() => {
+ let expr = return_expr.expr();
+ if let Some(replacement) = make_rewritten_flow(handler, expr) {
+ ted::replace(return_expr.syntax(), replacement.syntax())
+ }
+ }
+ ast::Expr::BreakExpr(break_expr) if nested_loop.is_none() => {
+ let expr = break_expr.expr();
+ if let Some(replacement) = make_rewritten_flow(handler, expr) {
+ ted::replace(break_expr.syntax(), replacement.syntax())
+ }
+ }
+ ast::Expr::ContinueExpr(continue_expr) if nested_loop.is_none() => {
+ if let Some(replacement) = make_rewritten_flow(handler, None) {
+ ted::replace(continue_expr.syntax(), replacement.syntax())
+ }
+ }
+ _ => {
+ // do nothing
+ }
+ }
+ }
+ }
+
+ if nested_loop.as_ref() == Some(&e) {
+ nested_loop = None;
+ }
+ if nested_scope.as_ref() == Some(&e) {
+ nested_scope = None;
+ }
+ }
+ };
+ }
+}
+
+fn make_rewritten_flow(handler: &FlowHandler, arg_expr: Option<ast::Expr>) -> Option<ast::Expr> {
+ let value = match handler {
+ FlowHandler::None | FlowHandler::Try { .. } => return None,
+ FlowHandler::If { .. } => make::expr_call(
+ make::expr_path(make::path_from_text("ControlFlow::Break")),
+ make::arg_list(iter::once(make::expr_unit())),
+ ),
+ FlowHandler::IfOption { .. } => {
+ let expr = arg_expr.unwrap_or_else(|| make::expr_tuple(Vec::new()));
+ let args = make::arg_list(iter::once(expr));
+ make::expr_call(make::expr_path(make::ext::ident_path("Some")), args)
+ }
+ FlowHandler::MatchOption { .. } => make::expr_path(make::ext::ident_path("None")),
+ FlowHandler::MatchResult { .. } => {
+ let expr = arg_expr.unwrap_or_else(|| make::expr_tuple(Vec::new()));
+ let args = make::arg_list(iter::once(expr));
+ make::expr_call(make::expr_path(make::ext::ident_path("Err")), args)
+ }
+ };
+ Some(make::expr_return(Some(value)).clone_for_update())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn no_args_from_binary_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ foo($01 + 1$0);
+}
+"#,
+ r#"
+fn foo() {
+ foo(fun_name());
+}
+
+fn $0fun_name() -> i32 {
+ 1 + 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_binary_expr_in_module() {
+ check_assist(
+ extract_function,
+ r#"
+mod bar {
+ fn foo() {
+ foo($01 + 1$0);
+ }
+}
+"#,
+ r#"
+mod bar {
+ fn foo() {
+ foo(fun_name());
+ }
+
+ fn $0fun_name() -> i32 {
+ 1 + 1
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_binary_expr_indented() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0{ 1 + 1 }$0;
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() -> i32 {
+ 1 + 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_stmt_with_last_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ let k = 1;
+ $0let m = 1;
+ m + 1$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ let k = 1;
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ let m = 1;
+ m + 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_stmt_unit() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let k = 3;
+ $0let m = 1;
+ let n = m + 1;$0
+ let g = 5;
+}
+"#,
+ r#"
+fn foo() {
+ let k = 3;
+ fun_name();
+ let g = 5;
+}
+
+fn $0fun_name() {
+ let m = 1;
+ let n = m + 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_if() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0if true { }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ if true { }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_if_else() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ $0if true { 1 } else { 2 }$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ if true { 1 } else { 2 }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_if_let_else() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ $0if let true = false { 1 } else { 2 }$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ if let true = false { 1 } else { 2 }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_match() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ $0match true {
+ true => 1,
+ false => 2,
+ }$0
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ fun_name()
+}
+
+fn $0fun_name() -> i32 {
+ match true {
+ true => 1,
+ false => 2,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_while() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0while true { }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ while true { }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_for() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0for v in &[0, 1] { }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ for v in &[0, 1] { }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_loop_unit() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0loop {
+ let m = 1;
+ }$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name()
+}
+
+fn $0fun_name() -> ! {
+ loop {
+ let m = 1;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_loop_with_return() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let v = $0loop {
+ let m = 1;
+ break m;
+ }$0;
+}
+"#,
+ r#"
+fn foo() {
+ let v = fun_name();
+}
+
+fn $0fun_name() -> i32 {
+ loop {
+ let m = 1;
+ break m;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_args_from_match() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let v: i32 = $0match Some(1) {
+ Some(x) => x,
+ None => 0,
+ }$0;
+}
+"#,
+ r#"
+fn foo() {
+ let v: i32 = fun_name();
+}
+
+fn $0fun_name() -> i32 {
+ match Some(1) {
+ Some(x) => x,
+ None => 0,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_partial_block_single_line() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ let mut v = $0n * n;$0
+ v += 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let mut v = fun_name(n);
+ v += 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let mut v = n * n;
+ v
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_partial_block() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let m = 2;
+ let n = 1;
+ let mut v = m $0* n;
+ let mut w = 3;$0
+ v += 1;
+ w += 1;
+}
+"#,
+ r#"
+fn foo() {
+ let m = 2;
+ let n = 1;
+ let (mut v, mut w) = fun_name(m, n);
+ v += 1;
+ w += 1;
+}
+
+fn $0fun_name(m: i32, n: i32) -> (i32, i32) {
+ let mut v = m * n;
+ let mut w = 3;
+ (v, w)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn argument_form_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ $0n+2$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ fun_name(n)
+}
+
+fn $0fun_name(n: u32) -> u32 {
+ n+2
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn argument_used_twice_form_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ $0n+n$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ fun_name(n)
+}
+
+fn $0fun_name(n: u32) -> u32 {
+ n+n
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn two_arguments_form_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ let m = 3;
+ $0n+n*m$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ let m = 3;
+ fun_name(n, m)
+}
+
+fn $0fun_name(n: u32, m: u32) -> u32 {
+ n+n*m
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn argument_and_locals() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ $0let m = 1;
+ n + m$0
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ let n = 2;
+ fun_name(n)
+}
+
+fn $0fun_name(n: u32) -> u32 {
+ let m = 1;
+ n + m
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn in_comment_is_not_applicable() {
+ cov_mark::check!(extract_function_in_comment_is_not_applicable);
+ check_assist_not_applicable(extract_function, r"fn main() { 1 + /* $0comment$0 */ 1; }");
+ }
+
+ #[test]
+ fn part_of_expr_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $01$0 + 1;
+}
+"#,
+ r#"
+fn foo() {
+ fun_name() + 1;
+}
+
+fn $0fun_name() -> i32 {
+ 1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn function_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0bar(1 + 1)$0
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ bar(1 + 1)
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn extract_from_nested() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => ($02 + 2$0, true)
+ _ => (0, false)
+ };
+}
+"#,
+ r#"
+fn main() {
+ let x = true;
+ let tuple = match x {
+ true => (fun_name(), true)
+ _ => (0, false)
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_from_closure() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let lambda = |x: u32| $0x * 2$0;
+}
+"#,
+ r#"
+fn main() {
+ let lambda = |x: u32| fun_name(x);
+}
+
+fn $0fun_name(x: u32) -> u32 {
+ x * 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_return_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+ $0return 2 + 2$0;
+}
+"#,
+ r#"
+fn foo() -> u32 {
+ return fun_name();
+}
+
+fn $0fun_name() -> u32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn does_not_add_extra_whitespace() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> u32 {
+
+
+ $0return 2 + 2$0;
+}
+"#,
+ r#"
+fn foo() -> u32 {
+
+
+ return fun_name();
+}
+
+fn $0fun_name() -> u32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let result = loop {
+ $0break 2 + 2$0;
+ };
+}
+"#,
+ r#"
+fn main() {
+ let result = loop {
+ break fun_name();
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 2 + 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_cast() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ let v = $00f32 as u32$0;
+}
+"#,
+ r#"
+fn main() {
+ let v = fun_name();
+}
+
+fn $0fun_name() -> u32 {
+ 0f32 as u32
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_not_applicable() {
+ check_assist_not_applicable(extract_function, r"fn foo() { $0return$0; } ");
+ }
+
+ #[test]
+ fn method_to_freestanding() {
+ check_assist(
+ extract_function,
+ r#"
+struct S;
+
+impl S {
+ fn foo(&self) -> i32 {
+ $01+1$0
+ }
+}
+"#,
+ r#"
+struct S;
+
+impl S {
+ fn foo(&self) -> i32 {
+ fun_name()
+ }
+}
+
+fn $0fun_name() -> i32 {
+ 1+1
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_with_reference() {
+ check_assist(
+ extract_function,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&self) -> i32 {
+ $0self.f+self.f$0
+ }
+}
+"#,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&self) -> i32 {
+ self.fun_name()
+ }
+
+ fn $0fun_name(&self) -> i32 {
+ self.f+self.f
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_with_mut() {
+ check_assist(
+ extract_function,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&mut self) {
+ $0self.f += 1;$0
+ }
+}
+"#,
+ r#"
+struct S { f: i32 };
+
+impl S {
+ fn foo(&mut self) {
+ self.fun_name();
+ }
+
+ fn $0fun_name(&mut self) {
+ self.f += 1;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn variable_defined_inside_and_used_after_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let k = n * n;$0
+ let m = k + 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let k = fun_name(n);
+ let m = k + 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let k = n * n;
+ k
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn variable_defined_inside_and_used_after_mutably_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let mut k = n * n;$0
+ k += 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let mut k = fun_name(n);
+ k += 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let mut k = n * n;
+ k
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn two_variables_defined_inside_and_used_after_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let k = n * n;
+ let m = k + 2;$0
+ let h = k + m;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let (k, m) = fun_name(n);
+ let h = k + m;
+}
+
+fn $0fun_name(n: i32) -> (i32, i32) {
+ let k = n * n;
+ let m = k + 2;
+ (k, m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_variables_defined_inside_and_used_after_mutably_no_ret() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let n = 1;
+ $0let mut k = n * n;
+ let mut m = k + 2;
+ let mut o = m + 3;
+ o += 1;$0
+ k += o;
+ m = 1;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 1;
+ let (mut k, mut m, o) = fun_name(n);
+ k += o;
+ m = 1;
+}
+
+fn $0fun_name(n: i32) -> (i32, i32, i32) {
+ let mut k = n * n;
+ let mut m = k + 2;
+ let mut o = m + 3;
+ o += 1;
+ (k, m, o)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nontrivial_patterns_define_variables() {
+ check_assist(
+ extract_function,
+ r#"
+struct Counter(i32);
+fn foo() {
+ $0let Counter(n) = Counter(0);$0
+ let m = n;
+}
+"#,
+ r#"
+struct Counter(i32);
+fn foo() {
+ let n = fun_name();
+ let m = n;
+}
+
+fn $0fun_name() -> i32 {
+ let Counter(n) = Counter(0);
+ n
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn struct_with_two_fields_pattern_define_variables() {
+ check_assist(
+ extract_function,
+ r#"
+struct Counter { n: i32, m: i32 };
+fn foo() {
+ $0let Counter { n, m: k } = Counter { n: 1, m: 2 };$0
+ let h = n + k;
+}
+"#,
+ r#"
+struct Counter { n: i32, m: i32 };
+fn foo() {
+ let (n, k) = fun_name();
+ let h = n + k;
+}
+
+fn $0fun_name() -> (i32, i32) {
+ let Counter { n, m: k } = Counter { n: 1, m: 2 };
+ (n, k)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_var_from_outer_scope() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut n = 1;
+ $0n += 1;$0
+ let m = n + 1;
+}
+"#,
+ r#"
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let m = n + 1;
+}
+
+fn $0fun_name(n: &mut i32) {
+ *n += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_field_from_outer_scope() {
+ check_assist(
+ extract_function,
+ r#"
+struct C { n: i32 }
+fn foo() {
+ let mut c = C { n: 0 };
+ $0c.n += 1;$0
+ let m = c.n + 1;
+}
+"#,
+ r#"
+struct C { n: i32 }
+fn foo() {
+ let mut c = C { n: 0 };
+ fun_name(&mut c);
+ let m = c.n + 1;
+}
+
+fn $0fun_name(c: &mut C) {
+ c.n += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_nested_field_from_outer_scope() {
+ check_assist(
+ extract_function,
+ r#"
+struct P { n: i32}
+struct C { p: P }
+fn foo() {
+ let mut c = C { p: P { n: 0 } };
+ let mut v = C { p: P { n: 0 } };
+ let u = C { p: P { n: 0 } };
+ $0c.p.n += u.p.n;
+ let r = &mut v.p.n;$0
+ let m = c.p.n + v.p.n + u.p.n;
+}
+"#,
+ r#"
+struct P { n: i32}
+struct C { p: P }
+fn foo() {
+ let mut c = C { p: P { n: 0 } };
+ let mut v = C { p: P { n: 0 } };
+ let u = C { p: P { n: 0 } };
+ fun_name(&mut c, &u, &mut v);
+ let m = c.p.n + v.p.n + u.p.n;
+}
+
+fn $0fun_name(c: &mut C, u: &C, v: &mut C) {
+ c.p.n += u.p.n;
+ let r = &mut v.p.n;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_many_usages_stmt() {
+ check_assist(
+ extract_function,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0n += n;
+ bar(n);
+ bar(n+1);
+ bar(n*n);
+ bar(&n);
+ n.inc();
+ let v = &mut n;
+ *v = v.succ();
+ n.succ();$0
+ let m = n + 1;
+}
+"#,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let m = n + 1;
+}
+
+fn $0fun_name(n: &mut i32) {
+ *n += *n;
+ bar(*n);
+ bar(*n+1);
+ bar(*n**n);
+ bar(&*n);
+ n.inc();
+ let v = n;
+ *v = v.succ();
+ n.succ();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_many_usages_expr() {
+ check_assist(
+ extract_function,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0{
+ n += n;
+ bar(n);
+ bar(n+1);
+ bar(n*n);
+ bar(&n);
+ n.inc();
+ let v = &mut n;
+ *v = v.succ();
+ n.succ();
+ }$0
+ let m = n + 1;
+}
+"#,
+ r#"
+fn bar(k: i32) {}
+trait I: Copy {
+ fn succ(&self) -> Self;
+ fn inc(&mut self) -> Self { let v = self.succ(); *self = v; v }
+}
+impl I for i32 {
+ fn succ(&self) -> Self { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let m = n + 1;
+}
+
+fn $0fun_name(n: &mut i32) {
+ *n += *n;
+ bar(*n);
+ bar(*n+1);
+ bar(*n**n);
+ bar(&*n);
+ n.inc();
+ let v = n;
+ *v = v.succ();
+ n.succ();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_by_value() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut n = 1;
+ $0n += 1;$0
+}
+"#,
+ r"
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(mut n: i32) {
+ n += 1;
+}
+",
+ );
+ }
+
+ #[test]
+ fn mut_param_because_of_mut_ref() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut n = 1;
+ $0let v = &mut n;
+ *v += 1;$0
+ let k = n;
+}
+"#,
+ r#"
+fn foo() {
+ let mut n = 1;
+ fun_name(&mut n);
+ let k = n;
+}
+
+fn $0fun_name(n: &mut i32) {
+ let v = n;
+ *v += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_param_by_value_because_of_mut_ref() {
+ check_assist(
+ extract_function,
+ r"
+fn foo() {
+ let mut n = 1;
+ $0let v = &mut n;
+ *v += 1;$0
+}
+",
+ r#"
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(mut n: i32) {
+ let v = &mut n;
+ *v += 1;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn mut_method_call() {
+ check_assist(
+ extract_function,
+ r#"
+trait I {
+ fn inc(&mut self);
+}
+impl I for i32 {
+ fn inc(&mut self) { *self += 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0n.inc();$0
+}
+"#,
+ r#"
+trait I {
+ fn inc(&mut self);
+}
+impl I for i32 {
+ fn inc(&mut self) { *self += 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(mut n: i32) {
+ n.inc();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shared_method_call() {
+ check_assist(
+ extract_function,
+ r#"
+trait I {
+ fn succ(&self);
+}
+impl I for i32 {
+ fn succ(&self) { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ $0n.succ();$0
+}
+"#,
+ r"
+trait I {
+ fn succ(&self);
+}
+impl I for i32 {
+ fn succ(&self) { *self + 1 }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(n: i32) {
+ n.succ();
+}
+",
+ );
+ }
+
+ #[test]
+ fn mut_method_call_with_other_receiver() {
+ check_assist(
+ extract_function,
+ r#"
+trait I {
+ fn inc(&mut self, n: i32);
+}
+impl I for i32 {
+ fn inc(&mut self, n: i32) { *self += n }
+}
+fn foo() {
+ let mut n = 1;
+ $0let mut m = 2;
+ m.inc(n);$0
+}
+"#,
+ r"
+trait I {
+ fn inc(&mut self, n: i32);
+}
+impl I for i32 {
+ fn inc(&mut self, n: i32) { *self += n }
+}
+fn foo() {
+ let mut n = 1;
+ fun_name(n);
+}
+
+fn $0fun_name(n: i32) {
+ let mut m = 2;
+ m.inc(n);
+}
+",
+ );
+ }
+
+ #[test]
+ fn non_copy_without_usages_after() {
+ check_assist(
+ extract_function,
+ r#"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ $0let n = c.0;$0
+}
+"#,
+ r"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ fun_name(c);
+}
+
+fn $0fun_name(c: Counter) {
+ let n = c.0;
+}
+",
+ );
+ }
+
+ #[test]
+ fn non_copy_used_after() {
+ check_assist(
+ extract_function,
+ r"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ $0let n = c.0;$0
+ let m = c.0;
+}
+",
+ r#"
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ fun_name(&c);
+ let m = c.0;
+}
+
+fn $0fun_name(c: &Counter) {
+ let n = c.0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn copy_used_after() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: copy
+fn foo() {
+ let n = 0;
+ $0let m = n;$0
+ let k = n;
+}
+"#,
+ r#"
+fn foo() {
+ let n = 0;
+ fun_name(n);
+ let k = n;
+}
+
+fn $0fun_name(n: i32) {
+ let m = n;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn copy_custom_used_after() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: copy, derive
+#[derive(Clone, Copy)]
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ $0let n = c.0;$0
+ let m = c.0;
+}
+"#,
+ r#"
+#[derive(Clone, Copy)]
+struct Counter(i32);
+fn foo() {
+ let c = Counter(0);
+ fun_name(c);
+ let m = c.0;
+}
+
+fn $0fun_name(c: Counter) {
+ let n = c.0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn indented_stmts() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ if true {
+ loop {
+ $0let n = 1;
+ let m = 2;$0
+ }
+ }
+}
+"#,
+ r#"
+fn foo() {
+ if true {
+ loop {
+ fun_name();
+ }
+ }
+}
+
+fn $0fun_name() {
+ let n = 1;
+ let m = 2;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn indented_stmts_inside_mod() {
+ check_assist(
+ extract_function,
+ r#"
+mod bar {
+ fn foo() {
+ if true {
+ loop {
+ $0let n = 1;
+ let m = 2;$0
+ }
+ }
+ }
+}
+"#,
+ r#"
+mod bar {
+ fn foo() {
+ if true {
+ loop {
+ fun_name();
+ }
+ }
+ }
+
+ fn $0fun_name() {
+ let n = 1;
+ let m = 2;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() {
+ loop {
+ let n = 1;
+ $0let m = n + 1;
+ break;
+ let k = 2;$0
+ let h = 1 + k;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let k = match fun_name(n) {
+ Some(value) => value,
+ None => break,
+ };
+ let h = 1 + k;
+ }
+}
+
+fn $0fun_name(n: i32) -> Option<i32> {
+ let m = n + 1;
+ return None;
+ let k = 2;
+ Some(k)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_to_parent() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: copy, result
+fn foo() -> i64 {
+ let n = 1;
+ $0let m = n + 1;
+ return 1;
+ let k = 2;$0
+ (n + k) as i64
+}
+"#,
+ r#"
+fn foo() -> i64 {
+ let n = 1;
+ let k = match fun_name(n) {
+ Ok(value) => value,
+ Err(value) => return value,
+ };
+ (n + k) as i64
+}
+
+fn $0fun_name(n: i32) -> Result<i32, i64> {
+ let m = n + 1;
+ return Err(1);
+ let k = 2;
+ Ok(k)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_and_continue() {
+ cov_mark::check!(external_control_flow_break_and_continue);
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let m = n + 1;
+ break;
+ let k = 2;
+ continue;
+ let k = k + 1;$0
+ let r = n + k;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_and_break() {
+ cov_mark::check!(external_control_flow_return_and_bc);
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let m = n + 1;
+ break;
+ let k = 2;
+ return;
+ let k = k + 1;$0
+ let r = n + k;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop_with_if() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ loop {
+ let mut n = 1;
+ $0let m = n + 1;
+ break;
+ n += m;$0
+ let h = 1 + n;
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ loop {
+ let mut n = 1;
+ if let ControlFlow::Break(_) = fun_name(&mut n) {
+ break;
+ }
+ let h = 1 + n;
+ }
+}
+
+fn $0fun_name(n: &mut i32) -> ControlFlow<()> {
+ let m = *n + 1;
+ return ControlFlow::Break(());
+ *n += m;
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop_nested() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ loop {
+ let mut n = 1;
+ $0let m = n + 1;
+ if m == 42 {
+ break;
+ }$0
+ let h = 1;
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ loop {
+ let mut n = 1;
+ if let ControlFlow::Break(_) = fun_name(n) {
+ break;
+ }
+ let h = 1;
+ }
+}
+
+fn $0fun_name(n: i32) -> ControlFlow<()> {
+ let m = n + 1;
+ if m == 42 {
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_loop_nested_labeled() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ 'bar: loop {
+ loop {
+ $0break 'bar;$0
+ }
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ 'bar: loop {
+ loop {
+ if let ControlFlow::Break(_) = fun_name() {
+ break 'bar;
+ }
+ }
+ }
+}
+
+fn $0fun_name() -> ControlFlow<()> {
+ return ControlFlow::Break(());
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn continue_loop_nested_labeled() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: try
+fn foo() {
+ 'bar: loop {
+ loop {
+ $0continue 'bar;$0
+ }
+ }
+}
+"#,
+ r#"
+use core::ops::ControlFlow;
+
+fn foo() {
+ 'bar: loop {
+ loop {
+ if let ControlFlow::Break(_) = fun_name() {
+ continue 'bar;
+ }
+ }
+ }
+}
+
+fn $0fun_name() -> ControlFlow<()> {
+ return ControlFlow::Break(());
+ ControlFlow::Continue(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_from_nested_loop() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;$0
+ let k = 1;
+ loop {
+ return;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = match fun_name() {
+ Some(value) => value,
+ None => return,
+ };
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ loop {
+ return None;
+ }
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_from_nested_loop() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ loop {
+ break;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = fun_name();
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> i32 {
+ let k = 1;
+ loop {
+ break;
+ }
+ let m = k + 1;
+ m
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_from_nested_and_outer_loops() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ loop {
+ break;
+ }
+ if k == 42 {
+ break;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = match fun_name() {
+ Some(value) => value,
+ None => break,
+ };
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ loop {
+ break;
+ }
+ if k == 42 {
+ return None;
+ }
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn return_from_nested_fn() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ fn test() {
+ return;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let n = 1;
+ let m = fun_name();
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> i32 {
+ let k = 1;
+ fn test() {
+ return;
+ }
+ let m = k + 1;
+ m
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_with_value() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ loop {
+ let n = 1;
+ $0let k = 1;
+ if k == 42 {
+ break 3;
+ }
+ let m = k + 1;$0
+ let h = 1;
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ loop {
+ let n = 1;
+ if let Some(value) = fun_name() {
+ break value;
+ }
+ let h = 1;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ if k == 42 {
+ return Some(3);
+ }
+ let m = k + 1;
+ None
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_with_value_and_label() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i32 {
+ 'bar: loop {
+ let n = 1;
+ $0let k = 1;
+ if k == 42 {
+ break 'bar 4;
+ }
+ let m = k + 1;$0
+ let h = 1;
+ }
+}
+"#,
+ r#"
+fn foo() -> i32 {
+ 'bar: loop {
+ let n = 1;
+ if let Some(value) = fun_name() {
+ break 'bar value;
+ }
+ let h = 1;
+ }
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = 1;
+ if k == 42 {
+ return Some(4);
+ }
+ let m = k + 1;
+ None
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn break_with_value_and_return() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() -> i64 {
+ loop {
+ let n = 1;$0
+ let k = 1;
+ if k == 42 {
+ break 3;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+fn foo() -> i64 {
+ loop {
+ let n = 1;
+ let m = match fun_name() {
+ Ok(value) => value,
+ Err(value) => break value,
+ };
+ let h = 1 + m;
+ }
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = 1;
+ if k == 42 {
+ return Err(3);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_option() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn bar() -> Option<i32> { None }
+fn foo() -> Option<()> {
+ let n = bar()?;
+ $0let k = foo()?;
+ let m = k + 1;$0
+ let h = 1 + m;
+ Some(())
+}
+"#,
+ r#"
+fn bar() -> Option<i32> { None }
+fn foo() -> Option<()> {
+ let n = bar()?;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Some(())
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = foo()?;
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_option_unit() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() -> Option<()> {
+ let n = 1;
+ $0let k = foo()?;
+ let m = k + 1;$0
+ let h = 1 + n;
+ Some(())
+}
+"#,
+ r#"
+fn foo() -> Option<()> {
+ let n = 1;
+ fun_name()?;
+ let h = 1 + n;
+ Some(())
+}
+
+fn $0fun_name() -> Option<()> {
+ let k = foo()?;
+ let m = k + 1;
+ Some(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_result() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ $0let k = foo()?;
+ let m = k + 1;$0
+ let h = 1 + m;
+ Ok(())
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Ok(())
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = foo()?;
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_option_with_return() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() -> Option<()> {
+ let n = 1;
+ $0let k = foo()?;
+ if k == 42 {
+ return None;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ Some(())
+}
+"#,
+ r#"
+fn foo() -> Option<()> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Some(())
+}
+
+fn $0fun_name() -> Option<i32> {
+ let k = foo()?;
+ if k == 42 {
+ return None;
+ }
+ let m = k + 1;
+ Some(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_result_with_return() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ $0let k = foo()?;
+ if k == 42 {
+ return Err(1);
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ Ok(())
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Ok(())
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = foo()?;
+ if k == 42 {
+ return Err(1);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_and_break() {
+ cov_mark::check!(external_control_flow_try_and_bc);
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+//- minicore: option
+fn foo() -> Option<()> {
+ loop {
+ let n = Some(1);
+ $0let m = n? + 1;
+ break;
+ let k = 2;
+ let k = k + 1;$0
+ let r = n + k;
+ }
+ Some(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn try_and_return_ok() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ $0let k = foo()?;
+ if k == 42 {
+ return Ok(1);
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ Ok(())
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ let n = 1;
+ let m = fun_name()?;
+ let h = 1 + m;
+ Ok(())
+}
+
+fn $0fun_name() -> Result<i32, i64> {
+ let k = foo()?;
+ if k == 42 {
+ return Ok(1);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn param_usage_in_macro() {
+ check_assist(
+ extract_function,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+
+fn foo() {
+ let n = 1;
+ $0let k = n * m!(n);$0
+ let m = k + 1;
+}
+"#,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+
+fn foo() {
+ let n = 1;
+ let k = fun_name(n);
+ let m = k + 1;
+}
+
+fn $0fun_name(n: i32) -> i32 {
+ let k = n * m!(n);
+ k
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future
+fn main() {
+ $0some_function().await;$0
+}
+
+async fn some_function() {
+
+}
+"#,
+ r#"
+fn main() {
+ fun_name().await;
+}
+
+async fn $0fun_name() {
+ some_function().await;
+}
+
+async fn some_function() {
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await_and_result_not_producing_match_expr() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future, result
+async fn foo() -> Result<(), ()> {
+ $0async {}.await;
+ Err(())?$0
+}
+"#,
+ r#"
+async fn foo() -> Result<(), ()> {
+ fun_name().await?
+}
+
+async fn $0fun_name() -> Result<(), ()> {
+ async {}.await;
+ Err(())?
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await_and_result_producing_match_expr() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future
+async fn foo() -> i32 {
+ loop {
+ let n = 1;$0
+ let k = async { 1 }.await;
+ if k == 42 {
+ break 3;
+ }
+ let m = k + 1;$0
+ let h = 1 + m;
+ }
+}
+"#,
+ r#"
+async fn foo() -> i32 {
+ loop {
+ let n = 1;
+ let m = match fun_name().await {
+ Ok(value) => value,
+ Err(value) => break value,
+ };
+ let h = 1 + m;
+ }
+}
+
+async fn $0fun_name() -> Result<i32, i32> {
+ let k = async { 1 }.await;
+ if k == 42 {
+ return Err(3);
+ }
+ let m = k + 1;
+ Ok(m)
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_await_in_args() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: future
+fn main() {
+ $0function_call("a", some_function().await);$0
+}
+
+async fn some_function() {
+
+}
+"#,
+ r#"
+fn main() {
+ fun_name().await;
+}
+
+async fn $0fun_name() {
+ function_call("a", some_function().await);
+}
+
+async fn some_function() {
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_extract_standalone_blocks() {
+ check_assist_not_applicable(
+ extract_function,
+ r#"
+fn main() $0{}$0
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_adds_comma_for_match_arm() {
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ match 6 {
+ 100 => $0{ 100 }$0
+ _ => 0,
+ };
+}
+"#,
+ r#"
+fn main() {
+ match 6 {
+ 100 => fun_name(),
+ _ => 0,
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 100
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+fn main() {
+ match 6 {
+ 100 => $0{ 100 }$0,
+ _ => 0,
+ };
+}
+"#,
+ r#"
+fn main() {
+ match 6 {
+ 100 => fun_name(),
+ _ => 0,
+ };
+}
+
+fn $0fun_name() -> i32 {
+ 100
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_tear_comments_apart() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ /*$0*/
+ foo();
+ foo();
+ /*$0*/
+}
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ /**/
+ foo();
+ foo();
+ /**/
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_tear_body_apart() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ $0foo();
+}$0
+"#,
+ r#"
+fn foo() {
+ fun_name();
+}
+
+fn $0fun_name() {
+ foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_wrap_res_in_res() {
+ check_assist(
+ extract_function,
+ r#"
+//- minicore: result
+fn foo() -> Result<(), i64> {
+ $0Result::<i32, i64>::Ok(0)?;
+ Ok(())$0
+}
+"#,
+ r#"
+fn foo() -> Result<(), i64> {
+ fun_name()?
+}
+
+fn $0fun_name() -> Result<(), i64> {
+ Result::<i32, i64>::Ok(0)?;
+ Ok(())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_knows_const() {
+ check_assist(
+ extract_function,
+ r#"
+const fn foo() {
+ $0()$0
+}
+"#,
+ r#"
+const fn foo() {
+ fun_name();
+}
+
+const fn $0fun_name() {
+ ()
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+const FOO: () = {
+ $0()$0
+};
+"#,
+ r#"
+const FOO: () = {
+ fun_name();
+};
+
+const fn $0fun_name() {
+ ()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_does_not_move_outer_loop_vars() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let mut x = 5;
+ for _ in 0..10 {
+ $0x += 1;$0
+ }
+}
+"#,
+ r#"
+fn foo() {
+ let mut x = 5;
+ for _ in 0..10 {
+ fun_name(&mut x);
+ }
+}
+
+fn $0fun_name(x: &mut i32) {
+ *x += 1;
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ for _ in 0..10 {
+ let mut x = 5;
+ $0x += 1;$0
+ }
+}
+"#,
+ r#"
+fn foo() {
+ for _ in 0..10 {
+ let mut x = 5;
+ fun_name(x);
+ }
+}
+
+fn $0fun_name(mut x: i32) {
+ x += 1;
+}
+"#,
+ );
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ loop {
+ let mut x = 5;
+ for _ in 0..10 {
+ $0x += 1;$0
+ }
+ }
+}
+"#,
+ r#"
+fn foo() {
+ loop {
+ let mut x = 5;
+ for _ in 0..10 {
+ fun_name(&mut x);
+ }
+ }
+}
+
+fn $0fun_name(x: &mut i32) {
+ *x += 1;
+}
+"#,
+ );
+ }
+
+ // regression test for #9822
+ #[test]
+ fn extract_mut_ref_param_has_no_mut_binding_in_loop() {
+ check_assist(
+ extract_function,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&mut self) {}
+}
+fn foo() {
+ let mut x = Foo;
+ while false {
+ let y = &mut x;
+ $0y.foo();$0
+ }
+ let z = x;
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&mut self) {}
+}
+fn foo() {
+ let mut x = Foo;
+ while false {
+ let y = &mut x;
+ fun_name(y);
+ }
+ let z = x;
+}
+
+fn $0fun_name(y: &mut Foo) {
+ y.foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_with_macro_arg() {
+ check_assist(
+ extract_function,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+fn main() {
+ let bar = "bar";
+ $0m!(bar);$0
+}
+"#,
+ r#"
+macro_rules! m {
+ ($val:expr) => { $val };
+}
+fn main() {
+ let bar = "bar";
+ fun_name(bar);
+}
+
+fn $0fun_name(bar: &str) {
+ m!(bar);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolveable_types_default_to_placeholder() {
+ check_assist(
+ extract_function,
+ r#"
+fn foo() {
+ let a = __unresolved;
+ let _ = $0{a}$0;
+}
+"#,
+ r#"
+fn foo() {
+ let a = __unresolved;
+ let _ = fun_name(a);
+}
+
+fn $0fun_name(a: _) -> _ {
+ a
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn reference_mutable_param_with_further_usages() {
+ check_assist(
+ extract_function,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ $0arg.field = 8;$0
+ // Simulating access after the extracted portion
+ arg.field = 16;
+}
+"#,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ fun_name(arg);
+ // Simulating access after the extracted portion
+ arg.field = 16;
+}
+
+fn $0fun_name(arg: &mut Foo) {
+ arg.field = 8;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn reference_mutable_param_without_further_usages() {
+ check_assist(
+ extract_function,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ $0arg.field = 8;$0
+}
+"#,
+ r#"
+pub struct Foo {
+ field: u32,
+}
+
+pub fn testfn(arg: &mut Foo) {
+ fun_name(arg);
+}
+
+fn $0fun_name(arg: &mut Foo) {
+ arg.field = 8;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_at_start() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0// comment here!
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ // comment here!
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_in_between() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;$0
+ let a = 0;
+ // comment here!
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let a = 0;
+ // comment here!
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_at_end() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0let x = 0;
+ // comment here!$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let x = 0;
+ // comment here!
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_copies_comment_indented() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0let x = 0;
+ while(true) {
+ // comment here!
+ }$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let x = 0;
+ while(true) {
+ // comment here!
+ }
+}
+"#,
+ );
+ }
+
+ // FIXME: we do want to preserve whitespace
+ #[test]
+ fn extract_function_does_not_preserve_whitespace() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0let a = 0;
+
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ let a = 0;
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_function_long_form_comment() {
+ check_assist(
+ extract_function,
+ r#"
+fn func() {
+ let i = 0;
+ $0/* a comment */
+ let x = 0;$0
+}
+"#,
+ r#"
+fn func() {
+ let i = 0;
+ fun_name();
+}
+
+fn $0fun_name() {
+ /* a comment */
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn it_should_not_generate_duplicate_function_names() {
+ check_assist(
+ extract_function,
+ r#"
+fn fun_name() {
+ $0let x = 0;$0
+}
+"#,
+ r#"
+fn fun_name() {
+ fun_name1();
+}
+
+fn $0fun_name1() {
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn should_increment_suffix_until_it_finds_space() {
+ check_assist(
+ extract_function,
+ r#"
+fn fun_name1() {
+ let y = 0;
+}
+
+fn fun_name() {
+ $0let x = 0;$0
+}
+"#,
+ r#"
+fn fun_name1() {
+ let y = 0;
+}
+
+fn fun_name() {
+ fun_name2();
+}
+
+fn $0fun_name2() {
+ let x = 0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn extract_method_from_trait_impl() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct(i32);
+trait Trait {
+ fn bar(&self) -> i32;
+}
+
+impl Trait for Struct {
+ fn bar(&self) -> i32 {
+ $0self.0 + 2$0
+ }
+}
+"#,
+ r#"
+struct Struct(i32);
+trait Trait {
+ fn bar(&self) -> i32;
+}
+
+impl Trait for Struct {
+ fn bar(&self) -> i32 {
+ self.fun_name()
+ }
+}
+
+impl Struct {
+ fn $0fun_name(&self) -> i32 {
+ self.0 + 2
+ }
+}
+"#,
+ );
+ }
+
++ #[test]
++ fn extract_method_from_trait_with_existing_non_empty_impl_block() {
++ check_assist(
++ extract_function,
++ r#"
++struct Struct(i32);
++trait Trait {
++ fn bar(&self) -> i32;
++}
++
++impl Struct {
++ fn foo() {}
++}
++
++impl Trait for Struct {
++ fn bar(&self) -> i32 {
++ $0self.0 + 2$0
++ }
++}
++"#,
++ r#"
++struct Struct(i32);
++trait Trait {
++ fn bar(&self) -> i32;
++}
++
++impl Struct {
++ fn foo() {}
++
++ fn $0fun_name(&self) -> i32 {
++ self.0 + 2
++ }
++}
++
++impl Trait for Struct {
++ fn bar(&self) -> i32 {
++ self.fun_name()
++ }
++}
++"#,
++ )
++ }
++
++ #[test]
++ fn extract_function_from_trait_with_existing_non_empty_impl_block() {
++ check_assist(
++ extract_function,
++ r#"
++struct Struct(i32);
++trait Trait {
++ fn bar(&self) -> i32;
++}
++
++impl Struct {
++ fn foo() {}
++}
++
++impl Trait for Struct {
++ fn bar(&self) -> i32 {
++ let three_squared = $03 * 3$0;
++ self.0 + three_squared
++ }
++}
++"#,
++ r#"
++struct Struct(i32);
++trait Trait {
++ fn bar(&self) -> i32;
++}
++
++impl Struct {
++ fn foo() {}
++}
++
++impl Trait for Struct {
++ fn bar(&self) -> i32 {
++ let three_squared = fun_name();
++ self.0 + three_squared
++ }
++}
++
++fn $0fun_name() -> i32 {
++ 3 * 3
++}
++"#,
++ )
++ }
++
++ #[test]
++ fn extract_method_from_trait_with_multiple_existing_impl_blocks() {
++ check_assist(
++ extract_function,
++ r#"
++struct Struct(i32);
++struct StructBefore(i32);
++struct StructAfter(i32);
++trait Trait {
++ fn bar(&self) -> i32;
++}
++
++impl StructBefore {
++ fn foo(){}
++}
++
++impl Struct {
++ fn foo(){}
++}
++
++impl StructAfter {
++ fn foo(){}
++}
++
++impl Trait for Struct {
++ fn bar(&self) -> i32 {
++ $0self.0 + 2$0
++ }
++}
++"#,
++ r#"
++struct Struct(i32);
++struct StructBefore(i32);
++struct StructAfter(i32);
++trait Trait {
++ fn bar(&self) -> i32;
++}
++
++impl StructBefore {
++ fn foo(){}
++}
++
++impl Struct {
++ fn foo(){}
++
++ fn $0fun_name(&self) -> i32 {
++ self.0 + 2
++ }
++}
++
++impl StructAfter {
++ fn foo(){}
++}
++
++impl Trait for Struct {
++ fn bar(&self) -> i32 {
++ self.fun_name()
++ }
++}
++"#,
++ )
++ }
++
++ #[test]
++ fn extract_method_from_trait_with_multiple_existing_trait_impl_blocks() {
++ check_assist(
++ extract_function,
++ r#"
++struct Struct(i32);
++trait Trait {
++ fn bar(&self) -> i32;
++}
++trait TraitBefore {
++ fn before(&self) -> i32;
++}
++trait TraitAfter {
++ fn after(&self) -> i32;
++}
++
++impl TraitBefore for Struct {
++ fn before(&self) -> i32 {
++ 42
++ }
++}
++
++impl Struct {
++ fn foo(){}
++}
++
++impl TraitAfter for Struct {
++ fn after(&self) -> i32 {
++ 42
++ }
++}
++
++impl Trait for Struct {
++ fn bar(&self) -> i32 {
++ $0self.0 + 2$0
++ }
++}
++"#,
++ r#"
++struct Struct(i32);
++trait Trait {
++ fn bar(&self) -> i32;
++}
++trait TraitBefore {
++ fn before(&self) -> i32;
++}
++trait TraitAfter {
++ fn after(&self) -> i32;
++}
++
++impl TraitBefore for Struct {
++ fn before(&self) -> i32 {
++ 42
++ }
++}
++
++impl Struct {
++ fn foo(){}
++
++ fn $0fun_name(&self) -> i32 {
++ self.0 + 2
++ }
++}
++
++impl TraitAfter for Struct {
++ fn after(&self) -> i32 {
++ 42
++ }
++}
++
++impl Trait for Struct {
++ fn bar(&self) -> i32 {
++ self.fun_name()
++ }
++}
++"#,
++ )
++ }
++
+ #[test]
+ fn closure_arguments() {
+ check_assist(
+ extract_function,
+ r#"
+fn parent(factor: i32) {
+ let v = &[1, 2, 3];
+
+ $0v.iter().map(|it| it * factor);$0
+}
+"#,
+ r#"
+fn parent(factor: i32) {
+ let v = &[1, 2, 3];
+
+ fun_name(v, factor);
+}
+
+fn $0fun_name(v: &[i32; 3], factor: i32) {
+ v.iter().map(|it| it * factor);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn preserve_generics() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Debug>(i: T) {
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T: Debug>(i: T) {
+ fun_name(i);
+}
+
+fn $0fun_name<T: Debug>(i: T) {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn preserve_generics_from_body() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Default>() -> T {
+ $0T::default()$0
+}
+"#,
+ r#"
+fn func<T: Default>() -> T {
+ fun_name()
+}
+
+fn $0fun_name<T: Default>() -> T {
+ T::default()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filter_unused_generics() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Debug, U: Copy>(i: T, u: U) {
+ bar(u);
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T: Debug, U: Copy>(i: T, u: U) {
+ bar(u);
+ fun_name(i);
+}
+
+fn $0fun_name<T: Debug>(i: T) {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn empty_generic_param_list() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T: Debug>(t: T, i: u32) {
+ bar(t);
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T: Debug>(t: T, i: u32) {
+ bar(t);
+ fun_name(i);
+}
+
+fn $0fun_name(i: u32) {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn preserve_where_clause() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T>(i: T) where T: Debug {
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T>(i: T) where T: Debug {
+ fun_name(i);
+}
+
+fn $0fun_name<T>(i: T) where T: Debug {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filter_unused_where_clause() {
+ check_assist(
+ extract_function,
+ r#"
+fn func<T, U>(i: T, u: U) where T: Debug, U: Copy {
+ bar(u);
+ $0foo(i);$0
+}
+"#,
+ r#"
+fn func<T, U>(i: T, u: U) where T: Debug, U: Copy {
+ bar(u);
+ fun_name(i);
+}
+
+fn $0fun_name<T>(i: T) where T: Debug {
+ foo(i);
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_generics() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T: Into<i32>>(T);
+impl <T: Into<i32> + Copy> Struct<T> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T: Into<i32>>(T);
+impl <T: Into<i32> + Copy> Struct<T> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T: Into<i32> + Copy, V: Into<i32>>(t: T, v: V) -> i32 {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filters_unused_nested_generics() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T: Into<i32>, U: Debug>(T, U);
+impl <T: Into<i32> + Copy, U: Debug> Struct<T, U> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T: Into<i32>, U: Debug>(T, U);
+impl <T: Into<i32> + Copy, U: Debug> Struct<T, U> {
+ fn func<V: Into<i32>>(&self, v: V) -> i32 {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T: Into<i32> + Copy, V: Into<i32>>(t: T, v: V) -> i32 {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn nested_where_clauses() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T>(T) where T: Into<i32>;
+impl <T> Struct<T> where T: Into<i32> + Copy {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T>(T) where T: Into<i32>;
+impl <T> Struct<T> where T: Into<i32> + Copy {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T, V>(t: T, v: V) -> i32 where T: Into<i32> + Copy, V: Into<i32> {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn filters_unused_nested_where_clauses() {
+ check_assist(
+ extract_function,
+ r#"
+struct Struct<T, U>(T, U) where T: Into<i32>, U: Debug;
+impl <T, U> Struct<T, U> where T: Into<i32> + Copy, U: Debug {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ $0t.into() + v.into()$0
+ }
+}
+"#,
+ r#"
+struct Struct<T, U>(T, U) where T: Into<i32>, U: Debug;
+impl <T, U> Struct<T, U> where T: Into<i32> + Copy, U: Debug {
+ fn func<V>(&self, v: V) -> i32 where V: Into<i32> {
+ let t = self.0;
+ fun_name(t, v)
+ }
+}
+
+fn $0fun_name<T, V>(t: T, v: V) -> i32 where T: Into<i32> + Copy, V: Into<i32> {
+ t.into() + v.into()
+}
+"#,
+ );
+ }
+}
--- /dev/null
- use hir::{db::HirDatabase, HasSource, HasVisibility, PathResolution};
++use hir::{db::HirDatabase, HasSource, HasVisibility, ModuleDef, PathResolution, ScopeDef};
+use ide_db::base_db::FileId;
+use syntax::{
+ ast::{self, HasVisibility as _},
+ AstNode, TextRange, TextSize,
+};
+
+use crate::{utils::vis_offset, AssistContext, AssistId, AssistKind, Assists};
+
+// FIXME: this really should be a fix for diagnostic, rather than an assist.
+
+// Assist: fix_visibility
+//
+// Makes inaccessible item public.
+//
+// ```
+// mod m {
+// fn frobnicate() {}
+// }
+// fn main() {
- // m::frobnicate$0() {}
++// m::frobnicate$0();
+// }
+// ```
+// ->
+// ```
+// mod m {
+// $0pub(crate) fn frobnicate() {}
+// }
+// fn main() {
- // m::frobnicate() {}
++// m::frobnicate();
+// }
+// ```
+pub(crate) fn fix_visibility(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ add_vis_to_referenced_module_def(acc, ctx)
+ .or_else(|| add_vis_to_referenced_record_field(acc, ctx))
+}
+
+fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let path: ast::Path = ctx.find_node_at_offset()?;
- let path_res = ctx.sema.resolve_path(&path)?;
- let def = match path_res {
- PathResolution::Def(def) => def,
- _ => return None,
- };
++ let qualifier = path.qualifier()?;
++ let name_ref = path.segment()?.name_ref()?;
++ let qualifier_res = ctx.sema.resolve_path(&qualifier)?;
++ let PathResolution::Def(ModuleDef::Module(module)) = qualifier_res else { return None; };
++ let (_, def) = module
++ .scope(ctx.db(), None)
++ .into_iter()
++ .find(|(name, _)| name.to_smol_str() == name_ref.text().as_str())?;
++ let ScopeDef::ModuleDef(def) = def else { return None; };
+
+ let current_module = ctx.sema.scope(path.syntax())?.module();
+ let target_module = def.module(ctx.db())?;
+
+ if def.visibility(ctx.db()).is_visible_from(ctx.db(), current_module.into()) {
+ return None;
+ };
+
+ let (offset, current_visibility, target, target_file, target_name) =
+ target_data_for_def(ctx.db(), def)?;
+
+ let missing_visibility =
+ if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
+
+ let assist_label = match target_name {
+ None => format!("Change visibility to {missing_visibility}"),
+ Some(name) => format!("Change visibility of {name} to {missing_visibility}"),
+ };
+
+ acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
+ builder.edit_file(target_file);
+ match ctx.config.snippet_cap {
+ Some(cap) => match current_visibility {
+ Some(current_visibility) => builder.replace_snippet(
+ cap,
+ current_visibility.syntax().text_range(),
+ format!("$0{missing_visibility}"),
+ ),
+ None => builder.insert_snippet(cap, offset, format!("$0{missing_visibility} ")),
+ },
+ None => match current_visibility {
+ Some(current_visibility) => {
+ builder.replace(current_visibility.syntax().text_range(), missing_visibility)
+ }
+ None => builder.insert(offset, format!("{missing_visibility} ")),
+ },
+ }
+ })
+}
+
+fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let record_field: ast::RecordExprField = ctx.find_node_at_offset()?;
+ let (record_field_def, _, _) = ctx.sema.resolve_record_field(&record_field)?;
+
+ let current_module = ctx.sema.scope(record_field.syntax())?.module();
+ let visibility = record_field_def.visibility(ctx.db());
+ if visibility.is_visible_from(ctx.db(), current_module.into()) {
+ return None;
+ }
+
+ let parent = record_field_def.parent_def(ctx.db());
+ let parent_name = parent.name(ctx.db());
+ let target_module = parent.module(ctx.db());
+
+ let in_file_source = record_field_def.source(ctx.db())?;
+ let (offset, current_visibility, target) = match in_file_source.value {
+ hir::FieldSource::Named(it) => {
+ let s = it.syntax();
+ (vis_offset(s), it.visibility(), s.text_range())
+ }
+ hir::FieldSource::Pos(it) => {
+ let s = it.syntax();
+ (vis_offset(s), it.visibility(), s.text_range())
+ }
+ };
+
+ let missing_visibility =
+ if current_module.krate() == target_module.krate() { "pub(crate)" } else { "pub" };
+ let target_file = in_file_source.file_id.original_file(ctx.db());
+
+ let target_name = record_field_def.name(ctx.db());
+ let assist_label =
+ format!("Change visibility of {parent_name}.{target_name} to {missing_visibility}");
+
+ acc.add(AssistId("fix_visibility", AssistKind::QuickFix), assist_label, target, |builder| {
+ builder.edit_file(target_file);
+ match ctx.config.snippet_cap {
+ Some(cap) => match current_visibility {
+ Some(current_visibility) => builder.replace_snippet(
+ cap,
+ current_visibility.syntax().text_range(),
+ format!("$0{missing_visibility}"),
+ ),
+ None => builder.insert_snippet(cap, offset, format!("$0{missing_visibility} ")),
+ },
+ None => match current_visibility {
+ Some(current_visibility) => {
+ builder.replace(current_visibility.syntax().text_range(), missing_visibility)
+ }
+ None => builder.insert(offset, format!("{missing_visibility} ")),
+ },
+ }
+ })
+}
+
+fn target_data_for_def(
+ db: &dyn HirDatabase,
+ def: hir::ModuleDef,
+) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId, Option<hir::Name>)> {
+ fn offset_target_and_file_id<S, Ast>(
+ db: &dyn HirDatabase,
+ x: S,
+ ) -> Option<(TextSize, Option<ast::Visibility>, TextRange, FileId)>
+ where
+ S: HasSource<Ast = Ast>,
+ Ast: AstNode + ast::HasVisibility,
+ {
+ let source = x.source(db)?;
+ let in_file_syntax = source.syntax();
+ let file_id = in_file_syntax.file_id;
+ let syntax = in_file_syntax.value;
+ let current_visibility = source.value.visibility();
+ Some((
+ vis_offset(syntax),
+ current_visibility,
+ syntax.text_range(),
+ file_id.original_file(db.upcast()),
+ ))
+ }
+
+ let target_name;
+ let (offset, current_visibility, target, target_file) = match def {
+ hir::ModuleDef::Function(f) => {
+ target_name = Some(f.name(db));
+ offset_target_and_file_id(db, f)?
+ }
+ hir::ModuleDef::Adt(adt) => {
+ target_name = Some(adt.name(db));
+ match adt {
+ hir::Adt::Struct(s) => offset_target_and_file_id(db, s)?,
+ hir::Adt::Union(u) => offset_target_and_file_id(db, u)?,
+ hir::Adt::Enum(e) => offset_target_and_file_id(db, e)?,
+ }
+ }
+ hir::ModuleDef::Const(c) => {
+ target_name = c.name(db);
+ offset_target_and_file_id(db, c)?
+ }
+ hir::ModuleDef::Static(s) => {
+ target_name = Some(s.name(db));
+ offset_target_and_file_id(db, s)?
+ }
+ hir::ModuleDef::Trait(t) => {
+ target_name = Some(t.name(db));
+ offset_target_and_file_id(db, t)?
+ }
+ hir::ModuleDef::TypeAlias(t) => {
+ target_name = Some(t.name(db));
+ offset_target_and_file_id(db, t)?
+ }
+ hir::ModuleDef::Module(m) => {
+ target_name = m.name(db);
+ let in_file_source = m.declaration_source(db)?;
+ let file_id = in_file_source.file_id.original_file(db.upcast());
+ let syntax = in_file_source.value.syntax();
+ (vis_offset(syntax), in_file_source.value.visibility(), syntax.text_range(), file_id)
+ }
+ // FIXME
+ hir::ModuleDef::Macro(_) => return None,
+ // Enum variants can't be private, we can't modify builtin types
+ hir::ModuleDef::Variant(_) | hir::ModuleDef::BuiltinType(_) => return None,
+ };
+
+ Some((offset, current_visibility, target, target_file, target_name))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn fix_visibility_of_fn() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { fn foo() {} }
+ fn main() { foo::foo$0() } ",
+ r"mod foo { $0pub(crate) fn foo() {} }
+ fn main() { foo::foo() } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub fn foo() {} }
+ fn main() { foo::foo$0() } ",
+ )
+ }
+
+ #[test]
+ fn fix_visibility_of_adt_in_submodule() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { struct Foo; }
+ fn main() { foo::Foo$0 } ",
+ r"mod foo { $0pub(crate) struct Foo; }
+ fn main() { foo::Foo } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub struct Foo; }
+ fn main() { foo::Foo$0 } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"mod foo { enum Foo; }
+ fn main() { foo::Foo$0 } ",
+ r"mod foo { $0pub(crate) enum Foo; }
+ fn main() { foo::Foo } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub enum Foo; }
+ fn main() { foo::Foo$0 } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"mod foo { union Foo; }
+ fn main() { foo::Foo$0 } ",
+ r"mod foo { $0pub(crate) union Foo; }
+ fn main() { foo::Foo } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub union Foo; }
+ fn main() { foo::Foo$0 } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_adt_in_other_file() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::Foo$0 }
+
+//- /foo.rs
+struct Foo;
+",
+ r"$0pub(crate) struct Foo;
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_struct_field() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { pub struct Foo { bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ r"mod foo { pub struct Foo { $0pub(crate) bar: (), } }
+ fn main() { foo::Foo { bar: () }; } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub struct Foo { bar: () }
+",
+ r"pub struct Foo { $0pub(crate) bar: () }
+",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub struct Foo { pub bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub struct Foo { pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_enum_variant_field() {
+ // Enum variants, as well as their fields, always get the enum's visibility. In fact, rustc
+ // rejects any visibility specifiers on them, so this assist should never fire on them.
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub enum Foo { Bar { bar: () } } }
+ fn main() { foo::Foo::Bar { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo::Bar { $0bar: () }; }
+//- /foo.rs
+pub enum Foo { Bar { bar: () } }
+",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub struct Foo { pub bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub struct Foo { pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_union_field() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { pub union Foo { bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ r"mod foo { pub union Foo { $0pub(crate) bar: (), } }
+ fn main() { foo::Foo { bar: () }; } ",
+ );
+ check_assist(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub union Foo { bar: () }
+",
+ r"pub union Foo { $0pub(crate) bar: () }
+",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub union Foo { pub bar: (), } }
+ fn main() { foo::Foo { $0bar: () }; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"
+//- /lib.rs
+mod foo;
+fn main() { foo::Foo { $0bar: () }; }
+//- /foo.rs
+pub union Foo { pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_const() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { const FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ r"mod foo { $0pub(crate) const FOO: () = (); }
+ fn main() { foo::FOO } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub const FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_static() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { static FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ r"mod foo { $0pub(crate) static FOO: () = (); }
+ fn main() { foo::FOO } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub static FOO: () = (); }
+ fn main() { foo::FOO$0 } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_trait() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { trait Foo { fn foo(&self) {} } }
+ fn main() { let x: &dyn foo::$0Foo; } ",
+ r"mod foo { $0pub(crate) trait Foo { fn foo(&self) {} } }
+ fn main() { let x: &dyn foo::Foo; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub trait Foo { fn foo(&self) {} } }
+ fn main() { let x: &dyn foo::Foo$0; } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_type_alias() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { type Foo = (); }
+ fn main() { let x: foo::Foo$0; } ",
+ r"mod foo { $0pub(crate) type Foo = (); }
+ fn main() { let x: foo::Foo; } ",
+ );
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub type Foo = (); }
+ fn main() { let x: foo::Foo$0; } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_module() {
+ check_assist(
+ fix_visibility,
+ r"mod foo { mod bar { fn bar() {} } }
+ fn main() { foo::bar$0::bar(); } ",
+ r"mod foo { $0pub(crate) mod bar { fn bar() {} } }
+ fn main() { foo::bar::bar(); } ",
+ );
+
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::bar$0::baz(); }
+
+//- /foo.rs
+mod bar {
+ pub fn baz() {}
+}
+",
+ r"$0pub(crate) mod bar {
+ pub fn baz() {}
+}
+",
+ );
+
+ check_assist_not_applicable(
+ fix_visibility,
+ r"mod foo { pub mod bar { pub fn bar() {} } }
+ fn main() { foo::bar$0::bar(); } ",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_inline_module_in_other_file() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::bar$0::baz(); }
+
+//- /foo.rs
+mod bar;
+//- /foo/bar.rs
+pub fn baz() {}
+",
+ r"$0pub(crate) mod bar;
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_module_declaration_in_other_file() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs
+mod foo;
+fn main() { foo::bar$0>::baz(); }
+
+//- /foo.rs
+mod bar {
+ pub fn baz() {}
+}
+",
+ r"$0pub(crate) mod bar {
+ pub fn baz() {}
+}
+",
+ );
+ }
+
+ #[test]
+ fn adds_pub_when_target_is_in_another_crate() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs crate:a deps:foo
+foo::Bar$0
+//- /lib.rs crate:foo
+struct Bar;
+",
+ r"$0pub struct Bar;
+",
+ )
+ }
+
+ #[test]
+ fn replaces_pub_crate_with_pub() {
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs crate:a deps:foo
+foo::Bar$0
+//- /lib.rs crate:foo
+pub(crate) struct Bar;
+",
+ r"$0pub struct Bar;
+",
+ );
+ check_assist(
+ fix_visibility,
+ r"
+//- /main.rs crate:a deps:foo
+fn main() {
+ foo::Foo { $0bar: () };
+}
+//- /lib.rs crate:foo
+pub struct Foo { pub(crate) bar: () }
+",
+ r"pub struct Foo { $0pub bar: () }
+",
+ );
+ }
+
+ #[test]
+ fn fix_visibility_of_reexport() {
+ // FIXME: broken test, this should fix visibility of the re-export
+ // rather than the struct.
+ check_assist(
+ fix_visibility,
+ r#"
+mod foo {
+ use bar::Baz;
+ mod bar { pub(super) struct Baz; }
+}
+foo::Baz$0
+"#,
+ r#"
+mod foo {
+ use bar::Baz;
+ mod bar { $0pub(crate) struct Baz; }
+}
+foo::Baz
+"#,
+ )
+ }
+}
--- /dev/null
- enum Foo {
+use hir::{HasSource, HirDisplay, InFile};
+use ide_db::assists::{AssistId, AssistKind};
+use syntax::{
+ ast::{self, make, HasArgList},
+ match_ast, AstNode, SyntaxNode,
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: generate_enum_variant
+//
+// Adds a variant to an enum.
+//
+// ```
+// enum Countries {
+// Ghana,
+// }
+//
+// fn main() {
+// let country = Countries::Lesotho$0;
+// }
+// ```
+// ->
+// ```
+// enum Countries {
+// Ghana,
+// Lesotho,
+// }
+//
+// fn main() {
+// let country = Countries::Lesotho;
+// }
+// ```
+pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let path: ast::Path = ctx.find_node_at_offset()?;
+ let parent = path_parent(&path)?;
+
+ if ctx.sema.resolve_path(&path).is_some() {
+ // No need to generate anything if the path resolves
+ return None;
+ }
+
+ let name_ref = path.segment()?.name_ref()?;
+ if name_ref.text().starts_with(char::is_lowercase) {
+ // Don't suggest generating variant if the name starts with a lowercase letter
+ return None;
+ }
+
+ if let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Enum(e)))) =
+ ctx.sema.resolve_path(&path.qualifier()?)
+ {
+ let target = path.syntax().text_range();
+ return add_variant_to_accumulator(acc, ctx, target, e, &name_ref, parent);
+ }
+
+ None
+}
+
+#[derive(Debug)]
+enum PathParent {
+ PathExpr(ast::PathExpr),
+ RecordExpr(ast::RecordExpr),
+ PathPat(ast::PathPat),
+ UseTree(ast::UseTree),
+}
+
+impl PathParent {
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ PathParent::PathExpr(it) => it.syntax(),
+ PathParent::RecordExpr(it) => it.syntax(),
+ PathParent::PathPat(it) => it.syntax(),
+ PathParent::UseTree(it) => it.syntax(),
+ }
+ }
+
+ fn make_field_list(&self, ctx: &AssistContext<'_>) -> Option<ast::FieldList> {
+ let scope = ctx.sema.scope(self.syntax())?;
+
+ match self {
+ PathParent::PathExpr(it) => {
+ if let Some(call_expr) = it.syntax().parent().and_then(ast::CallExpr::cast) {
+ make_tuple_field_list(call_expr, ctx, &scope)
+ } else {
+ None
+ }
+ }
+ PathParent::RecordExpr(it) => make_record_field_list(it, ctx, &scope),
+ PathParent::UseTree(_) | PathParent::PathPat(_) => None,
+ }
+ }
+}
+
+fn path_parent(path: &ast::Path) -> Option<PathParent> {
+ let parent = path.syntax().parent()?;
+
+ match_ast! {
+ match parent {
+ ast::PathExpr(it) => Some(PathParent::PathExpr(it)),
+ ast::RecordExpr(it) => Some(PathParent::RecordExpr(it)),
+ ast::PathPat(it) => Some(PathParent::PathPat(it)),
+ ast::UseTree(it) => Some(PathParent::UseTree(it)),
+ _ => None
+ }
+ }
+}
+
+fn add_variant_to_accumulator(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ target: syntax::TextRange,
+ adt: hir::Enum,
+ name_ref: &ast::NameRef,
+ parent: PathParent,
+) -> Option<()> {
+ let db = ctx.db();
+ let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
+
+ acc.add(
+ AssistId("generate_enum_variant", AssistKind::Generate),
+ "Generate variant",
+ target,
+ |builder| {
+ builder.edit_file(file_id.original_file(db));
+ let node = builder.make_mut(enum_node);
+ let variant = make_variant(ctx, name_ref, parent);
+ node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));
+ },
+ )
+}
+
+fn make_variant(
+ ctx: &AssistContext<'_>,
+ name_ref: &ast::NameRef,
+ parent: PathParent,
+) -> ast::Variant {
+ let field_list = parent.make_field_list(ctx);
+ make::variant(make::name(&name_ref.text()), field_list)
+}
+
+fn make_record_field_list(
+ record: &ast::RecordExpr,
+ ctx: &AssistContext<'_>,
+ scope: &hir::SemanticsScope<'_>,
+) -> Option<ast::FieldList> {
+ let fields = record.record_expr_field_list()?.fields();
+ let record_fields = fields.map(|field| {
+ let name = name_from_field(&field);
+
+ let ty = field
+ .expr()
+ .and_then(|it| expr_ty(ctx, it, scope))
+ .unwrap_or_else(make::ty_placeholder);
+
+ make::record_field(None, name, ty)
+ });
+ Some(make::record_field_list(record_fields).into())
+}
+
+fn name_from_field(field: &ast::RecordExprField) -> ast::Name {
+ let text = match field.name_ref() {
+ Some(it) => it.to_string(),
+ None => name_from_field_shorthand(field).unwrap_or("unknown".to_string()),
+ };
+ make::name(&text)
+}
+
+fn name_from_field_shorthand(field: &ast::RecordExprField) -> Option<String> {
+ let path = match field.expr()? {
+ ast::Expr::PathExpr(path_expr) => path_expr.path(),
+ _ => None,
+ }?;
+ Some(path.as_single_name_ref()?.to_string())
+}
+
+fn make_tuple_field_list(
+ call_expr: ast::CallExpr,
+ ctx: &AssistContext<'_>,
+ scope: &hir::SemanticsScope<'_>,
+) -> Option<ast::FieldList> {
+ let args = call_expr.arg_list()?.args();
+ let tuple_fields = args.map(|arg| {
+ let ty = expr_ty(ctx, arg, &scope).unwrap_or_else(make::ty_placeholder);
+ make::tuple_field(None, ty)
+ });
+ Some(make::tuple_field_list(tuple_fields).into())
+}
+
+fn expr_ty(
+ ctx: &AssistContext<'_>,
+ arg: ast::Expr,
+ scope: &hir::SemanticsScope<'_>,
+) -> Option<ast::Type> {
+ let ty = ctx.sema.type_of_expr(&arg).map(|it| it.adjusted())?;
+ let text = ty.display_source_code(ctx.db(), scope.module().into()).ok()?;
+ Some(make::ty(&text))
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn generate_basic_enum_variant_in_empty_enum() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0
+}
+",
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Bar
+}
+",
+ )
+ }
+
+ #[test]
+ fn generate_basic_enum_variant_in_non_empty_enum() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Baz$0
+}
+",
+ r"
+enum Foo {
+ Bar,
+ Baz,
+}
+fn main() {
+ Foo::Baz
+}
+",
+ )
+ }
+
+ #[test]
+ fn generate_basic_enum_variant_in_different_file() {
+ check_assist(
+ generate_enum_variant,
+ r"
+//- /main.rs
+mod foo;
+use foo::Foo;
+
+fn main() {
+ Foo::Baz$0
+}
+
+//- /foo.rs
- enum Foo {
++pub enum Foo {
+ Bar,
+}
+",
+ r"
- enum Foo {
++pub enum Foo {
+ Bar,
+ Baz,
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_existing_variant() {
+ check_assist_not_applicable(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Bar$0
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_lowercase() {
+ check_assist_not_applicable(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::new$0
+}
+",
+ )
+ }
+
+ #[test]
+ fn indentation_level_is_correct() {
+ check_assist(
+ generate_enum_variant,
+ r"
+mod m {
- enum Foo {
++ pub enum Foo {
+ Bar,
+ }
+}
+fn main() {
+ m::Foo::Baz$0
+}
+",
+ r"
+mod m {
- enum Foo {}
++ pub enum Foo {
+ Bar,
+ Baz,
+ }
+}
+fn main() {
+ m::Foo::Baz
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_single_element_tuple() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0(true)
+}
+",
+ r"
+enum Foo {
+ Bar(bool),
+}
+fn main() {
+ Foo::Bar(true)
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_single_element_tuple_unknown_type() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0(x)
+}
+",
+ r"
+enum Foo {
+ Bar(_),
+}
+fn main() {
+ Foo::Bar(x)
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_multi_element_tuple() {
+ check_assist(
+ generate_enum_variant,
+ r"
+struct Struct {}
+enum Foo {}
+fn main() {
+ Foo::Bar$0(true, x, Struct {})
+}
+",
+ r"
+struct Struct {}
+enum Foo {
+ Bar(bool, _, Struct),
+}
+fn main() {
+ Foo::Bar(true, x, Struct {})
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_record() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::$0Bar { x: true }
+}
+",
+ r"
+enum Foo {
+ Bar { x: bool },
+}
+fn main() {
+ Foo::Bar { x: true }
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_record_unknown_type() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::$0Bar { x: y }
+}
+",
+ r"
+enum Foo {
+ Bar { x: _ },
+}
+fn main() {
+ Foo::Bar { x: y }
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_record_field_shorthand() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ let x = true;
+ Foo::$0Bar { x }
+}
+",
+ r"
+enum Foo {
+ Bar { x: bool },
+}
+fn main() {
+ let x = true;
+ Foo::Bar { x }
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_record_field_shorthand_unknown_type() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::$0Bar { x }
+}
+",
+ r"
+enum Foo {
+ Bar { x: _ },
+}
+fn main() {
+ Foo::Bar { x }
+}
+",
+ )
+ }
+
+ #[test]
+ fn associated_record_field_multiple_fields() {
+ check_assist(
+ generate_enum_variant,
+ r"
+struct Struct {}
+enum Foo {}
+fn main() {
+ Foo::$0Bar { x, y: x, s: Struct {} }
+}
+",
+ r"
+struct Struct {}
+enum Foo {
+ Bar { x: _, y: _, s: Struct },
+}
+fn main() {
+ Foo::Bar { x, y: x, s: Struct {} }
+}
+",
+ )
+ }
+
+ #[test]
+ fn use_tree() {
+ check_assist(
+ generate_enum_variant,
+ r"
+//- /main.rs
+mod foo;
+use foo::Foo::Bar$0;
+
+//- /foo.rs
- enum Foo {
++pub enum Foo {}
+",
+ r"
++pub enum Foo {
+ Bar,
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_path_type() {
+ check_assist_not_applicable(
+ generate_enum_variant,
+ r"
+enum Foo {}
+impl Foo::Bar$0 {}
+",
+ )
+ }
+
+ #[test]
+ fn path_pat() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn foo(x: Foo) {
+ match x {
+ Foo::Bar$0 =>
+ }
+}
+",
+ r"
+enum Foo {
+ Bar,
+}
+fn foo(x: Foo) {
+ match x {
+ Foo::Bar =>
+ }
+}
+",
+ )
+ }
+}
--- /dev/null
- mod baz {}
+use hir::{Adt, HasSource, HirDisplay, Module, Semantics, TypeInfo};
+use ide_db::{
+ base_db::FileId,
+ defs::{Definition, NameRefClass},
+ famous_defs::FamousDefs,
+ FxHashMap, FxHashSet, RootDatabase, SnippetCap,
+};
+use stdx::to_lower_snake_case;
+use syntax::{
+ ast::{
+ self,
+ edit::{AstNodeEdit, IndentLevel},
+ make, AstNode, CallExpr, HasArgList, HasModuleItem,
+ },
+ SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ utils::convert_reference_type,
+ utils::{find_struct_impl, render_snippet, Cursor},
+ AssistContext, AssistId, AssistKind, Assists,
+};
+
+// Assist: generate_function
+//
+// Adds a stub function with a signature matching the function under the cursor.
+//
+// ```
+// struct Baz;
+// fn baz() -> Baz { Baz }
+// fn foo() {
+// bar$0("", baz());
+// }
+//
+// ```
+// ->
+// ```
+// struct Baz;
+// fn baz() -> Baz { Baz }
+// fn foo() {
+// bar("", baz());
+// }
+//
+// fn bar(arg: &str, baz: Baz) ${0:-> _} {
+// todo!()
+// }
+//
+// ```
+pub(crate) fn generate_function(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ gen_fn(acc, ctx).or_else(|| gen_method(acc, ctx))
+}
+
+fn gen_fn(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
+ let call = path_expr.syntax().parent().and_then(ast::CallExpr::cast)?;
+ let path = path_expr.path()?;
+ let name_ref = path.segment()?.name_ref()?;
+ if ctx.sema.resolve_path(&path).is_some() {
+ // The function call already resolves, no need to add a function
+ return None;
+ }
+
+ let fn_name = &*name_ref.text();
+ let TargetInfo { target_module, adt_name, target, file, insert_offset } =
+ fn_target_info(ctx, path, &call, fn_name)?;
+ let function_builder = FunctionBuilder::from_call(ctx, &call, fn_name, target_module, target)?;
+ let text_range = call.syntax().text_range();
+ let label = format!("Generate {} function", function_builder.fn_name);
+ add_func_to_accumulator(
+ acc,
+ ctx,
+ text_range,
+ function_builder,
+ insert_offset,
+ file,
+ adt_name,
+ label,
+ )
+}
+
+struct TargetInfo {
+ target_module: Option<Module>,
+ adt_name: Option<hir::Name>,
+ target: GeneratedFunctionTarget,
+ file: FileId,
+ insert_offset: TextSize,
+}
+
+impl TargetInfo {
+ fn new(
+ target_module: Option<Module>,
+ adt_name: Option<hir::Name>,
+ target: GeneratedFunctionTarget,
+ file: FileId,
+ insert_offset: TextSize,
+ ) -> Self {
+ Self { target_module, adt_name, target, file, insert_offset }
+ }
+}
+
+fn fn_target_info(
+ ctx: &AssistContext<'_>,
+ path: ast::Path,
+ call: &CallExpr,
+ fn_name: &str,
+) -> Option<TargetInfo> {
+ match path.qualifier() {
+ Some(qualifier) => match ctx.sema.resolve_path(&qualifier) {
+ Some(hir::PathResolution::Def(hir::ModuleDef::Module(module))) => {
+ get_fn_target_info(ctx, &Some(module), call.clone())
+ }
+ Some(hir::PathResolution::Def(hir::ModuleDef::Adt(adt))) => {
+ if let hir::Adt::Enum(_) = adt {
+ // Don't suggest generating function if the name starts with an uppercase letter
+ if fn_name.starts_with(char::is_uppercase) {
+ return None;
+ }
+ }
+
+ assoc_fn_target_info(ctx, call, adt, fn_name)
+ }
+ Some(hir::PathResolution::SelfType(impl_)) => {
+ let adt = impl_.self_ty(ctx.db()).as_adt()?;
+ assoc_fn_target_info(ctx, call, adt, fn_name)
+ }
+ _ => None,
+ },
+ _ => get_fn_target_info(ctx, &None, call.clone()),
+ }
+}
+
+fn gen_method(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let call: ast::MethodCallExpr = ctx.find_node_at_offset()?;
+ if ctx.sema.resolve_method_call(&call).is_some() {
+ return None;
+ }
+
+ let fn_name = call.name_ref()?;
+ let adt = ctx.sema.type_of_expr(&call.receiver()?)?.original().strip_references().as_adt()?;
+
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let target_module = adt.module(ctx.sema.db);
+
+ if current_module.krate() != target_module.krate() {
+ return None;
+ }
+ let (impl_, file) = get_adt_source(ctx, &adt, fn_name.text().as_str())?;
+ let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?;
+
+ let function_builder =
+ FunctionBuilder::from_method_call(ctx, &call, &fn_name, target_module, target)?;
+ let text_range = call.syntax().text_range();
+ let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
+ let label = format!("Generate {} method", function_builder.fn_name);
+ add_func_to_accumulator(
+ acc,
+ ctx,
+ text_range,
+ function_builder,
+ insert_offset,
+ file,
+ adt_name,
+ label,
+ )
+}
+
+fn add_func_to_accumulator(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ text_range: TextRange,
+ function_builder: FunctionBuilder,
+ insert_offset: TextSize,
+ file: FileId,
+ adt_name: Option<hir::Name>,
+ label: String,
+) -> Option<()> {
+ acc.add(AssistId("generate_function", AssistKind::Generate), label, text_range, |builder| {
+ let indent = IndentLevel::from_node(function_builder.target.syntax());
+ let function_template = function_builder.render(adt_name.is_some());
+ let mut func = function_template.to_string(ctx.config.snippet_cap);
+ if let Some(name) = adt_name {
+ func = format!("\n{indent}impl {name} {{\n{func}\n{indent}}}");
+ }
+ builder.edit_file(file);
+ match ctx.config.snippet_cap {
+ Some(cap) => builder.insert_snippet(cap, insert_offset, func),
+ None => builder.insert(insert_offset, func),
+ }
+ })
+}
+
+fn get_adt_source(
+ ctx: &AssistContext<'_>,
+ adt: &hir::Adt,
+ fn_name: &str,
+) -> Option<(Option<ast::Impl>, FileId)> {
+ let range = adt.source(ctx.sema.db)?.syntax().original_file_range(ctx.sema.db);
+ let file = ctx.sema.parse(range.file_id);
+ let adt_source =
+ ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
+ find_struct_impl(ctx, &adt_source, &[fn_name.to_string()]).map(|impl_| (impl_, range.file_id))
+}
+
+struct FunctionTemplate {
+ leading_ws: String,
+ fn_def: ast::Fn,
+ ret_type: Option<ast::RetType>,
+ should_focus_return_type: bool,
+ trailing_ws: String,
+ tail_expr: ast::Expr,
+}
+
+impl FunctionTemplate {
+ fn to_string(&self, cap: Option<SnippetCap>) -> String {
+ let Self { leading_ws, fn_def, ret_type, should_focus_return_type, trailing_ws, tail_expr } =
+ self;
+
+ let f = match cap {
+ Some(cap) => {
+ let cursor = if *should_focus_return_type {
+ // Focus the return type if there is one
+ match ret_type {
+ Some(ret_type) => ret_type.syntax(),
+ None => tail_expr.syntax(),
+ }
+ } else {
+ tail_expr.syntax()
+ };
+ render_snippet(cap, fn_def.syntax(), Cursor::Replace(cursor))
+ }
+ None => fn_def.to_string(),
+ };
+
+ format!("{leading_ws}{f}{trailing_ws}")
+ }
+}
+
+struct FunctionBuilder {
+ target: GeneratedFunctionTarget,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ params: ast::ParamList,
+ ret_type: Option<ast::RetType>,
+ should_focus_return_type: bool,
+ needs_pub: bool,
+ is_async: bool,
+}
+
+impl FunctionBuilder {
+ /// Prepares a generated function that matches `call`.
+ /// The function is generated in `target_module` or next to `call`
+ fn from_call(
+ ctx: &AssistContext<'_>,
+ call: &ast::CallExpr,
+ fn_name: &str,
+ target_module: Option<hir::Module>,
+ target: GeneratedFunctionTarget,
+ ) -> Option<Self> {
+ let needs_pub = target_module.is_some();
+ let target_module =
+ target_module.or_else(|| ctx.sema.scope(target.syntax()).map(|it| it.module()))?;
+ let fn_name = make::name(fn_name);
+ let (type_params, params) =
+ fn_args(ctx, target_module, ast::CallableExpr::Call(call.clone()))?;
+
+ let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast);
+ let is_async = await_expr.is_some();
+
+ let (ret_type, should_focus_return_type) =
+ make_return_type(ctx, &ast::Expr::CallExpr(call.clone()), target_module);
+
+ Some(Self {
+ target,
+ fn_name,
+ type_params,
+ params,
+ ret_type,
+ should_focus_return_type,
+ needs_pub,
+ is_async,
+ })
+ }
+
+ fn from_method_call(
+ ctx: &AssistContext<'_>,
+ call: &ast::MethodCallExpr,
+ name: &ast::NameRef,
+ target_module: Module,
+ target: GeneratedFunctionTarget,
+ ) -> Option<Self> {
+ let needs_pub =
+ !module_is_descendant(&ctx.sema.scope(call.syntax())?.module(), &target_module, ctx);
+ let fn_name = make::name(&name.text());
+ let (type_params, params) =
+ fn_args(ctx, target_module, ast::CallableExpr::MethodCall(call.clone()))?;
+
+ let await_expr = call.syntax().parent().and_then(ast::AwaitExpr::cast);
+ let is_async = await_expr.is_some();
+
+ let (ret_type, should_focus_return_type) =
+ make_return_type(ctx, &ast::Expr::MethodCallExpr(call.clone()), target_module);
+
+ Some(Self {
+ target,
+ fn_name,
+ type_params,
+ params,
+ ret_type,
+ should_focus_return_type,
+ needs_pub,
+ is_async,
+ })
+ }
+
+ fn render(self, is_method: bool) -> FunctionTemplate {
+ let placeholder_expr = make::ext::expr_todo();
+ let fn_body = make::block_expr(vec![], Some(placeholder_expr));
+ let visibility = if self.needs_pub { Some(make::visibility_pub_crate()) } else { None };
+ let mut fn_def = make::fn_(
+ visibility,
+ self.fn_name,
+ self.type_params,
+ self.params,
+ fn_body,
+ self.ret_type,
+ self.is_async,
+ );
+ let leading_ws;
+ let trailing_ws;
+
+ match self.target {
+ GeneratedFunctionTarget::BehindItem(it) => {
+ let mut indent = IndentLevel::from_node(&it);
+ if is_method {
+ indent = indent + 1;
+ leading_ws = format!("{indent}");
+ } else {
+ leading_ws = format!("\n\n{indent}");
+ }
+
+ fn_def = fn_def.indent(indent);
+ trailing_ws = String::new();
+ }
+ GeneratedFunctionTarget::InEmptyItemList(it) => {
+ let indent = IndentLevel::from_node(&it);
+ let leading_indent = indent + 1;
+ leading_ws = format!("\n{leading_indent}");
+ fn_def = fn_def.indent(leading_indent);
+ trailing_ws = format!("\n{indent}");
+ }
+ };
+
+ FunctionTemplate {
+ leading_ws,
+ ret_type: fn_def.ret_type(),
+ // PANIC: we guarantee we always create a function body with a tail expr
+ tail_expr: fn_def.body().unwrap().tail_expr().unwrap(),
+ should_focus_return_type: self.should_focus_return_type,
+ fn_def,
+ trailing_ws,
+ }
+ }
+}
+
+/// Makes an optional return type along with whether the return type should be focused by the cursor.
+/// If we cannot infer what the return type should be, we create a placeholder type.
+///
+/// The rule for whether we focus a return type or not (and thus focus the function body),
+/// is rather simple:
+/// * If we could *not* infer what the return type should be, focus it (so the user can fill-in
+/// the correct return type).
+/// * If we could infer the return type, don't focus it (and thus focus the function body) so the
+/// user can change the `todo!` function body.
+fn make_return_type(
+ ctx: &AssistContext<'_>,
+ call: &ast::Expr,
+ target_module: Module,
+) -> (Option<ast::RetType>, bool) {
+ let (ret_ty, should_focus_return_type) = {
+ match ctx.sema.type_of_expr(call).map(TypeInfo::original) {
+ Some(ty) if ty.is_unknown() => (Some(make::ty_placeholder()), true),
+ None => (Some(make::ty_placeholder()), true),
+ Some(ty) if ty.is_unit() => (None, false),
+ Some(ty) => {
+ let rendered = ty.display_source_code(ctx.db(), target_module.into());
+ match rendered {
+ Ok(rendered) => (Some(make::ty(&rendered)), false),
+ Err(_) => (Some(make::ty_placeholder()), true),
+ }
+ }
+ }
+ };
+ let ret_type = ret_ty.map(make::ret_type);
+ (ret_type, should_focus_return_type)
+}
+
+fn get_fn_target_info(
+ ctx: &AssistContext<'_>,
+ target_module: &Option<Module>,
+ call: CallExpr,
+) -> Option<TargetInfo> {
+ let (target, file, insert_offset) = get_fn_target(ctx, target_module, call)?;
+ Some(TargetInfo::new(*target_module, None, target, file, insert_offset))
+}
+
+fn get_fn_target(
+ ctx: &AssistContext<'_>,
+ target_module: &Option<Module>,
+ call: CallExpr,
+) -> Option<(GeneratedFunctionTarget, FileId, TextSize)> {
+ let mut file = ctx.file_id();
+ let target = match target_module {
+ Some(target_module) => {
+ let module_source = target_module.definition_source(ctx.db());
+ let (in_file, target) = next_space_for_fn_in_module(ctx.sema.db, &module_source)?;
+ file = in_file;
+ target
+ }
+ None => next_space_for_fn_after_call_site(ast::CallableExpr::Call(call))?,
+ };
+ Some((target.clone(), file, get_insert_offset(&target)))
+}
+
+fn get_method_target(
+ ctx: &AssistContext<'_>,
+ impl_: &Option<ast::Impl>,
+ adt: &Adt,
+) -> Option<(GeneratedFunctionTarget, TextSize)> {
+ let target = match impl_ {
+ Some(impl_) => next_space_for_fn_in_impl(impl_)?,
+ None => {
+ GeneratedFunctionTarget::BehindItem(adt.source(ctx.sema.db)?.syntax().value.clone())
+ }
+ };
+ Some((target.clone(), get_insert_offset(&target)))
+}
+
+fn assoc_fn_target_info(
+ ctx: &AssistContext<'_>,
+ call: &CallExpr,
+ adt: hir::Adt,
+ fn_name: &str,
+) -> Option<TargetInfo> {
+ let current_module = ctx.sema.scope(call.syntax())?.module();
+ let module = adt.module(ctx.sema.db);
+ let target_module = if current_module == module { None } else { Some(module) };
+ if current_module.krate() != module.krate() {
+ return None;
+ }
+ let (impl_, file) = get_adt_source(ctx, &adt, fn_name)?;
+ let (target, insert_offset) = get_method_target(ctx, &impl_, &adt)?;
+ let adt_name = if impl_.is_none() { Some(adt.name(ctx.sema.db)) } else { None };
+ Some(TargetInfo::new(target_module, adt_name, target, file, insert_offset))
+}
+
+fn get_insert_offset(target: &GeneratedFunctionTarget) -> TextSize {
+ match &target {
+ GeneratedFunctionTarget::BehindItem(it) => it.text_range().end(),
+ GeneratedFunctionTarget::InEmptyItemList(it) => it.text_range().start() + TextSize::of('{'),
+ }
+}
+
+#[derive(Clone)]
+enum GeneratedFunctionTarget {
+ BehindItem(SyntaxNode),
+ InEmptyItemList(SyntaxNode),
+}
+
+impl GeneratedFunctionTarget {
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GeneratedFunctionTarget::BehindItem(it) => it,
+ GeneratedFunctionTarget::InEmptyItemList(it) => it,
+ }
+ }
+}
+
+/// Computes the type variables and arguments required for the generated function
+fn fn_args(
+ ctx: &AssistContext<'_>,
+ target_module: hir::Module,
+ call: ast::CallableExpr,
+) -> Option<(Option<ast::GenericParamList>, ast::ParamList)> {
+ let mut arg_names = Vec::new();
+ let mut arg_types = Vec::new();
+ for arg in call.arg_list()?.args() {
+ arg_names.push(fn_arg_name(&ctx.sema, &arg));
+ arg_types.push(fn_arg_type(ctx, target_module, &arg));
+ }
+ deduplicate_arg_names(&mut arg_names);
+ let params = arg_names.into_iter().zip(arg_types).map(|(name, ty)| {
+ make::param(make::ext::simple_ident_pat(make::name(&name)).into(), make::ty(&ty))
+ });
+
+ Some((
+ None,
+ make::param_list(
+ match call {
+ ast::CallableExpr::Call(_) => None,
+ ast::CallableExpr::MethodCall(_) => Some(make::self_param()),
+ },
+ params,
+ ),
+ ))
+}
+
+/// Makes duplicate argument names unique by appending incrementing numbers.
+///
+/// ```
+/// let mut names: Vec<String> =
+/// vec!["foo".into(), "foo".into(), "bar".into(), "baz".into(), "bar".into()];
+/// deduplicate_arg_names(&mut names);
+/// let expected: Vec<String> =
+/// vec!["foo_1".into(), "foo_2".into(), "bar_1".into(), "baz".into(), "bar_2".into()];
+/// assert_eq!(names, expected);
+/// ```
+fn deduplicate_arg_names(arg_names: &mut Vec<String>) {
+ let mut arg_name_counts = FxHashMap::default();
+ for name in arg_names.iter() {
+ *arg_name_counts.entry(name).or_insert(0) += 1;
+ }
+ let duplicate_arg_names: FxHashSet<String> = arg_name_counts
+ .into_iter()
+ .filter(|(_, count)| *count >= 2)
+ .map(|(name, _)| name.clone())
+ .collect();
+
+ let mut counter_per_name = FxHashMap::default();
+ for arg_name in arg_names.iter_mut() {
+ if duplicate_arg_names.contains(arg_name) {
+ let counter = counter_per_name.entry(arg_name.clone()).or_insert(1);
+ arg_name.push('_');
+ arg_name.push_str(&counter.to_string());
+ *counter += 1;
+ }
+ }
+}
+
+fn fn_arg_name(sema: &Semantics<'_, RootDatabase>, arg_expr: &ast::Expr) -> String {
+ let name = (|| match arg_expr {
+ ast::Expr::CastExpr(cast_expr) => Some(fn_arg_name(sema, &cast_expr.expr()?)),
+ expr => {
+ let name_ref = expr
+ .syntax()
+ .descendants()
+ .filter_map(ast::NameRef::cast)
+ .filter(|name| name.ident_token().is_some())
+ .last()?;
+ if let Some(NameRefClass::Definition(Definition::Const(_) | Definition::Static(_))) =
+ NameRefClass::classify(sema, &name_ref)
+ {
+ return Some(name_ref.to_string().to_lowercase());
+ };
+ Some(to_lower_snake_case(&name_ref.to_string()))
+ }
+ })();
+ match name {
+ Some(mut name) if name.starts_with(|c: char| c.is_ascii_digit()) => {
+ name.insert_str(0, "arg");
+ name
+ }
+ Some(name) => name,
+ None => "arg".to_string(),
+ }
+}
+
+fn fn_arg_type(ctx: &AssistContext<'_>, target_module: hir::Module, fn_arg: &ast::Expr) -> String {
+ fn maybe_displayed_type(
+ ctx: &AssistContext<'_>,
+ target_module: hir::Module,
+ fn_arg: &ast::Expr,
+ ) -> Option<String> {
+ let ty = ctx.sema.type_of_expr(fn_arg)?.adjusted();
+ if ty.is_unknown() {
+ return None;
+ }
+
+ if ty.is_reference() || ty.is_mutable_reference() {
+ let famous_defs = &FamousDefs(&ctx.sema, ctx.sema.scope(fn_arg.syntax())?.krate());
+ convert_reference_type(ty.strip_references(), ctx.db(), famous_defs)
+ .map(|conversion| conversion.convert_type(ctx.db()))
+ .or_else(|| ty.display_source_code(ctx.db(), target_module.into()).ok())
+ } else {
+ ty.display_source_code(ctx.db(), target_module.into()).ok()
+ }
+ }
+
+ maybe_displayed_type(ctx, target_module, fn_arg).unwrap_or_else(|| String::from("_"))
+}
+
+/// Returns the position inside the current mod or file
+/// directly after the current block
+/// We want to write the generated function directly after
+/// fns, impls or macro calls, but inside mods
+fn next_space_for_fn_after_call_site(expr: ast::CallableExpr) -> Option<GeneratedFunctionTarget> {
+ let mut ancestors = expr.syntax().ancestors().peekable();
+ let mut last_ancestor: Option<SyntaxNode> = None;
+ while let Some(next_ancestor) = ancestors.next() {
+ match next_ancestor.kind() {
+ SyntaxKind::SOURCE_FILE => {
+ break;
+ }
+ SyntaxKind::ITEM_LIST => {
+ if ancestors.peek().map(|a| a.kind()) == Some(SyntaxKind::MODULE) {
+ break;
+ }
+ }
+ _ => {}
+ }
+ last_ancestor = Some(next_ancestor);
+ }
+ last_ancestor.map(GeneratedFunctionTarget::BehindItem)
+}
+
+fn next_space_for_fn_in_module(
+ db: &dyn hir::db::AstDatabase,
+ module_source: &hir::InFile<hir::ModuleSource>,
+) -> Option<(FileId, GeneratedFunctionTarget)> {
+ let file = module_source.file_id.original_file(db);
+ let assist_item = match &module_source.value {
+ hir::ModuleSource::SourceFile(it) => match it.items().last() {
+ Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()),
+ None => GeneratedFunctionTarget::BehindItem(it.syntax().clone()),
+ },
+ hir::ModuleSource::Module(it) => match it.item_list().and_then(|it| it.items().last()) {
+ Some(last_item) => GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()),
+ None => GeneratedFunctionTarget::InEmptyItemList(it.item_list()?.syntax().clone()),
+ },
+ hir::ModuleSource::BlockExpr(it) => {
+ if let Some(last_item) =
+ it.statements().take_while(|stmt| matches!(stmt, ast::Stmt::Item(_))).last()
+ {
+ GeneratedFunctionTarget::BehindItem(last_item.syntax().clone())
+ } else {
+ GeneratedFunctionTarget::InEmptyItemList(it.syntax().clone())
+ }
+ }
+ };
+ Some((file, assist_item))
+}
+
+fn next_space_for_fn_in_impl(impl_: &ast::Impl) -> Option<GeneratedFunctionTarget> {
+ if let Some(last_item) = impl_.assoc_item_list().and_then(|it| it.assoc_items().last()) {
+ Some(GeneratedFunctionTarget::BehindItem(last_item.syntax().clone()))
+ } else {
+ Some(GeneratedFunctionTarget::InEmptyItemList(impl_.assoc_item_list()?.syntax().clone()))
+ }
+}
+
+fn module_is_descendant(module: &hir::Module, ans: &hir::Module, ctx: &AssistContext<'_>) -> bool {
+ if module == ans {
+ return true;
+ }
+ for c in ans.children(ctx.sema.db) {
+ if module_is_descendant(module, &c, ctx) {
+ return true;
+ }
+ }
+ false
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_function_with_no_args() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ bar$0();
+}
+",
+ r"
+fn foo() {
+ bar();
+}
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_from_method() {
+ // This ensures that the function is correctly generated
+ // in the next outer mod or file
+ check_assist(
+ generate_function,
+ r"
+impl Foo {
+ fn foo() {
+ bar$0();
+ }
+}
+",
+ r"
+impl Foo {
+ fn foo() {
+ bar();
+ }
+}
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_directly_after_current_block() {
+ // The new fn should not be created at the end of the file or module
+ check_assist(
+ generate_function,
+ r"
+fn foo1() {
+ bar$0();
+}
+
+fn foo2() {}
+",
+ r"
+fn foo1() {
+ bar();
+}
+
+fn bar() ${0:-> _} {
+ todo!()
+}
+
+fn foo2() {}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_no_args_in_same_module() {
+ check_assist(
+ generate_function,
+ r"
+mod baz {
+ fn foo() {
+ bar$0();
+ }
+}
+",
+ r"
+mod baz {
+ fn foo() {
+ bar();
+ }
+
+ fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_upper_camel_case_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct BazBaz;
+fn foo() {
+ bar$0(BazBaz);
+}
+",
+ r"
+struct BazBaz;
+fn foo() {
+ bar(BazBaz);
+}
+
+fn bar(baz_baz: BazBaz) ${0:-> _} {
+ todo!()
+}
+",
+ );
+ }
+
+ #[test]
+ fn add_function_with_upper_camel_case_arg_as_cast() {
+ check_assist(
+ generate_function,
+ r"
+struct BazBaz;
+fn foo() {
+ bar$0(&BazBaz as *const BazBaz);
+}
+",
+ r"
+struct BazBaz;
+fn foo() {
+ bar(&BazBaz as *const BazBaz);
+}
+
+fn bar(baz_baz: *const BazBaz) ${0:-> _} {
+ todo!()
+}
+",
+ );
+ }
+
+ #[test]
+ fn add_function_with_function_call_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+fn foo() {
+ bar$0(baz());
+}
+",
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+fn foo() {
+ bar(baz());
+}
+
+fn bar(baz: Baz) ${0:-> _} {
+ todo!()
+}
+",
+ );
+ }
+
+ #[test]
+ fn add_function_with_method_call_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+impl Baz {
+ fn foo(&self) -> Baz {
+ ba$0r(self.baz())
+ }
+ fn baz(&self) -> Baz {
+ Baz
+ }
+}
+",
+ r"
+struct Baz;
+impl Baz {
+ fn foo(&self) -> Baz {
+ bar(self.baz())
+ }
+ fn baz(&self) -> Baz {
+ Baz
+ }
+}
+
+fn bar(baz: Baz) -> Baz {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_string_literal_arg() {
+ check_assist(
+ generate_function,
+ r#"
+fn foo() {
+ $0bar("bar")
+}
+"#,
+ r#"
+fn foo() {
+ bar("bar")
+}
+
+fn bar(arg: &str) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_with_char_literal_arg() {
+ check_assist(
+ generate_function,
+ r#"
+fn foo() {
+ $0bar('x')
+}
+"#,
+ r#"
+fn foo() {
+ bar('x')
+}
+
+fn bar(arg: char) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_with_int_literal_arg() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(42)
+}
+",
+ r"
+fn foo() {
+ bar(42)
+}
+
+fn bar(arg: i32) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_cast_int_literal_arg() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(42 as u8)
+}
+",
+ r"
+fn foo() {
+ bar(42 as u8)
+}
+
+fn bar(arg: u8) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn name_of_cast_variable_is_used() {
+ // Ensures that the name of the cast type isn't used
+ // in the generated function signature.
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ let x = 42;
+ bar$0(x as u8)
+}
+",
+ r"
+fn foo() {
+ let x = 42;
+ bar(x as u8)
+}
+
+fn bar(x: u8) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_variable_arg() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ let worble = ();
+ $0bar(worble)
+}
+",
+ r"
+fn foo() {
+ let worble = ();
+ bar(worble)
+}
+
+fn bar(worble: ()) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_impl_trait_arg() {
+ check_assist(
+ generate_function,
+ r#"
+//- minicore: sized
+trait Foo {}
+fn foo() -> impl Foo {
+ todo!()
+}
+fn baz() {
+ $0bar(foo())
+}
+"#,
+ r#"
+trait Foo {}
+fn foo() -> impl Foo {
+ todo!()
+}
+fn baz() {
+ bar(foo())
+}
+
+fn bar(foo: impl Foo) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn borrowed_arg() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+
+fn foo() {
+ bar$0(&baz())
+}
+",
+ r"
+struct Baz;
+fn baz() -> Baz { todo!() }
+
+fn foo() {
+ bar(&baz())
+}
+
+fn bar(baz: &Baz) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_qualified_path_arg() {
+ check_assist(
+ generate_function,
+ r"
+mod Baz {
+ pub struct Bof;
+ pub fn baz() -> Bof { Bof }
+}
+fn foo() {
+ $0bar(Baz::baz())
+}
+",
+ r"
+mod Baz {
+ pub struct Bof;
+ pub fn baz() -> Bof { Bof }
+}
+fn foo() {
+ bar(Baz::baz())
+}
+
+fn bar(baz: Baz::Bof) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_generic_arg() {
+ // FIXME: This is wrong, generated `bar` should include generic parameter.
+ check_assist(
+ generate_function,
+ r"
+fn foo<T>(t: T) {
+ $0bar(t)
+}
+",
+ r"
+fn foo<T>(t: T) {
+ bar(t)
+}
+
+fn bar(t: T) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_fn_arg() {
+ // FIXME: The argument in `bar` is wrong.
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+impl Baz {
+ fn new() -> Self { Baz }
+}
+fn foo() {
+ $0bar(Baz::new);
+}
+",
+ r"
+struct Baz;
+impl Baz {
+ fn new() -> Self { Baz }
+}
+fn foo() {
+ bar(Baz::new);
+}
+
+fn bar(new: fn) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_closure_arg() {
+ // FIXME: The argument in `bar` is wrong.
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ let closure = |x: i64| x - 1;
+ $0bar(closure)
+}
+",
+ r"
+fn foo() {
+ let closure = |x: i64| x - 1;
+ bar(closure)
+}
+
+fn bar(closure: _) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn unresolveable_types_default_to_placeholder() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(baz)
+}
+",
+ r"
+fn foo() {
+ bar(baz)
+}
+
+fn bar(baz: _) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn arg_names_dont_overlap() {
+ check_assist(
+ generate_function,
+ r"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ $0bar(baz(), baz())
+}
+",
+ r"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar(baz(), baz())
+}
+
+fn bar(baz_1: Baz, baz_2: Baz) {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn arg_name_counters_start_at_1_per_name() {
+ check_assist(
+ generate_function,
+ r#"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ $0bar(baz(), baz(), "foo", "bar")
+}
+"#,
+ r#"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar(baz(), baz(), "foo", "bar")
+}
+
+fn bar(baz_1: Baz, baz_2: Baz, arg_1: &str, arg_2: &str) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_in_module() {
+ check_assist(
+ generate_function,
+ r"
+mod bar {}
+
+fn foo() {
+ bar::my_fn$0()
+}
+",
+ r"
+mod bar {
+ pub(crate) fn my_fn() {
+ ${0:todo!()}
+ }
+}
+
+fn foo() {
+ bar::my_fn()
+}
+",
+ )
+ }
+
+ #[test]
+ fn qualified_path_uses_correct_scope() {
+ check_assist(
+ generate_function,
+ r#"
+mod foo {
+ pub struct Foo;
+}
+fn bar() {
+ use foo::Foo;
+ let foo = Foo;
+ baz$0(foo)
+}
+"#,
+ r#"
+mod foo {
+ pub struct Foo;
+}
+fn bar() {
+ use foo::Foo;
+ let foo = Foo;
+ baz(foo)
+}
+
+fn baz(foo: foo::Foo) {
+ ${0:todo!()}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_function_in_module_containing_other_items() {
+ check_assist(
+ generate_function,
+ r"
+mod bar {
+ fn something_else() {}
+}
+
+fn foo() {
+ bar::my_fn$0()
+}
+",
+ r"
+mod bar {
+ fn something_else() {}
+
+ pub(crate) fn my_fn() {
+ ${0:todo!()}
+ }
+}
+
+fn foo() {
+ bar::my_fn()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_in_nested_module() {
+ check_assist(
+ generate_function,
+ r"
+mod bar {
- mod baz {
++ pub mod baz {}
+}
+
+fn foo() {
+ bar::baz::my_fn$0()
+}
+",
+ r"
+mod bar {
++ pub mod baz {
+ pub(crate) fn my_fn() {
+ ${0:todo!()}
+ }
+ }
+}
+
+fn foo() {
+ bar::baz::my_fn()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_in_another_file() {
+ check_assist(
+ generate_function,
+ r"
+//- /main.rs
+mod foo;
+
+fn main() {
+ foo::bar$0()
+}
+//- /foo.rs
+",
+ r"
+
+
+pub(crate) fn bar() {
+ ${0:todo!()}
+}",
+ )
+ }
+
+ #[test]
+ fn add_function_with_return_type() {
+ check_assist(
+ generate_function,
+ r"
+fn main() {
+ let x: u32 = foo$0();
+}
+",
+ r"
+fn main() {
+ let x: u32 = foo();
+}
+
+fn foo() -> u32 {
+ ${0:todo!()}
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_not_applicable_if_function_already_exists() {
+ check_assist_not_applicable(
+ generate_function,
+ r"
+fn foo() {
+ bar$0();
+}
+
+fn bar() {}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_not_applicable_if_unresolved_variable_in_call_is_selected() {
+ check_assist_not_applicable(
+ // bar is resolved, but baz isn't.
+ // The assist is only active if the cursor is on an unresolved path,
+ // but the assist should only be offered if the path is a function call.
+ generate_function,
+ r#"
+fn foo() {
+ bar(b$0az);
+}
+
+fn bar(baz: ()) {}
+"#,
+ )
+ }
+
+ #[test]
+ fn create_method_with_no_args() {
+ check_assist(
+ generate_function,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {
+ self.bar()$0;
+ }
+}
+"#,
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self) {
+ self.bar();
+ }
+
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn create_function_with_async() {
+ check_assist(
+ generate_function,
+ r"
+fn foo() {
+ $0bar(42).await();
+}
+",
+ r"
+fn foo() {
+ bar(42).await();
+}
+
+async fn bar(arg: i32) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn create_method() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S.bar$0();}
+",
+ r"
+struct S;
+impl S {
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+fn foo() {S.bar();}
+",
+ )
+ }
+
+ #[test]
+ fn create_method_within_an_impl() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S.bar$0();}
+impl S {}
+
+",
+ r"
+struct S;
+fn foo() {S.bar();}
+impl S {
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+
+",
+ )
+ }
+
+ #[test]
+ fn create_method_from_different_module() {
+ check_assist(
+ generate_function,
+ r"
+mod s {
+ pub struct S;
+}
+fn foo() {s::S.bar$0();}
+",
+ r"
+mod s {
+ pub struct S;
+ impl S {
+ pub(crate) fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+ }
+}
+fn foo() {s::S.bar();}
+",
+ )
+ }
+
+ #[test]
+ fn create_method_from_descendant_module() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+mod s {
+ fn foo() {
+ super::S.bar$0();
+ }
+}
+
+",
+ r"
+struct S;
+impl S {
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+mod s {
+ fn foo() {
+ super::S.bar();
+ }
+}
+
+",
+ )
+ }
+
+ #[test]
+ fn create_method_with_cursor_anywhere_on_call_expresion() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {$0S.bar();}
+",
+ r"
+struct S;
+impl S {
+ fn bar(&self) ${0:-> _} {
+ todo!()
+ }
+}
+fn foo() {S.bar();}
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S::bar$0();}
+",
+ r"
+struct S;
+impl S {
+ fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+fn foo() {S::bar();}
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method_within_an_impl() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {S::bar$0();}
+impl S {}
+
+",
+ r"
+struct S;
+fn foo() {S::bar();}
+impl S {
+ fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method_from_different_module() {
+ check_assist(
+ generate_function,
+ r"
+mod s {
+ pub struct S;
+}
+fn foo() {s::S::bar$0();}
+",
+ r"
+mod s {
+ pub struct S;
+ impl S {
+ pub(crate) fn bar() ${0:-> _} {
+ todo!()
+ }
+ }
+}
+fn foo() {s::S::bar();}
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method_with_cursor_anywhere_on_call_expresion() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+fn foo() {$0S::bar();}
+",
+ r"
+struct S;
+impl S {
+ fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+fn foo() {S::bar();}
+",
+ )
+ }
+
+ #[test]
+ fn create_static_method_within_an_impl_with_self_syntax() {
+ check_assist(
+ generate_function,
+ r"
+struct S;
+impl S {
+ fn foo(&self) {
+ Self::bar$0();
+ }
+}
+",
+ r"
+struct S;
+impl S {
+ fn foo(&self) {
+ Self::bar();
+ }
+
+ fn bar() ${0:-> _} {
+ todo!()
+ }
+}
+",
+ )
+ }
+
+ #[test]
+ fn no_panic_on_invalid_global_path() {
+ check_assist(
+ generate_function,
+ r"
+fn main() {
+ ::foo$0();
+}
+",
+ r"
+fn main() {
+ ::foo();
+}
+
+fn foo() ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn handle_tuple_indexing() {
+ check_assist(
+ generate_function,
+ r"
+fn main() {
+ let a = ((),);
+ foo$0(a.0);
+}
+",
+ r"
+fn main() {
+ let a = ((),);
+ foo(a.0);
+}
+
+fn foo(a: ()) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_const_arg() {
+ check_assist(
+ generate_function,
+ r"
+const VALUE: usize = 0;
+fn main() {
+ foo$0(VALUE);
+}
+",
+ r"
+const VALUE: usize = 0;
+fn main() {
+ foo(VALUE);
+}
+
+fn foo(value: usize) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_static_arg() {
+ check_assist(
+ generate_function,
+ r"
+static VALUE: usize = 0;
+fn main() {
+ foo$0(VALUE);
+}
+",
+ r"
+static VALUE: usize = 0;
+fn main() {
+ foo(VALUE);
+}
+
+fn foo(value: usize) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn add_function_with_static_mut_arg() {
+ check_assist(
+ generate_function,
+ r"
+static mut VALUE: usize = 0;
+fn main() {
+ foo$0(VALUE);
+}
+",
+ r"
+static mut VALUE: usize = 0;
+fn main() {
+ foo(VALUE);
+}
+
+fn foo(value: usize) ${0:-> _} {
+ todo!()
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_enum_variant() {
+ check_assist_not_applicable(
+ generate_function,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0(true)
+}
+",
+ );
+ }
+
+ #[test]
+ fn applicable_for_enum_method() {
+ check_assist(
+ generate_function,
+ r"
+enum Foo {}
+fn main() {
+ Foo::new$0();
+}
+",
+ r"
+enum Foo {}
+impl Foo {
+ fn new() ${0:-> _} {
+ todo!()
+ }
+}
+fn main() {
+ Foo::new();
+}
+",
+ )
+ }
+}
--- /dev/null
- NodeOrToken::Token(t) if t.kind() == COMMA=> {
+use crate::{AssistContext, Assists};
+use ide_db::{
+ assists::{AssistId, AssistKind},
+ syntax_helpers::{
+ format_string::is_format_string,
+ format_string_exprs::{parse_format_exprs, Arg},
+ },
+};
+use itertools::Itertools;
+use stdx::format_to;
+use syntax::{ast, AstNode, AstToken, NodeOrToken, SyntaxKind::COMMA, TextRange};
+
+// Assist: move_format_string_arg
+//
+// Move an expression out of a format string.
+//
+// ```
+// macro_rules! format_args {
+// ($lit:literal $(tt:tt)*) => { 0 },
+// }
+// macro_rules! print {
+// ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+// }
+//
+// fn main() {
+// print!("{x + 1}$0");
+// }
+// ```
+// ->
+// ```
+// macro_rules! format_args {
+// ($lit:literal $(tt:tt)*) => { 0 },
+// }
+// macro_rules! print {
+// ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+// }
+//
+// fn main() {
+// print!("{}"$0, x + 1);
+// }
+// ```
+
+pub(crate) fn move_format_string_arg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
+ let fmt_string = ctx.find_token_at_offset::<ast::String>()?;
+ let tt = fmt_string.syntax().parent().and_then(ast::TokenTree::cast)?;
+
+ let expanded_t = ast::String::cast(
+ ctx.sema.descend_into_macros_with_kind_preference(fmt_string.syntax().clone()),
+ )?;
+ if !is_format_string(&expanded_t) {
+ return None;
+ }
+
+ let (new_fmt, extracted_args) = parse_format_exprs(fmt_string.text()).ok()?;
+ if extracted_args.is_empty() {
+ return None;
+ }
+
+ acc.add(
+ AssistId(
+ "move_format_string_arg",
+ // if there aren't any expressions, then make the assist a RefactorExtract
+ if extracted_args.iter().filter(|f| matches!(f, Arg::Expr(_))).count() == 0 {
+ AssistKind::RefactorExtract
+ } else {
+ AssistKind::QuickFix
+ },
+ ),
+ "Extract format args",
+ tt.syntax().text_range(),
+ |edit| {
+ let fmt_range = fmt_string.syntax().text_range();
+
+ // Replace old format string with new format string whose arguments have been extracted
+ edit.replace(fmt_range, new_fmt);
+
+ // Insert cursor at end of format string
+ edit.insert(fmt_range.end(), "$0");
+
+ // Extract existing arguments in macro
+ let tokens =
+ tt.token_trees_and_tokens().collect_vec();
+
+ let mut existing_args: Vec<String> = vec![];
+
+ let mut current_arg = String::new();
+ if let [_opening_bracket, NodeOrToken::Token(format_string), _args_start_comma, tokens @ .., NodeOrToken::Token(end_bracket)] =
+ tokens.as_slice()
+ {
+ for t in tokens {
+ match t {
+ NodeOrToken::Node(n) => {
+ format_to!(current_arg, "{n}");
+ },
- print!("{} {x + 1:b} {Struct(1, 2)}$0", 1);
++ NodeOrToken::Token(t) if t.kind() == COMMA => {
+ existing_args.push(current_arg.trim().into());
+ current_arg.clear();
+ },
+ NodeOrToken::Token(t) => {
+ current_arg.push_str(t.text());
+ },
+ }
+ }
+ existing_args.push(current_arg.trim().into());
+
+ // delete everything after the format string till end bracket
+ // we're going to insert the new arguments later
+ edit.delete(TextRange::new(
+ format_string.text_range().end(),
+ end_bracket.text_range().start(),
+ ));
+ }
+
+ // Start building the new args
+ let mut existing_args = existing_args.into_iter();
+ let mut args = String::new();
+
+ let mut placeholder_idx = 1;
+
+ for extracted_args in extracted_args {
+ // remove expr from format string
+ args.push_str(", ");
+
+ match extracted_args {
+ Arg::Ident(s) | Arg::Expr(s) => {
+ // insert arg
+ args.push_str(&s);
+ }
+ Arg::Placeholder => {
+ // try matching with existing argument
+ match existing_args.next() {
+ Some(ea) => {
+ args.push_str(&ea);
+ }
+ None => {
+ // insert placeholder
+ args.push_str(&format!("${placeholder_idx}"));
+ placeholder_idx += 1;
+ }
+ }
+ }
+ }
+ }
+
+ // Insert new args
+ edit.insert(fmt_range.end(), args);
+ },
+ );
+
+ Some(())
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::tests::check_assist;
+
+ const MACRO_DECL: &'static str = r#"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+macro_rules! print {
+ ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+}
+"#;
+
+ fn add_macro_decl(s: &'static str) -> String {
+ MACRO_DECL.to_string() + s
+ }
+
+ #[test]
+ fn multiple_middle_arg() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {x + 1:b} {}$0", y + 2, 2);
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {:b} {}"$0, y + 2, x + 1, 2);
+}
+"#,
+ ),
+ );
+ }
+
+ #[test]
+ fn single_arg() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{obj.value:b}$0",);
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{:b}"$0, obj.value);
+}
+"#,
+ ),
+ );
+ }
+
+ #[test]
+ fn multiple_middle_placeholders_arg() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {x + 1:b} {} {}$0", y + 2, 2);
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {:b} {} {}"$0, y + 2, x + 1, 2, $1);
+}
+"#,
+ ),
+ );
+ }
+
+ #[test]
+ fn multiple_trailing_args() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
- print!("{} {:b} {}"$0, 1, x + 1, Struct(1, 2));
++ print!("{:b} {x + 1:b} {Struct(1, 2)}$0", 1);
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
++ print!("{:b} {:b} {}"$0, 1, x + 1, Struct(1, 2));
+}
+"#,
+ ),
+ );
+ }
+
+ #[test]
+ fn improper_commas() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {x + 1:b} {Struct(1, 2)}$0", 1,);
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("{} {:b} {}"$0, 1, x + 1, Struct(1, 2));
+}
+"#,
+ ),
+ );
+ }
+
+ #[test]
+ fn nested_tt() {
+ check_assist(
+ move_format_string_arg,
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("My name is {} {x$0 + x}", stringify!(Paperino))
+}
+"#,
+ ),
+ &add_macro_decl(
+ r#"
+fn main() {
+ print!("My name is {} {}"$0, stringify!(Paperino), x + x)
+}
+"#,
+ ),
+ );
+ }
+}
--- /dev/null
- match_ast, NodeOrToken, SyntaxElement, TextSize, T,
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, AstToken},
- let macro_call = ctx.find_node_at_offset::<ast::MacroCall>()?;
++ match_ast, NodeOrToken, SyntaxElement, TextRange, TextSize, T,
+};
+
+use crate::{AssistContext, AssistId, AssistKind, Assists};
+
+// Assist: remove_dbg
+//
+// Removes `dbg!()` macro call.
+//
+// ```
+// fn main() {
+// $0dbg!(92);
+// }
+// ```
+// ->
+// ```
+// fn main() {
+// 92;
+// }
+// ```
+pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let (range, text) = match &*input_expressions {
++ let macro_calls = if ctx.has_empty_selection() {
++ vec![ctx.find_node_at_offset::<ast::MacroCall>()?]
++ } else {
++ ctx.covering_element()
++ .as_node()?
++ .descendants()
++ .filter(|node| ctx.selection_trimmed().contains_range(node.text_range()))
++ .filter_map(ast::MacroCall::cast)
++ .collect()
++ };
++
++ let replacements =
++ macro_calls.into_iter().filter_map(compute_dbg_replacement).collect::<Vec<_>>();
++ if replacements.is_empty() {
++ return None;
++ }
++
++ acc.add(
++ AssistId("remove_dbg", AssistKind::Refactor),
++ "Remove dbg!()",
++ ctx.selection_trimmed(),
++ |builder| {
++ for (range, text) in replacements {
++ builder.replace(range, text);
++ }
++ },
++ )
++}
++
++fn compute_dbg_replacement(macro_call: ast::MacroCall) -> Option<(TextRange, String)> {
+ let tt = macro_call.token_tree()?;
+ let r_delim = NodeOrToken::Token(tt.right_delimiter_token()?);
+ if macro_call.path()?.segment()?.name_ref()?.text() != "dbg"
+ || macro_call.excl_token().is_none()
+ {
+ return None;
+ }
+
+ let mac_input = tt.syntax().children_with_tokens().skip(1).take_while(|it| *it != r_delim);
+ let input_expressions = mac_input.group_by(|tok| tok.kind() == T![,]);
+ let input_expressions = input_expressions
+ .into_iter()
+ .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
+ .map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
+ .collect::<Option<Vec<ast::Expr>>>()?;
+
+ let macro_expr = ast::MacroExpr::cast(macro_call.syntax().parent()?)?;
+ let parent = macro_expr.syntax().parent()?;
- };
-
- acc.add(AssistId("remove_dbg", AssistKind::Refactor), "Remove dbg!()", range, |builder| {
- builder.replace(range, text);
++ Some(match &*input_expressions {
+ // dbg!()
+ [] => {
+ match_ast! {
+ match parent {
+ ast::StmtList(__) => {
+ let range = macro_expr.syntax().text_range();
+ let range = match whitespace_start(macro_expr.syntax().prev_sibling_or_token()) {
+ Some(start) => range.cover_offset(start),
+ None => range,
+ };
+ (range, String::new())
+ },
+ ast::ExprStmt(it) => {
+ let range = it.syntax().text_range();
+ let range = match whitespace_start(it.syntax().prev_sibling_or_token()) {
+ Some(start) => range.cover_offset(start),
+ None => range,
+ };
+ (range, String::new())
+ },
+ _ => (macro_call.syntax().text_range(), "()".to_owned())
+ }
+ }
+ }
+ // dbg!(expr0)
+ [expr] => {
+ let wrap = match ast::Expr::cast(parent) {
+ Some(parent) => match (expr, parent) {
+ (ast::Expr::CastExpr(_), ast::Expr::CastExpr(_)) => false,
+ (
+ ast::Expr::BoxExpr(_) | ast::Expr::PrefixExpr(_) | ast::Expr::RefExpr(_),
+ ast::Expr::AwaitExpr(_)
+ | ast::Expr::CallExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::TryExpr(_),
+ ) => true,
+ (
+ ast::Expr::BinExpr(_) | ast::Expr::CastExpr(_) | ast::Expr::RangeExpr(_),
+ ast::Expr::AwaitExpr(_)
+ | ast::Expr::BinExpr(_)
+ | ast::Expr::CallExpr(_)
+ | ast::Expr::CastExpr(_)
+ | ast::Expr::FieldExpr(_)
+ | ast::Expr::IndexExpr(_)
+ | ast::Expr::MethodCallExpr(_)
+ | ast::Expr::PrefixExpr(_)
+ | ast::Expr::RangeExpr(_)
+ | ast::Expr::RefExpr(_)
+ | ast::Expr::TryExpr(_),
+ ) => true,
+ _ => false,
+ },
+ None => false,
+ };
+ (
+ macro_call.syntax().text_range(),
+ if wrap { format!("({expr})") } else { expr.to_string() },
+ )
+ }
+ // dbg!(expr0, expr1, ...)
+ exprs => (macro_call.syntax().text_range(), format!("({})", exprs.iter().format(", "))),
+ })
+}
+
+fn whitespace_start(it: Option<SyntaxElement>) -> Option<TextSize> {
+ Some(it?.into_token().and_then(ast::Whitespace::cast)?.syntax().text_range().start())
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ fn check(ra_fixture_before: &str, ra_fixture_after: &str) {
+ check_assist(
+ remove_dbg,
+ &format!("fn main() {{\n{ra_fixture_before}\n}}"),
+ &format!("fn main() {{\n{ra_fixture_after}\n}}"),
+ );
+ }
+
+ #[test]
+ fn test_remove_dbg() {
+ check("$0dbg!(1 + 1)", "1 + 1");
+ check("dbg!$0(1 + 1)", "1 + 1");
+ check("dbg!(1 $0+ 1)", "1 + 1");
+ check("dbg![$01 + 1]", "1 + 1");
+ check("dbg!{$01 + 1}", "1 + 1");
+ }
+
+ #[test]
+ fn test_remove_dbg_not_applicable() {
+ check_assist_not_applicable(remove_dbg, "fn main() {$0vec![1, 2, 3]}");
+ check_assist_not_applicable(remove_dbg, "fn main() {$0dbg(5, 6, 7)}");
+ check_assist_not_applicable(remove_dbg, "fn main() {$0dbg!(5, 6, 7}");
+ }
+
+ #[test]
+ fn test_remove_dbg_keep_semicolon_in_let() {
+ // https://github.com/rust-lang/rust-analyzer/issues/5129#issuecomment-651399779
+ check(
+ r#"let res = $0dbg!(1 * 20); // needless comment"#,
+ r#"let res = 1 * 20; // needless comment"#,
+ );
+ check(r#"let res = $0dbg!(); // needless comment"#, r#"let res = (); // needless comment"#);
+ check(
+ r#"let res = $0dbg!(1, 2); // needless comment"#,
+ r#"let res = (1, 2); // needless comment"#,
+ );
+ }
+
+ #[test]
+ fn test_remove_dbg_cast_cast() {
+ check(r#"let res = $0dbg!(x as u32) as u32;"#, r#"let res = x as u32 as u32;"#);
+ }
+
+ #[test]
+ fn test_remove_dbg_prefix() {
+ check(r#"let res = $0dbg!(&result).foo();"#, r#"let res = (&result).foo();"#);
+ check(r#"let res = &$0dbg!(&result);"#, r#"let res = &&result;"#);
+ check(r#"let res = $0dbg!(!result) && true;"#, r#"let res = !result && true;"#);
+ }
+
+ #[test]
+ fn test_remove_dbg_post_expr() {
+ check(r#"let res = $0dbg!(fut.await).foo();"#, r#"let res = fut.await.foo();"#);
+ check(r#"let res = $0dbg!(result?).foo();"#, r#"let res = result?.foo();"#);
+ check(r#"let res = $0dbg!(foo as u32).foo();"#, r#"let res = (foo as u32).foo();"#);
+ check(r#"let res = $0dbg!(array[3]).foo();"#, r#"let res = array[3].foo();"#);
+ check(r#"let res = $0dbg!(tuple.3).foo();"#, r#"let res = tuple.3.foo();"#);
+ }
+
+ #[test]
+ fn test_remove_dbg_range_expr() {
+ check(r#"let res = $0dbg!(foo..bar).foo();"#, r#"let res = (foo..bar).foo();"#);
+ check(r#"let res = $0dbg!(foo..=bar).foo();"#, r#"let res = (foo..=bar).foo();"#);
+ }
+
+ #[test]
+ fn test_remove_empty_dbg() {
+ check_assist(remove_dbg, r#"fn foo() { $0dbg!(); }"#, r#"fn foo() { }"#);
+ check_assist(
+ remove_dbg,
+ r#"
+fn foo() {
+ $0dbg!();
+}
+"#,
+ r#"
+fn foo() {
+}
+"#,
+ );
+ check_assist(
+ remove_dbg,
+ r#"
+fn foo() {
+ let test = $0dbg!();
+}"#,
+ r#"
+fn foo() {
+ let test = ();
+}"#,
+ );
+ check_assist(
+ remove_dbg,
+ r#"
+fn foo() {
+ let t = {
+ println!("Hello, world");
+ $0dbg!()
+ };
+}"#,
+ r#"
+fn foo() {
+ let t = {
+ println!("Hello, world");
+ };
+}"#,
+ );
+ }
+
+ #[test]
+ fn test_remove_multi_dbg() {
+ check(r#"$0dbg!(0, 1)"#, r#"(0, 1)"#);
+ check(r#"$0dbg!(0, (1, 2))"#, r#"(0, (1, 2))"#);
+ }
++
++ #[test]
++ fn test_range() {
++ check(
++ r#"
++fn f() {
++ dbg!(0) + $0dbg!(1);
++ dbg!(())$0
++}
++"#,
++ r#"
++fn f() {
++ dbg!(0) + 1;
++ ()
++}
++"#,
++ );
++ }
++
++ #[test]
++ fn test_range_partial() {
++ check_assist_not_applicable(remove_dbg, r#"$0dbg$0!(0)"#);
++ check_assist_not_applicable(remove_dbg, r#"$0dbg!(0$0)"#);
++ }
+}
--- /dev/null
- const Baz: usize = 42;
-
+use hir::{InFile, ModuleDef};
+use ide_db::{
+ helpers::mod_path_to_ast, imports::import_assets::NameToImport, items_locator,
+ syntax_helpers::insert_whitespace_into_node::insert_ws_into,
+};
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, HasName},
+ SyntaxKind::WHITESPACE,
+};
+
+use crate::{
+ assist_context::{AssistContext, Assists, SourceChangeBuilder},
+ utils::{
+ add_trait_assoc_items_to_impl, filter_assoc_items, gen_trait_fn_body,
+ generate_trait_impl_text, render_snippet, Cursor, DefaultMethods,
+ },
+ AssistId, AssistKind,
+};
+
+// Assist: replace_derive_with_manual_impl
+//
+// Converts a `derive` impl into a manual one.
+//
+// ```
+// # //- minicore: derive
+// # trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+// #[derive(Deb$0ug, Display)]
+// struct S;
+// ```
+// ->
+// ```
+// # trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+// #[derive(Display)]
+// struct S;
+//
+// impl Debug for S {
+// $0fn fmt(&self, f: &mut Formatter) -> Result<()> {
+// f.debug_struct("S").finish()
+// }
+// }
+// ```
+pub(crate) fn replace_derive_with_manual_impl(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+) -> Option<()> {
+ let attr = ctx.find_node_at_offset_with_descend::<ast::Attr>()?;
+ let path = attr.path()?;
+ let hir_file = ctx.sema.hir_file_for(attr.syntax());
+ if !hir_file.is_derive_attr_pseudo_expansion(ctx.db()) {
+ return None;
+ }
+
+ let InFile { file_id, value } = hir_file.call_node(ctx.db())?;
+ if file_id.is_macro() {
+ // FIXME: make this work in macro files
+ return None;
+ }
+ // collect the derive paths from the #[derive] expansion
+ let current_derives = ctx
+ .sema
+ .parse_or_expand(hir_file)?
+ .descendants()
+ .filter_map(ast::Attr::cast)
+ .filter_map(|attr| attr.path())
+ .collect::<Vec<_>>();
+
+ let adt = value.parent().and_then(ast::Adt::cast)?;
+ let attr = ast::Attr::cast(value)?;
+ let args = attr.token_tree()?;
+
+ let current_module = ctx.sema.scope(adt.syntax())?.module();
+ let current_crate = current_module.krate();
+
+ let found_traits = items_locator::items_with_name(
+ &ctx.sema,
+ current_crate,
+ NameToImport::exact_case_sensitive(path.segments().last()?.to_string()),
+ items_locator::AssocItemSearch::Exclude,
+ Some(items_locator::DEFAULT_QUERY_SEARCH_LIMIT.inner()),
+ )
+ .filter_map(|item| match item.as_module_def()? {
+ ModuleDef::Trait(trait_) => Some(trait_),
+ _ => None,
+ })
+ .flat_map(|trait_| {
+ current_module
+ .find_use_path(ctx.sema.db, hir::ModuleDef::Trait(trait_), ctx.config.prefer_no_std)
+ .as_ref()
+ .map(mod_path_to_ast)
+ .zip(Some(trait_))
+ });
+
+ let mut no_traits_found = true;
+ for (replace_trait_path, trait_) in found_traits.inspect(|_| no_traits_found = false) {
+ add_assist(
+ acc,
+ ctx,
+ &attr,
+ ¤t_derives,
+ &args,
+ &path,
+ &replace_trait_path,
+ Some(trait_),
+ &adt,
+ )?;
+ }
+ if no_traits_found {
+ add_assist(acc, ctx, &attr, ¤t_derives, &args, &path, &path, None, &adt)?;
+ }
+ Some(())
+}
+
+fn add_assist(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ attr: &ast::Attr,
+ old_derives: &[ast::Path],
+ old_tree: &ast::TokenTree,
+ old_trait_path: &ast::Path,
+ replace_trait_path: &ast::Path,
+ trait_: Option<hir::Trait>,
+ adt: &ast::Adt,
+) -> Option<()> {
+ let target = attr.syntax().text_range();
+ let annotated_name = adt.name()?;
+ let label = format!("Convert to manual `impl {replace_trait_path} for {annotated_name}`");
+
+ acc.add(
+ AssistId("replace_derive_with_manual_impl", AssistKind::Refactor),
+ label,
+ target,
+ |builder| {
+ let insert_pos = adt.syntax().text_range().end();
+ let impl_def_with_items =
+ impl_def_from_trait(&ctx.sema, adt, &annotated_name, trait_, replace_trait_path);
+ update_attribute(builder, old_derives, old_tree, old_trait_path, attr);
+ let trait_path = replace_trait_path.to_string();
+ match (ctx.config.snippet_cap, impl_def_with_items) {
+ (None, _) => {
+ builder.insert(insert_pos, generate_trait_impl_text(adt, &trait_path, ""))
+ }
+ (Some(cap), None) => builder.insert_snippet(
+ cap,
+ insert_pos,
+ generate_trait_impl_text(adt, &trait_path, " $0"),
+ ),
+ (Some(cap), Some((impl_def, first_assoc_item))) => {
+ let mut cursor = Cursor::Before(first_assoc_item.syntax());
+ let placeholder;
+ if let ast::AssocItem::Fn(ref func) = first_assoc_item {
+ if let Some(m) = func.syntax().descendants().find_map(ast::MacroCall::cast)
+ {
+ if m.syntax().text() == "todo!()" {
+ placeholder = m;
+ cursor = Cursor::Replace(placeholder.syntax());
+ }
+ }
+ }
+
+ let rendered = render_snippet(cap, impl_def.syntax(), cursor);
+ builder.insert_snippet(cap, insert_pos, format!("\n\n{rendered}"))
+ }
+ };
+ },
+ )
+}
+
+fn impl_def_from_trait(
+ sema: &hir::Semantics<'_, ide_db::RootDatabase>,
+ adt: &ast::Adt,
+ annotated_name: &ast::Name,
+ trait_: Option<hir::Trait>,
+ trait_path: &ast::Path,
+) -> Option<(ast::Impl, ast::AssocItem)> {
+ let trait_ = trait_?;
+ let target_scope = sema.scope(annotated_name.syntax())?;
+ let trait_items = filter_assoc_items(sema, &trait_.items(sema.db), DefaultMethods::No);
+ if trait_items.is_empty() {
+ return None;
+ }
+ let impl_def = {
+ use syntax::ast::Impl;
+ let text = generate_trait_impl_text(adt, trait_path.to_string().as_str(), "");
+ let parse = syntax::SourceFile::parse(&text);
+ let node = match parse.tree().syntax().descendants().find_map(Impl::cast) {
+ Some(it) => it,
+ None => {
+ panic!(
+ "Failed to make ast node `{}` from text {}",
+ std::any::type_name::<Impl>(),
+ text
+ )
+ }
+ };
+ let node = node.clone_subtree();
+ assert_eq!(node.syntax().text_range().start(), 0.into());
+ node
+ };
+
+ let trait_items = trait_items
+ .into_iter()
+ .map(|it| {
+ if sema.hir_file_for(it.syntax()).is_macro() {
+ if let Some(it) = ast::AssocItem::cast(insert_ws_into(it.syntax().clone())) {
+ return it;
+ }
+ }
+ it.clone_for_update()
+ })
+ .collect();
+ let (impl_def, first_assoc_item) =
+ add_trait_assoc_items_to_impl(sema, trait_items, trait_, impl_def, target_scope);
+
+ // Generate a default `impl` function body for the derived trait.
+ if let ast::AssocItem::Fn(ref func) = first_assoc_item {
+ let _ = gen_trait_fn_body(func, trait_path, adt);
+ };
+
+ Some((impl_def, first_assoc_item))
+}
+
+fn update_attribute(
+ builder: &mut SourceChangeBuilder,
+ old_derives: &[ast::Path],
+ old_tree: &ast::TokenTree,
+ old_trait_path: &ast::Path,
+ attr: &ast::Attr,
+) {
+ let new_derives = old_derives
+ .iter()
+ .filter(|t| t.to_string() != old_trait_path.to_string())
+ .collect::<Vec<_>>();
+ let has_more_derives = !new_derives.is_empty();
+
+ if has_more_derives {
+ let new_derives = format!("({})", new_derives.iter().format(", "));
+ builder.replace(old_tree.syntax().text_range(), new_derives);
+ } else {
+ let attr_range = attr.syntax().text_range();
+ builder.delete(attr_range);
+
+ if let Some(line_break_range) = attr
+ .syntax()
+ .next_sibling_or_token()
+ .filter(|t| t.kind() == WHITESPACE)
+ .map(|t| t.text_range())
+ {
+ builder.delete(line_break_range);
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn add_custom_impl_debug_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+struct Foo {
+ bar: String,
+}
+"#,
+ r#"
+struct Foo {
+ bar: String,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_struct("Foo").field("bar", &self.bar).finish()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+struct Foo(String, usize);
+"#,
+ r#"struct Foo(String, usize);
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_tuple("Foo").field(&self.0).field(&self.1).finish()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_struct("Foo").finish()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::Bar => write!(f, "Bar"),
+ Self::Baz => write!(f, "Baz"),
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_debug_tuple_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+enum Foo {
+ Bar(usize, usize),
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar(usize, usize),
+ Baz,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::Bar(arg0, arg1) => f.debug_tuple("Bar").field(arg0).field(arg1).finish(),
+ Self::Baz => write!(f, "Baz"),
+ }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_debug_record_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(Debu$0g)]
+enum Foo {
+ Bar {
+ baz: usize,
+ qux: usize,
+ },
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar {
+ baz: usize,
+ qux: usize,
+ },
+ Baz,
+}
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ match self {
+ Self::Bar { baz, qux } => f.debug_struct("Bar").field("baz", baz).field("qux", qux).finish(),
+ Self::Baz => write!(f, "Baz"),
+ }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_default_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo {
+ foo: usize,
+}
+"#,
+ r#"
+struct Foo {
+ foo: usize,
+}
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self { foo: Default::default() }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_default_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo(usize);
+"#,
+ r#"
+struct Foo(usize);
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self(Default::default())
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_default_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl Default for Foo {
+ $0fn default() -> Self {
+ Self { }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_hash_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: hash, derive
+#[derive(Has$0h)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+
+impl core::hash::Hash for Foo {
+ $0fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
+ self.bin.hash(state);
+ self.bar.hash(state);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_hash_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: hash, derive
+#[derive(Has$0h)]
+struct Foo(usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize);
+
+impl core::hash::Hash for Foo {
+ $0fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
+ self.0.hash(state);
+ self.1.hash(state);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_hash_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: hash, derive
+#[derive(Has$0h)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl core::hash::Hash for Foo {
+ $0fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
+ core::mem::discriminant(self).hash(state);
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self { bin: self.bin.clone(), bar: self.bar.clone() }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo(usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize);
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self(self.0.clone(), self.1.clone())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self { }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ match self {
+ Self::Bar => Self::Bar,
+ Self::Baz => Self::Baz,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_tuple_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+enum Foo {
+ Bar(String),
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar(String),
+ Baz,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ match self {
+ Self::Bar(arg0) => Self::Bar(arg0.clone()),
+ Self::Baz => Self::Baz,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_record_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz,
+}
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ match self {
+ Self::Bar { bin } => Self::Bar { bin: bin.clone() },
+ Self::Baz => Self::Baz,
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_ord_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: ord, derive
+#[derive(Partial$0Ord)]
+struct Foo {
+ bin: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+}
+
+impl PartialOrd for Foo {
+ $0fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+ self.bin.partial_cmp(&other.bin)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_ord_record_struct_multi_field() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: ord, derive
+#[derive(Partial$0Ord)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+ baz: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+ baz: usize,
+}
+
+impl PartialOrd for Foo {
+ $0fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+ match self.bin.partial_cmp(&other.bin) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ match self.bar.partial_cmp(&other.bar) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ self.baz.partial_cmp(&other.baz)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_ord_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: ord, derive
+#[derive(Partial$0Ord)]
+struct Foo(usize, usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize, usize);
+
+impl PartialOrd for Foo {
+ $0fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
+ match self.0.partial_cmp(&other.0) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ match self.1.partial_cmp(&other.1) {
+ Some(core::cmp::Ordering::Equal) => {}
+ ord => return ord,
+ }
+ self.2.partial_cmp(&other.2)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+"#,
+ r#"
+struct Foo {
+ bin: usize,
+ bar: usize,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ self.bin == other.bin && self.bar == other.bar
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_tuple_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+struct Foo(usize, usize);
+"#,
+ r#"
+struct Foo(usize, usize);
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ self.0 == other.0 && self.1 == other.1
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_empty_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+struct Foo;
+"#,
+ r#"
+struct Foo;
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ true
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+enum Foo {
+ Bar,
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar,
+ Baz,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ core::mem::discriminant(self) == core::mem::discriminant(other)
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_tuple_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+enum Foo {
+ Bar(String),
+ Baz,
+}
+"#,
+ r#"
+enum Foo {
+ Bar(String),
+ Baz,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (Self::Bar(l0), Self::Bar(r0)) => l0 == r0,
+ _ => core::mem::discriminant(self) == core::mem::discriminant(other),
+ }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_partial_eq_record_enum() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: eq, derive
+#[derive(Partial$0Eq)]
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz {
+ qux: String,
+ fez: String,
+ },
+ Qux {},
+ Bin,
+}
+"#,
+ r#"
+enum Foo {
+ Bar {
+ bin: String,
+ },
+ Baz {
+ qux: String,
+ fez: String,
+ },
+ Qux {},
+ Bin,
+}
+
+impl PartialEq for Foo {
+ $0fn eq(&self, other: &Self) -> bool {
+ match (self, other) {
+ (Self::Bar { bin: l_bin }, Self::Bar { bin: r_bin }) => l_bin == r_bin,
+ (Self::Baz { qux: l_qux, fez: l_fez }, Self::Baz { qux: r_qux, fez: r_fez }) => l_qux == r_qux && l_fez == r_fez,
+ _ => core::mem::discriminant(self) == core::mem::discriminant(other),
+ }
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_all() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+mod foo {
+ pub trait Bar {
+ type Qux;
+ const Baz: usize = 42;
+ const Fez: usize;
+ fn foo();
+ fn bar() {}
+ }
+}
+
+#[derive($0Bar)]
+struct Foo {
+ bar: String,
+}
+"#,
+ r#"
+mod foo {
+ pub trait Bar {
+ type Qux;
+ const Baz: usize = 42;
+ const Fez: usize;
+ fn foo();
+ fn bar() {}
+ }
+}
+
+struct Foo {
+ bar: String,
+}
+
+impl foo::Bar for Foo {
+ $0type Qux;
+
+ const Fez: usize;
+
+ fn foo() {
+ todo!()
+ }
+}
+"#,
+ )
+ }
+ #[test]
+ fn add_custom_impl_for_unique_input_unknown() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive(Debu$0g)]
+struct Foo {
+ bar: String,
+}
+ "#,
+ r#"
+struct Foo {
+ bar: String,
+}
+
+impl Debug for Foo {
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_for_with_visibility_modifier() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive(Debug$0)]
+pub struct Foo {
+ bar: String,
+}
+ "#,
+ r#"
+pub struct Foo {
+ bar: String,
+}
+
+impl Debug for Foo {
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_when_multiple_inputs() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive(Display, Debug$0, Serialize)]
+struct Foo {}
+ "#,
+ r#"
+#[derive(Display, Serialize)]
+struct Foo {}
+
+impl Debug for Foo {
+ $0
+}
+ "#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_default_generic_record_struct() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: default, derive
+#[derive(Defau$0lt)]
+struct Foo<T, U> {
+ foo: T,
+ bar: U,
+}
+"#,
+ r#"
+struct Foo<T, U> {
+ foo: T,
+ bar: U,
+}
+
+impl<T, U> Default for Foo<T, U> {
+ $0fn default() -> Self {
+ Self { foo: Default::default(), bar: Default::default() }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_clone_generic_tuple_struct_with_bounds() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(Clo$0ne)]
+struct Foo<T: Clone>(T, usize);
+"#,
+ r#"
+struct Foo<T: Clone>(T, usize);
+
+impl<T: Clone> Clone for Foo<T> {
+ $0fn clone(&self) -> Self {
+ Self(self.0.clone(), self.1.clone())
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_derive_macro_without_input() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[derive($0)]
+struct Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_if_cursor_on_param() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive, fmt
+#[derive$0(Debug)]
+struct Foo {}
+ "#,
+ );
+
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive, fmt
+#[derive(Debug)$0]
+struct Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn test_ignore_if_not_derive() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive
+#[allow(non_camel_$0case_types)]
+struct Foo {}
+ "#,
+ )
+ }
+
+ #[test]
+ fn works_at_start_of_file() {
+ check_assist_not_applicable(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: derive, fmt
+$0#[derive(Debug)]
+struct S;
+ "#,
+ );
+ }
+
+ #[test]
+ fn add_custom_impl_keep_path() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: clone, derive
+#[derive(std::fmt::Debug, Clo$0ne)]
+pub struct Foo;
+"#,
+ r#"
+#[derive(std::fmt::Debug)]
+pub struct Foo;
+
+impl Clone for Foo {
+ $0fn clone(&self) -> Self {
+ Self { }
+ }
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn add_custom_impl_replace_path() {
+ check_assist(
+ replace_derive_with_manual_impl,
+ r#"
+//- minicore: fmt, derive
+#[derive(core::fmt::Deb$0ug, Clone)]
+pub struct Foo;
+"#,
+ r#"
+#[derive(Clone)]
+pub struct Foo;
+
+impl core::fmt::Debug for Foo {
+ $0fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
+ f.debug_struct("Foo").finish()
+ }
+}
+"#,
+ )
+ }
+}
--- /dev/null
- m::frobnicate$0() {}
+//! Generated by `sourcegen_assists_docs`, do not edit by hand.
+
+use super::check_doc_test;
+
+#[test]
+fn doctest_add_explicit_type() {
+ check_doc_test(
+ "add_explicit_type",
+ r#####"
+fn main() {
+ let x$0 = 92;
+}
+"#####,
+ r#####"
+fn main() {
+ let x: i32 = 92;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_hash() {
+ check_doc_test(
+ "add_hash",
+ r#####"
+fn main() {
+ r#"Hello,$0 World!"#;
+}
+"#####,
+ r#####"
+fn main() {
+ r##"Hello, World!"##;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_impl_default_members() {
+ check_doc_test(
+ "add_impl_default_members",
+ r#####"
+trait Trait {
+ type X;
+ fn foo(&self);
+ fn bar(&self) {}
+}
+
+impl Trait for () {
+ type X = ();
+ fn foo(&self) {}$0
+}
+"#####,
+ r#####"
+trait Trait {
+ type X;
+ fn foo(&self);
+ fn bar(&self) {}
+}
+
+impl Trait for () {
+ type X = ();
+ fn foo(&self) {}
+
+ $0fn bar(&self) {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_impl_missing_members() {
+ check_doc_test(
+ "add_impl_missing_members",
+ r#####"
+trait Trait<T> {
+ type X;
+ fn foo(&self) -> T;
+ fn bar(&self) {}
+}
+
+impl Trait<u32> for () {$0
+
+}
+"#####,
+ r#####"
+trait Trait<T> {
+ type X;
+ fn foo(&self) -> T;
+ fn bar(&self) {}
+}
+
+impl Trait<u32> for () {
+ $0type X;
+
+ fn foo(&self) -> u32 {
+ todo!()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_label_to_loop() {
+ check_doc_test(
+ "add_label_to_loop",
+ r#####"
+fn main() {
+ loop$0 {
+ break;
+ continue;
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ 'l: loop {
+ break 'l;
+ continue 'l;
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_lifetime_to_type() {
+ check_doc_test(
+ "add_lifetime_to_type",
+ r#####"
+struct Point {
+ x: &$0u32,
+ y: u32,
+}
+"#####,
+ r#####"
+struct Point<'a> {
+ x: &'a u32,
+ y: u32,
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_missing_match_arms() {
+ check_doc_test(
+ "add_missing_match_arms",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ $0
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ $0Action::Move { distance } => todo!(),
+ Action::Stop => todo!(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_return_type() {
+ check_doc_test(
+ "add_return_type",
+ r#####"
+fn foo() { 4$02i32 }
+"#####,
+ r#####"
+fn foo() -> i32 { 42i32 }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_add_turbo_fish() {
+ check_doc_test(
+ "add_turbo_fish",
+ r#####"
+fn make<T>() -> T { todo!() }
+fn main() {
+ let x = make$0();
+}
+"#####,
+ r#####"
+fn make<T>() -> T { todo!() }
+fn main() {
+ let x = make::<${0:_}>();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_apply_demorgan() {
+ check_doc_test(
+ "apply_demorgan",
+ r#####"
+fn main() {
+ if x != 4 ||$0 y < 3.14 {}
+}
+"#####,
+ r#####"
+fn main() {
+ if !(x == 4 && y >= 3.14) {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_auto_import() {
+ check_doc_test(
+ "auto_import",
+ r#####"
+fn main() {
+ let map = HashMap$0::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ r#####"
+use std::collections::HashMap;
+
+fn main() {
+ let map = HashMap::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_change_visibility() {
+ check_doc_test(
+ "change_visibility",
+ r#####"
+$0fn frobnicate() {}
+"#####,
+ r#####"
+pub(crate) fn frobnicate() {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_bool_then_to_if() {
+ check_doc_test(
+ "convert_bool_then_to_if",
+ r#####"
+//- minicore: bool_impl
+fn main() {
+ (0 == 0).then$0(|| val)
+}
+"#####,
+ r#####"
+fn main() {
+ if 0 == 0 {
+ Some(val)
+ } else {
+ None
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_for_loop_with_for_each() {
+ check_doc_test(
+ "convert_for_loop_with_for_each",
+ r#####"
+fn main() {
+ let x = vec![1, 2, 3];
+ for$0 v in x {
+ let y = v * 2;
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ let x = vec![1, 2, 3];
+ x.into_iter().for_each(|v| {
+ let y = v * 2;
+ });
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_if_to_bool_then() {
+ check_doc_test(
+ "convert_if_to_bool_then",
+ r#####"
+//- minicore: option
+fn main() {
+ if$0 cond {
+ Some(val)
+ } else {
+ None
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ cond.then(|| val)
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_integer_literal() {
+ check_doc_test(
+ "convert_integer_literal",
+ r#####"
+const _: i32 = 10$0;
+"#####,
+ r#####"
+const _: i32 = 0b1010;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_into_to_from() {
+ check_doc_test(
+ "convert_into_to_from",
+ r#####"
+//- minicore: from
+impl $0Into<Thing> for usize {
+ fn into(self) -> Thing {
+ Thing {
+ b: self.to_string(),
+ a: self
+ }
+ }
+}
+"#####,
+ r#####"
+impl From<usize> for Thing {
+ fn from(val: usize) -> Self {
+ Thing {
+ b: val.to_string(),
+ a: val
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_iter_for_each_to_for() {
+ check_doc_test(
+ "convert_iter_for_each_to_for",
+ r#####"
+//- minicore: iterators
+use core::iter;
+fn main() {
+ let iter = iter::repeat((9, 2));
+ iter.for_each$0(|(x, y)| {
+ println!("x: {}, y: {}", x, y);
+ });
+}
+"#####,
+ r#####"
+use core::iter;
+fn main() {
+ let iter = iter::repeat((9, 2));
+ for (x, y) in iter {
+ println!("x: {}, y: {}", x, y);
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_let_else_to_match() {
+ check_doc_test(
+ "convert_let_else_to_match",
+ r#####"
+fn main() {
+ let Ok(mut x) = f() else$0 { return };
+}
+"#####,
+ r#####"
+fn main() {
+ let mut x = match f() {
+ Ok(x) => x,
+ _ => return,
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_match_to_let_else() {
+ check_doc_test(
+ "convert_match_to_let_else",
+ r#####"
+//- minicore: option
+fn foo(opt: Option<()>) {
+ let val = $0match opt {
+ Some(it) => it,
+ None => return,
+ };
+}
+"#####,
+ r#####"
+fn foo(opt: Option<()>) {
+ let Some(val) = opt else { return };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_named_struct_to_tuple_struct() {
+ check_doc_test(
+ "convert_named_struct_to_tuple_struct",
+ r#####"
+struct Point$0 { x: f32, y: f32 }
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point { x, y }
+ }
+
+ pub fn x(&self) -> f32 {
+ self.x
+ }
+
+ pub fn y(&self) -> f32 {
+ self.y
+ }
+}
+"#####,
+ r#####"
+struct Point(f32, f32);
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point(x, y)
+ }
+
+ pub fn x(&self) -> f32 {
+ self.0
+ }
+
+ pub fn y(&self) -> f32 {
+ self.1
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_to_guarded_return() {
+ check_doc_test(
+ "convert_to_guarded_return",
+ r#####"
+fn main() {
+ $0if cond {
+ foo();
+ bar();
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ if !cond {
+ return;
+ }
+ foo();
+ bar();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_tuple_struct_to_named_struct() {
+ check_doc_test(
+ "convert_tuple_struct_to_named_struct",
+ r#####"
+struct Point$0(f32, f32);
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point(x, y)
+ }
+
+ pub fn x(&self) -> f32 {
+ self.0
+ }
+
+ pub fn y(&self) -> f32 {
+ self.1
+ }
+}
+"#####,
+ r#####"
+struct Point { field1: f32, field2: f32 }
+
+impl Point {
+ pub fn new(x: f32, y: f32) -> Self {
+ Point { field1: x, field2: y }
+ }
+
+ pub fn x(&self) -> f32 {
+ self.field1
+ }
+
+ pub fn y(&self) -> f32 {
+ self.field2
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_two_arm_bool_match_to_matches_macro() {
+ check_doc_test(
+ "convert_two_arm_bool_match_to_matches_macro",
+ r#####"
+fn main() {
+ match scrutinee$0 {
+ Some(val) if val.cond() => true,
+ _ => false,
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ matches!(scrutinee, Some(val) if val.cond())
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_convert_while_to_loop() {
+ check_doc_test(
+ "convert_while_to_loop",
+ r#####"
+fn main() {
+ $0while cond {
+ foo();
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ loop {
+ if !cond {
+ break;
+ }
+ foo();
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_destructure_tuple_binding() {
+ check_doc_test(
+ "destructure_tuple_binding",
+ r#####"
+fn main() {
+ let $0t = (1,2);
+ let v = t.0;
+}
+"#####,
+ r#####"
+fn main() {
+ let ($0_0, _1) = (1,2);
+ let v = _0;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_expand_glob_import() {
+ check_doc_test(
+ "expand_glob_import",
+ r#####"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+use foo::*$0;
+
+fn qux(bar: Bar, baz: Baz) {}
+"#####,
+ r#####"
+mod foo {
+ pub struct Bar;
+ pub struct Baz;
+}
+
+use foo::{Bar, Baz};
+
+fn qux(bar: Bar, baz: Baz) {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_function() {
+ check_doc_test(
+ "extract_function",
+ r#####"
+fn main() {
+ let n = 1;
+ $0let m = n + 2;
+ // calculate
+ let k = m + n;$0
+ let g = 3;
+}
+"#####,
+ r#####"
+fn main() {
+ let n = 1;
+ fun_name(n);
+ let g = 3;
+}
+
+fn $0fun_name(n: i32) {
+ let m = n + 2;
+ // calculate
+ let k = m + n;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_module() {
+ check_doc_test(
+ "extract_module",
+ r#####"
+$0fn foo(name: i32) -> i32 {
+ name + 1
+}$0
+
+fn bar(name: i32) -> i32 {
+ name + 2
+}
+"#####,
+ r#####"
+mod modname {
+ pub(crate) fn foo(name: i32) -> i32 {
+ name + 1
+ }
+}
+
+fn bar(name: i32) -> i32 {
+ name + 2
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_struct_from_enum_variant() {
+ check_doc_test(
+ "extract_struct_from_enum_variant",
+ r#####"
+enum A { $0One(u32, u32) }
+"#####,
+ r#####"
+struct One(u32, u32);
+
+enum A { One(One) }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_type_alias() {
+ check_doc_test(
+ "extract_type_alias",
+ r#####"
+struct S {
+ field: $0(u8, u8, u8)$0,
+}
+"#####,
+ r#####"
+type $0Type = (u8, u8, u8);
+
+struct S {
+ field: Type,
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_extract_variable() {
+ check_doc_test(
+ "extract_variable",
+ r#####"
+fn main() {
+ $0(1 + 2)$0 * 4;
+}
+"#####,
+ r#####"
+fn main() {
+ let $0var_name = (1 + 2);
+ var_name * 4;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_fix_visibility() {
+ check_doc_test(
+ "fix_visibility",
+ r#####"
+mod m {
+ fn frobnicate() {}
+}
+fn main() {
- m::frobnicate() {}
++ m::frobnicate$0();
+}
+"#####,
+ r#####"
+mod m {
+ $0pub(crate) fn frobnicate() {}
+}
+fn main() {
++ m::frobnicate();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_flip_binexpr() {
+ check_doc_test(
+ "flip_binexpr",
+ r#####"
+fn main() {
+ let _ = 90 +$0 2;
+}
+"#####,
+ r#####"
+fn main() {
+ let _ = 2 + 90;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_flip_comma() {
+ check_doc_test(
+ "flip_comma",
+ r#####"
+fn main() {
+ ((1, 2),$0 (3, 4));
+}
+"#####,
+ r#####"
+fn main() {
+ ((3, 4), (1, 2));
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_flip_trait_bound() {
+ check_doc_test(
+ "flip_trait_bound",
+ r#####"
+fn foo<T: Clone +$0 Copy>() { }
+"#####,
+ r#####"
+fn foo<T: Copy + Clone>() { }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_constant() {
+ check_doc_test(
+ "generate_constant",
+ r#####"
+struct S { i: usize }
+impl S { pub fn new(n: usize) {} }
+fn main() {
+ let v = S::new(CAPA$0CITY);
+}
+"#####,
+ r#####"
+struct S { i: usize }
+impl S { pub fn new(n: usize) {} }
+fn main() {
+ const CAPACITY: usize = $0;
+ let v = S::new(CAPACITY);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_default_from_enum_variant() {
+ check_doc_test(
+ "generate_default_from_enum_variant",
+ r#####"
+enum Version {
+ Undefined,
+ Minor$0,
+ Major,
+}
+"#####,
+ r#####"
+enum Version {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Default for Version {
+ fn default() -> Self {
+ Self::Minor
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_default_from_new() {
+ check_doc_test(
+ "generate_default_from_new",
+ r#####"
+struct Example { _inner: () }
+
+impl Example {
+ pub fn n$0ew() -> Self {
+ Self { _inner: () }
+ }
+}
+"#####,
+ r#####"
+struct Example { _inner: () }
+
+impl Example {
+ pub fn new() -> Self {
+ Self { _inner: () }
+ }
+}
+
+impl Default for Example {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_delegate_methods() {
+ check_doc_test(
+ "generate_delegate_methods",
+ r#####"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ ag$0e: Age,
+}
+"#####,
+ r#####"
+struct Age(u8);
+impl Age {
+ fn age(&self) -> u8 {
+ self.0
+ }
+}
+
+struct Person {
+ age: Age,
+}
+
+impl Person {
+ $0fn age(&self) -> u8 {
+ self.age.age()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_deref() {
+ check_doc_test(
+ "generate_deref",
+ r#####"
+//- minicore: deref, deref_mut
+struct A;
+struct B {
+ $0a: A
+}
+"#####,
+ r#####"
+struct A;
+struct B {
+ a: A
+}
+
+impl core::ops::Deref for B {
+ type Target = A;
+
+ fn deref(&self) -> &Self::Target {
+ &self.a
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_derive() {
+ check_doc_test(
+ "generate_derive",
+ r#####"
+struct Point {
+ x: u32,
+ y: u32,$0
+}
+"#####,
+ r#####"
+#[derive($0)]
+struct Point {
+ x: u32,
+ y: u32,
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_doc_example() {
+ check_doc_test(
+ "generate_doc_example",
+ r#####"
+/// Adds two numbers.$0
+pub fn add(a: i32, b: i32) -> i32 { a + b }
+"#####,
+ r#####"
+/// Adds two numbers.
+///
+/// # Examples
+///
+/// ```
+/// use test::add;
+///
+/// assert_eq!(add(a, b), );
+/// ```
+pub fn add(a: i32, b: i32) -> i32 { a + b }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_documentation_template() {
+ check_doc_test(
+ "generate_documentation_template",
+ r#####"
+pub struct S;
+impl S {
+ pub unsafe fn set_len$0(&mut self, len: usize) -> Result<(), std::io::Error> {
+ /* ... */
+ }
+}
+"#####,
+ r#####"
+pub struct S;
+impl S {
+ /// Sets the length of this [`S`].
+ ///
+ /// # Errors
+ ///
+ /// This function will return an error if .
+ ///
+ /// # Safety
+ ///
+ /// .
+ pub unsafe fn set_len(&mut self, len: usize) -> Result<(), std::io::Error> {
+ /* ... */
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_as_method() {
+ check_doc_test(
+ "generate_enum_as_method",
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String)$0,
+}
+"#####,
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn as_text(&self) -> Option<&String> {
+ if let Self::Text(v) = self {
+ Some(v)
+ } else {
+ None
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_is_method() {
+ check_doc_test(
+ "generate_enum_is_method",
+ r#####"
+enum Version {
+ Undefined,
+ Minor$0,
+ Major,
+}
+"#####,
+ r#####"
+enum Version {
+ Undefined,
+ Minor,
+ Major,
+}
+
+impl Version {
+ /// Returns `true` if the version is [`Minor`].
+ ///
+ /// [`Minor`]: Version::Minor
+ #[must_use]
+ fn is_minor(&self) -> bool {
+ matches!(self, Self::Minor)
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_try_into_method() {
+ check_doc_test(
+ "generate_enum_try_into_method",
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String)$0,
+}
+"#####,
+ r#####"
+enum Value {
+ Number(i32),
+ Text(String),
+}
+
+impl Value {
+ fn try_into_text(self) -> Result<String, Self> {
+ if let Self::Text(v) = self {
+ Ok(v)
+ } else {
+ Err(self)
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_enum_variant() {
+ check_doc_test(
+ "generate_enum_variant",
+ r#####"
+enum Countries {
+ Ghana,
+}
+
+fn main() {
+ let country = Countries::Lesotho$0;
+}
+"#####,
+ r#####"
+enum Countries {
+ Ghana,
+ Lesotho,
+}
+
+fn main() {
+ let country = Countries::Lesotho;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_from_impl_for_enum() {
+ check_doc_test(
+ "generate_from_impl_for_enum",
+ r#####"
+enum A { $0One(u32) }
+"#####,
+ r#####"
+enum A { One(u32) }
+
+impl From<u32> for A {
+ fn from(v: u32) -> Self {
+ Self::One(v)
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_function() {
+ check_doc_test(
+ "generate_function",
+ r#####"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar$0("", baz());
+}
+
+"#####,
+ r#####"
+struct Baz;
+fn baz() -> Baz { Baz }
+fn foo() {
+ bar("", baz());
+}
+
+fn bar(arg: &str, baz: Baz) ${0:-> _} {
+ todo!()
+}
+
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_getter() {
+ check_doc_test(
+ "generate_getter",
+ r#####"
+//- minicore: as_ref
+pub struct String;
+impl AsRef<str> for String {
+ fn as_ref(&self) -> &str {
+ ""
+ }
+}
+
+struct Person {
+ nam$0e: String,
+}
+"#####,
+ r#####"
+pub struct String;
+impl AsRef<str> for String {
+ fn as_ref(&self) -> &str {
+ ""
+ }
+}
+
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn $0name(&self) -> &str {
+ self.name.as_ref()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_getter_mut() {
+ check_doc_test(
+ "generate_getter_mut",
+ r#####"
+struct Person {
+ nam$0e: String,
+}
+"#####,
+ r#####"
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn $0name_mut(&mut self) -> &mut String {
+ &mut self.name
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_impl() {
+ check_doc_test(
+ "generate_impl",
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,$0
+}
+"#####,
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Ctx<T> {
+ $0
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_is_empty_from_len() {
+ check_doc_test(
+ "generate_is_empty_from_len",
+ r#####"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ p$0ub fn len(&self) -> usize {
+ self.data.len()
+ }
+}
+"#####,
+ r#####"
+struct MyStruct { data: Vec<String> }
+
+impl MyStruct {
+ #[must_use]
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ #[must_use]
+ pub fn is_empty(&self) -> bool {
+ self.len() == 0
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_new() {
+ check_doc_test(
+ "generate_new",
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,$0
+}
+"#####,
+ r#####"
+struct Ctx<T: Clone> {
+ data: T,
+}
+
+impl<T: Clone> Ctx<T> {
+ fn $0new(data: T) -> Self { Self { data } }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_generate_setter() {
+ check_doc_test(
+ "generate_setter",
+ r#####"
+struct Person {
+ nam$0e: String,
+}
+"#####,
+ r#####"
+struct Person {
+ name: String,
+}
+
+impl Person {
+ fn set_name(&mut self, name: String) {
+ self.name = name;
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_call() {
+ check_doc_test(
+ "inline_call",
+ r#####"
+//- minicore: option
+fn foo(name: Option<&str>) {
+ let name = name.unwrap$0();
+}
+"#####,
+ r#####"
+fn foo(name: Option<&str>) {
+ let name = match name {
+ Some(val) => val,
+ None => panic!("called `Option::unwrap()` on a `None` value"),
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_into_callers() {
+ check_doc_test(
+ "inline_into_callers",
+ r#####"
+fn print(_: &str) {}
+fn foo$0(word: &str) {
+ if !word.is_empty() {
+ print(word);
+ }
+}
+fn bar() {
+ foo("안녕하세요");
+ foo("여러분");
+}
+"#####,
+ r#####"
+fn print(_: &str) {}
+
+fn bar() {
+ {
+ let word = "안녕하세요";
+ if !word.is_empty() {
+ print(word);
+ }
+ };
+ {
+ let word = "여러분";
+ if !word.is_empty() {
+ print(word);
+ }
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_local_variable() {
+ check_doc_test(
+ "inline_local_variable",
+ r#####"
+fn main() {
+ let x$0 = 1 + 2;
+ x * 4;
+}
+"#####,
+ r#####"
+fn main() {
+ (1 + 2) * 4;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_type_alias() {
+ check_doc_test(
+ "inline_type_alias",
+ r#####"
+type A<T = u32> = Vec<T>;
+
+fn main() {
+ let a: $0A;
+}
+"#####,
+ r#####"
+type A<T = u32> = Vec<T>;
+
+fn main() {
+ let a: Vec<u32>;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_inline_type_alias_uses() {
+ check_doc_test(
+ "inline_type_alias_uses",
+ r#####"
+type $0A = i32;
+fn id(x: A) -> A {
+ x
+};
+fn foo() {
+ let _: A = 3;
+}
+"#####,
+ r#####"
+
+fn id(x: i32) -> i32 {
+ x
+};
+fn foo() {
+ let _: i32 = 3;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_introduce_named_generic() {
+ check_doc_test(
+ "introduce_named_generic",
+ r#####"
+fn foo(bar: $0impl Bar) {}
+"#####,
+ r#####"
+fn foo<B: Bar>(bar: B) {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_introduce_named_lifetime() {
+ check_doc_test(
+ "introduce_named_lifetime",
+ r#####"
+impl Cursor<'_$0> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+"#####,
+ r#####"
+impl<'a> Cursor<'a> {
+ fn node(self) -> &SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_invert_if() {
+ check_doc_test(
+ "invert_if",
+ r#####"
+fn main() {
+ if$0 !y { A } else { B }
+}
+"#####,
+ r#####"
+fn main() {
+ if y { B } else { A }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_line_to_block() {
+ check_doc_test(
+ "line_to_block",
+ r#####"
+ // Multi-line$0
+ // comment
+"#####,
+ r#####"
+ /*
+ Multi-line
+ comment
+ */
+"#####,
+ )
+}
+
+#[test]
+fn doctest_make_raw_string() {
+ check_doc_test(
+ "make_raw_string",
+ r#####"
+fn main() {
+ "Hello,$0 World!";
+}
+"#####,
+ r#####"
+fn main() {
+ r#"Hello, World!"#;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_make_usual_string() {
+ check_doc_test(
+ "make_usual_string",
+ r#####"
+fn main() {
+ r#"Hello,$0 "World!""#;
+}
+"#####,
+ r#####"
+fn main() {
+ "Hello, \"World!\"";
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_merge_imports() {
+ check_doc_test(
+ "merge_imports",
+ r#####"
+use std::$0fmt::Formatter;
+use std::io;
+"#####,
+ r#####"
+use std::{fmt::Formatter, io};
+"#####,
+ )
+}
+
+#[test]
+fn doctest_merge_match_arms() {
+ check_doc_test(
+ "merge_match_arms",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ $0Action::Move(..) => foo(),
+ Action::Stop => foo(),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move(..) | Action::Stop => foo(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_arm_cond_to_match_guard() {
+ check_doc_test(
+ "move_arm_cond_to_match_guard",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => $0if distance > 10 { foo() },
+ _ => (),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } if distance > 10 => foo(),
+ _ => (),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_bounds_to_where_clause() {
+ check_doc_test(
+ "move_bounds_to_where_clause",
+ r#####"
+fn apply<T, U, $0F: FnOnce(T) -> U>(f: F, x: T) -> U {
+ f(x)
+}
+"#####,
+ r#####"
+fn apply<T, U, F>(f: F, x: T) -> U where F: FnOnce(T) -> U {
+ f(x)
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_format_string_arg() {
+ check_doc_test(
+ "move_format_string_arg",
+ r#####"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+macro_rules! print {
+ ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+}
+
+fn main() {
+ print!("{x + 1}$0");
+}
+"#####,
+ r#####"
+macro_rules! format_args {
+ ($lit:literal $(tt:tt)*) => { 0 },
+}
+macro_rules! print {
+ ($($arg:tt)*) => (std::io::_print(format_args!($($arg)*)));
+}
+
+fn main() {
+ print!("{}"$0, x + 1);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_from_mod_rs() {
+ check_doc_test(
+ "move_from_mod_rs",
+ r#####"
+//- /main.rs
+mod a;
+//- /a/mod.rs
+$0fn t() {}$0
+"#####,
+ r#####"
+fn t() {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_guard_to_arm_body() {
+ check_doc_test(
+ "move_guard_to_arm_body",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } $0if distance > 10 => foo(),
+ _ => (),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => if distance > 10 {
+ foo()
+ },
+ _ => (),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_module_to_file() {
+ check_doc_test(
+ "move_module_to_file",
+ r#####"
+mod $0foo {
+ fn t() {}
+}
+"#####,
+ r#####"
+mod foo;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_move_to_mod_rs() {
+ check_doc_test(
+ "move_to_mod_rs",
+ r#####"
+//- /main.rs
+mod a;
+//- /a.rs
+$0fn t() {}$0
+"#####,
+ r#####"
+fn t() {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_promote_local_to_const() {
+ check_doc_test(
+ "promote_local_to_const",
+ r#####"
+fn main() {
+ let foo$0 = true;
+
+ if foo {
+ println!("It's true");
+ } else {
+ println!("It's false");
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ const $0FOO: bool = true;
+
+ if FOO {
+ println!("It's true");
+ } else {
+ println!("It's false");
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_pull_assignment_up() {
+ check_doc_test(
+ "pull_assignment_up",
+ r#####"
+fn main() {
+ let mut foo = 6;
+
+ if true {
+ $0foo = 5;
+ } else {
+ foo = 4;
+ }
+}
+"#####,
+ r#####"
+fn main() {
+ let mut foo = 6;
+
+ foo = if true {
+ 5
+ } else {
+ 4
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_qualify_method_call() {
+ check_doc_test(
+ "qualify_method_call",
+ r#####"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn main() {
+ let foo = Foo;
+ foo.fo$0o();
+}
+"#####,
+ r#####"
+struct Foo;
+impl Foo {
+ fn foo(&self) {}
+}
+fn main() {
+ let foo = Foo;
+ Foo::foo(&foo);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_qualify_path() {
+ check_doc_test(
+ "qualify_path",
+ r#####"
+fn main() {
+ let map = HashMap$0::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ r#####"
+fn main() {
+ let map = std::collections::HashMap::new();
+}
+pub mod std { pub mod collections { pub struct HashMap { } } }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_reformat_number_literal() {
+ check_doc_test(
+ "reformat_number_literal",
+ r#####"
+const _: i32 = 1012345$0;
+"#####,
+ r#####"
+const _: i32 = 1_012_345;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_dbg() {
+ check_doc_test(
+ "remove_dbg",
+ r#####"
+fn main() {
+ $0dbg!(92);
+}
+"#####,
+ r#####"
+fn main() {
+ 92;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_hash() {
+ check_doc_test(
+ "remove_hash",
+ r#####"
+fn main() {
+ r#"Hello,$0 World!"#;
+}
+"#####,
+ r#####"
+fn main() {
+ r"Hello, World!";
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_mut() {
+ check_doc_test(
+ "remove_mut",
+ r#####"
+impl Walrus {
+ fn feed(&mut$0 self, amount: u32) {}
+}
+"#####,
+ r#####"
+impl Walrus {
+ fn feed(&self, amount: u32) {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_remove_unused_param() {
+ check_doc_test(
+ "remove_unused_param",
+ r#####"
+fn frobnicate(x: i32$0) {}
+
+fn main() {
+ frobnicate(92);
+}
+"#####,
+ r#####"
+fn frobnicate() {}
+
+fn main() {
+ frobnicate();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_reorder_fields() {
+ check_doc_test(
+ "reorder_fields",
+ r#####"
+struct Foo {foo: i32, bar: i32};
+const test: Foo = $0Foo {bar: 0, foo: 1}
+"#####,
+ r#####"
+struct Foo {foo: i32, bar: i32};
+const test: Foo = Foo {foo: 1, bar: 0}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_reorder_impl_items() {
+ check_doc_test(
+ "reorder_impl_items",
+ r#####"
+trait Foo {
+ type A;
+ const B: u8;
+ fn c();
+}
+
+struct Bar;
+$0impl Foo for Bar {
+ const B: u8 = 17;
+ fn c() {}
+ type A = String;
+}
+"#####,
+ r#####"
+trait Foo {
+ type A;
+ const B: u8;
+ fn c();
+}
+
+struct Bar;
+impl Foo for Bar {
+ type A = String;
+ const B: u8 = 17;
+ fn c() {}
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_char_with_string() {
+ check_doc_test(
+ "replace_char_with_string",
+ r#####"
+fn main() {
+ find('{$0');
+}
+"#####,
+ r#####"
+fn main() {
+ find("{");
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_derive_with_manual_impl() {
+ check_doc_test(
+ "replace_derive_with_manual_impl",
+ r#####"
+//- minicore: derive
+trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+#[derive(Deb$0ug, Display)]
+struct S;
+"#####,
+ r#####"
+trait Debug { fn fmt(&self, f: &mut Formatter) -> Result<()>; }
+#[derive(Display)]
+struct S;
+
+impl Debug for S {
+ $0fn fmt(&self, f: &mut Formatter) -> Result<()> {
+ f.debug_struct("S").finish()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_if_let_with_match() {
+ check_doc_test(
+ "replace_if_let_with_match",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ $0if let Action::Move { distance } = action {
+ foo(distance)
+ } else {
+ bar()
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move { distance } => foo(distance),
+ _ => bar(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_let_with_if_let() {
+ check_doc_test(
+ "replace_let_with_if_let",
+ r#####"
+enum Option<T> { Some(T), None }
+
+fn main(action: Action) {
+ $0let x = compute();
+}
+
+fn compute() -> Option<i32> { None }
+"#####,
+ r#####"
+enum Option<T> { Some(T), None }
+
+fn main(action: Action) {
+ if let Some(x) = compute() {
+ }
+}
+
+fn compute() -> Option<i32> { None }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_match_with_if_let() {
+ check_doc_test(
+ "replace_match_with_if_let",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ $0match action {
+ Action::Move { distance } => foo(distance),
+ _ => bar(),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ if let Action::Move { distance } = action {
+ foo(distance)
+ } else {
+ bar()
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_or_else_with_or() {
+ check_doc_test(
+ "replace_or_else_with_or",
+ r#####"
+//- minicore:option
+fn foo() {
+ let a = Some(1);
+ a.unwra$0p_or_else(|| 2);
+}
+"#####,
+ r#####"
+fn foo() {
+ let a = Some(1);
+ a.unwrap_or(2);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_or_with_or_else() {
+ check_doc_test(
+ "replace_or_with_or_else",
+ r#####"
+//- minicore:option
+fn foo() {
+ let a = Some(1);
+ a.unwra$0p_or(2);
+}
+"#####,
+ r#####"
+fn foo() {
+ let a = Some(1);
+ a.unwrap_or_else(|| 2);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_qualified_name_with_use() {
+ check_doc_test(
+ "replace_qualified_name_with_use",
+ r#####"
+mod std { pub mod collections { pub struct HashMap<T, U>(T, U); } }
+fn process(map: std::collections::$0HashMap<String, String>) {}
+"#####,
+ r#####"
+use std::collections::HashMap;
+
+mod std { pub mod collections { pub struct HashMap<T, U>(T, U); } }
+fn process(map: HashMap<String, String>) {}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_string_with_char() {
+ check_doc_test(
+ "replace_string_with_char",
+ r#####"
+fn main() {
+ find("{$0");
+}
+"#####,
+ r#####"
+fn main() {
+ find('{');
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_try_expr_with_match() {
+ check_doc_test(
+ "replace_try_expr_with_match",
+ r#####"
+//- minicore:option
+fn handle() {
+ let pat = Some(true)$0?;
+}
+"#####,
+ r#####"
+fn handle() {
+ let pat = match Some(true) {
+ Some(it) => it,
+ None => return None,
+ };
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_replace_turbofish_with_explicit_type() {
+ check_doc_test(
+ "replace_turbofish_with_explicit_type",
+ r#####"
+fn make<T>() -> T { ) }
+fn main() {
+ let a = make$0::<i32>();
+}
+"#####,
+ r#####"
+fn make<T>() -> T { ) }
+fn main() {
+ let a: i32 = make();
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+struct $0Foo$0 { second: u32, first: String }
+"#####,
+ r#####"
+struct Foo { first: String, second: u32 }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_1() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+trait $0Bar$0 {
+ fn second(&self) -> u32;
+ fn first(&self) -> String;
+}
+"#####,
+ r#####"
+trait Bar {
+ fn first(&self) -> String;
+ fn second(&self) -> u32;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_2() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+struct Baz;
+impl $0Baz$0 {
+ fn second(&self) -> u32;
+ fn first(&self) -> String;
+}
+"#####,
+ r#####"
+struct Baz;
+impl Baz {
+ fn first(&self) -> String;
+ fn second(&self) -> u32;
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_3() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+enum $0Animal$0 {
+ Dog(String, f64),
+ Cat { weight: f64, name: String },
+}
+"#####,
+ r#####"
+enum Animal {
+ Cat { weight: f64, name: String },
+ Dog(String, f64),
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_sort_items_4() {
+ check_doc_test(
+ "sort_items",
+ r#####"
+enum Animal {
+ Dog(String, f64),
+ Cat $0{ weight: f64, name: String }$0,
+}
+"#####,
+ r#####"
+enum Animal {
+ Dog(String, f64),
+ Cat { name: String, weight: f64 },
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_split_import() {
+ check_doc_test(
+ "split_import",
+ r#####"
+use std::$0collections::HashMap;
+"#####,
+ r#####"
+use std::{collections::HashMap};
+"#####,
+ )
+}
+
+#[test]
+fn doctest_toggle_ignore() {
+ check_doc_test(
+ "toggle_ignore",
+ r#####"
+$0#[test]
+fn arithmetics {
+ assert_eq!(2 + 2, 5);
+}
+"#####,
+ r#####"
+#[test]
+#[ignore]
+fn arithmetics {
+ assert_eq!(2 + 2, 5);
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unmerge_match_arm() {
+ check_doc_test(
+ "unmerge_match_arm",
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move(..) $0| Action::Stop => foo(),
+ }
+}
+"#####,
+ r#####"
+enum Action { Move { distance: u32 }, Stop }
+
+fn handle(action: Action) {
+ match action {
+ Action::Move(..) => foo(),
+ Action::Stop => foo(),
+ }
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unmerge_use() {
+ check_doc_test(
+ "unmerge_use",
+ r#####"
+use std::fmt::{Debug, Display$0};
+"#####,
+ r#####"
+use std::fmt::{Debug};
+use std::fmt::Display;
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unnecessary_async() {
+ check_doc_test(
+ "unnecessary_async",
+ r#####"
+pub async f$0n foo() {}
+pub async fn bar() { foo().await }
+"#####,
+ r#####"
+pub fn foo() {}
+pub async fn bar() { foo() }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unwrap_block() {
+ check_doc_test(
+ "unwrap_block",
+ r#####"
+fn foo() {
+ if true {$0
+ println!("foo");
+ }
+}
+"#####,
+ r#####"
+fn foo() {
+ println!("foo");
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unwrap_result_return_type() {
+ check_doc_test(
+ "unwrap_result_return_type",
+ r#####"
+//- minicore: result
+fn foo() -> Result<i32>$0 { Ok(42i32) }
+"#####,
+ r#####"
+fn foo() -> i32 { 42i32 }
+"#####,
+ )
+}
+
+#[test]
+fn doctest_unwrap_tuple() {
+ check_doc_test(
+ "unwrap_tuple",
+ r#####"
+//- minicore: result
+fn main() {
+ $0let (foo, bar) = ("Foo", "Bar");
+}
+"#####,
+ r#####"
+fn main() {
+ let foo = "Foo";
+ let bar = "Bar";
+}
+"#####,
+ )
+}
+
+#[test]
+fn doctest_wrap_return_type_in_result() {
+ check_doc_test(
+ "wrap_return_type_in_result",
+ r#####"
+//- minicore: result
+fn foo() -> i32$0 { 42i32 }
+"#####,
+ r#####"
+fn foo() -> Result<i32, ${0:_}> { Ok(42i32) }
+"#####,
+ )
+}
--- /dev/null
+//! Assorted functions shared by several assists.
+
+use std::ops;
+
+pub(crate) use gen_trait_fn_body::gen_trait_fn_body;
+use hir::{db::HirDatabase, HirDisplay, Semantics};
+use ide_db::{famous_defs::FamousDefs, path_transform::PathTransform, RootDatabase, SnippetCap};
+use stdx::format_to;
+use syntax::{
+ ast::{
+ self,
+ edit::{self, AstNodeEdit},
+ edit_in_place::{AttrsOwnerEdit, Removable},
+ make, HasArgList, HasAttrs, HasGenericParams, HasName, HasTypeBounds, Whitespace,
+ },
+ ted, AstNode, AstToken, Direction, SourceFile,
+ SyntaxKind::*,
+ SyntaxNode, TextRange, TextSize, T,
+};
+
+use crate::assist_context::{AssistContext, SourceChangeBuilder};
+
+pub(crate) mod suggest_name;
+mod gen_trait_fn_body;
+
+pub(crate) fn unwrap_trivial_block(block_expr: ast::BlockExpr) -> ast::Expr {
+ extract_trivial_expression(&block_expr)
+ .filter(|expr| !expr.syntax().text().contains_char('\n'))
+ .unwrap_or_else(|| block_expr.into())
+}
+
+pub fn extract_trivial_expression(block_expr: &ast::BlockExpr) -> Option<ast::Expr> {
+ if block_expr.modifier().is_some() {
+ return None;
+ }
+ let stmt_list = block_expr.stmt_list()?;
+ let has_anything_else = |thing: &SyntaxNode| -> bool {
+ let mut non_trivial_children =
+ stmt_list.syntax().children_with_tokens().filter(|it| match it.kind() {
+ WHITESPACE | T!['{'] | T!['}'] => false,
+ _ => it.as_node() != Some(thing),
+ });
+ non_trivial_children.next().is_some()
+ };
+
+ if let Some(expr) = stmt_list.tail_expr() {
+ if has_anything_else(expr.syntax()) {
+ return None;
+ }
+ return Some(expr);
+ }
+ // Unwrap `{ continue; }`
+ let stmt = stmt_list.statements().next()?;
+ if let ast::Stmt::ExprStmt(expr_stmt) = stmt {
+ if has_anything_else(expr_stmt.syntax()) {
+ return None;
+ }
+ let expr = expr_stmt.expr()?;
+ if matches!(expr.syntax().kind(), CONTINUE_EXPR | BREAK_EXPR | RETURN_EXPR) {
+ return Some(expr);
+ }
+ }
+ None
+}
+
+/// This is a method with a heuristics to support test methods annotated with custom test annotations, such as
+/// `#[test_case(...)]`, `#[tokio::test]` and similar.
+/// Also a regular `#[test]` annotation is supported.
+///
+/// It may produce false positives, for example, `#[wasm_bindgen_test]` requires a different command to run the test,
+/// but it's better than not to have the runnables for the tests at all.
+pub fn test_related_attribute(fn_def: &ast::Fn) -> Option<ast::Attr> {
+ fn_def.attrs().find_map(|attr| {
+ let path = attr.path()?;
+ let text = path.syntax().text().to_string();
+ if text.starts_with("test") || text.ends_with("test") {
+ Some(attr)
+ } else {
+ None
+ }
+ })
+}
+
+#[derive(Copy, Clone, PartialEq)]
+pub enum DefaultMethods {
+ Only,
+ No,
+}
+
+pub fn filter_assoc_items(
+ sema: &Semantics<'_, RootDatabase>,
+ items: &[hir::AssocItem],
+ default_methods: DefaultMethods,
+) -> Vec<ast::AssocItem> {
+ fn has_def_name(item: &ast::AssocItem) -> bool {
+ match item {
+ ast::AssocItem::Fn(def) => def.name(),
+ ast::AssocItem::TypeAlias(def) => def.name(),
+ ast::AssocItem::Const(def) => def.name(),
+ ast::AssocItem::MacroCall(_) => None,
+ }
+ .is_some()
+ }
+
+ items
+ .iter()
+ // Note: This throws away items with no source.
+ .filter_map(|&i| {
+ let item = match i {
+ hir::AssocItem::Function(i) => ast::AssocItem::Fn(sema.source(i)?.value),
+ hir::AssocItem::TypeAlias(i) => ast::AssocItem::TypeAlias(sema.source(i)?.value),
+ hir::AssocItem::Const(i) => ast::AssocItem::Const(sema.source(i)?.value),
+ };
+ Some(item)
+ })
+ .filter(has_def_name)
+ .filter(|it| match it {
+ ast::AssocItem::Fn(def) => matches!(
+ (default_methods, def.body()),
+ (DefaultMethods::Only, Some(_)) | (DefaultMethods::No, None)
+ ),
++ ast::AssocItem::Const(def) => matches!(
++ (default_methods, def.body()),
++ (DefaultMethods::Only, Some(_)) | (DefaultMethods::No, None)
++ ),
+ _ => default_methods == DefaultMethods::No,
+ })
+ .collect::<Vec<_>>()
+}
+
+pub fn add_trait_assoc_items_to_impl(
+ sema: &Semantics<'_, RootDatabase>,
+ items: Vec<ast::AssocItem>,
+ trait_: hir::Trait,
+ impl_: ast::Impl,
+ target_scope: hir::SemanticsScope<'_>,
+) -> (ast::Impl, ast::AssocItem) {
+ let source_scope = sema.scope_for_def(trait_);
+
+ let transform = PathTransform::trait_impl(&target_scope, &source_scope, trait_, impl_.clone());
+
+ let items = items.into_iter().map(|assoc_item| {
+ transform.apply(assoc_item.syntax());
+ assoc_item.remove_attrs_and_docs();
+ assoc_item
+ });
+
+ let res = impl_.clone_for_update();
+
+ let assoc_item_list = res.get_or_create_assoc_item_list();
+ let mut first_item = None;
+ for item in items {
+ first_item.get_or_insert_with(|| item.clone());
+ match &item {
+ ast::AssocItem::Fn(fn_) if fn_.body().is_none() => {
+ let body = make::block_expr(None, Some(make::ext::expr_todo()))
+ .indent(edit::IndentLevel(1));
+ ted::replace(fn_.get_or_create_body().syntax(), body.clone_for_update().syntax())
+ }
+ ast::AssocItem::TypeAlias(type_alias) => {
+ if let Some(type_bound_list) = type_alias.type_bound_list() {
+ type_bound_list.remove()
+ }
+ }
+ _ => {}
+ }
+
+ assoc_item_list.add_item(item)
+ }
+
+ (res, first_item.unwrap())
+}
+
+#[derive(Clone, Copy, Debug)]
+pub(crate) enum Cursor<'a> {
+ Replace(&'a SyntaxNode),
+ Before(&'a SyntaxNode),
+}
+
+impl<'a> Cursor<'a> {
+ fn node(self) -> &'a SyntaxNode {
+ match self {
+ Cursor::Replace(node) | Cursor::Before(node) => node,
+ }
+ }
+}
+
+pub(crate) fn render_snippet(_cap: SnippetCap, node: &SyntaxNode, cursor: Cursor<'_>) -> String {
+ assert!(cursor.node().ancestors().any(|it| it == *node));
+ let range = cursor.node().text_range() - node.text_range().start();
+ let range: ops::Range<usize> = range.into();
+
+ let mut placeholder = cursor.node().to_string();
+ escape(&mut placeholder);
+ let tab_stop = match cursor {
+ Cursor::Replace(placeholder) => format!("${{0:{placeholder}}}"),
+ Cursor::Before(placeholder) => format!("$0{placeholder}"),
+ };
+
+ let mut buf = node.to_string();
+ buf.replace_range(range, &tab_stop);
+ return buf;
+
+ fn escape(buf: &mut String) {
+ stdx::replace(buf, '{', r"\{");
+ stdx::replace(buf, '}', r"\}");
+ stdx::replace(buf, '$', r"\$");
+ }
+}
+
+pub(crate) fn vis_offset(node: &SyntaxNode) -> TextSize {
+ node.children_with_tokens()
+ .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR))
+ .map(|it| it.text_range().start())
+ .unwrap_or_else(|| node.text_range().start())
+}
+
+pub(crate) fn invert_boolean_expression(expr: ast::Expr) -> ast::Expr {
+ invert_special_case(&expr).unwrap_or_else(|| make::expr_prefix(T![!], expr))
+}
+
+fn invert_special_case(expr: &ast::Expr) -> Option<ast::Expr> {
+ match expr {
+ ast::Expr::BinExpr(bin) => {
+ let bin = bin.clone_for_update();
+ let op_token = bin.op_token()?;
+ let rev_token = match op_token.kind() {
+ T![==] => T![!=],
+ T![!=] => T![==],
+ T![<] => T![>=],
+ T![<=] => T![>],
+ T![>] => T![<=],
+ T![>=] => T![<],
+ // Parenthesize other expressions before prefixing `!`
+ _ => return Some(make::expr_prefix(T![!], make::expr_paren(expr.clone()))),
+ };
+ ted::replace(op_token, make::token(rev_token));
+ Some(bin.into())
+ }
+ ast::Expr::MethodCallExpr(mce) => {
+ let receiver = mce.receiver()?;
+ let method = mce.name_ref()?;
+ let arg_list = mce.arg_list()?;
+
+ let method = match method.text().as_str() {
+ "is_some" => "is_none",
+ "is_none" => "is_some",
+ "is_ok" => "is_err",
+ "is_err" => "is_ok",
+ _ => return None,
+ };
+ Some(make::expr_method_call(receiver, make::name_ref(method), arg_list))
+ }
+ ast::Expr::PrefixExpr(pe) if pe.op_kind()? == ast::UnaryOp::Not => match pe.expr()? {
+ ast::Expr::ParenExpr(parexpr) => parexpr.expr(),
+ _ => pe.expr(),
+ },
+ ast::Expr::Literal(lit) => match lit.kind() {
+ ast::LiteralKind::Bool(b) => match b {
+ true => Some(ast::Expr::Literal(make::expr_literal("false"))),
+ false => Some(ast::Expr::Literal(make::expr_literal("true"))),
+ },
+ _ => None,
+ },
+ _ => None,
+ }
+}
+
+pub(crate) fn next_prev() -> impl Iterator<Item = Direction> {
+ [Direction::Next, Direction::Prev].into_iter()
+}
+
+pub(crate) fn does_pat_match_variant(pat: &ast::Pat, var: &ast::Pat) -> bool {
+ let first_node_text = |pat: &ast::Pat| pat.syntax().first_child().map(|node| node.text());
+
+ let pat_head = match pat {
+ ast::Pat::IdentPat(bind_pat) => match bind_pat.pat() {
+ Some(p) => first_node_text(&p),
+ None => return pat.syntax().text() == var.syntax().text(),
+ },
+ pat => first_node_text(pat),
+ };
+
+ let var_head = first_node_text(var);
+
+ pat_head == var_head
+}
+
+pub(crate) fn does_nested_pattern(pat: &ast::Pat) -> bool {
+ let depth = calc_depth(pat, 0);
+
+ if 1 < depth {
+ return true;
+ }
+ false
+}
+
+fn calc_depth(pat: &ast::Pat, depth: usize) -> usize {
+ match pat {
+ ast::Pat::IdentPat(_)
+ | ast::Pat::BoxPat(_)
+ | ast::Pat::RestPat(_)
+ | ast::Pat::LiteralPat(_)
+ | ast::Pat::MacroPat(_)
+ | ast::Pat::OrPat(_)
+ | ast::Pat::ParenPat(_)
+ | ast::Pat::PathPat(_)
+ | ast::Pat::WildcardPat(_)
+ | ast::Pat::RangePat(_)
+ | ast::Pat::RecordPat(_)
+ | ast::Pat::RefPat(_)
+ | ast::Pat::SlicePat(_)
+ | ast::Pat::TuplePat(_)
+ | ast::Pat::ConstBlockPat(_) => depth,
+
+ // FIXME: Other patterns may also be nested. Currently it simply supports only `TupleStructPat`
+ ast::Pat::TupleStructPat(pat) => {
+ let mut max_depth = depth;
+ for p in pat.fields() {
+ let d = calc_depth(&p, depth + 1);
+ if d > max_depth {
+ max_depth = d
+ }
+ }
+ max_depth
+ }
+ }
+}
+
+// Uses a syntax-driven approach to find any impl blocks for the struct that
+// exist within the module/file
+//
+// Returns `None` if we've found an existing fn
+//
+// FIXME: change the new fn checking to a more semantic approach when that's more
+// viable (e.g. we process proc macros, etc)
+// FIXME: this partially overlaps with `find_impl_block_*`
+
+/// `find_struct_impl` looks for impl of a struct, but this also has additional feature
+/// where it takes a list of function names and check if they exist inside impl_, if
+/// even one match is found, it returns None
+pub(crate) fn find_struct_impl(
+ ctx: &AssistContext<'_>,
+ adt: &ast::Adt,
+ names: &[String],
+) -> Option<Option<ast::Impl>> {
+ let db = ctx.db();
+ let module = adt.syntax().parent()?;
+
+ let struct_def = ctx.sema.to_def(adt)?;
+
+ let block = module.descendants().filter_map(ast::Impl::cast).find_map(|impl_blk| {
+ let blk = ctx.sema.to_def(&impl_blk)?;
+
+ // FIXME: handle e.g. `struct S<T>; impl<U> S<U> {}`
+ // (we currently use the wrong type parameter)
+ // also we wouldn't want to use e.g. `impl S<u32>`
+
+ let same_ty = match blk.self_ty(db).as_adt() {
+ Some(def) => def == struct_def,
+ None => false,
+ };
+ let not_trait_impl = blk.trait_(db).is_none();
+
+ if !(same_ty && not_trait_impl) {
+ None
+ } else {
+ Some(impl_blk)
+ }
+ });
+
+ if let Some(ref impl_blk) = block {
+ if has_any_fn(impl_blk, names) {
+ return None;
+ }
+ }
+
+ Some(block)
+}
+
+fn has_any_fn(imp: &ast::Impl, names: &[String]) -> bool {
+ if let Some(il) = imp.assoc_item_list() {
+ for item in il.assoc_items() {
+ if let ast::AssocItem::Fn(f) = item {
+ if let Some(name) = f.name() {
+ if names.iter().any(|n| n.eq_ignore_ascii_case(&name.text())) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ false
+}
+
+/// Find the start of the `impl` block for the given `ast::Impl`.
+//
+// FIXME: this partially overlaps with `find_struct_impl`
+pub(crate) fn find_impl_block_start(impl_def: ast::Impl, buf: &mut String) -> Option<TextSize> {
+ buf.push('\n');
+ let start = impl_def.assoc_item_list().and_then(|it| it.l_curly_token())?.text_range().end();
+ Some(start)
+}
+
+/// Find the end of the `impl` block for the given `ast::Impl`.
+//
+// FIXME: this partially overlaps with `find_struct_impl`
+pub(crate) fn find_impl_block_end(impl_def: ast::Impl, buf: &mut String) -> Option<TextSize> {
+ buf.push('\n');
+ let end = impl_def
+ .assoc_item_list()
+ .and_then(|it| it.r_curly_token())?
+ .prev_sibling_or_token()?
+ .text_range()
+ .end();
+ Some(end)
+}
+
+// Generates the surrounding `impl Type { <code> }` including type and lifetime
+// parameters
+pub(crate) fn generate_impl_text(adt: &ast::Adt, code: &str) -> String {
+ generate_impl_text_inner(adt, None, code)
+}
+
+// Generates the surrounding `impl <trait> for Type { <code> }` including type
+// and lifetime parameters
+pub(crate) fn generate_trait_impl_text(adt: &ast::Adt, trait_text: &str, code: &str) -> String {
+ generate_impl_text_inner(adt, Some(trait_text), code)
+}
+
+fn generate_impl_text_inner(adt: &ast::Adt, trait_text: Option<&str>, code: &str) -> String {
+ // Ensure lifetime params are before type & const params
+ let generic_params = adt.generic_param_list().map(|generic_params| {
+ let lifetime_params =
+ generic_params.lifetime_params().map(ast::GenericParam::LifetimeParam);
+ let ty_or_const_params = generic_params.type_or_const_params().filter_map(|param| {
+ // remove defaults since they can't be specified in impls
+ match param {
+ ast::TypeOrConstParam::Type(param) => {
+ let param = param.clone_for_update();
+ param.remove_default();
+ Some(ast::GenericParam::TypeParam(param))
+ }
+ ast::TypeOrConstParam::Const(param) => {
+ let param = param.clone_for_update();
+ param.remove_default();
+ Some(ast::GenericParam::ConstParam(param))
+ }
+ }
+ });
+
+ make::generic_param_list(itertools::chain(lifetime_params, ty_or_const_params))
+ });
+
+ // FIXME: use syntax::make & mutable AST apis instead
+ // `trait_text` and `code` can't be opaque blobs of text
+ let mut buf = String::with_capacity(code.len());
+
+ // Copy any cfg attrs from the original adt
+ buf.push_str("\n\n");
+ let cfg_attrs = adt
+ .attrs()
+ .filter(|attr| attr.as_simple_call().map(|(name, _arg)| name == "cfg").unwrap_or(false));
+ cfg_attrs.for_each(|attr| buf.push_str(&format!("{attr}\n")));
+
+ // `impl{generic_params} {trait_text} for {name}{generic_params.to_generic_args()}`
+ buf.push_str("impl");
+ if let Some(generic_params) = &generic_params {
+ format_to!(buf, "{generic_params}");
+ }
+ buf.push(' ');
+ if let Some(trait_text) = trait_text {
+ buf.push_str(trait_text);
+ buf.push_str(" for ");
+ }
+ buf.push_str(&adt.name().unwrap().text());
+ if let Some(generic_params) = generic_params {
+ format_to!(buf, "{}", generic_params.to_generic_args());
+ }
+
+ match adt.where_clause() {
+ Some(where_clause) => {
+ format_to!(buf, "\n{where_clause}\n{{\n{code}\n}}");
+ }
+ None => {
+ format_to!(buf, " {{\n{code}\n}}");
+ }
+ }
+
+ buf
+}
+
+pub(crate) fn add_method_to_adt(
+ builder: &mut SourceChangeBuilder,
+ adt: &ast::Adt,
+ impl_def: Option<ast::Impl>,
+ method: &str,
+) {
+ let mut buf = String::with_capacity(method.len() + 2);
+ if impl_def.is_some() {
+ buf.push('\n');
+ }
+ buf.push_str(method);
+
+ let start_offset = impl_def
+ .and_then(|impl_def| find_impl_block_end(impl_def, &mut buf))
+ .unwrap_or_else(|| {
+ buf = generate_impl_text(adt, &buf);
+ adt.syntax().text_range().end()
+ });
+
+ builder.insert(start_offset, buf);
+}
+
+#[derive(Debug)]
+pub(crate) struct ReferenceConversion {
+ conversion: ReferenceConversionType,
+ ty: hir::Type,
+}
+
+#[derive(Debug)]
+enum ReferenceConversionType {
+ // reference can be stripped if the type is Copy
+ Copy,
+ // &String -> &str
+ AsRefStr,
+ // &Vec<T> -> &[T]
+ AsRefSlice,
+ // &Box<T> -> &T
+ Dereferenced,
+ // &Option<T> -> Option<&T>
+ Option,
+ // &Result<T, E> -> Result<&T, &E>
+ Result,
+}
+
+impl ReferenceConversion {
+ pub(crate) fn convert_type(&self, db: &dyn HirDatabase) -> String {
+ match self.conversion {
+ ReferenceConversionType::Copy => self.ty.display(db).to_string(),
+ ReferenceConversionType::AsRefStr => "&str".to_string(),
+ ReferenceConversionType::AsRefSlice => {
+ let type_argument_name =
+ self.ty.type_arguments().next().unwrap().display(db).to_string();
+ format!("&[{type_argument_name}]")
+ }
+ ReferenceConversionType::Dereferenced => {
+ let type_argument_name =
+ self.ty.type_arguments().next().unwrap().display(db).to_string();
+ format!("&{type_argument_name}")
+ }
+ ReferenceConversionType::Option => {
+ let type_argument_name =
+ self.ty.type_arguments().next().unwrap().display(db).to_string();
+ format!("Option<&{type_argument_name}>")
+ }
+ ReferenceConversionType::Result => {
+ let mut type_arguments = self.ty.type_arguments();
+ let first_type_argument_name =
+ type_arguments.next().unwrap().display(db).to_string();
+ let second_type_argument_name =
+ type_arguments.next().unwrap().display(db).to_string();
+ format!("Result<&{first_type_argument_name}, &{second_type_argument_name}>")
+ }
+ }
+ }
+
+ pub(crate) fn getter(&self, field_name: String) -> String {
+ match self.conversion {
+ ReferenceConversionType::Copy => format!("self.{field_name}"),
+ ReferenceConversionType::AsRefStr
+ | ReferenceConversionType::AsRefSlice
+ | ReferenceConversionType::Dereferenced
+ | ReferenceConversionType::Option
+ | ReferenceConversionType::Result => format!("self.{field_name}.as_ref()"),
+ }
+ }
+}
+
+// FIXME: It should return a new hir::Type, but currently constructing new types is too cumbersome
+// and all users of this function operate on string type names, so they can do the conversion
+// itself themselves.
+pub(crate) fn convert_reference_type(
+ ty: hir::Type,
+ db: &RootDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversion> {
+ handle_copy(&ty, db)
+ .or_else(|| handle_as_ref_str(&ty, db, famous_defs))
+ .or_else(|| handle_as_ref_slice(&ty, db, famous_defs))
+ .or_else(|| handle_dereferenced(&ty, db, famous_defs))
+ .or_else(|| handle_option_as_ref(&ty, db, famous_defs))
+ .or_else(|| handle_result_as_ref(&ty, db, famous_defs))
+ .map(|conversion| ReferenceConversion { ty, conversion })
+}
+
+fn handle_copy(ty: &hir::Type, db: &dyn HirDatabase) -> Option<ReferenceConversionType> {
+ ty.is_copy(db).then(|| ReferenceConversionType::Copy)
+}
+
+fn handle_as_ref_str(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ let str_type = hir::BuiltinType::str().ty(db);
+
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[str_type])
+ .then(|| ReferenceConversionType::AsRefStr)
+}
+
+fn handle_as_ref_slice(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ let type_argument = ty.type_arguments().next()?;
+ let slice_type = hir::Type::new_slice(type_argument);
+
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[slice_type])
+ .then(|| ReferenceConversionType::AsRefSlice)
+}
+
+fn handle_dereferenced(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ let type_argument = ty.type_arguments().next()?;
+
+ ty.impls_trait(db, famous_defs.core_convert_AsRef()?, &[type_argument])
+ .then(|| ReferenceConversionType::Dereferenced)
+}
+
+fn handle_option_as_ref(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ if ty.as_adt() == famous_defs.core_option_Option()?.ty(db).as_adt() {
+ Some(ReferenceConversionType::Option)
+ } else {
+ None
+ }
+}
+
+fn handle_result_as_ref(
+ ty: &hir::Type,
+ db: &dyn HirDatabase,
+ famous_defs: &FamousDefs<'_, '_>,
+) -> Option<ReferenceConversionType> {
+ if ty.as_adt() == famous_defs.core_result_Result()?.ty(db).as_adt() {
+ Some(ReferenceConversionType::Result)
+ } else {
+ None
+ }
+}
+
+pub(crate) fn get_methods(items: &ast::AssocItemList) -> Vec<ast::Fn> {
+ items
+ .assoc_items()
+ .flat_map(|i| match i {
+ ast::AssocItem::Fn(f) => Some(f),
+ _ => None,
+ })
+ .filter(|f| f.name().is_some())
+ .collect()
+}
+
+/// Trim(remove leading and trailing whitespace) `initial_range` in `source_file`, return the trimmed range.
+pub(crate) fn trimmed_text_range(source_file: &SourceFile, initial_range: TextRange) -> TextRange {
+ let mut trimmed_range = initial_range;
+ while source_file
+ .syntax()
+ .token_at_offset(trimmed_range.start())
+ .find_map(Whitespace::cast)
+ .is_some()
+ && trimmed_range.start() < trimmed_range.end()
+ {
+ let start = trimmed_range.start() + TextSize::from(1);
+ trimmed_range = TextRange::new(start, trimmed_range.end());
+ }
+ while source_file
+ .syntax()
+ .token_at_offset(trimmed_range.end())
+ .find_map(Whitespace::cast)
+ .is_some()
+ && trimmed_range.start() < trimmed_range.end()
+ {
+ let end = trimmed_range.end() - TextSize::from(1);
+ trimmed_range = TextRange::new(trimmed_range.start(), end);
+ }
+ trimmed_range
+}
+
+/// Convert a list of function params to a list of arguments that can be passed
+/// into a function call.
+pub(crate) fn convert_param_list_to_arg_list(list: ast::ParamList) -> ast::ArgList {
+ let mut args = vec![];
+ for param in list.params() {
+ if let Some(ast::Pat::IdentPat(pat)) = param.pat() {
+ if let Some(name) = pat.name() {
+ let name = name.to_string();
+ let expr = make::expr_path(make::ext::ident_path(&name));
+ args.push(expr);
+ }
+ }
+ }
+ make::arg_list(args)
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "ide-completion"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.5"
+
+once_cell = "1.15.0"
+smallvec = "1.10.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+
+# completions crate should depend only on the top-level `hir` package. if you need
+# something from some `hir-xxx` subpackage, reexport the API via `hir`.
+hir = { path = "../hir", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+
+test-utils = { path = "../test-utils" }
--- /dev/null
- add_type_alias_impl(acc, ctx, replacement_range, type_alias)
+//! Completion for associated items in a trait implementation.
+//!
+//! This module adds the completion items related to implementing associated
+//! items within an `impl Trait for Struct` block. The current context node
+//! must be within either a `FN`, `TYPE_ALIAS`, or `CONST` node
+//! and an direct child of an `IMPL`.
+//!
+//! # Examples
+//!
+//! Considering the following trait `impl`:
+//!
+//! ```ignore
+//! trait SomeTrait {
+//! fn foo();
+//! }
+//!
+//! impl SomeTrait for () {
+//! fn f$0
+//! }
+//! ```
+//!
+//! may result in the completion of the following method:
+//!
+//! ```ignore
+//! # trait SomeTrait {
+//! # fn foo();
+//! # }
+//!
+//! impl SomeTrait for () {
+//! fn foo() {}$0
+//! }
+//! ```
+
+use hir::{self, HasAttrs};
+use ide_db::{
+ path_transform::PathTransform, syntax_helpers::insert_whitespace_into_node,
+ traits::get_missing_assoc_items, SymbolKind,
+};
+use syntax::{
+ ast::{self, edit_in_place::AttrsOwnerEdit},
+ AstNode, SyntaxElement, SyntaxKind, TextRange, T,
+};
+use text_edit::TextEdit;
+
+use crate::{
+ context::PathCompletionCtx, CompletionContext, CompletionItem, CompletionItemKind,
+ CompletionRelevance, Completions,
+};
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+enum ImplCompletionKind {
+ All,
+ Fn,
+ TypeAlias,
+ Const,
+}
+
+pub(crate) fn complete_trait_impl_const(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+) -> Option<()> {
+ complete_trait_impl_name(acc, ctx, name, ImplCompletionKind::Const)
+}
+
+pub(crate) fn complete_trait_impl_type_alias(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+) -> Option<()> {
+ complete_trait_impl_name(acc, ctx, name, ImplCompletionKind::TypeAlias)
+}
+
+pub(crate) fn complete_trait_impl_fn(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+) -> Option<()> {
+ complete_trait_impl_name(acc, ctx, name, ImplCompletionKind::Fn)
+}
+
+fn complete_trait_impl_name(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ name: &Option<ast::Name>,
+ kind: ImplCompletionKind,
+) -> Option<()> {
+ let item = match name {
+ Some(name) => name.syntax().parent(),
+ None => {
+ let token = &ctx.token;
+ match token.kind() {
+ SyntaxKind::WHITESPACE => token.prev_token()?,
+ _ => token.clone(),
+ }
+ .parent()
+ }
+ }?;
+ let item = ctx.sema.original_syntax_node(&item)?;
+ // item -> ASSOC_ITEM_LIST -> IMPL
+ let impl_def = ast::Impl::cast(item.parent()?.parent()?)?;
+ let replacement_range = {
+ // ctx.sema.original_ast_node(item)?;
+ let first_child = item
+ .children_with_tokens()
+ .find(|child| {
+ !matches!(
+ child.kind(),
+ SyntaxKind::COMMENT | SyntaxKind::WHITESPACE | SyntaxKind::ATTR
+ )
+ })
+ .unwrap_or_else(|| SyntaxElement::Node(item.clone()));
+
+ TextRange::new(first_child.text_range().start(), ctx.source_range().end())
+ };
+
+ complete_trait_impl(acc, ctx, kind, replacement_range, &impl_def);
+ Some(())
+}
+
+pub(crate) fn complete_trait_impl_item_by_name(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ path_ctx: &PathCompletionCtx,
+ name_ref: &Option<ast::NameRef>,
+ impl_: &Option<ast::Impl>,
+) {
+ if !path_ctx.is_trivial_path() {
+ return;
+ }
+ if let Some(impl_) = impl_ {
+ complete_trait_impl(
+ acc,
+ ctx,
+ ImplCompletionKind::All,
+ match name_ref {
+ Some(name) => name.syntax().text_range(),
+ None => ctx.source_range(),
+ },
+ impl_,
+ );
+ }
+}
+
+fn complete_trait_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ kind: ImplCompletionKind,
+ replacement_range: TextRange,
+ impl_def: &ast::Impl,
+) {
+ if let Some(hir_impl) = ctx.sema.to_def(impl_def) {
+ get_missing_assoc_items(&ctx.sema, impl_def).into_iter().for_each(|item| {
+ use self::ImplCompletionKind::*;
+ match (item, kind) {
+ (hir::AssocItem::Function(func), All | Fn) => {
+ add_function_impl(acc, ctx, replacement_range, func, hir_impl)
+ }
+ (hir::AssocItem::TypeAlias(type_alias), All | TypeAlias) => {
- if let ast::AssocItem::Fn(func) = &assoc_item {
- func.remove_attrs_and_docs();
- }
++ add_type_alias_impl(acc, ctx, replacement_range, type_alias, hir_impl)
+ }
+ (hir::AssocItem::Const(const_), All | Const) => {
+ add_const_impl(acc, ctx, replacement_range, const_, hir_impl)
+ }
+ _ => {}
+ }
+ });
+ }
+}
+
+fn add_function_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ replacement_range: TextRange,
+ func: hir::Function,
+ impl_def: hir::Impl,
+) {
+ let fn_name = func.name(ctx.db);
+
+ let label = format!(
+ "fn {}({})",
+ fn_name,
+ if func.assoc_fn_params(ctx.db).is_empty() { "" } else { ".." }
+ );
+
+ let completion_kind = if func.has_self_param(ctx.db) {
+ CompletionItemKind::Method
+ } else {
+ CompletionItemKind::SymbolKind(SymbolKind::Function)
+ };
+
+ let mut item = CompletionItem::new(completion_kind, replacement_range, label);
+ item.lookup_by(format!("fn {}", fn_name))
+ .set_documentation(func.docs(ctx.db))
+ .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() });
+
+ if let Some(source) = ctx.sema.source(func) {
+ let assoc_item = ast::AssocItem::Fn(source.value);
+ if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
+ let transformed_fn = match transformed_item {
+ ast::AssocItem::Fn(func) => func,
+ _ => unreachable!(),
+ };
+
+ let function_decl = function_declaration(&transformed_fn, source.file_id.is_macro());
+ match ctx.config.snippet_cap {
+ Some(cap) => {
+ let snippet = format!("{} {{\n $0\n}}", function_decl);
+ item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet));
+ }
+ None => {
+ let header = format!("{} {{", function_decl);
+ item.text_edit(TextEdit::replace(replacement_range, header));
+ }
+ };
+ item.add_to(acc);
+ }
+ }
+}
+
+/// Transform a relevant associated item to inline generics from the impl, remove attrs and docs, etc.
+fn get_transformed_assoc_item(
+ ctx: &CompletionContext<'_>,
+ assoc_item: ast::AssocItem,
+ impl_def: hir::Impl,
+) -> Option<ast::AssocItem> {
+ let assoc_item = assoc_item.clone_for_update();
+ let trait_ = impl_def.trait_(ctx.db)?;
+ let source_scope = &ctx.sema.scope_for_def(trait_);
+ let target_scope = &ctx.sema.scope(ctx.sema.source(impl_def)?.syntax().value)?;
+ let transform = PathTransform::trait_impl(
+ target_scope,
+ source_scope,
+ trait_,
+ ctx.sema.source(impl_def)?.value,
+ );
+
+ transform.apply(assoc_item.syntax());
- let alias_name = type_alias.name(ctx.db);
- let (alias_name, escaped_name) =
- (alias_name.unescaped().to_smol_str(), alias_name.to_smol_str());
++ assoc_item.remove_attrs_and_docs();
+ Some(assoc_item)
+}
+
+fn add_type_alias_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ replacement_range: TextRange,
+ type_alias: hir::TypeAlias,
++ impl_def: hir::Impl,
+) {
- let replacement = format!("type {} = ", escaped_name);
++ let alias_name = type_alias.name(ctx.db).unescaped().to_smol_str();
+
+ let label = format!("type {} =", alias_name);
- match ctx.config.snippet_cap {
- Some(cap) => item
- .snippet_edit(cap, TextEdit::replace(replacement_range, format!("{}$0;", replacement))),
- None => item.text_edit(TextEdit::replace(replacement_range, replacement)),
- };
- item.add_to(acc);
+
+ let mut item = CompletionItem::new(SymbolKind::TypeAlias, replacement_range, label);
+ item.lookup_by(format!("type {}", alias_name))
+ .set_documentation(type_alias.docs(ctx.db))
+ .set_relevance(CompletionRelevance { is_item_from_trait: true, ..Default::default() });
- const_.remove_attrs_and_docs();
++
++ if let Some(source) = ctx.sema.source(type_alias) {
++ let assoc_item = ast::AssocItem::TypeAlias(source.value);
++ if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
++ let transformed_ty = match transformed_item {
++ ast::AssocItem::TypeAlias(ty) => ty,
++ _ => unreachable!(),
++ };
++
++ let start = transformed_ty.syntax().text_range().start();
++ let Some(end) = transformed_ty
++ .eq_token()
++ .map(|tok| tok.text_range().start())
++ .or(transformed_ty.semicolon_token().map(|tok| tok.text_range().start())) else { return };
++
++ let len = end - start;
++ let mut decl = transformed_ty.syntax().text().slice(..len).to_string();
++ if !decl.ends_with(' ') {
++ decl.push(' ');
++ }
++ decl.push_str("= ");
++
++ match ctx.config.snippet_cap {
++ Some(cap) => {
++ let snippet = format!("{}$0;", decl);
++ item.snippet_edit(cap, TextEdit::replace(replacement_range, snippet));
++ }
++ None => {
++ item.text_edit(TextEdit::replace(replacement_range, decl));
++ }
++ };
++ item.add_to(acc);
++ }
++ }
+}
+
+fn add_const_impl(
+ acc: &mut Completions,
+ ctx: &CompletionContext<'_>,
+ replacement_range: TextRange,
+ const_: hir::Const,
+ impl_def: hir::Impl,
+) {
+ let const_name = const_.name(ctx.db).map(|n| n.to_smol_str());
+
+ if let Some(const_name) = const_name {
+ if let Some(source) = ctx.sema.source(const_) {
+ let assoc_item = ast::AssocItem::Const(source.value);
+ if let Some(transformed_item) = get_transformed_assoc_item(ctx, assoc_item, impl_def) {
+ let transformed_const = match transformed_item {
+ ast::AssocItem::Const(const_) => const_,
+ _ => unreachable!(),
+ };
+
+ let label = make_const_compl_syntax(&transformed_const, source.file_id.is_macro());
+ let replacement = format!("{} ", label);
+
+ let mut item = CompletionItem::new(SymbolKind::Const, replacement_range, label);
+ item.lookup_by(format!("const {}", const_name))
+ .set_documentation(const_.docs(ctx.db))
+ .set_relevance(CompletionRelevance {
+ is_item_from_trait: true,
+ ..Default::default()
+ });
+ match ctx.config.snippet_cap {
+ Some(cap) => item.snippet_edit(
+ cap,
+ TextEdit::replace(replacement_range, format!("{}$0;", replacement)),
+ ),
+ None => item.text_edit(TextEdit::replace(replacement_range, replacement)),
+ };
+ item.add_to(acc);
+ }
+ }
+ }
+}
+
+fn make_const_compl_syntax(const_: &ast::Const, needs_whitespace: bool) -> String {
- node.remove_attrs_and_docs();
-
+ let const_ = if needs_whitespace {
+ insert_whitespace_into_node::insert_ws_into(const_.syntax().clone())
+ } else {
+ const_.syntax().clone()
+ };
+
+ let start = const_.text_range().start();
+ let const_end = const_.text_range().end();
+
+ let end = const_
+ .children_with_tokens()
+ .find(|s| s.kind() == T![;] || s.kind() == T![=])
+ .map_or(const_end, |f| f.text_range().start());
+
+ let len = end - start;
+ let range = TextRange::new(0.into(), len);
+
+ let syntax = const_.text().slice(range).to_string();
+
+ format!("{} =", syntax.trim_end())
+}
+
+fn function_declaration(node: &ast::Fn, needs_whitespace: bool) -> String {
- let range = TextRange::new(0.into(), len);
-
- let syntax = node.text().slice(range).to_string();
+ let node = if needs_whitespace {
+ insert_whitespace_into_node::insert_ws_into(node.syntax().clone())
+ } else {
+ node.syntax().clone()
+ };
+
+ let start = node.text_range().start();
+ let end = node.text_range().end();
+
+ let end = node
+ .last_child_or_token()
+ .filter(|s| s.kind() == T![;] || s.kind() == SyntaxKind::BLOCK_EXPR)
+ .map_or(end, |f| f.text_range().start());
+
+ let len = end - start;
++ let syntax = node.text().slice(..len).to_string();
+
+ syntax.trim_end().to_owned()
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::tests::{check_edit, completion_list_no_kw};
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list_no_kw(ra_fixture);
+ expect.assert_eq(&actual)
+ }
+
+ #[test]
+ fn no_completion_inside_fn() {
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ t$0
+ }
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ fn t$0
+ }
+}
+",
+ expect![[""]],
+ );
+
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ fn $0
+ }
+}
+",
+ expect![[""]],
+ );
+
+ // https://github.com/rust-lang/rust-analyzer/pull/5976#issuecomment-692332191
+ check(
+ r"
+trait Test { fn test(); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test() {
+ foo.$0
+ }
+}
+",
+ expect![[r#""#]],
+ );
+
+ check(
+ r"
+trait Test { fn test(_: i32); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test(t$0)
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ bn &mut self
+ bn &self
+ bn mut self
+ bn self
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { fn test(_: fn()); fn test2(); }
+struct T;
+
+impl Test for T {
+ fn test(f: fn $0)
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ "#]],
+ );
+ }
+
+ #[test]
+ fn no_completion_inside_const() {
+ check(
+ r"
+trait Test { const TEST: fn(); const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: fn $0
+}
+",
+ expect![[r#""#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: T$0
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = f$0
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = {
+ t$0
+ };
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = {
+ fn $0
+ };
+}
+",
+ expect![[""]],
+ );
+
+ check(
+ r"
+trait Test { const TEST: u32; const TEST2: u32; type Test; fn test(); }
+struct T;
+
+impl Test for T {
+ const TEST: u32 = {
+ fn t$0
+ };
+}
+",
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn no_completion_inside_type() {
+ check(
+ r"
+trait Test { type Test; type Test2; fn test(); }
+struct T;
+
+impl Test for T {
+ type Test = T$0;
+}
+",
+ expect![[r#"
+ sp Self
+ st T
+ tt Test
+ bt u32
+ "#]],
+ );
+
+ check(
+ r"
+trait Test { type Test; type Test2; fn test(); }
+struct T;
+
+impl Test for T {
+ type Test = fn $0;
+}
+",
+ expect![[r#""#]],
+ );
+ }
+
+ #[test]
+ fn name_ref_single_function() {
+ check_edit(
+ "fn test",
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ t$0
+}
+"#,
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ fn test() {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn single_function() {
+ check_edit(
+ "fn test",
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ fn t$0
+}
+"#,
+ r#"
+trait Test {
+ fn test();
+}
+struct T;
+
+impl Test for T {
+ fn test() {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn generic_fn() {
+ check_edit(
+ "fn foo",
+ r#"
+trait Test {
+ fn foo<T>();
+}
+struct T;
+
+impl Test for T {
+ fn f$0
+}
+"#,
+ r#"
+trait Test {
+ fn foo<T>();
+}
+struct T;
+
+impl Test for T {
+ fn foo<T>() {
+ $0
+}
+}
+"#,
+ );
+ check_edit(
+ "fn foo",
+ r#"
+trait Test {
+ fn foo<T>() where T: Into<String>;
+}
+struct T;
+
+impl Test for T {
+ fn f$0
+}
+"#,
+ r#"
+trait Test {
+ fn foo<T>() where T: Into<String>;
+}
+struct T;
+
+impl Test for T {
+ fn foo<T>() where T: Into<String> {
+ $0
+}
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn associated_type() {
+ check_edit(
+ "type SomeType",
+ r#"
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type S$0
+}
+"#,
+ "
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type SomeType = $0;\n\
+}
+",
+ );
+ check_edit(
+ "type SomeType",
+ r#"
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type$0
+}
+"#,
+ "
+trait Test {
+ type SomeType;
+}
+
+impl Test for () {
+ type SomeType = $0;\n\
+}
+",
+ );
+ }
+
+ #[test]
+ fn associated_const() {
+ check_edit(
+ "const SOME_CONST",
+ r#"
+trait Test {
+ const SOME_CONST: u16;
+}
+
+impl Test for () {
+ const S$0
+}
+"#,
+ "
+trait Test {
+ const SOME_CONST: u16;
+}
+
+impl Test for () {
+ const SOME_CONST: u16 = $0;\n\
+}
+",
+ );
+
+ check_edit(
+ "const SOME_CONST",
+ r#"
+trait Test {
+ const SOME_CONST: u16 = 92;
+}
+
+impl Test for () {
+ const S$0
+}
+"#,
+ "
+trait Test {
+ const SOME_CONST: u16 = 92;
+}
+
+impl Test for () {
+ const SOME_CONST: u16 = $0;\n\
+}
+",
+ );
+ }
+
+ #[test]
+ fn complete_without_name() {
+ let test = |completion: &str, hint: &str, completed: &str, next_sibling: &str| {
+ check_edit(
+ completion,
+ &format!(
+ r#"
+trait Test {{
+ type Foo;
+ const CONST: u16;
+ fn bar();
+}}
+struct T;
+
+impl Test for T {{
+ {}
+ {}
+}}
+"#,
+ hint, next_sibling
+ ),
+ &format!(
+ r#"
+trait Test {{
+ type Foo;
+ const CONST: u16;
+ fn bar();
+}}
+struct T;
+
+impl Test for T {{
+ {}
+ {}
+}}
+"#,
+ completed, next_sibling
+ ),
+ )
+ };
+
+ // Enumerate some possible next siblings.
+ for next_sibling in &[
+ "",
+ "fn other_fn() {}", // `const $0 fn` -> `const fn`
+ "type OtherType = i32;",
+ "const OTHER_CONST: i32 = 0;",
+ "async fn other_fn() {}",
+ "unsafe fn other_fn() {}",
+ "default fn other_fn() {}",
+ "default type OtherType = i32;",
+ "default const OTHER_CONST: i32 = 0;",
+ ] {
+ test("fn bar", "fn $0", "fn bar() {\n $0\n}", next_sibling);
+ test("type Foo", "type $0", "type Foo = $0;", next_sibling);
+ test("const CONST", "const $0", "const CONST: u16 = $0;", next_sibling);
+ }
+ }
+
+ #[test]
+ fn snippet_does_not_overwrite_comment_or_attr() {
+ let test = |completion: &str, hint: &str, completed: &str| {
+ check_edit(
+ completion,
+ &format!(
+ r#"
+trait Foo {{
+ type Type;
+ fn function();
+ const CONST: i32 = 0;
+}}
+struct T;
+
+impl Foo for T {{
+ // Comment
+ #[bar]
+ {}
+}}
+"#,
+ hint
+ ),
+ &format!(
+ r#"
+trait Foo {{
+ type Type;
+ fn function();
+ const CONST: i32 = 0;
+}}
+struct T;
+
+impl Foo for T {{
+ // Comment
+ #[bar]
+ {}
+}}
+"#,
+ completed
+ ),
+ )
+ };
+ test("fn function", "fn f$0", "fn function() {\n $0\n}");
+ test("type Type", "type T$0", "type Type = $0;");
+ test("const CONST", "const C$0", "const CONST: i32 = $0;");
+ }
+
+ #[test]
+ fn generics_are_inlined_in_return_type() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T> {
+ fn function() -> T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ fn function() -> T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function() -> u32 {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_in_parameter() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T> {
+ fn function(bar: T);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ fn function(bar: T);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function(bar: u32) {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_when_part_of_other_types() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T> {
+ fn function(bar: Vec<T>);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ fn function(bar: Vec<T>);
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function(bar: Vec<u32>) {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_complex() {
+ check_edit(
+ "fn function",
+ r#"
+trait Foo<T, U, V> {
+ fn function(bar: Vec<T>, baz: U) -> Arc<Vec<V>>;
+}
+struct Bar;
+
+impl Foo<u32, Vec<usize>, u8> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait Foo<T, U, V> {
+ fn function(bar: Vec<T>, baz: U) -> Arc<Vec<V>>;
+}
+struct Bar;
+
+impl Foo<u32, Vec<usize>, u8> for Bar {
+ fn function(bar: Vec<u32>, baz: Vec<usize>) -> Arc<Vec<u8>> {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_in_associated_const() {
+ check_edit(
+ "const BAR",
+ r#"
+trait Foo<T> {
+ const BAR: T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ const B$0
+}
+"#,
+ r#"
+trait Foo<T> {
+ const BAR: T;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ const BAR: u32 = $0;
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn generics_are_inlined_in_where_clause() {
+ check_edit(
+ "fn function",
+ r#"
+trait SomeTrait<T> {}
+
+trait Foo<T> {
+ fn function()
+ where Self: SomeTrait<T>;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn f$0
+}
+"#,
+ r#"
+trait SomeTrait<T> {}
+
+trait Foo<T> {
+ fn function()
+ where Self: SomeTrait<T>;
+}
+struct Bar;
+
+impl Foo<u32> for Bar {
+ fn function()
+ where Self: SomeTrait<u32> {
+ $0
+}
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn works_directly_in_impl() {
+ check(
+ r#"
+trait Tr {
+ fn required();
+}
+
+impl Tr for () {
+ $0
+}
+"#,
+ expect![[r#"
+ fn fn required()
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ fn provided() {}
+ fn required();
+}
+
+impl Tr for () {
+ fn provided() {}
+ $0
+}
+"#,
+ expect![[r#"
+ fn fn required()
+ "#]],
+ );
+ }
+
+ #[test]
+ fn fixes_up_macro_generated() {
+ check_edit(
+ "fn foo",
+ r#"
+macro_rules! noop {
+ ($($item: item)*) => {
+ $($item)*
+ }
+}
+
+noop! {
+ trait Foo {
+ fn foo(&mut self, bar: i64, baz: &mut u32) -> Result<(), u32>;
+ }
+}
+
+struct Test;
+
+impl Foo for Test {
+ $0
+}
+"#,
+ r#"
+macro_rules! noop {
+ ($($item: item)*) => {
+ $($item)*
+ }
+}
+
+noop! {
+ trait Foo {
+ fn foo(&mut self, bar: i64, baz: &mut u32) -> Result<(), u32>;
+ }
+}
+
+struct Test;
+
+impl Foo for Test {
+ fn foo(&mut self,bar:i64,baz: &mut u32) -> Result<(),u32> {
+ $0
+}
+}
++"#,
++ );
++ }
++
++ #[test]
++ fn includes_gat_generics() {
++ check_edit(
++ "type Ty",
++ r#"
++trait Tr<'b> {
++ type Ty<'a: 'b, T: Copy, const C: usize>;
++}
++
++impl<'b> Tr<'b> for () {
++ $0
++}
++"#,
++ r#"
++trait Tr<'b> {
++ type Ty<'a: 'b, T: Copy, const C: usize>;
++}
++
++impl<'b> Tr<'b> for () {
++ type Ty<'a: 'b, T: Copy, const C: usize> = $0;
++}
++"#,
++ );
++ }
++
++ #[test]
++ fn strips_comments() {
++ check_edit(
++ "fn func",
++ r#"
++trait Tr {
++ /// docs
++ #[attr]
++ fn func();
++}
++impl Tr for () {
++ $0
++}
++"#,
++ r#"
++trait Tr {
++ /// docs
++ #[attr]
++ fn func();
++}
++impl Tr for () {
++ fn func() {
++ $0
++}
++}
++"#,
++ );
++ check_edit(
++ "const C",
++ r#"
++trait Tr {
++ /// docs
++ #[attr]
++ const C: usize;
++}
++impl Tr for () {
++ $0
++}
++"#,
++ r#"
++trait Tr {
++ /// docs
++ #[attr]
++ const C: usize;
++}
++impl Tr for () {
++ const C: usize = $0;
++}
++"#,
++ );
++ check_edit(
++ "type Item",
++ r#"
++trait Tr {
++ /// docs
++ #[attr]
++ type Item;
++}
++impl Tr for () {
++ $0
++}
++"#,
++ r#"
++trait Tr {
++ /// docs
++ #[attr]
++ type Item;
++}
++impl Tr for () {
++ type Item = $0;
++}
+"#,
+ );
+ }
+}
--- /dev/null
- ast::Item::Module(it) => it.item_list().is_none(),
+//! Module responsible for analyzing the code surrounding the cursor for completion.
+use std::iter;
+
+use hir::{Semantics, Type, TypeInfo};
+use ide_db::{active_parameter::ActiveParameter, RootDatabase};
+use syntax::{
+ algo::{find_node_at_offset, non_trivia_sibling},
+ ast::{self, AttrKind, HasArgList, HasLoopBody, HasName, NameOrNameRef},
+ match_ast, AstNode, AstToken, Direction, NodeOrToken, SyntaxElement, SyntaxKind, SyntaxNode,
+ SyntaxToken, TextRange, TextSize, T,
+};
+
+use crate::context::{
+ AttrCtx, CompletionAnalysis, DotAccess, DotAccessKind, ExprCtx, ItemListKind, LifetimeContext,
+ LifetimeKind, NameContext, NameKind, NameRefContext, NameRefKind, ParamContext, ParamKind,
+ PathCompletionCtx, PathKind, PatternContext, PatternRefutability, Qualified, QualifierCtx,
+ TypeAscriptionTarget, TypeLocation, COMPLETION_MARKER,
+};
+
+struct ExpansionResult {
+ original_file: SyntaxNode,
+ speculative_file: SyntaxNode,
+ offset: TextSize,
+ fake_ident_token: SyntaxToken,
+ derive_ctx: Option<(SyntaxNode, SyntaxNode, TextSize, ast::Attr)>,
+}
+
+pub(super) struct AnalysisResult {
+ pub(super) analysis: CompletionAnalysis,
+ pub(super) expected: (Option<Type>, Option<ast::NameOrNameRef>),
+ pub(super) qualifier_ctx: QualifierCtx,
+ pub(super) token: SyntaxToken,
+ pub(super) offset: TextSize,
+}
+
+pub(super) fn expand_and_analyze(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: SyntaxNode,
+ speculative_file: SyntaxNode,
+ offset: TextSize,
+ original_token: &SyntaxToken,
+) -> Option<AnalysisResult> {
+ // as we insert after the offset, right biased will *always* pick the identifier no matter
+ // if there is an ident already typed or not
+ let fake_ident_token = speculative_file.token_at_offset(offset).right_biased()?;
+ // the relative offset between the cursor and the *identifier* token we are completing on
+ let relative_offset = offset - fake_ident_token.text_range().start();
+ // make the offset point to the start of the original token, as that is what the
+ // intermediate offsets calculated in expansion always points to
+ let offset = offset - relative_offset;
+ let expansion = expand(sema, original_file, speculative_file, offset, fake_ident_token);
+ // add the relative offset back, so that left_biased finds the proper token
+ let offset = expansion.offset + relative_offset;
+ let token = expansion.original_file.token_at_offset(offset).left_biased()?;
+
+ analyze(sema, expansion, original_token, &token).map(|(analysis, expected, qualifier_ctx)| {
+ AnalysisResult { analysis, expected, qualifier_ctx, token, offset }
+ })
+}
+
+/// Expand attributes and macro calls at the current cursor position for both the original file
+/// and fake file repeatedly. As soon as one of the two expansions fail we stop so the original
+/// and speculative states stay in sync.
+fn expand(
+ sema: &Semantics<'_, RootDatabase>,
+ mut original_file: SyntaxNode,
+ mut speculative_file: SyntaxNode,
+ mut offset: TextSize,
+ mut fake_ident_token: SyntaxToken,
+) -> ExpansionResult {
+ let _p = profile::span("CompletionContext::expand");
+ let mut derive_ctx = None;
+
+ 'expansion: loop {
+ let parent_item =
+ |item: &ast::Item| item.syntax().ancestors().skip(1).find_map(ast::Item::cast);
+ let ancestor_items = iter::successors(
+ Option::zip(
+ find_node_at_offset::<ast::Item>(&original_file, offset),
+ find_node_at_offset::<ast::Item>(&speculative_file, offset),
+ ),
+ |(a, b)| parent_item(a).zip(parent_item(b)),
+ );
+
+ // first try to expand attributes as these are always the outermost macro calls
+ 'ancestors: for (actual_item, item_with_fake_ident) in ancestor_items {
+ match (
+ sema.expand_attr_macro(&actual_item),
+ sema.speculative_expand_attr_macro(
+ &actual_item,
+ &item_with_fake_ident,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ // maybe parent items have attributes, so continue walking the ancestors
+ (None, None) => continue 'ancestors,
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+ let new_offset = fake_mapped_token.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ // offset outside of bounds from the original expansion,
+ // stop here to prevent problems from happening
+ break 'expansion;
+ }
+ original_file = actual_expansion;
+ speculative_file = fake_expansion;
+ fake_ident_token = fake_mapped_token;
+ offset = new_offset;
+ continue 'expansion;
+ }
+ // exactly one expansion failed, inconsistent state so stop expanding completely
+ _ => break 'expansion,
+ }
+ }
+
+ // No attributes have been expanded, so look for macro_call! token trees or derive token trees
+ let orig_tt = match find_node_at_offset::<ast::TokenTree>(&original_file, offset) {
+ Some(it) => it,
+ None => break 'expansion,
+ };
+ let spec_tt = match find_node_at_offset::<ast::TokenTree>(&speculative_file, offset) {
+ Some(it) => it,
+ None => break 'expansion,
+ };
+
+ // Expand pseudo-derive expansion
+ if let (Some(orig_attr), Some(spec_attr)) = (
+ orig_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
+ spec_tt.syntax().parent().and_then(ast::Meta::cast).and_then(|it| it.parent_attr()),
+ ) {
+ if let (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) = (
+ sema.expand_derive_as_pseudo_attr_macro(&orig_attr),
+ sema.speculative_expand_derive_as_pseudo_attr_macro(
+ &orig_attr,
+ &spec_attr,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ derive_ctx = Some((
+ actual_expansion,
+ fake_expansion,
+ fake_mapped_token.text_range().start(),
+ orig_attr,
+ ));
+ }
+ // at this point we won't have any more successful expansions, so stop
+ break 'expansion;
+ }
+
+ // Expand fn-like macro calls
+ if let (Some(actual_macro_call), Some(macro_call_with_fake_ident)) = (
+ orig_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
+ spec_tt.syntax().ancestors().find_map(ast::MacroCall::cast),
+ ) {
+ let mac_call_path0 = actual_macro_call.path().as_ref().map(|s| s.syntax().text());
+ let mac_call_path1 =
+ macro_call_with_fake_ident.path().as_ref().map(|s| s.syntax().text());
+
+ // inconsistent state, stop expanding
+ if mac_call_path0 != mac_call_path1 {
+ break 'expansion;
+ }
+ let speculative_args = match macro_call_with_fake_ident.token_tree() {
+ Some(tt) => tt,
+ None => break 'expansion,
+ };
+
+ match (
+ sema.expand(&actual_macro_call),
+ sema.speculative_expand(
+ &actual_macro_call,
+ &speculative_args,
+ fake_ident_token.clone(),
+ ),
+ ) {
+ // successful expansions
+ (Some(actual_expansion), Some((fake_expansion, fake_mapped_token))) => {
+ let new_offset = fake_mapped_token.text_range().start();
+ if new_offset > actual_expansion.text_range().end() {
+ // offset outside of bounds from the original expansion,
+ // stop here to prevent problems from happening
+ break 'expansion;
+ }
+ original_file = actual_expansion;
+ speculative_file = fake_expansion;
+ fake_ident_token = fake_mapped_token;
+ offset = new_offset;
+ continue 'expansion;
+ }
+ // at least on expansion failed, we won't have anything to expand from this point
+ // onwards so break out
+ _ => break 'expansion,
+ }
+ }
+
+ // none of our states have changed so stop the loop
+ break 'expansion;
+ }
+ ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx }
+}
+
+/// Fill the completion context, this is what does semantic reasoning about the surrounding context
+/// of the completion location.
+fn analyze(
+ sema: &Semantics<'_, RootDatabase>,
+ expansion_result: ExpansionResult,
+ original_token: &SyntaxToken,
+ self_token: &SyntaxToken,
+) -> Option<(CompletionAnalysis, (Option<Type>, Option<ast::NameOrNameRef>), QualifierCtx)> {
+ let _p = profile::span("CompletionContext::analyze");
+ let ExpansionResult { original_file, speculative_file, offset, fake_ident_token, derive_ctx } =
+ expansion_result;
+ let syntax_element = NodeOrToken::Token(fake_ident_token);
+ if is_in_token_of_for_loop(syntax_element.clone()) {
+ // for pat $0
+ // there is nothing to complete here except `in` keyword
+ // don't bother populating the context
+ // FIXME: the completion calculations should end up good enough
+ // such that this special case becomes unnecessary
+ return None;
+ }
+
+ // Overwrite the path kind for derives
+ if let Some((original_file, file_with_fake_ident, offset, origin_attr)) = derive_ctx {
+ if let Some(ast::NameLike::NameRef(name_ref)) =
+ find_node_at_offset(&file_with_fake_ident, offset)
+ {
+ let parent = name_ref.syntax().parent()?;
+ let (mut nameref_ctx, _) = classify_name_ref(&sema, &original_file, name_ref, parent)?;
+ if let NameRefKind::Path(path_ctx) = &mut nameref_ctx.kind {
+ path_ctx.kind = PathKind::Derive {
+ existing_derives: sema
+ .resolve_derive_macro(&origin_attr)
+ .into_iter()
+ .flatten()
+ .flatten()
+ .collect(),
+ };
+ }
+ return Some((
+ CompletionAnalysis::NameRef(nameref_ctx),
+ (None, None),
+ QualifierCtx::default(),
+ ));
+ }
+ return None;
+ }
+
+ let name_like = match find_node_at_offset(&speculative_file, offset) {
+ Some(it) => it,
+ None => {
+ let analysis = if let Some(original) = ast::String::cast(original_token.clone()) {
+ CompletionAnalysis::String {
+ original,
+ expanded: ast::String::cast(self_token.clone()),
+ }
+ } else {
+ // Fix up trailing whitespace problem
+ // #[attr(foo = $0
+ let token = syntax::algo::skip_trivia_token(self_token.clone(), Direction::Prev)?;
+ let p = token.parent()?;
+ if p.kind() == SyntaxKind::TOKEN_TREE
+ && p.ancestors().any(|it| it.kind() == SyntaxKind::META)
+ {
+ let colon_prefix = previous_non_trivia_token(self_token.clone())
+ .map_or(false, |it| T![:] == it.kind());
+ CompletionAnalysis::UnexpandedAttrTT {
+ fake_attribute_under_caret: syntax_element
+ .ancestors()
+ .find_map(ast::Attr::cast),
+ colon_prefix,
+ }
+ } else {
+ return None;
+ }
+ };
+ return Some((analysis, (None, None), QualifierCtx::default()));
+ }
+ };
+ let expected = expected_type_and_name(sema, &self_token, &name_like);
+ let mut qual_ctx = QualifierCtx::default();
+ let analysis = match name_like {
+ ast::NameLike::Lifetime(lifetime) => {
+ CompletionAnalysis::Lifetime(classify_lifetime(sema, &original_file, lifetime)?)
+ }
+ ast::NameLike::NameRef(name_ref) => {
+ let parent = name_ref.syntax().parent()?;
+ let (nameref_ctx, qualifier_ctx) =
+ classify_name_ref(sema, &original_file, name_ref, parent.clone())?;
+ qual_ctx = qualifier_ctx;
+ CompletionAnalysis::NameRef(nameref_ctx)
+ }
+ ast::NameLike::Name(name) => {
+ let name_ctx = classify_name(sema, &original_file, name)?;
+ CompletionAnalysis::Name(name_ctx)
+ }
+ };
+ Some((analysis, expected, qual_ctx))
+}
+
+/// Calculate the expected type and name of the cursor position.
+fn expected_type_and_name(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+ name_like: &ast::NameLike,
+) -> (Option<Type>, Option<NameOrNameRef>) {
+ let mut node = match token.parent() {
+ Some(it) => it,
+ None => return (None, None),
+ };
+
+ let strip_refs = |mut ty: Type| match name_like {
+ ast::NameLike::NameRef(n) => {
+ let p = match n.syntax().parent() {
+ Some(it) => it,
+ None => return ty,
+ };
+ let top_syn = match_ast! {
+ match p {
+ ast::FieldExpr(e) => e
+ .syntax()
+ .ancestors()
+ .map_while(ast::FieldExpr::cast)
+ .last()
+ .map(|it| it.syntax().clone()),
+ ast::PathSegment(e) => e
+ .syntax()
+ .ancestors()
+ .skip(1)
+ .take_while(|it| ast::Path::can_cast(it.kind()) || ast::PathExpr::can_cast(it.kind()))
+ .find_map(ast::PathExpr::cast)
+ .map(|it| it.syntax().clone()),
+ _ => None
+ }
+ };
+ let top_syn = match top_syn {
+ Some(it) => it,
+ None => return ty,
+ };
+ for _ in top_syn.ancestors().skip(1).map_while(ast::RefExpr::cast) {
+ cov_mark::hit!(expected_type_fn_param_ref);
+ ty = ty.strip_reference();
+ }
+ ty
+ }
+ _ => ty,
+ };
+
+ loop {
+ break match_ast! {
+ match node {
+ ast::LetStmt(it) => {
+ cov_mark::hit!(expected_type_let_with_leading_char);
+ cov_mark::hit!(expected_type_let_without_leading_char);
+ let ty = it.pat()
+ .and_then(|pat| sema.type_of_pat(&pat))
+ .or_else(|| it.initializer().and_then(|it| sema.type_of_expr(&it)))
+ .map(TypeInfo::original);
+ let name = match it.pat() {
+ Some(ast::Pat::IdentPat(ident)) => ident.name().map(NameOrNameRef::Name),
+ Some(_) | None => None,
+ };
+
+ (ty, name)
+ },
+ ast::LetExpr(it) => {
+ cov_mark::hit!(expected_type_if_let_without_leading_char);
+ let ty = it.pat()
+ .and_then(|pat| sema.type_of_pat(&pat))
+ .or_else(|| it.expr().and_then(|it| sema.type_of_expr(&it)))
+ .map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::ArgList(_) => {
+ cov_mark::hit!(expected_type_fn_param);
+ ActiveParameter::at_token(
+ &sema,
+ token.clone(),
+ ).map(|ap| {
+ let name = ap.ident().map(NameOrNameRef::Name);
+
+ let ty = strip_refs(ap.ty);
+ (Some(ty), name)
+ })
+ .unwrap_or((None, None))
+ },
+ ast::RecordExprFieldList(it) => {
+ // wouldn't try {} be nice...
+ (|| {
+ if token.kind() == T![..]
+ ||token.prev_token().map(|t| t.kind()) == Some(T![..])
+ {
+ cov_mark::hit!(expected_type_struct_func_update);
+ let record_expr = it.syntax().parent().and_then(ast::RecordExpr::cast)?;
+ let ty = sema.type_of_expr(&record_expr.into())?;
+ Some((
+ Some(ty.original),
+ None
+ ))
+ } else {
+ cov_mark::hit!(expected_type_struct_field_without_leading_char);
+ let expr_field = token.prev_sibling_or_token()?
+ .into_node()
+ .and_then(ast::RecordExprField::cast)?;
+ let (_, _, ty) = sema.resolve_record_field(&expr_field)?;
+ Some((
+ Some(ty),
+ expr_field.field_name().map(NameOrNameRef::NameRef),
+ ))
+ }
+ })().unwrap_or((None, None))
+ },
+ ast::RecordExprField(it) => {
+ if let Some(expr) = it.expr() {
+ cov_mark::hit!(expected_type_struct_field_with_leading_char);
+ (
+ sema.type_of_expr(&expr).map(TypeInfo::original),
+ it.field_name().map(NameOrNameRef::NameRef),
+ )
+ } else {
+ cov_mark::hit!(expected_type_struct_field_followed_by_comma);
+ let ty = sema.resolve_record_field(&it)
+ .map(|(_, _, ty)| ty);
+ (
+ ty,
+ it.field_name().map(NameOrNameRef::NameRef),
+ )
+ }
+ },
+ // match foo { $0 }
+ // match foo { ..., pat => $0 }
+ ast::MatchExpr(it) => {
+ let on_arrow = previous_non_trivia_token(token.clone()).map_or(false, |it| T![=>] == it.kind());
+
+ let ty = if on_arrow {
+ // match foo { ..., pat => $0 }
+ cov_mark::hit!(expected_type_match_arm_body_without_leading_char);
+ cov_mark::hit!(expected_type_match_arm_body_with_leading_char);
+ sema.type_of_expr(&it.into())
+ } else {
+ // match foo { $0 }
+ cov_mark::hit!(expected_type_match_arm_without_leading_char);
+ it.expr().and_then(|e| sema.type_of_expr(&e))
+ }.map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::IfExpr(it) => {
+ let ty = it.condition()
+ .and_then(|e| sema.type_of_expr(&e))
+ .map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::IdentPat(it) => {
+ cov_mark::hit!(expected_type_if_let_with_leading_char);
+ cov_mark::hit!(expected_type_match_arm_with_leading_char);
+ let ty = sema.type_of_pat(&ast::Pat::from(it)).map(TypeInfo::original);
+ (ty, None)
+ },
+ ast::Fn(it) => {
+ cov_mark::hit!(expected_type_fn_ret_with_leading_char);
+ cov_mark::hit!(expected_type_fn_ret_without_leading_char);
+ let def = sema.to_def(&it);
+ (def.map(|def| def.ret_type(sema.db)), None)
+ },
+ ast::ClosureExpr(it) => {
+ let ty = sema.type_of_expr(&it.into());
+ ty.and_then(|ty| ty.original.as_callable(sema.db))
+ .map(|c| (Some(c.return_type()), None))
+ .unwrap_or((None, None))
+ },
+ ast::ParamList(_) => (None, None),
+ ast::Stmt(_) => (None, None),
+ ast::Item(_) => (None, None),
+ _ => {
+ match node.parent() {
+ Some(n) => {
+ node = n;
+ continue;
+ },
+ None => (None, None),
+ }
+ },
+ }
+ };
+ }
+}
+
+fn classify_lifetime(
+ _sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ lifetime: ast::Lifetime,
+) -> Option<LifetimeContext> {
+ let parent = lifetime.syntax().parent()?;
+ if parent.kind() == SyntaxKind::ERROR {
+ return None;
+ }
+
+ let kind = match_ast! {
+ match parent {
+ ast::LifetimeParam(param) => LifetimeKind::LifetimeParam {
+ is_decl: param.lifetime().as_ref() == Some(&lifetime),
+ param
+ },
+ ast::BreakExpr(_) => LifetimeKind::LabelRef,
+ ast::ContinueExpr(_) => LifetimeKind::LabelRef,
+ ast::Label(_) => LifetimeKind::LabelDef,
+ _ => LifetimeKind::Lifetime,
+ }
+ };
+ let lifetime = find_node_at_offset(&original_file, lifetime.syntax().text_range().start());
+
+ Some(LifetimeContext { lifetime, kind })
+}
+
+fn classify_name(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ name: ast::Name,
+) -> Option<NameContext> {
+ let parent = name.syntax().parent()?;
+ let kind = match_ast! {
+ match parent {
+ ast::Const(_) => NameKind::Const,
+ ast::ConstParam(_) => NameKind::ConstParam,
+ ast::Enum(_) => NameKind::Enum,
+ ast::Fn(_) => NameKind::Function,
+ ast::IdentPat(bind_pat) => {
+ let mut pat_ctx = pattern_context_for(sema, original_file, bind_pat.into());
+ if let Some(record_field) = ast::RecordPatField::for_field_name(&name) {
+ pat_ctx.record_pat = find_node_in_file_compensated(sema, original_file, &record_field.parent_record_pat());
+ }
+
+ NameKind::IdentPat(pat_ctx)
+ },
+ ast::MacroDef(_) => NameKind::MacroDef,
+ ast::MacroRules(_) => NameKind::MacroRules,
+ ast::Module(module) => NameKind::Module(module),
+ ast::RecordField(_) => NameKind::RecordField,
+ ast::Rename(_) => NameKind::Rename,
+ ast::SelfParam(_) => NameKind::SelfParam,
+ ast::Static(_) => NameKind::Static,
+ ast::Struct(_) => NameKind::Struct,
+ ast::Trait(_) => NameKind::Trait,
+ ast::TypeAlias(_) => NameKind::TypeAlias,
+ ast::TypeParam(_) => NameKind::TypeParam,
+ ast::Union(_) => NameKind::Union,
+ ast::Variant(_) => NameKind::Variant,
+ _ => return None,
+ }
+ };
+ let name = find_node_at_offset(&original_file, name.syntax().text_range().start());
+ Some(NameContext { name, kind })
+}
+
+fn classify_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ name_ref: ast::NameRef,
+ parent: SyntaxNode,
+) -> Option<(NameRefContext, QualifierCtx)> {
+ let nameref = find_node_at_offset(&original_file, name_ref.syntax().text_range().start());
+
+ let make_res = |kind| (NameRefContext { nameref: nameref.clone(), kind }, Default::default());
+
+ if let Some(record_field) = ast::RecordExprField::for_field_name(&name_ref) {
+ let dot_prefix = previous_non_trivia_token(name_ref.syntax().clone())
+ .map_or(false, |it| T![.] == it.kind());
+
+ return find_node_in_file_compensated(
+ sema,
+ original_file,
+ &record_field.parent_record_lit(),
+ )
+ .map(|expr| NameRefKind::RecordExpr { expr, dot_prefix })
+ .map(make_res);
+ }
+ if let Some(record_field) = ast::RecordPatField::for_field_name_ref(&name_ref) {
+ let kind = NameRefKind::Pattern(PatternContext {
+ param_ctx: None,
+ has_type_ascription: false,
+ ref_token: None,
+ mut_token: None,
+ record_pat: find_node_in_file_compensated(
+ sema,
+ original_file,
+ &record_field.parent_record_pat(),
+ ),
+ ..pattern_context_for(
+ sema,
+ original_file,
+ record_field.parent_record_pat().clone().into(),
+ )
+ });
+ return Some(make_res(kind));
+ }
+
+ let segment = match_ast! {
+ match parent {
+ ast::PathSegment(segment) => segment,
+ ast::FieldExpr(field) => {
+ let receiver = find_opt_node_in_file(original_file, field.expr());
+ let receiver_is_ambiguous_float_literal = match &receiver {
+ Some(ast::Expr::Literal(l)) => matches! {
+ l.kind(),
+ ast::LiteralKind::FloatNumber { .. } if l.syntax().last_token().map_or(false, |it| it.text().ends_with('.'))
+ },
+ _ => false,
+ };
+ let kind = NameRefKind::DotAccess(DotAccess {
+ receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
+ kind: DotAccessKind::Field { receiver_is_ambiguous_float_literal },
+ receiver
+ });
+ return Some(make_res(kind));
+ },
+ ast::MethodCallExpr(method) => {
+ let receiver = find_opt_node_in_file(original_file, method.receiver());
+ let kind = NameRefKind::DotAccess(DotAccess {
+ receiver_ty: receiver.as_ref().and_then(|it| sema.type_of_expr(it)),
+ kind: DotAccessKind::Method { has_parens: method.arg_list().map_or(false, |it| it.l_paren_token().is_some()) },
+ receiver
+ });
+ return Some(make_res(kind));
+ },
+ _ => return None,
+ }
+ };
+
+ let path = segment.parent_path();
+ let original_path = find_node_in_file_compensated(sema, original_file, &path);
+
+ let mut path_ctx = PathCompletionCtx {
+ has_call_parens: false,
+ has_macro_bang: false,
+ qualified: Qualified::No,
+ parent: None,
+ path: path.clone(),
+ original_path,
+ kind: PathKind::Item { kind: ItemListKind::SourceFile },
+ has_type_args: false,
+ use_tree_parent: false,
+ };
+
+ let is_in_block = |it: &SyntaxNode| {
+ it.parent()
+ .map(|node| {
+ ast::ExprStmt::can_cast(node.kind()) || ast::StmtList::can_cast(node.kind())
+ })
+ .unwrap_or(false)
+ };
+ let func_update_record = |syn: &SyntaxNode| {
+ if let Some(record_expr) = syn.ancestors().nth(2).and_then(ast::RecordExpr::cast) {
+ find_node_in_file_compensated(sema, original_file, &record_expr)
+ } else {
+ None
+ }
+ };
+ let after_if_expr = |node: SyntaxNode| {
+ let prev_expr = (|| {
+ let prev_sibling = non_trivia_sibling(node.into(), Direction::Prev)?.into_node()?;
+ ast::ExprStmt::cast(prev_sibling)?.expr()
+ })();
+ matches!(prev_expr, Some(ast::Expr::IfExpr(_)))
+ };
+
+ // We do not want to generate path completions when we are sandwiched between an item decl signature and its body.
+ // ex. trait Foo $0 {}
+ // in these cases parser recovery usually kicks in for our inserted identifier, causing it
+ // to either be parsed as an ExprStmt or a MacroCall, depending on whether it is in a block
+ // expression or an item list.
+ // The following code checks if the body is missing, if it is we either cut off the body
+ // from the item or it was missing in the first place
+ let inbetween_body_and_decl_check = |node: SyntaxNode| {
+ if let Some(NodeOrToken::Node(n)) =
+ syntax::algo::non_trivia_sibling(node.into(), syntax::Direction::Prev)
+ {
+ if let Some(item) = ast::Item::cast(n) {
+ let is_inbetween = match &item {
+ ast::Item::Const(it) => it.body().is_none(),
+ ast::Item::Enum(it) => it.variant_list().is_none(),
+ ast::Item::ExternBlock(it) => it.extern_item_list().is_none(),
+ ast::Item::Fn(it) => it.body().is_none(),
+ ast::Item::Impl(it) => it.assoc_item_list().is_none(),
- ast::Item::Struct(it) => it.field_list().is_none(),
++ ast::Item::Module(it) => {
++ it.item_list().is_none() && it.semicolon_token().is_none()
++ }
+ ast::Item::Static(it) => it.body().is_none(),
++ ast::Item::Struct(it) => {
++ it.field_list().is_none() && it.semicolon_token().is_none()
++ }
+ ast::Item::Trait(it) => it.assoc_item_list().is_none(),
+ ast::Item::TypeAlias(it) => it.ty().is_none(),
+ ast::Item::Union(it) => it.record_field_list().is_none(),
+ _ => false,
+ };
+ if is_inbetween {
+ return Some(item);
+ }
+ }
+ }
+ None
+ };
+
+ let type_location = |node: &SyntaxNode| {
+ let parent = node.parent()?;
+ let res = match_ast! {
+ match parent {
+ ast::Const(it) => {
+ let name = find_opt_node_in_file(original_file, it.name())?;
+ let original = ast::Const::cast(name.syntax().parent()?)?;
+ TypeLocation::TypeAscription(TypeAscriptionTarget::Const(original.body()))
+ },
+ ast::RetType(it) => {
+ if it.thin_arrow_token().is_none() {
+ return None;
+ }
+ let parent = match ast::Fn::cast(parent.parent()?) {
+ Some(x) => x.param_list(),
+ None => ast::ClosureExpr::cast(parent.parent()?)?.param_list(),
+ };
+
+ let parent = find_opt_node_in_file(original_file, parent)?.syntax().parent()?;
+ TypeLocation::TypeAscription(TypeAscriptionTarget::RetType(match_ast! {
+ match parent {
+ ast::ClosureExpr(it) => {
+ it.body()
+ },
+ ast::Fn(it) => {
+ it.body().map(ast::Expr::BlockExpr)
+ },
+ _ => return None,
+ }
+ }))
+ },
+ ast::Param(it) => {
+ if it.colon_token().is_none() {
+ return None;
+ }
+ TypeLocation::TypeAscription(TypeAscriptionTarget::FnParam(find_opt_node_in_file(original_file, it.pat())))
+ },
+ ast::LetStmt(it) => {
+ if it.colon_token().is_none() {
+ return None;
+ }
+ TypeLocation::TypeAscription(TypeAscriptionTarget::Let(find_opt_node_in_file(original_file, it.pat())))
+ },
+ ast::Impl(it) => {
+ match it.trait_() {
+ Some(t) if t.syntax() == node => TypeLocation::ImplTrait,
+ _ => match it.self_ty() {
+ Some(t) if t.syntax() == node => TypeLocation::ImplTarget,
+ _ => return None,
+ },
+ }
+ },
+ ast::TypeBound(_) => TypeLocation::TypeBound,
+ // is this case needed?
+ ast::TypeBoundList(_) => TypeLocation::TypeBound,
+ ast::GenericArg(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, it.syntax().parent().and_then(ast::GenericArgList::cast))),
+ // is this case needed?
+ ast::GenericArgList(it) => TypeLocation::GenericArgList(find_opt_node_in_file_compensated(sema, original_file, Some(it))),
+ ast::TupleField(_) => TypeLocation::TupleField,
+ _ => return None,
+ }
+ };
+ Some(res)
+ };
+
+ let is_in_condition = |it: &ast::Expr| {
+ (|| {
+ let parent = it.syntax().parent()?;
+ if let Some(expr) = ast::WhileExpr::cast(parent.clone()) {
+ Some(expr.condition()? == *it)
+ } else if let Some(expr) = ast::IfExpr::cast(parent) {
+ Some(expr.condition()? == *it)
+ } else {
+ None
+ }
+ })()
+ .unwrap_or(false)
+ };
+
+ let make_path_kind_expr = |expr: ast::Expr| {
+ let it = expr.syntax();
+ let in_block_expr = is_in_block(it);
+ let in_loop_body = is_in_loop_body(it);
+ let after_if_expr = after_if_expr(it.clone());
+ let ref_expr_parent =
+ path.as_single_name_ref().and_then(|_| it.parent()).and_then(ast::RefExpr::cast);
+ let (innermost_ret_ty, self_param) = {
+ let find_ret_ty = |it: SyntaxNode| {
+ if let Some(item) = ast::Item::cast(it.clone()) {
+ match item {
+ ast::Item::Fn(f) => Some(sema.to_def(&f).map(|it| it.ret_type(sema.db))),
+ ast::Item::MacroCall(_) => None,
+ _ => Some(None),
+ }
+ } else {
+ let expr = ast::Expr::cast(it)?;
+ let callable = match expr {
+ // FIXME
+ // ast::Expr::BlockExpr(b) if b.async_token().is_some() || b.try_token().is_some() => sema.type_of_expr(b),
+ ast::Expr::ClosureExpr(_) => sema.type_of_expr(&expr),
+ _ => return None,
+ };
+ Some(
+ callable
+ .and_then(|c| c.adjusted().as_callable(sema.db))
+ .map(|it| it.return_type()),
+ )
+ }
+ };
+ let find_fn_self_param = |it| match it {
+ ast::Item::Fn(fn_) => Some(sema.to_def(&fn_).and_then(|it| it.self_param(sema.db))),
+ ast::Item::MacroCall(_) => None,
+ _ => Some(None),
+ };
+
+ match find_node_in_file_compensated(sema, original_file, &expr) {
+ Some(it) => {
+ let innermost_ret_ty = sema
+ .ancestors_with_macros(it.syntax().clone())
+ .find_map(find_ret_ty)
+ .flatten();
+
+ let self_param = sema
+ .ancestors_with_macros(it.syntax().clone())
+ .filter_map(ast::Item::cast)
+ .find_map(find_fn_self_param)
+ .flatten();
+ (innermost_ret_ty, self_param)
+ }
+ None => (None, None),
+ }
+ };
+ let is_func_update = func_update_record(it);
+ let in_condition = is_in_condition(&expr);
+ let incomplete_let = it
+ .parent()
+ .and_then(ast::LetStmt::cast)
+ .map_or(false, |it| it.semicolon_token().is_none());
+ let impl_ = fetch_immediate_impl(sema, original_file, expr.syntax());
+
+ let in_match_guard = match it.parent().and_then(ast::MatchArm::cast) {
+ Some(arm) => arm
+ .fat_arrow_token()
+ .map_or(true, |arrow| it.text_range().start() < arrow.text_range().start()),
+ None => false,
+ };
+
+ PathKind::Expr {
+ expr_ctx: ExprCtx {
+ in_block_expr,
+ in_loop_body,
+ after_if_expr,
+ in_condition,
+ ref_expr_parent,
+ is_func_update,
+ innermost_ret_ty,
+ self_param,
+ incomplete_let,
+ impl_,
+ in_match_guard,
+ },
+ }
+ };
+ let make_path_kind_type = |ty: ast::Type| {
+ let location = type_location(ty.syntax());
+ PathKind::Type { location: location.unwrap_or(TypeLocation::Other) }
+ };
+
+ let mut kind_macro_call = |it: ast::MacroCall| {
+ path_ctx.has_macro_bang = it.excl_token().is_some();
+ let parent = it.syntax().parent()?;
+ // Any path in an item list will be treated as a macro call by the parser
+ let kind = match_ast! {
+ match parent {
+ ast::MacroExpr(expr) => make_path_kind_expr(expr.into()),
+ ast::MacroPat(it) => PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())},
+ ast::MacroType(ty) => make_path_kind_type(ty.into()),
+ ast::ItemList(_) => PathKind::Item { kind: ItemListKind::Module },
+ ast::AssocItemList(_) => PathKind::Item { kind: match parent.parent() {
+ Some(it) => match_ast! {
+ match it {
+ ast::Trait(_) => ItemListKind::Trait,
+ ast::Impl(it) => if it.trait_().is_some() {
+ ItemListKind::TraitImpl(find_node_in_file_compensated(sema, original_file, &it))
+ } else {
+ ItemListKind::Impl
+ },
+ _ => return None
+ }
+ },
+ None => return None,
+ } },
+ ast::ExternItemList(_) => PathKind::Item { kind: ItemListKind::ExternBlock },
+ ast::SourceFile(_) => PathKind::Item { kind: ItemListKind::SourceFile },
+ _ => return None,
+ }
+ };
+ Some(kind)
+ };
+ let make_path_kind_attr = |meta: ast::Meta| {
+ let attr = meta.parent_attr()?;
+ let kind = attr.kind();
+ let attached = attr.syntax().parent()?;
+ let is_trailing_outer_attr = kind != AttrKind::Inner
+ && non_trivia_sibling(attr.syntax().clone().into(), syntax::Direction::Next).is_none();
+ let annotated_item_kind = if is_trailing_outer_attr { None } else { Some(attached.kind()) };
+ Some(PathKind::Attr { attr_ctx: AttrCtx { kind, annotated_item_kind } })
+ };
+
+ // Infer the path kind
+ let parent = path.syntax().parent()?;
+ let kind = match_ast! {
+ match parent {
+ ast::PathType(it) => make_path_kind_type(it.into()),
+ ast::PathExpr(it) => {
+ if let Some(p) = it.syntax().parent() {
+ if ast::ExprStmt::can_cast(p.kind()) {
+ if let Some(kind) = inbetween_body_and_decl_check(p) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
+ }
+ }
+
+ path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
+
+ make_path_kind_expr(it.into())
+ },
+ ast::TupleStructPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::RecordPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::PathPat(it) => {
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
+ },
+ ast::MacroCall(it) => {
+ // A macro call in this position is usually a result of parsing recovery, so check that
+ if let Some(kind) = inbetween_body_and_decl_check(it.syntax().clone()) {
+ return Some(make_res(NameRefKind::Keyword(kind)));
+ }
+
+ kind_macro_call(it)?
+ },
+ ast::Meta(meta) => make_path_kind_attr(meta)?,
+ ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
+ ast::UseTree(_) => PathKind::Use,
+ // completing inside a qualifier
+ ast::Path(parent) => {
+ path_ctx.parent = Some(parent.clone());
+ let parent = iter::successors(Some(parent), |it| it.parent_path()).last()?.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::PathType(it) => make_path_kind_type(it.into()),
+ ast::PathExpr(it) => {
+ path_ctx.has_call_parens = it.syntax().parent().map_or(false, |it| ast::CallExpr::can_cast(it.kind()));
+
+ make_path_kind_expr(it.into())
+ },
+ ast::TupleStructPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::RecordPat(it) => {
+ path_ctx.has_call_parens = true;
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into()) }
+ },
+ ast::PathPat(it) => {
+ PathKind::Pat { pat_ctx: pattern_context_for(sema, original_file, it.into())}
+ },
+ ast::MacroCall(it) => {
+ kind_macro_call(it)?
+ },
+ ast::Meta(meta) => make_path_kind_attr(meta)?,
+ ast::Visibility(it) => PathKind::Vis { has_in_token: it.in_token().is_some() },
+ ast::UseTree(_) => PathKind::Use,
+ ast::RecordExpr(it) => make_path_kind_expr(it.into()),
+ _ => return None,
+ }
+ }
+ },
+ ast::RecordExpr(it) => make_path_kind_expr(it.into()),
+ _ => return None,
+ }
+ };
+
+ path_ctx.kind = kind;
+ path_ctx.has_type_args = segment.generic_arg_list().is_some();
+
+ // calculate the qualifier context
+ if let Some((qualifier, use_tree_parent)) = path_or_use_tree_qualifier(&path) {
+ path_ctx.use_tree_parent = use_tree_parent;
+ if !use_tree_parent && segment.coloncolon_token().is_some() {
+ path_ctx.qualified = Qualified::Absolute;
+ } else {
+ let qualifier = qualifier
+ .segment()
+ .and_then(|it| find_node_in_file(original_file, &it))
+ .map(|it| it.parent_path());
+ if let Some(qualifier) = qualifier {
+ let type_anchor = match qualifier.segment().and_then(|it| it.kind()) {
+ Some(ast::PathSegmentKind::Type { type_ref: Some(type_ref), trait_ref })
+ if qualifier.qualifier().is_none() =>
+ {
+ Some((type_ref, trait_ref))
+ }
+ _ => None,
+ };
+
+ path_ctx.qualified = if let Some((ty, trait_ref)) = type_anchor {
+ let ty = match ty {
+ ast::Type::InferType(_) => None,
+ ty => sema.resolve_type(&ty),
+ };
+ let trait_ = trait_ref.and_then(|it| sema.resolve_trait(&it.path()?));
+ Qualified::TypeAnchor { ty, trait_ }
+ } else {
+ let res = sema.resolve_path(&qualifier);
+
+ // For understanding how and why super_chain_len is calculated the way it
+ // is check the documentation at it's definition
+ let mut segment_count = 0;
+ let super_count = iter::successors(Some(qualifier.clone()), |p| p.qualifier())
+ .take_while(|p| {
+ p.segment()
+ .and_then(|s| {
+ segment_count += 1;
+ s.super_token()
+ })
+ .is_some()
+ })
+ .count();
+
+ let super_chain_len =
+ if segment_count > super_count { None } else { Some(super_count) };
+
+ Qualified::With { path: qualifier, resolution: res, super_chain_len }
+ }
+ };
+ }
+ } else if let Some(segment) = path.segment() {
+ if segment.coloncolon_token().is_some() {
+ path_ctx.qualified = Qualified::Absolute;
+ }
+ }
+
+ let mut qualifier_ctx = QualifierCtx::default();
+ if path_ctx.is_trivial_path() {
+ // fetch the full expression that may have qualifiers attached to it
+ let top_node = match path_ctx.kind {
+ PathKind::Expr { expr_ctx: ExprCtx { in_block_expr: true, .. } } => {
+ parent.ancestors().find(|it| ast::PathExpr::can_cast(it.kind())).and_then(|p| {
+ let parent = p.parent()?;
+ if ast::StmtList::can_cast(parent.kind()) {
+ Some(p)
+ } else if ast::ExprStmt::can_cast(parent.kind()) {
+ Some(parent)
+ } else {
+ None
+ }
+ })
+ }
+ PathKind::Item { .. } => {
+ parent.ancestors().find(|it| ast::MacroCall::can_cast(it.kind()))
+ }
+ _ => None,
+ };
+ if let Some(top) = top_node {
+ if let Some(NodeOrToken::Node(error_node)) =
+ syntax::algo::non_trivia_sibling(top.clone().into(), syntax::Direction::Prev)
+ {
+ if error_node.kind() == SyntaxKind::ERROR {
+ qualifier_ctx.unsafe_tok = error_node
+ .children_with_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .find(|it| it.kind() == T![unsafe]);
+ qualifier_ctx.vis_node = error_node.children().find_map(ast::Visibility::cast);
+ }
+ }
+
+ if let PathKind::Item { .. } = path_ctx.kind {
+ if qualifier_ctx.none() {
+ if let Some(t) = top.first_token() {
+ if let Some(prev) = t
+ .prev_token()
+ .and_then(|t| syntax::algo::skip_trivia_token(t, Direction::Prev))
+ {
+ if ![T![;], T!['}'], T!['{']].contains(&prev.kind()) {
+ // This was inferred to be an item position path, but it seems
+ // to be part of some other broken node which leaked into an item
+ // list
+ return None;
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ Some((NameRefContext { nameref, kind: NameRefKind::Path(path_ctx) }, qualifier_ctx))
+}
+
+fn pattern_context_for(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ pat: ast::Pat,
+) -> PatternContext {
+ let mut param_ctx = None;
+ let (refutability, has_type_ascription) =
+ pat
+ .syntax()
+ .ancestors()
+ .skip_while(|it| ast::Pat::can_cast(it.kind()))
+ .next()
+ .map_or((PatternRefutability::Irrefutable, false), |node| {
+ let refutability = match_ast! {
+ match node {
+ ast::LetStmt(let_) => return (PatternRefutability::Irrefutable, let_.ty().is_some()),
+ ast::Param(param) => {
+ let has_type_ascription = param.ty().is_some();
+ param_ctx = (|| {
+ let fake_param_list = param.syntax().parent().and_then(ast::ParamList::cast)?;
+ let param_list = find_node_in_file_compensated(sema, original_file, &fake_param_list)?;
+ let param_list_owner = param_list.syntax().parent()?;
+ let kind = match_ast! {
+ match param_list_owner {
+ ast::ClosureExpr(closure) => ParamKind::Closure(closure),
+ ast::Fn(fn_) => ParamKind::Function(fn_),
+ _ => return None,
+ }
+ };
+ Some(ParamContext {
+ param_list, param, kind
+ })
+ })();
+ return (PatternRefutability::Irrefutable, has_type_ascription)
+ },
+ ast::MatchArm(_) => PatternRefutability::Refutable,
+ ast::LetExpr(_) => PatternRefutability::Refutable,
+ ast::ForExpr(_) => PatternRefutability::Irrefutable,
+ _ => PatternRefutability::Irrefutable,
+ }
+ };
+ (refutability, false)
+ });
+ let (ref_token, mut_token) = match &pat {
+ ast::Pat::IdentPat(it) => (it.ref_token(), it.mut_token()),
+ _ => (None, None),
+ };
+
+ PatternContext {
+ refutability,
+ param_ctx,
+ has_type_ascription,
+ parent_pat: pat.syntax().parent().and_then(ast::Pat::cast),
+ mut_token,
+ ref_token,
+ record_pat: None,
+ impl_: fetch_immediate_impl(sema, original_file, pat.syntax()),
+ }
+}
+
+fn fetch_immediate_impl(
+ sema: &Semantics<'_, RootDatabase>,
+ original_file: &SyntaxNode,
+ node: &SyntaxNode,
+) -> Option<ast::Impl> {
+ let mut ancestors = ancestors_in_file_compensated(sema, original_file, node)?
+ .filter_map(ast::Item::cast)
+ .filter(|it| !matches!(it, ast::Item::MacroCall(_)));
+
+ match ancestors.next()? {
+ ast::Item::Const(_) | ast::Item::Fn(_) | ast::Item::TypeAlias(_) => (),
+ ast::Item::Impl(it) => return Some(it),
+ _ => return None,
+ }
+ match ancestors.next()? {
+ ast::Item::Impl(it) => Some(it),
+ _ => None,
+ }
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range.
+/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
+fn find_opt_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: Option<N>) -> Option<N> {
+ find_node_in_file(syntax, &node?)
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range.
+/// If the fake identifier has been inserted after this node or inside of this node use the `_compensated` version instead.
+fn find_node_in_file<N: AstNode>(syntax: &SyntaxNode, node: &N) -> Option<N> {
+ let syntax_range = syntax.text_range();
+ let range = node.syntax().text_range();
+ let intersection = range.intersect(syntax_range)?;
+ syntax.covering_element(intersection).ancestors().find_map(N::cast)
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
+/// for the offset introduced by the fake ident.
+/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
+fn find_node_in_file_compensated<N: AstNode>(
+ sema: &Semantics<'_, RootDatabase>,
+ in_file: &SyntaxNode,
+ node: &N,
+) -> Option<N> {
+ ancestors_in_file_compensated(sema, in_file, node.syntax())?.find_map(N::cast)
+}
+
+fn ancestors_in_file_compensated<'sema>(
+ sema: &'sema Semantics<'_, RootDatabase>,
+ in_file: &SyntaxNode,
+ node: &SyntaxNode,
+) -> Option<impl Iterator<Item = SyntaxNode> + 'sema> {
+ let syntax_range = in_file.text_range();
+ let range = node.text_range();
+ let end = range.end().checked_sub(TextSize::try_from(COMPLETION_MARKER.len()).ok()?)?;
+ if end < range.start() {
+ return None;
+ }
+ let range = TextRange::new(range.start(), end);
+ // our inserted ident could cause `range` to go outside of the original syntax, so cap it
+ let intersection = range.intersect(syntax_range)?;
+ let node = match in_file.covering_element(intersection) {
+ NodeOrToken::Node(node) => node,
+ NodeOrToken::Token(tok) => tok.parent()?,
+ };
+ Some(sema.ancestors_with_macros(node))
+}
+
+/// Attempts to find `node` inside `syntax` via `node`'s text range while compensating
+/// for the offset introduced by the fake ident..
+/// This is wrong if `node` comes before the insertion point! Use `find_node_in_file` instead.
+fn find_opt_node_in_file_compensated<N: AstNode>(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ node: Option<N>,
+) -> Option<N> {
+ find_node_in_file_compensated(sema, syntax, &node?)
+}
+
+fn path_or_use_tree_qualifier(path: &ast::Path) -> Option<(ast::Path, bool)> {
+ if let Some(qual) = path.qualifier() {
+ return Some((qual, false));
+ }
+ let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
+ let use_tree = use_tree_list.syntax().parent().and_then(ast::UseTree::cast)?;
+ Some((use_tree.path()?, true))
+}
+
+pub(crate) fn is_in_token_of_for_loop(element: SyntaxElement) -> bool {
+ // oh my ...
+ (|| {
+ let syntax_token = element.into_token()?;
+ let range = syntax_token.text_range();
+ let for_expr = syntax_token.parent_ancestors().find_map(ast::ForExpr::cast)?;
+
+ // check if the current token is the `in` token of a for loop
+ if let Some(token) = for_expr.in_token() {
+ return Some(syntax_token == token);
+ }
+ let pat = for_expr.pat()?;
+ if range.end() < pat.syntax().text_range().end() {
+ // if we are inside or before the pattern we can't be at the `in` token position
+ return None;
+ }
+ let next_sibl = next_non_trivia_sibling(pat.syntax().clone().into())?;
+ Some(match next_sibl {
+ // the loop body is some node, if our token is at the start we are at the `in` position,
+ // otherwise we could be in a recovered expression, we don't wanna ruin completions there
+ syntax::NodeOrToken::Node(n) => n.text_range().start() == range.start(),
+ // the loop body consists of a single token, if we are this we are certainly at the `in` token position
+ syntax::NodeOrToken::Token(t) => t == syntax_token,
+ })
+ })()
+ .unwrap_or(false)
+}
+
+#[test]
+fn test_for_is_prev2() {
+ crate::tests::check_pattern_is_applicable(r"fn __() { for i i$0 }", is_in_token_of_for_loop);
+}
+
+pub(crate) fn is_in_loop_body(node: &SyntaxNode) -> bool {
+ node.ancestors()
+ .take_while(|it| it.kind() != SyntaxKind::FN && it.kind() != SyntaxKind::CLOSURE_EXPR)
+ .find_map(|it| {
+ let loop_body = match_ast! {
+ match it {
+ ast::ForExpr(it) => it.loop_body(),
+ ast::WhileExpr(it) => it.loop_body(),
+ ast::LoopExpr(it) => it.loop_body(),
+ _ => None,
+ }
+ };
+ loop_body.filter(|it| it.syntax().text_range().contains_range(node.text_range()))
+ })
+ .is_some()
+}
+
+fn previous_non_trivia_token(e: impl Into<SyntaxElement>) -> Option<SyntaxToken> {
+ let mut token = match e.into() {
+ SyntaxElement::Node(n) => n.first_token()?,
+ SyntaxElement::Token(t) => t,
+ }
+ .prev_token();
+ while let Some(inner) = token {
+ if !inner.kind().is_trivia() {
+ return Some(inner);
+ } else {
+ token = inner.prev_token();
+ }
+ }
+ None
+}
+
+fn next_non_trivia_sibling(ele: SyntaxElement) -> Option<SyntaxElement> {
+ let mut e = ele.next_sibling_or_token();
+ while let Some(inner) = e {
+ if !inner.kind().is_trivia() {
+ return Some(inner);
+ } else {
+ e = inner.next_sibling_or_token();
+ }
+ }
+ None
+}
--- /dev/null
+//! Completion tests for item list position.
+use expect_test::{expect, Expect};
+
+use crate::tests::{completion_list, BASE_ITEMS_FIXTURE};
+
+fn check(ra_fixture: &str, expect: Expect) {
+ let actual = completion_list(&format!("{}{}", BASE_ITEMS_FIXTURE, ra_fixture));
+ expect.assert_eq(&actual)
+}
+
+#[test]
+fn in_mod_item_list() {
+ check(
+ r#"mod tests { $0 }"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw fn
+ kw impl
+ kw mod
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw static
+ kw struct
+ kw super::
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ sn macro_rules
+ sn tfn (Test function)
+ sn tmod (Test module)
+ "#]],
+ )
+}
+
+#[test]
+fn in_source_file_item_list() {
+ check(
+ r#"$0"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw fn
+ kw impl
+ kw mod
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ sn macro_rules
+ sn tfn (Test function)
+ sn tmod (Test module)
+ "#]],
+ )
+}
+
+#[test]
+fn in_item_list_after_attr() {
+ check(
+ r#"#[attr] $0"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw enum
+ kw extern
+ kw fn
+ kw impl
+ kw mod
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw static
+ kw struct
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ sn macro_rules
+ sn tfn (Test function)
+ sn tmod (Test module)
+ "#]],
+ )
+}
+
+#[test]
+fn in_qualified_path() {
+ check(
+ r#"crate::$0"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ "#]],
+ )
+}
+
+#[test]
+fn after_unsafe_token() {
+ check(
+ r#"unsafe $0"#,
+ expect![[r#"
+ kw fn
+ kw impl
+ kw trait
+ "#]],
+ );
+}
+
+#[test]
+fn after_visibility() {
+ check(
+ r#"pub $0"#,
+ expect![[r#"
+ kw const
+ kw enum
+ kw extern
+ kw fn
+ kw mod
+ kw static
+ kw struct
+ kw trait
+ kw type
+ kw union
+ kw unsafe
+ kw use
+ "#]],
+ );
+}
+
+#[test]
+fn after_visibility_unsafe() {
+ check(
+ r#"pub unsafe $0"#,
+ expect![[r#"
+ kw fn
+ kw trait
+ "#]],
+ );
+}
+
+#[test]
+fn in_impl_assoc_item_list() {
+ check(
+ r#"impl Struct { $0 }"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw fn
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw unsafe
+ "#]],
+ )
+}
+
+#[test]
+fn in_impl_assoc_item_list_after_attr() {
+ check(
+ r#"impl Struct { #[attr] $0 }"#,
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw fn
+ kw pub
+ kw pub(crate)
+ kw pub(super)
+ kw self::
+ kw unsafe
+ "#]],
+ )
+}
+
+#[test]
+fn in_trait_assoc_item_list() {
+ check(
+ r"trait Foo { $0 }",
+ expect![[r#"
+ ma makro!(…) macro_rules! makro
+ md module
+ kw const
+ kw crate::
+ kw fn
+ kw self::
+ kw type
+ kw unsafe
+ "#]],
+ );
+}
+
+#[test]
+fn in_trait_impl_assoc_item_list() {
+ check(
+ r#"
+trait Test {
+ type Type0;
+ type Type1;
+ const CONST0: ();
+ const CONST1: ();
+ fn function0();
+ fn function1();
+}
+
+impl Test for () {
+ type Type0 = ();
+ const CONST0: () = ();
+ fn function0() {}
+ $0
+}
+"#,
+ expect![[r#"
+ ct const CONST1: () =
+ fn fn function1()
+ ma makro!(…) macro_rules! makro
+ md module
+ ta type Type1 =
+ kw crate::
+ kw self::
+ "#]],
+ );
+}
++
++#[test]
++fn after_unit_struct() {
++ check(
++ r#"struct S; f$0"#,
++ expect![[r#"
++ ma makro!(…) macro_rules! makro
++ md module
++ kw const
++ kw crate::
++ kw enum
++ kw extern
++ kw fn
++ kw impl
++ kw mod
++ kw pub
++ kw pub(crate)
++ kw pub(super)
++ kw self::
++ kw static
++ kw struct
++ kw trait
++ kw type
++ kw union
++ kw unsafe
++ kw use
++ sn macro_rules
++ sn tfn (Test function)
++ sn tmod (Test module)
++ "#]],
++ );
++}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "ide-db"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+tracing = "0.1.35"
+rayon = "1.5.3"
+fst = { version = "0.4.7", default-features = false }
+rustc-hash = "1.1.0"
+once_cell = "1.15.0"
+either = "1.7.0"
+itertools = "0.10.5"
+arrayvec = "0.7.2"
+indexmap = "1.9.1"
+memchr = "2.5.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+# ide should depend only on the top-level `hir` package. if you need
+# something from some `hir-xxx` subpackage, reexport the API via `hir`.
+hir = { path = "../hir", version = "0.0.0" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+xshell = "0.2.2"
+expect-test = "1.4.0"
--- /dev/null
- let mut newlines = vec![0.into()];
- let mut curr_row @ mut curr_col = 0.into();
+//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)`
+//! representation.
+use std::{iter, mem};
+
+use stdx::hash::NoHashHashMap;
+use syntax::{TextRange, TextSize};
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct LineIndex {
+ /// Offset the the beginning of each line, zero-based
+ pub(crate) newlines: Vec<TextSize>,
+ /// List of non-ASCII characters on each line
+ pub(crate) utf16_lines: NoHashHashMap<u32, Vec<Utf16Char>>,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct LineColUtf16 {
+ /// Zero-based
+ pub line: u32,
+ /// Zero-based
+ pub col: u32,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct LineCol {
+ /// Zero-based
+ pub line: u32,
+ /// Zero-based utf8 offset
+ pub col: u32,
+}
+
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+pub(crate) struct Utf16Char {
+ /// Start offset of a character inside a line, zero-based
+ pub(crate) start: TextSize,
+ /// End offset of a character inside a line, zero-based
+ pub(crate) end: TextSize,
+}
+
+impl Utf16Char {
+ /// Returns the length in 8-bit UTF-8 code units.
+ fn len(&self) -> TextSize {
+ self.end - self.start
+ }
+
+ /// Returns the length in 16-bit UTF-16 code units.
+ fn len_utf16(&self) -> usize {
+ if self.len() == TextSize::from(4) {
+ 2
+ } else {
+ 1
+ }
+ }
+}
+
+impl LineIndex {
+ pub fn new(text: &str) -> LineIndex {
+ let mut utf16_lines = NoHashHashMap::default();
+ let mut utf16_chars = Vec::new();
+
++ let mut newlines = Vec::with_capacity(16);
++ newlines.push(TextSize::from(0));
++
++ let mut curr_row = 0.into();
++ let mut curr_col = 0.into();
+ let mut line = 0;
+ for c in text.chars() {
+ let c_len = TextSize::of(c);
+ curr_row += c_len;
+ if c == '\n' {
+ newlines.push(curr_row);
+
+ // Save any utf-16 characters seen in the previous line
+ if !utf16_chars.is_empty() {
+ utf16_lines.insert(line, mem::take(&mut utf16_chars));
+ }
+
+ // Prepare for processing the next line
+ curr_col = 0.into();
+ line += 1;
+ continue;
+ }
+
+ if !c.is_ascii() {
+ utf16_chars.push(Utf16Char { start: curr_col, end: curr_col + c_len });
+ }
+
+ curr_col += c_len;
+ }
+
+ // Save any utf-16 characters seen in the last line
+ if !utf16_chars.is_empty() {
+ utf16_lines.insert(line, utf16_chars);
+ }
+
+ LineIndex { newlines, utf16_lines }
+ }
+
+ pub fn line_col(&self, offset: TextSize) -> LineCol {
+ let line = self.newlines.partition_point(|&it| it <= offset) - 1;
+ let line_start_offset = self.newlines[line];
+ let col = offset - line_start_offset;
+ LineCol { line: line as u32, col: col.into() }
+ }
+
+ pub fn offset(&self, line_col: LineCol) -> Option<TextSize> {
+ self.newlines
+ .get(line_col.line as usize)
+ .map(|offset| offset + TextSize::from(line_col.col))
+ }
+
+ pub fn to_utf16(&self, line_col: LineCol) -> LineColUtf16 {
+ let col = self.utf8_to_utf16_col(line_col.line, line_col.col.into());
+ LineColUtf16 { line: line_col.line, col: col as u32 }
+ }
+
+ pub fn to_utf8(&self, line_col: LineColUtf16) -> LineCol {
+ let col = self.utf16_to_utf8_col(line_col.line, line_col.col);
+ LineCol { line: line_col.line, col: col.into() }
+ }
+
+ pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ {
+ let lo = self.newlines.partition_point(|&it| it < range.start());
+ let hi = self.newlines.partition_point(|&it| it <= range.end());
+ let all = iter::once(range.start())
+ .chain(self.newlines[lo..hi].iter().copied())
+ .chain(iter::once(range.end()));
+
+ all.clone()
+ .zip(all.skip(1))
+ .map(|(lo, hi)| TextRange::new(lo, hi))
+ .filter(|it| !it.is_empty())
+ }
+
+ fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize {
+ let mut res: usize = col.into();
+ if let Some(utf16_chars) = self.utf16_lines.get(&line) {
+ for c in utf16_chars {
+ if c.end <= col {
+ res -= usize::from(c.len()) - c.len_utf16();
+ } else {
+ // From here on, all utf16 characters come *after* the character we are mapping,
+ // so we don't need to take them into account
+ break;
+ }
+ }
+ }
+ res
+ }
+
+ fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
+ if let Some(utf16_chars) = self.utf16_lines.get(&line) {
+ for c in utf16_chars {
+ if col > u32::from(c.start) {
+ col += u32::from(c.len()) - c.len_utf16() as u32;
+ } else {
+ // From here on, all utf16 characters come *after* the character we are mapping,
+ // so we don't need to take them into account
+ break;
+ }
+ }
+ }
+
+ col.into()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_line_index() {
+ let text = "hello\nworld";
+ let table = [
+ (00, 0, 0),
+ (01, 0, 1),
+ (05, 0, 5),
+ (06, 1, 0),
+ (07, 1, 1),
+ (08, 1, 2),
+ (10, 1, 4),
+ (11, 1, 5),
+ (12, 1, 6),
+ ];
+
+ let index = LineIndex::new(text);
+ for &(offset, line, col) in &table {
+ assert_eq!(index.line_col(offset.into()), LineCol { line, col });
+ }
+
+ let text = "\nhello\nworld";
+ let table = [(0, 0, 0), (1, 1, 0), (2, 1, 1), (6, 1, 5), (7, 2, 0)];
+ let index = LineIndex::new(text);
+ for &(offset, line, col) in &table {
+ assert_eq!(index.line_col(offset.into()), LineCol { line, col });
+ }
+ }
+
+ #[test]
+ fn test_char_len() {
+ assert_eq!('メ'.len_utf8(), 3);
+ assert_eq!('メ'.len_utf16(), 1);
+ }
+
+ #[test]
+ fn test_empty_index() {
+ let col_index = LineIndex::new(
+ "
+const C: char = 'x';
+",
+ );
+ assert_eq!(col_index.utf16_lines.len(), 0);
+ }
+
+ #[test]
+ fn test_single_char() {
+ let col_index = LineIndex::new(
+ "
+const C: char = 'メ';
+",
+ );
+
+ assert_eq!(col_index.utf16_lines.len(), 1);
+ assert_eq!(col_index.utf16_lines[&1].len(), 1);
+ assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
+
+ // UTF-8 to UTF-16, no changes
+ assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
+
+ // UTF-8 to UTF-16
+ assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20);
+
+ // UTF-16 to UTF-8, no changes
+ assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
+
+ // UTF-16 to UTF-8
+ assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21));
+
+ let col_index = LineIndex::new("a𐐏b");
+ assert_eq!(col_index.utf16_to_utf8_col(0, 3), TextSize::from(5));
+ }
+
+ #[test]
+ fn test_string() {
+ let col_index = LineIndex::new(
+ "
+const C: char = \"メ メ\";
+",
+ );
+
+ assert_eq!(col_index.utf16_lines.len(), 1);
+ assert_eq!(col_index.utf16_lines[&1].len(), 2);
+ assert_eq!(col_index.utf16_lines[&1][0], Utf16Char { start: 17.into(), end: 20.into() });
+ assert_eq!(col_index.utf16_lines[&1][1], Utf16Char { start: 21.into(), end: 24.into() });
+
+ // UTF-8 to UTF-16
+ assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
+
+ assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19);
+ assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21);
+
+ assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
+
+ // UTF-16 to UTF-8
+ assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextSize::from(15));
+
+ // メ UTF-8: 0xE3 0x83 0xA1, UTF-16: 0x30E1
+ assert_eq!(col_index.utf16_to_utf8_col(1, 17), TextSize::from(17)); // first メ at 17..20
+ assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextSize::from(20)); // space
+ assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextSize::from(21)); // second メ at 21..24
+
+ assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextSize::from(15));
+ }
+
+ #[test]
+ fn test_splitlines() {
+ fn r(lo: u32, hi: u32) -> TextRange {
+ TextRange::new(lo.into(), hi.into())
+ }
+
+ let text = "a\nbb\nccc\n";
+ let line_index = LineIndex::new(text);
+
+ let actual = line_index.lines(r(0, 9)).collect::<Vec<_>>();
+ let expected = vec![r(0, 2), r(2, 5), r(5, 9)];
+ assert_eq!(actual, expected);
+
+ let text = "";
+ let line_index = LineIndex::new(text);
+
+ let actual = line_index.lines(r(0, 0)).collect::<Vec<_>>();
+ let expected = vec![];
+ assert_eq!(actual, expected);
+
+ let text = "\n";
+ let line_index = LineIndex::new(text);
+
+ let actual = line_index.lines(r(0, 1)).collect::<Vec<_>>();
+ let expected = vec![r(0, 1)];
+ assert_eq!(actual, expected)
+ }
+}
--- /dev/null
- (State::Ident | State::Expr, '}') => {
- if inexpr_open_count == 0 {
- output.push(chr);
-
- if matches!(state, State::Expr) {
- extracted_expressions.push(Arg::Expr(current_expr.trim().into()));
- } else {
- extracted_expressions.push(Arg::Ident(current_expr.trim().into()));
- }
-
- current_expr = String::new();
- state = State::NotArg;
- } else {
- // We're closing one brace met before inside of the expression.
- current_expr.push(chr);
- inexpr_open_count -= 1;
- }
- }
+//! Tools to work with expressions present in format string literals for the `format_args!` family of macros.
+//! Primarily meant for assists and completions.
+
+/// Enum for represenging extraced format string args.
+/// Can either be extracted expressions (which includes identifiers),
+/// or placeholders `{}`.
+#[derive(Debug, PartialEq, Eq)]
+pub enum Arg {
+ Placeholder,
+ Ident(String),
+ Expr(String),
+}
+
+/**
+ Add placeholders like `$1` and `$2` in place of [`Arg::Placeholder`],
+ and unwraps the [`Arg::Ident`] and [`Arg::Expr`] enums.
+ ```rust
+ # use ide_db::syntax_helpers::format_string_exprs::*;
+ assert_eq!(with_placeholders(vec![Arg::Ident("ident".to_owned()), Arg::Placeholder, Arg::Expr("expr + 2".to_owned())]), vec!["ident".to_owned(), "$1".to_owned(), "expr + 2".to_owned()])
+ ```
+*/
+
+pub fn with_placeholders(args: Vec<Arg>) -> Vec<String> {
+ let mut placeholder_id = 1;
+ args.into_iter()
+ .map(move |a| match a {
+ Arg::Expr(s) | Arg::Ident(s) => s,
+ Arg::Placeholder => {
+ let s = format!("${placeholder_id}");
+ placeholder_id += 1;
+ s
+ }
+ })
+ .collect()
+}
+
+/**
+ Parser for a format-like string. It is more allowing in terms of string contents,
+ as we expect variable placeholders to be filled with expressions.
+
+ Built for completions and assists, and escapes `\` and `$` in output.
+ (See the comments on `get_receiver_text()` for detail.)
+ Splits a format string that may contain expressions
+ like
+ ```rust
+ assert_eq!(parse("{ident} {} {expr + 42} ").unwrap(), ("{} {} {}", vec![Arg::Ident("ident"), Arg::Placeholder, Arg::Expr("expr + 42")]));
+ ```
+*/
+pub fn parse_format_exprs(input: &str) -> Result<(String, Vec<Arg>), ()> {
+ #[derive(Debug, Clone, Copy, PartialEq)]
+ enum State {
+ NotArg,
+ MaybeArg,
+ Expr,
+ Ident,
+ MaybeIncorrect,
+ FormatOpts,
+ }
+
+ let mut state = State::NotArg;
+ let mut current_expr = String::new();
+ let mut extracted_expressions = Vec::new();
+ let mut output = String::new();
+
+ // Count of open braces inside of an expression.
+ // We assume that user knows what they're doing, thus we treat it like a correct pattern, e.g.
+ // "{MyStruct { val_a: 0, val_b: 1 }}".
+ let mut inexpr_open_count = 0;
+
+ let mut chars = input.chars().peekable();
+ while let Some(chr) = chars.next() {
+ match (state, chr) {
+ (State::NotArg, '{') => {
+ output.push(chr);
+ state = State::MaybeArg;
+ }
+ (State::NotArg, '}') => {
+ output.push(chr);
+ state = State::MaybeIncorrect;
+ }
+ (State::NotArg, _) => {
+ if matches!(chr, '\\' | '$') {
+ output.push('\\');
+ }
+ output.push(chr);
+ }
+ (State::MaybeIncorrect, '}') => {
+ // It's okay, we met "}}".
+ output.push(chr);
+ state = State::NotArg;
+ }
+ (State::MaybeIncorrect, _) => {
+ // Error in the string.
+ return Err(());
+ }
+ // Escaped braces `{{`
+ (State::MaybeArg, '{') => {
+ output.push(chr);
+ state = State::NotArg;
+ }
+ (State::MaybeArg, '}') => {
+ // This is an empty sequence '{}'.
+ output.push(chr);
+ extracted_expressions.push(Arg::Placeholder);
+ state = State::NotArg;
+ }
++ (State::MaybeArg, ':') => {
++ output.push(chr);
++ extracted_expressions.push(Arg::Placeholder);
++ state = State::FormatOpts;
++ }
+ (State::MaybeArg, _) => {
+ if matches!(chr, '\\' | '$') {
+ current_expr.push('\\');
+ }
+ current_expr.push(chr);
+
+ // While Rust uses the unicode sets of XID_start and XID_continue for Identifiers
+ // this is probably the best we can do to avoid a false positive
+ if chr.is_alphabetic() || chr == '_' {
+ state = State::Ident;
+ } else {
+ state = State::Expr;
+ }
+ }
- (State::Ident | State::Expr, ':') => {
+ (State::Ident | State::Expr, ':') if matches!(chars.peek(), Some(':')) => {
+ // path separator
+ state = State::Expr;
+ current_expr.push_str("::");
+ chars.next();
+ }
- // We're outside of braces, thus assume that it's a specifier, like "{Some(value):?}"
- output.push(chr);
-
- if matches!(state, State::Expr) {
- extracted_expressions.push(Arg::Expr(current_expr.trim().into()));
++ (State::Ident | State::Expr, ':' | '}') => {
+ if inexpr_open_count == 0 {
- extracted_expressions.push(Arg::Ident(current_expr.trim().into()));
++ let trimmed = current_expr.trim();
++
++ // if the expression consists of a single number, like "0" or "12", it can refer to
++ // format args in the order they are specified.
++ // see: https://doc.rust-lang.org/std/fmt/#positional-parameters
++ if trimmed.chars().fold(true, |only_num, c| c.is_ascii_digit() && only_num) {
++ output.push_str(trimmed);
++ } else if matches!(state, State::Expr) {
++ extracted_expressions.push(Arg::Expr(trimmed.into()));
+ } else {
- current_expr = String::new();
- state = State::FormatOpts;
- } else {
++ extracted_expressions.push(Arg::Ident(trimmed.into()));
+ }
+
++ output.push(chr);
++ current_expr.clear();
++ state = if chr == ':' {
++ State::FormatOpts
++ } else if chr == '}' {
++ State::NotArg
++ } else {
++ unreachable!()
++ };
++ } else if chr == '}' {
++ // We're closing one brace met before inside of the expression.
++ current_expr.push(chr);
++ inexpr_open_count -= 1;
++ } else if chr == ':' {
+ // We're inside of braced expression, assume that it's a struct field name/value delimiter.
+ current_expr.push(chr);
+ }
+ }
+ (State::Ident | State::Expr, '{') => {
+ state = State::Expr;
+ current_expr.push(chr);
+ inexpr_open_count += 1;
+ }
+ (State::Ident | State::Expr, _) => {
+ if !(chr.is_alphanumeric() || chr == '_' || chr == '#') {
+ state = State::Expr;
+ }
+
+ if matches!(chr, '\\' | '$') {
+ current_expr.push('\\');
+ }
+ current_expr.push(chr);
+ }
+ (State::FormatOpts, '}') => {
+ output.push(chr);
+ state = State::NotArg;
+ }
+ (State::FormatOpts, _) => {
+ if matches!(chr, '\\' | '$') {
+ output.push('\\');
+ }
+ output.push(chr);
+ }
+ }
+ }
+
+ if state != State::NotArg {
+ return Err(());
+ }
+
+ Ok((output, extracted_expressions))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use expect_test::{expect, Expect};
+
+ fn check(input: &str, expect: &Expect) {
+ let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_string(), vec![]));
+ let outcome_repr = if !exprs.is_empty() {
+ format!("{}; {}", output, with_placeholders(exprs).join(", "))
+ } else {
+ output
+ };
+
+ expect.assert_eq(&outcome_repr);
+ }
+
+ #[test]
+ fn format_str_parser() {
+ let test_vector = &[
+ ("no expressions", expect![["no expressions"]]),
+ (r"no expressions with \$0$1", expect![r"no expressions with \\\$0\$1"]),
+ ("{expr} is {2 + 2}", expect![["{} is {}; expr, 2 + 2"]]),
+ ("{expr:?}", expect![["{:?}; expr"]]),
+ ("{expr:1$}", expect![[r"{:1\$}; expr"]]),
++ ("{:1$}", expect![[r"{:1\$}; $1"]]),
++ ("{:>padding$}", expect![[r"{:>padding\$}; $1"]]),
++ ("{}, {}, {0}", expect![[r"{}, {}, {0}; $1, $2"]]),
++ ("{}, {}, {0:b}", expect![[r"{}, {}, {0:b}; $1, $2"]]),
+ ("{$0}", expect![[r"{}; \$0"]]),
+ ("{malformed", expect![["-"]]),
+ ("malformed}", expect![["-"]]),
+ ("{{correct", expect![["{{correct"]]),
+ ("correct}}", expect![["correct}}"]]),
+ ("{correct}}}", expect![["{}}}; correct"]]),
+ ("{correct}}}}}", expect![["{}}}}}; correct"]]),
+ ("{incorrect}}", expect![["-"]]),
+ ("placeholders {} {}", expect![["placeholders {} {}; $1, $2"]]),
+ ("mixed {} {2 + 2} {}", expect![["mixed {} {} {}; $1, 2 + 2, $2"]]),
+ (
+ "{SomeStruct { val_a: 0, val_b: 1 }}",
+ expect![["{}; SomeStruct { val_a: 0, val_b: 1 }"]],
+ ),
+ ("{expr:?} is {2.32f64:.5}", expect![["{:?} is {:.5}; expr, 2.32f64"]]),
+ (
+ "{SomeStruct { val_a: 0, val_b: 1 }:?}",
+ expect![["{:?}; SomeStruct { val_a: 0, val_b: 1 }"]],
+ ),
+ ("{ 2 + 2 }", expect![["{}; 2 + 2"]]),
+ ("{strsim::jaro_winkle(a)}", expect![["{}; strsim::jaro_winkle(a)"]]),
+ ("{foo::bar::baz()}", expect![["{}; foo::bar::baz()"]]),
+ ("{foo::bar():?}", expect![["{:?}; foo::bar()"]]),
+ ];
+
+ for (input, output) in test_vector {
+ check(input, output)
+ }
+ }
+
+ #[test]
+ fn arg_type() {
+ assert_eq!(
+ parse_format_exprs("{_ident} {r#raw_ident} {expr.obj} {name {thing: 42} } {}")
+ .unwrap()
+ .1,
+ vec![
+ Arg::Ident("_ident".to_owned()),
+ Arg::Ident("r#raw_ident".to_owned()),
+ Arg::Expr("expr.obj".to_owned()),
+ Arg::Expr("name {thing: 42}".to_owned()),
+ Arg::Placeholder
+ ]
+ );
+ }
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "ide-diagnostics"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+either = "1.7.0"
+itertools = "0.10.5"
+serde_json = "1.0.86"
+
+profile = { path = "../profile", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+
+[features]
+in-rust-tree = []
--- /dev/null
- let display_range = d
- .precise_location
- .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.node.clone()).range);
-
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: macro-error
+//
+// This diagnostic is shown for macro expansion errors.
+pub(crate) fn macro_error(ctx: &DiagnosticsContext<'_>, d: &hir::MacroError) -> Diagnostic {
+ // Use more accurate position if available.
++ let display_range = ctx.resolve_precise_location(&d.node, d.precise_location);
+ Diagnostic::new("macro-error", d.message.clone(), display_range).experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ tests::{check_diagnostics, check_diagnostics_with_config},
+ DiagnosticsConfig,
+ };
+
+ #[test]
+ fn builtin_macro_fails_expansion() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+#[rustc_builtin_macro]
+macro_rules! compile_error { () => {} }
+
+ include!("doesntexist");
+//^^^^^^^ error: failed to load file `doesntexist`
+
+ compile_error!("compile_error macro works");
+//^^^^^^^^^^^^^ error: compile_error macro works
+ "#,
+ );
+ }
+
+ #[test]
+ fn eager_macro_concat() {
+ // FIXME: this is incorrectly handling `$crate`, resulting in a wrong diagnostic.
+ // See: https://github.com/rust-lang/rust-analyzer/issues/10300
+
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:lib deps:core
+use core::{panic, concat};
+
+mod private {
+ pub use core::concat;
+}
+
+macro_rules! m {
+ () => {
+ panic!(concat!($crate::private::concat!("")));
+ };
+}
+
+fn f() {
+ m!();
+ //^^^^ error: unresolved macro `$crate::private::concat!`
+}
+
+//- /core.rs crate:core
+#[macro_export]
+#[rustc_builtin_macro]
+macro_rules! concat { () => {} }
+
+pub macro panic {
+ ($msg:expr) => (
+ $crate::panicking::panic_str($msg)
+ ),
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn include_macro_should_allow_empty_content() {
+ let mut config = DiagnosticsConfig::test_sample();
+
+ // FIXME: This is a false-positive, the file is actually linked in via
+ // `include!` macro
+ config.disabled.insert("unlinked-file".to_string());
+
+ check_diagnostics_with_config(
+ config,
+ r#"
+//- /lib.rs
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+
+include!("foo/bar.rs");
+//- /foo/bar.rs
+// empty
+"#,
+ );
+ }
+
+ #[test]
+ fn good_out_dir_diagnostic() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! include { () => {} }
+#[rustc_builtin_macro]
+macro_rules! env { () => {} }
+#[rustc_builtin_macro]
+macro_rules! concat { () => {} }
+
+ include!(concat!(env!("OUT_DIR"), "/out.rs"));
+//^^^^^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
+"#,
+ );
+ }
+
+ #[test]
+ fn register_attr_and_tool() {
+ cov_mark::check!(register_attr);
+ cov_mark::check!(register_tool);
+ check_diagnostics(
+ r#"
+#![register_tool(tool)]
+#![register_attr(attr)]
+
+#[tool::path]
+#[attr]
+struct S;
+"#,
+ );
+ // NB: we don't currently emit diagnostics here
+ }
+
+ #[test]
+ fn macro_diag_builtin() {
+ check_diagnostics(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! env {}
+
+#[rustc_builtin_macro]
+macro_rules! include {}
+
+#[rustc_builtin_macro]
+macro_rules! compile_error {}
+
+#[rustc_builtin_macro]
+macro_rules! format_args { () => {} }
+
+fn main() {
+ // Test a handful of built-in (eager) macros:
+
+ include!(invalid);
+ //^^^^^^^ error: could not convert tokens
+ include!("does not exist");
+ //^^^^^^^ error: failed to load file `does not exist`
+
+ env!(invalid);
+ //^^^ error: could not convert tokens
+
+ env!("OUT_DIR");
+ //^^^ error: `OUT_DIR` not set, enable "build scripts" to fix
+
+ compile_error!("compile_error works");
+ //^^^^^^^^^^^^^ error: compile_error works
+
+ // Lazy:
+
+ format_args!();
+ //^^^^^^^^^^^ error: no rule matches input tokens
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn macro_rules_diag() {
+ check_diagnostics(
+ r#"
+macro_rules! m {
+ () => {};
+}
+fn f() {
+ m!();
+
+ m!(hi);
+ //^ error: leftover tokens
+}
+ "#,
+ );
+ }
+ #[test]
+ fn dollar_crate_in_builtin_macro() {
+ check_diagnostics(
+ r#"
+#[macro_export]
+#[rustc_builtin_macro]
+macro_rules! format_args {}
+
+#[macro_export]
+macro_rules! arg { () => {} }
+
+#[macro_export]
+macro_rules! outer {
+ () => {
+ $crate::format_args!( "", $crate::arg!(1) )
+ };
+}
+
+fn f() {
+ outer!();
+} //^^^^^^^^ error: leftover tokens
+"#,
+ )
+ }
+}
--- /dev/null
- struct Foo {
+use hir::{db::AstDatabase, HasSource, HirDisplay, Semantics};
+use ide_db::{base_db::FileId, source_change::SourceChange, RootDatabase};
+use syntax::{
+ ast::{self, edit::IndentLevel, make},
+ AstNode,
+};
+use text_edit::TextEdit;
+
+use crate::{fix, Assist, Diagnostic, DiagnosticsContext};
+
+// Diagnostic: no-such-field
+//
+// This diagnostic is triggered if created structure does not have field provided in record.
+pub(crate) fn no_such_field(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Diagnostic {
+ Diagnostic::new(
+ "no-such-field",
+ "no such field",
+ ctx.sema.diagnostics_display_range(d.field.clone().map(|it| it.into())).range,
+ )
+ .with_fixes(fixes(ctx, d))
+}
+
+fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::NoSuchField) -> Option<Vec<Assist>> {
+ let root = ctx.sema.db.parse_or_expand(d.field.file_id)?;
+ missing_record_expr_field_fixes(
+ &ctx.sema,
+ d.field.file_id.original_file(ctx.sema.db),
+ &d.field.value.to_node(&root),
+ )
+}
+
+fn missing_record_expr_field_fixes(
+ sema: &Semantics<'_, RootDatabase>,
+ usage_file_id: FileId,
+ record_expr_field: &ast::RecordExprField,
+) -> Option<Vec<Assist>> {
+ let record_lit = ast::RecordExpr::cast(record_expr_field.syntax().parent()?.parent()?)?;
+ let def_id = sema.resolve_variant(record_lit)?;
+ let module;
+ let def_file_id;
+ let record_fields = match def_id {
+ hir::VariantDef::Struct(s) => {
+ module = s.module(sema.db);
+ let source = s.source(sema.db)?;
+ def_file_id = source.file_id;
+ let fields = source.value.field_list()?;
+ record_field_list(fields)?
+ }
+ hir::VariantDef::Union(u) => {
+ module = u.module(sema.db);
+ let source = u.source(sema.db)?;
+ def_file_id = source.file_id;
+ source.value.record_field_list()?
+ }
+ hir::VariantDef::Variant(e) => {
+ module = e.module(sema.db);
+ let source = e.source(sema.db)?;
+ def_file_id = source.file_id;
+ let fields = source.value.field_list()?;
+ record_field_list(fields)?
+ }
+ };
+ let def_file_id = def_file_id.original_file(sema.db);
+
+ let new_field_type = sema.type_of_expr(&record_expr_field.expr()?)?.adjusted();
+ if new_field_type.is_unknown() {
+ return None;
+ }
+ let new_field = make::record_field(
+ None,
+ make::name(&record_expr_field.field_name()?.ident_token()?.text()),
+ make::ty(&new_field_type.display_source_code(sema.db, module.into()).ok()?),
+ );
+
+ let last_field = record_fields.fields().last()?;
+ let last_field_syntax = last_field.syntax();
+ let indent = IndentLevel::from_node(last_field_syntax);
+
+ let mut new_field = new_field.to_string();
+ if usage_file_id != def_file_id {
+ new_field = format!("pub(crate) {}", new_field);
+ }
+ new_field = format!("\n{}{}", indent, new_field);
+
+ let needs_comma = !last_field_syntax.to_string().ends_with(',');
+ if needs_comma {
+ new_field = format!(",{}", new_field);
+ }
+
+ let source_change = SourceChange::from_text_edit(
+ def_file_id,
+ TextEdit::insert(last_field_syntax.text_range().end(), new_field),
+ );
+
+ return Some(vec![fix(
+ "create_field",
+ "Create field",
+ source_change,
+ record_expr_field.syntax().text_range(),
+ )]);
+
+ fn record_field_list(field_def_list: ast::FieldList) -> Option<ast::RecordFieldList> {
+ match field_def_list {
+ ast::FieldList::RecordFieldList(it) => Some(it),
+ ast::FieldList::TupleFieldList(_) => None,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix, check_no_fix};
+
+ #[test]
+ fn no_such_field_diagnostics() {
+ check_diagnostics(
+ r#"
+struct S { foo: i32, bar: () }
+impl S {
+ fn new() -> S {
+ S {
+ //^ 💡 error: missing structure fields:
+ //| - bar
+ foo: 92,
+ baz: 62,
+ //^^^^^^^ 💡 error: no such field
+ }
+ }
+}
+"#,
+ );
+ }
+ #[test]
+ fn no_such_field_with_feature_flag_diagnostics() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct MyStruct {
+ my_val: usize,
+ #[cfg(feature = "foo")]
+ bar: bool,
+}
+
+impl MyStruct {
+ #[cfg(feature = "foo")]
+ pub(crate) fn new(my_val: usize, bar: bool) -> Self {
+ Self { my_val, bar }
+ }
+ #[cfg(not(feature = "foo"))]
+ pub(crate) fn new(my_val: usize, _bar: bool) -> Self {
+ Self { my_val }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_enum_with_feature_flag_diagnostics() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+enum Foo {
+ #[cfg(not(feature = "foo"))]
+ Buz,
+ #[cfg(feature = "foo")]
+ Bar,
+ Baz
+}
+
+fn test_fn(f: Foo) {
+ match f {
+ Foo::Bar => {},
+ Foo::Baz => {},
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_with_feature_flag_diagnostics_on_struct_lit() {
+ check_diagnostics(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+struct S {
+ #[cfg(feature = "foo")]
+ foo: u32,
+ #[cfg(not(feature = "foo"))]
+ bar: u32,
+}
+
+impl S {
+ #[cfg(feature = "foo")]
+ fn new(foo: u32) -> Self {
+ Self { foo }
+ }
+ #[cfg(not(feature = "foo"))]
+ fn new(bar: u32) -> Self {
+ Self { bar }
+ }
+ fn new2(bar: u32) -> Self {
+ #[cfg(feature = "foo")]
+ { Self { foo: bar } }
+ #[cfg(not(feature = "foo"))]
+ { Self { bar } }
+ }
+ fn new2(val: u32) -> Self {
+ Self {
+ #[cfg(feature = "foo")]
+ foo: val,
+ #[cfg(not(feature = "foo"))]
+ bar: val,
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn no_such_field_with_type_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! Type { () => { u32 }; }
+struct Foo { bar: Type![] }
+
+impl Foo {
+ fn new() -> Self {
+ Foo { bar: 0 }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_add_field_from_usage() {
+ check_fix(
+ r"
+fn main() {
+ Foo { bar: 3, baz$0: false};
+}
+struct Foo {
+ bar: i32
+}
+",
+ r"
+fn main() {
+ Foo { bar: 3, baz: false};
+}
+struct Foo {
+ bar: i32,
+ baz: bool
+}
+",
+ )
+ }
+
+ #[test]
+ fn test_add_field_in_other_file_from_usage() {
+ check_fix(
+ r#"
+//- /main.rs
+mod foo;
+
+fn main() {
+ foo::Foo { bar: 3, $0baz: false};
+}
+//- /foo.rs
- struct Foo {
++pub struct Foo {
+ bar: i32
+}
+"#,
+ r#"
++pub struct Foo {
+ bar: i32,
+ pub(crate) baz: bool
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn test_tuple_field_on_record_struct() {
+ check_no_fix(
+ r#"
+struct Struct {}
+fn main() {
+ Struct {
+ 0$0: 0
+ }
+}
+"#,
+ )
+ }
+}
--- /dev/null
- let display_range = d
- .precise_location
- .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.macro_call.clone()).range);
-
+use crate::{Diagnostic, DiagnosticsContext};
+
+// Diagnostic: unresolved-macro-call
+//
+// This diagnostic is triggered if rust-analyzer is unable to resolve the path
+// to a macro in a macro invocation.
+pub(crate) fn unresolved_macro_call(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedMacroCall,
+) -> Diagnostic {
+ // Use more accurate position if available.
++ let display_range = ctx.resolve_precise_location(&d.macro_call, d.precise_location);
+ let bang = if d.is_bang { "!" } else { "" };
+ Diagnostic::new(
+ "unresolved-macro-call",
+ format!("unresolved macro `{}{}`", d.path, bang),
+ display_range,
+ )
+ .experimental()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::check_diagnostics;
+
+ #[test]
+ fn unresolved_macro_diag() {
+ check_diagnostics(
+ r#"
+fn f() {
+ m!();
+} //^ error: unresolved macro `m!`
+
+"#,
+ );
+ }
+
+ #[test]
+ fn test_unresolved_macro_range() {
+ check_diagnostics(
+ r#"
+foo::bar!(92);
+ //^^^ error: unresolved macro `foo::bar!`
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolved_legacy_scope_macro() {
+ check_diagnostics(
+ r#"
+macro_rules! m { () => {} }
+
+m!(); m2!();
+ //^^ error: unresolved macro `m2!`
+"#,
+ );
+ }
+
+ #[test]
+ fn unresolved_module_scope_macro() {
+ check_diagnostics(
+ r#"
+mod mac {
+#[macro_export]
+macro_rules! m { () => {} } }
+
+self::m!(); self::m2!();
+ //^^ error: unresolved macro `self::m2!`
+"#,
+ );
+ }
+}
--- /dev/null
- use syntax::NodeOrToken;
+use hir::db::DefDatabase;
- let display_range = (|| {
- let precise_location = d.precise_location?;
- let root = ctx.sema.parse_or_expand(d.node.file_id)?;
- match root.covering_element(precise_location) {
- NodeOrToken::Node(it) => Some(ctx.sema.original_range(&it)),
- NodeOrToken::Token(it) => d.node.with_value(it).original_file_range_opt(ctx.sema.db),
- }
- })()
- .unwrap_or_else(|| ctx.sema.diagnostics_display_range(d.node.clone()))
- .range;
+
+use crate::{Diagnostic, DiagnosticsContext, Severity};
+
+// Diagnostic: unresolved-proc-macro
+//
+// This diagnostic is shown when a procedural macro can not be found. This usually means that
+// procedural macro support is simply disabled (and hence is only a weak hint instead of an error),
+// but can also indicate project setup problems.
+//
+// If you are seeing a lot of "proc macro not expanded" warnings, you can add this option to the
+// `rust-analyzer.diagnostics.disabled` list to prevent them from showing. Alternatively you can
+// enable support for procedural macros (see `rust-analyzer.procMacro.attributes.enable`).
+pub(crate) fn unresolved_proc_macro(
+ ctx: &DiagnosticsContext<'_>,
+ d: &hir::UnresolvedProcMacro,
+ proc_macros_enabled: bool,
+ proc_attr_macros_enabled: bool,
+) -> Diagnostic {
+ // Use more accurate position if available.
++ let display_range = ctx.resolve_precise_location(&d.node, d.precise_location);
+
+ let config_enabled = match d.kind {
+ hir::MacroKind::Attr => proc_macros_enabled && proc_attr_macros_enabled,
+ _ => proc_macros_enabled,
+ };
+
+ let message = match &d.macro_name {
+ Some(name) => format!("proc macro `{}` not expanded", name),
+ None => "proc macro not expanded".to_string(),
+ };
+ let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning };
+ let def_map = ctx.sema.db.crate_def_map(d.krate);
+ let message = format!(
+ "{message}: {}",
+ if config_enabled {
+ match def_map.proc_macro_loading_error() {
+ Some(e) => e,
+ None => "proc macro not found in the built dylib",
+ }
+ } else {
+ match d.kind {
+ hir::MacroKind::Attr if proc_macros_enabled => {
+ "attribute macro expansion is disabled"
+ }
+ _ => "proc-macro expansion is disabled",
+ }
+ },
+ );
+
+ Diagnostic::new("unresolved-proc-macro", message, display_range).severity(severity)
+}
--- /dev/null
- mod c {}
- mod d {
- mod e {}
+use ide_db::{base_db::FileId, source_change::SourceChange};
+use itertools::Itertools;
+use syntax::{ast, AstNode, SyntaxNode, TextRange};
+use text_edit::TextEdit;
+
+use crate::{fix, Diagnostic, Severity};
+
+// Diagnostic: unnecessary-braces
+//
+// Diagnostic for unnecessary braces in `use` items.
+pub(crate) fn useless_braces(
+ acc: &mut Vec<Diagnostic>,
+ file_id: FileId,
+ node: &SyntaxNode,
+) -> Option<()> {
+ let use_tree_list = ast::UseTreeList::cast(node.clone())?;
+ if let Some((single_use_tree,)) = use_tree_list.use_trees().collect_tuple() {
+ // If there is a comment inside the bracketed `use`,
+ // assume it is a commented out module path and don't show diagnostic.
+ if use_tree_list.has_inner_comment() {
+ return Some(());
+ }
+
+ let use_range = use_tree_list.syntax().text_range();
+ let edit = remove_braces(&single_use_tree).unwrap_or_else(|| {
+ let to_replace = single_use_tree.syntax().text().to_string();
+ let mut edit_builder = TextEdit::builder();
+ edit_builder.delete(use_range);
+ edit_builder.insert(use_range.start(), to_replace);
+ edit_builder.finish()
+ });
+
+ acc.push(
+ Diagnostic::new(
+ "unnecessary-braces",
+ "Unnecessary braces in use statement".to_string(),
+ use_range,
+ )
+ .severity(Severity::WeakWarning)
+ .with_fixes(Some(vec![fix(
+ "remove_braces",
+ "Remove unnecessary braces",
+ SourceChange::from_text_edit(file_id, edit),
+ use_range,
+ )])),
+ );
+ }
+
+ Some(())
+}
+
+fn remove_braces(single_use_tree: &ast::UseTree) -> Option<TextEdit> {
+ let use_tree_list_node = single_use_tree.syntax().parent()?;
+ if single_use_tree.path()?.segment()?.self_token().is_some() {
+ let start = use_tree_list_node.prev_sibling_or_token()?.text_range().start();
+ let end = use_tree_list_node.text_range().end();
+ return Some(TextEdit::delete(TextRange::new(start, end)));
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_diagnostics, check_fix};
+
+ #[test]
+ fn test_check_unnecessary_braces_in_use_statement() {
+ check_diagnostics(
+ r#"
+use a;
+use a::{c, d::e};
+
+mod a {
- mod c {}
- mod d {
- mod e {}
++ pub mod c {}
++ pub mod d {
++ pub mod e {}
+ }
+}
+"#,
+ );
+ check_diagnostics(
+ r#"
+use a;
+use a::{
+ c,
+ // d::e
+};
+
+mod a {
- mod a { mod c {} }
++ pub mod c {}
++ pub mod d {
++ pub mod e {}
+ }
+}
+"#,
+ );
+ check_fix(
+ r#"
+mod b {}
+use {$0b};
+"#,
+ r#"
+mod b {}
+use b;
+"#,
+ );
+ check_fix(
+ r#"
+mod b {}
+use {b$0};
+"#,
+ r#"
+mod b {}
+use b;
+"#,
+ );
+ check_fix(
+ r#"
- mod a { mod c {} }
++mod a { pub mod c {} }
+use a::{c$0};
+"#,
+ r#"
- mod a { mod c {} mod d { mod e {} } }
++mod a { pub mod c {} }
+use a::c;
+"#,
+ );
+ check_fix(
+ r#"
+mod a {}
+use a::{self$0};
+"#,
+ r#"
+mod a {}
+use a;
+"#,
+ );
+ check_fix(
+ r#"
- mod a { mod c {} mod d { mod e {} } }
++mod a { pub mod c {} pub mod d { pub mod e {} } }
+use a::{c, d::{e$0}};
+"#,
+ r#"
++mod a { pub mod c {} pub mod d { pub mod e {} } }
+use a::{c, d::e};
+"#,
+ );
+ }
+}
--- /dev/null
+//! Diagnostics rendering and fixits.
+//!
+//! Most of the diagnostics originate from the dark depth of the compiler, and
+//! are originally expressed in term of IR. When we emit the diagnostic, we are
+//! usually not in the position to decide how to best "render" it in terms of
+//! user-authored source code. We are especially not in the position to offer
+//! fixits, as the compiler completely lacks the infrastructure to edit the
+//! source code.
+//!
+//! Instead, we "bubble up" raw, structured diagnostics until the `hir` crate,
+//! where we "cook" them so that each diagnostic is formulated in terms of `hir`
+//! types. Well, at least that's the aspiration, the "cooking" is somewhat
+//! ad-hoc at the moment. Anyways, we get a bunch of ide-friendly diagnostic
+//! structs from hir, and we want to render them to unified serializable
+//! representation (span, level, message) here. If we can, we also provide
+//! fixits. By the way, that's why we want to keep diagnostics structured
+//! internally -- so that we have all the info to make fixes.
+//!
+//! We have one "handler" module per diagnostic code. Such a module contains
+//! rendering, optional fixes and tests. It's OK if some low-level compiler
+//! functionality ends up being tested via a diagnostic.
+//!
+//! There are also a couple of ad-hoc diagnostics implemented directly here, we
+//! don't yet have a great pattern for how to do them properly.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+mod handlers {
+ pub(crate) mod break_outside_of_loop;
+ pub(crate) mod inactive_code;
+ pub(crate) mod incorrect_case;
+ pub(crate) mod invalid_derive_target;
+ pub(crate) mod macro_error;
+ pub(crate) mod malformed_derive;
+ pub(crate) mod mismatched_arg_count;
+ pub(crate) mod missing_fields;
+ pub(crate) mod missing_match_arms;
+ pub(crate) mod missing_unsafe;
+ pub(crate) mod no_such_field;
+ pub(crate) mod replace_filter_map_next_with_find_map;
+ pub(crate) mod type_mismatch;
+ pub(crate) mod unimplemented_builtin_macro;
+ pub(crate) mod unresolved_extern_crate;
+ pub(crate) mod unresolved_import;
+ pub(crate) mod unresolved_macro_call;
+ pub(crate) mod unresolved_module;
+ pub(crate) mod unresolved_proc_macro;
+
+ // The handlers below are unusual, the implement the diagnostics as well.
+ pub(crate) mod field_shorthand;
+ pub(crate) mod useless_braces;
+ pub(crate) mod unlinked_file;
+ pub(crate) mod json_is_not_rust;
+}
+
+#[cfg(test)]
+mod tests;
+
+use hir::{diagnostics::AnyDiagnostic, InFile, Semantics};
+use ide_db::{
+ assists::{Assist, AssistId, AssistKind, AssistResolveStrategy},
+ base_db::{FileId, FileRange, SourceDatabase},
+ imports::insert_use::InsertUseConfig,
+ label::Label,
+ source_change::SourceChange,
+ FxHashSet, RootDatabase,
+};
+use syntax::{algo::find_node_at_range, ast::AstNode, SyntaxNodePtr, TextRange};
+
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub struct DiagnosticCode(pub &'static str);
+
+impl DiagnosticCode {
+ pub fn as_str(&self) -> &str {
+ self.0
+ }
+}
+
+#[derive(Debug)]
+pub struct Diagnostic {
+ pub code: DiagnosticCode,
+ pub message: String,
+ pub range: TextRange,
+ pub severity: Severity,
+ pub unused: bool,
+ pub experimental: bool,
+ pub fixes: Option<Vec<Assist>>,
+}
+
+impl Diagnostic {
+ fn new(code: &'static str, message: impl Into<String>, range: TextRange) -> Diagnostic {
+ let message = message.into();
+ Diagnostic {
+ code: DiagnosticCode(code),
+ message,
+ range,
+ severity: Severity::Error,
+ unused: false,
+ experimental: false,
+ fixes: None,
+ }
+ }
+
+ fn experimental(mut self) -> Diagnostic {
+ self.experimental = true;
+ self
+ }
+
+ fn severity(mut self, severity: Severity) -> Diagnostic {
+ self.severity = severity;
+ self
+ }
+
+ fn with_fixes(mut self, fixes: Option<Vec<Assist>>) -> Diagnostic {
+ self.fixes = fixes;
+ self
+ }
+
+ fn with_unused(mut self, unused: bool) -> Diagnostic {
+ self.unused = unused;
+ self
+ }
+}
+
+#[derive(Debug, Copy, Clone)]
+pub enum Severity {
+ Error,
+ // We don't actually emit this one yet, but we should at some point.
+ // Warning,
+ WeakWarning,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum ExprFillDefaultMode {
+ Todo,
+ Default,
+}
+impl Default for ExprFillDefaultMode {
+ fn default() -> Self {
+ Self::Todo
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct DiagnosticsConfig {
+ pub proc_macros_enabled: bool,
+ pub proc_attr_macros_enabled: bool,
+ pub disable_experimental: bool,
+ pub disabled: FxHashSet<String>,
+ pub expr_fill_default: ExprFillDefaultMode,
+ // FIXME: We may want to include a whole `AssistConfig` here
+ pub insert_use: InsertUseConfig,
+ pub prefer_no_std: bool,
+}
+
+impl DiagnosticsConfig {
+ pub fn test_sample() -> Self {
+ use hir::PrefixKind;
+ use ide_db::imports::insert_use::ImportGranularity;
+
+ Self {
+ proc_macros_enabled: Default::default(),
+ proc_attr_macros_enabled: Default::default(),
+ disable_experimental: Default::default(),
+ disabled: Default::default(),
+ expr_fill_default: Default::default(),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Preserve,
+ enforce_granularity: false,
+ prefix_kind: PrefixKind::Plain,
+ group: false,
+ skip_glob_imports: false,
+ },
+ prefer_no_std: false,
+ }
+ }
+}
+
+struct DiagnosticsContext<'a> {
+ config: &'a DiagnosticsConfig,
+ sema: Semantics<'a, RootDatabase>,
+ resolve: &'a AssistResolveStrategy,
+}
+
++impl<'a> DiagnosticsContext<'a> {
++ fn resolve_precise_location(
++ &self,
++ node: &InFile<SyntaxNodePtr>,
++ precise_location: Option<TextRange>,
++ ) -> TextRange {
++ let sema = &self.sema;
++ (|| {
++ let precise_location = precise_location?;
++ let root = sema.parse_or_expand(node.file_id)?;
++ match root.covering_element(precise_location) {
++ syntax::NodeOrToken::Node(it) => Some(sema.original_range(&it)),
++ syntax::NodeOrToken::Token(it) => {
++ node.with_value(it).original_file_range_opt(sema.db)
++ }
++ }
++ })()
++ .unwrap_or_else(|| sema.diagnostics_display_range(node.clone()))
++ .range
++ }
++}
++
+pub fn diagnostics(
+ db: &RootDatabase,
+ config: &DiagnosticsConfig,
+ resolve: &AssistResolveStrategy,
+ file_id: FileId,
+) -> Vec<Diagnostic> {
+ let _p = profile::span("diagnostics");
+ let sema = Semantics::new(db);
+ let parse = db.parse(file_id);
+ let mut res = Vec::new();
+
+ // [#34344] Only take first 128 errors to prevent slowing down editor/ide, the number 128 is chosen arbitrarily.
+ res.extend(
+ parse.errors().iter().take(128).map(|err| {
+ Diagnostic::new("syntax-error", format!("Syntax Error: {}", err), err.range())
+ }),
+ );
+
+ let parse = sema.parse(file_id);
+
+ for node in parse.syntax().descendants() {
+ handlers::useless_braces::useless_braces(&mut res, file_id, &node);
+ handlers::field_shorthand::field_shorthand(&mut res, file_id, &node);
+ handlers::json_is_not_rust::json_in_items(&sema, &mut res, file_id, &node, &config);
+ }
+
+ let module = sema.to_module_def(file_id);
+
+ let ctx = DiagnosticsContext { config, sema, resolve };
+ if module.is_none() {
+ handlers::unlinked_file::unlinked_file(&ctx, &mut res, file_id);
+ }
+
+ let mut diags = Vec::new();
+ if let Some(m) = module {
+ m.diagnostics(db, &mut diags)
+ }
+
+ for diag in diags {
+ #[rustfmt::skip]
+ let d = match diag {
+ AnyDiagnostic::BreakOutsideOfLoop(d) => handlers::break_outside_of_loop::break_outside_of_loop(&ctx, &d),
+ AnyDiagnostic::IncorrectCase(d) => handlers::incorrect_case::incorrect_case(&ctx, &d),
+ AnyDiagnostic::MacroError(d) => handlers::macro_error::macro_error(&ctx, &d),
+ AnyDiagnostic::MalformedDerive(d) => handlers::malformed_derive::malformed_derive(&ctx, &d),
+ AnyDiagnostic::MismatchedArgCount(d) => handlers::mismatched_arg_count::mismatched_arg_count(&ctx, &d),
+ AnyDiagnostic::MissingFields(d) => handlers::missing_fields::missing_fields(&ctx, &d),
+ AnyDiagnostic::MissingMatchArms(d) => handlers::missing_match_arms::missing_match_arms(&ctx, &d),
+ AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d),
+ AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d),
+ AnyDiagnostic::ReplaceFilterMapNextWithFindMap(d) => handlers::replace_filter_map_next_with_find_map::replace_filter_map_next_with_find_map(&ctx, &d),
+ AnyDiagnostic::TypeMismatch(d) => handlers::type_mismatch::type_mismatch(&ctx, &d),
+ AnyDiagnostic::UnimplementedBuiltinMacro(d) => handlers::unimplemented_builtin_macro::unimplemented_builtin_macro(&ctx, &d),
+ AnyDiagnostic::UnresolvedExternCrate(d) => handlers::unresolved_extern_crate::unresolved_extern_crate(&ctx, &d),
+ AnyDiagnostic::UnresolvedImport(d) => handlers::unresolved_import::unresolved_import(&ctx, &d),
+ AnyDiagnostic::UnresolvedMacroCall(d) => handlers::unresolved_macro_call::unresolved_macro_call(&ctx, &d),
+ AnyDiagnostic::UnresolvedModule(d) => handlers::unresolved_module::unresolved_module(&ctx, &d),
+ AnyDiagnostic::UnresolvedProcMacro(d) => handlers::unresolved_proc_macro::unresolved_proc_macro(&ctx, &d, config.proc_macros_enabled, config.proc_attr_macros_enabled),
+ AnyDiagnostic::InvalidDeriveTarget(d) => handlers::invalid_derive_target::invalid_derive_target(&ctx, &d),
+
+ AnyDiagnostic::InactiveCode(d) => match handlers::inactive_code::inactive_code(&ctx, &d) {
+ Some(it) => it,
+ None => continue,
+ }
+ };
+ res.push(d)
+ }
+
+ res.retain(|d| {
+ !ctx.config.disabled.contains(d.code.as_str())
+ && !(ctx.config.disable_experimental && d.experimental)
+ });
+
+ res
+}
+
+fn fix(id: &'static str, label: &str, source_change: SourceChange, target: TextRange) -> Assist {
+ let mut res = unresolved_fix(id, label, target);
+ res.source_change = Some(source_change);
+ res
+}
+
+fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist {
+ assert!(!id.contains(' '));
+ Assist {
+ id: AssistId(id, AssistKind::QuickFix),
+ label: Label::new(label.to_string()),
+ group: None,
+ target,
+ source_change: None,
+ trigger_signature_help: false,
+ }
+}
+
+fn adjusted_display_range<N: AstNode>(
+ ctx: &DiagnosticsContext<'_>,
+ diag_ptr: InFile<SyntaxNodePtr>,
+ adj: &dyn Fn(N) -> Option<TextRange>,
+) -> TextRange {
+ let FileRange { file_id, range } = ctx.sema.diagnostics_display_range(diag_ptr);
+
+ let source_file = ctx.sema.db.parse(file_id);
+ find_node_at_range::<N>(&source_file.syntax_node(), range)
+ .filter(|it| it.syntax().text_range() == range)
+ .and_then(adj)
+ .unwrap_or(range)
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "ide-ssr"
+version = "0.0.0"
+description = "Structural search and replace of Rust code"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-analyzer"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.5"
+
+text-edit = { path = "../text-edit", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "ide"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+crossbeam-channel = "0.5.5"
+either = "1.7.0"
+itertools = "0.10.5"
+tracing = "0.1.35"
+oorandom = "11.1.3"
+pulldown-cmark-to-cmark = "10.0.4"
+pulldown-cmark = { version = "0.9.1", default-features = false }
+url = "2.3.1"
+dot = "0.1.4"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+ide-assists = { path = "../ide-assists", version = "0.0.0" }
+ide-diagnostics = { path = "../ide-diagnostics", version = "0.0.0" }
+ide-ssr = { path = "../ide-ssr", version = "0.0.0" }
+ide-completion = { path = "../ide-completion", version = "0.0.0" }
+
+# ide should depend only on the top-level `hir` package. if you need
+# something from some `hir-xxx` subpackage, reexport the API via `hir`.
+hir = { path = "../hir", version = "0.0.0" }
+
+[target.'cfg(not(any(target_arch = "wasm32", target_os = "emscripten")))'.dependencies]
+toolchain = { path = "../toolchain", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
+expect-test = "1.4.0"
+
+[features]
+in-rust-tree = ["ide-assists/in-rust-tree", "ide-diagnostics/in-rust-tree"]
--- /dev/null
- struct Foo;
- //^^^
+use std::mem::discriminant;
+
+use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav};
+use hir::{AsAssocItem, AssocItem, Semantics};
+use ide_db::{
+ base_db::{AnchoredPath, FileId, FileLoader},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, AstNode, AstToken, SyntaxKind::*, SyntaxToken, TextRange, T};
+
+// Feature: Go to Definition
+//
+// Navigates to the definition of an identifier.
+//
+// For outline modules, this will navigate to the source file of the module.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[F12]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[]
+pub(crate) fn goto_definition(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = &Semantics::new(db);
+ let file = sema.parse(position.file_id).syntax().clone();
+ let original_token =
+ pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
+ IDENT
+ | INT_NUMBER
+ | LIFETIME_IDENT
+ | T![self]
+ | T![super]
+ | T![crate]
+ | T![Self]
+ | COMMENT => 4,
+ // index and prefix ops
+ T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
+ kind if kind.is_keyword() => 2,
+ T!['('] | T![')'] => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ return doc_comment.get_definition_with_descend_at(
+ sema,
+ position.offset,
+ |def, _, link_range| {
+ let nav = def.try_to_nav(db)?;
+ Some(RangeInfo::new(link_range, vec![nav]))
+ },
+ );
+ }
+ let navs = sema
+ .descend_into_macros(original_token.clone())
+ .into_iter()
+ .filter_map(|token| {
+ let parent = token.parent()?;
+ if let Some(tt) = ast::TokenTree::cast(parent) {
+ if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id)
+ {
+ return Some(vec![x]);
+ }
+ }
+ Some(
+ IdentClass::classify_token(sema, &token)?
+ .definitions()
+ .into_iter()
+ .flat_map(|def| {
+ try_filter_trait_item_definition(sema, &def)
+ .unwrap_or_else(|| def_to_nav(sema.db, def))
+ })
+ .collect(),
+ )
+ })
+ .flatten()
+ .unique()
+ .collect::<Vec<NavigationTarget>>();
+
+ Some(RangeInfo::new(original_token.text_range(), navs))
+}
+
+fn try_lookup_include_path(
+ sema: &Semantics<'_, RootDatabase>,
+ tt: ast::TokenTree,
+ token: SyntaxToken,
+ file_id: FileId,
+) -> Option<NavigationTarget> {
+ let token = ast::String::cast(token)?;
+ let path = token.value()?.into_owned();
+ let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
+ let name = macro_call.path()?.segment()?.name_ref()?;
+ if !matches!(&*name.text(), "include" | "include_str" | "include_bytes") {
+ return None;
+ }
+
+ // Ignore non-built-in macros to account for shadowing
+ if let Some(it) = sema.resolve_macro_call(¯o_call) {
+ if !matches!(it.kind(sema.db), hir::MacroKind::BuiltIn) {
+ return None;
+ }
+ }
+
+ let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
+ let size = sema.db.file_text(file_id).len().try_into().ok()?;
+ Some(NavigationTarget {
+ file_id,
+ full_range: TextRange::new(0.into(), size),
+ name: path.into(),
+ focus_range: None,
+ kind: None,
+ container_name: None,
+ description: None,
+ docs: None,
+ })
+}
+/// finds the trait definition of an impl'd item, except function
+/// e.g.
+/// ```rust
+/// trait A { type a; }
+/// struct S;
+/// impl A for S { type a = i32; } // <-- on this associate type, will get the location of a in the trait
+/// ```
+fn try_filter_trait_item_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ def: &Definition,
+) -> Option<Vec<NavigationTarget>> {
+ let db = sema.db;
+ let assoc = def.as_assoc_item(db)?;
+ match assoc {
+ AssocItem::Function(..) => None,
+ AssocItem::Const(..) | AssocItem::TypeAlias(..) => {
+ let imp = match assoc.container(db) {
+ hir::AssocItemContainer::Impl(imp) => imp,
+ _ => return None,
+ };
+ let trait_ = imp.trait_(db)?;
+ let name = def.name(db)?;
+ let discri_value = discriminant(&assoc);
+ trait_
+ .items(db)
+ .iter()
+ .filter(|itm| discriminant(*itm) == discri_value)
+ .find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(db)).flatten())
+ .map(|it| vec![it])
+ }
+ }
+}
+
+fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> {
+ def.try_to_nav(db).map(|it| vec![it]).unwrap_or_default()
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
+
+ let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start());
+ let navs = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected = expected
+ .into_iter()
+ .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ assert_eq!(expected, navs);
+ }
+
+ fn check_unresolved(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
+
+ assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {:?}", navs)
+ }
+
+ #[test]
+ fn goto_def_if_items_same_name() {
+ check(
+ r#"
+trait Trait {
+ type A;
+ const A: i32;
+ //^
+}
+
+struct T;
+impl Trait for T {
+ type A = i32;
+ const A$0: i32 = -9;
+}"#,
+ );
+ }
+ #[test]
+ fn goto_def_in_mac_call_in_attr_invoc() {
+ check(
+ r#"
+//- proc_macros: identity
+pub struct Struct {
+ // ^^^^^^
+ field: i32,
+}
+
+macro_rules! identity {
+ ($($tt:tt)*) => {$($tt)*};
+}
+
+#[proc_macros::identity]
+fn function() {
+ identity!(Struct$0 { field: 0 });
+}
+
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std$0;
+//- /std/lib.rs crate:std
+// empty
+//^file
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_renamed_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as abc$0;
+//- /std/lib.rs crate:std
+// empty
+//^file
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_in_items() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo$0) }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_at_start_of_item() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+enum E { X($0Foo) }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_resolves_correct_name() {
+ check(
+ r#"
+//- /lib.rs
+use a::Foo;
+mod a;
+mod b;
+enum E { X(Foo$0) }
+
+//- /a.rs
- struct Foo;
++pub struct Foo;
++ //^^^
+//- /b.rs
++pub struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_module_declaration() {
+ check(
+ r#"
+//- /lib.rs
+mod $0foo;
+
+//- /foo.rs
+// empty
+//^file
+"#,
+ );
+
+ check(
+ r#"
+//- /lib.rs
+mod $0foo;
+
+//- /foo/mod.rs
+// empty
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros() {
+ check(
+ r#"
+macro_rules! foo { () => { () } }
+ //^^^
+fn bar() {
+ $0foo!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros_from_other_crates() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo;
+fn bar() {
+ $0foo!();
+}
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros_in_use_tree() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo$0;
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macro_defined_fn_with_arg() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! define_fn {
+ ($name:ident) => (fn $name() {})
+}
+
+define_fn!(foo);
+ //^^^
+
+fn bar() {
+ $0foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macro_defined_fn_no_arg() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! define_fn {
+ () => (fn foo() {})
+}
+
+ define_fn!();
+//^^^^^^^^^^^^^
+
+fn bar() {
+ $0foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_works_for_macro_inside_pattern() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! foo {() => {0}}
+ //^^^
+
+fn bar() {
+ match (0,1) {
+ ($0foo!(), _) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_works_for_macro_inside_match_arm_lhs() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! foo {() => {0}}
+ //^^^
+fn bar() {
+ match 0 {
+ $0foo!() => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_use_alias() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo as bar$0;
+
+//- /foo/lib.rs crate:foo
+// empty
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_use_alias_foo_macro() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo as bar$0;
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_methods() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn frobnicate(&self) { }
+ //^^^^^^^^^^
+}
+
+fn bar(foo: &Foo) {
+ foo.frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_fields() {
+ check(
+ r#"
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar(foo: &Foo) {
+ foo.spam$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_fields() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar() -> Foo {
+ Foo {
+ spam$0: 0,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_pat_fields() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar(foo: Foo) -> Foo {
+ let Foo { spam$0: _, } = foo
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_fields_macros() {
+ check(
+ r"
+macro_rules! m { () => { 92 };}
+struct Foo { spam: u32 }
+ //^^^^
+
+fn bar() -> Foo {
+ Foo { spam$0: m!() }
+}
+",
+ );
+ }
+
+ #[test]
+ fn goto_for_tuple_fields() {
+ check(
+ r#"
+struct Foo(u32);
+ //^^^
+
+fn bar() {
+ let foo = Foo(0);
+ foo.$00;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_inherent_methods() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn frobnicate() { }
+} //^^^^^^^^^^
+
+fn bar(foo: &Foo) {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_trait_methods_through_traits() {
+ check(
+ r#"
+trait Foo {
+ fn frobnicate();
+} //^^^^^^^^^^
+
+fn bar() {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_trait_methods_through_self() {
+ check(
+ r#"
+struct Foo;
+trait Trait {
+ fn frobnicate();
+} //^^^^^^^^^^
+impl Trait for Foo {}
+
+fn bar() {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_on_self() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ //^^^
+ pub fn new() -> Self {
+ Self$0 {}
+ }
+}
+"#,
+ );
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ //^^^
+ pub fn new() -> Self$0 {
+ Self {}
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+enum Foo { A }
+impl Foo {
+ //^^^
+ pub fn new() -> Self$0 {
+ Foo::A
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+enum Foo { A }
+impl Foo {
+ //^^^
+ pub fn thing(a: &Self$0) {
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_on_self_in_trait_impl() {
+ check(
+ r#"
+struct Foo;
+trait Make {
+ fn new() -> Self;
+}
+impl Make for Foo {
+ //^^^
+ fn new() -> Self {
+ Self$0 {}
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+struct Foo;
+trait Make {
+ fn new() -> Self;
+}
+impl Make for Foo {
+ //^^^
+ fn new() -> Self$0 {
+ Self {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_when_used_on_definition_name_itself() {
+ check(
+ r#"
+struct Foo$0 { value: u32 }
+ //^^^
+ "#,
+ );
+
+ check(
+ r#"
+struct Foo {
+ field$0: string,
+} //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+fn foo_test$0() { }
+ //^^^^^^^^
+"#,
+ );
+
+ check(
+ r#"
+enum Foo$0 { Variant }
+ //^^^
+"#,
+ );
+
+ check(
+ r#"
+enum Foo {
+ Variant1,
+ Variant2$0,
+ //^^^^^^^^
+ Variant3,
+}
+"#,
+ );
+
+ check(
+ r#"
+static INNER$0: &str = "";
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+const INNER$0: &str = "";
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+type Thing$0 = Option<()>;
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+trait Foo$0 { }
+ //^^^
+"#,
+ );
+
+ check(
+ r#"
+mod bar$0 { }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_from_macro() {
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => { $($tt)* }
+}
+fn foo() {}
+ //^^^
+id! {
+ fn bar() {
+ fo$0o();
+ }
+}
+mod confuse_index { fn foo(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_through_format() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! format {
+ ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*)))
+}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+pub mod __export {
+ pub use crate::format_args;
+ fn foo() {} // for index confusion
+}
+fn foo() -> i8 {}
+ //^^^
+fn test() {
+ format!("{}", fo$0o())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_through_included_file() {
+ check(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {}
+
+ include!("foo.rs");
+//^^^^^^^^^^^^^^^^^^^
+
+fn f() {
+ foo$0();
+}
+
+mod confuse_index {
+ pub fn foo() {}
+}
+
+//- /foo.rs
+fn foo() {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_for_type_param() {
+ check(
+ r#"
+struct Foo<T: Clone> { t: $0T }
+ //^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_within_macro() {
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => ($($tt)*)
+}
+
+fn foo() {
+ let x = 1;
+ //^
+ id!({
+ let y = $0x;
+ let z = y;
+ });
+}
+"#,
+ );
+
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => ($($tt)*)
+}
+
+fn foo() {
+ let x = 1;
+ id!({
+ let y = x;
+ //^
+ let z = $0y;
+ });
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_local_fn() {
+ check(
+ r#"
+fn main() {
+ fn foo() {
+ let x = 92;
+ //^
+ $0x;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_local_macro() {
+ check(
+ r#"
+fn bar() {
+ macro_rules! foo { () => { () } }
+ //^^^
+ $0foo!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_field_init_shorthand() {
+ check(
+ r#"
+struct Foo { x: i32 }
+ //^
+fn main() {
+ let x = 92;
+ //^
+ Foo { x$0 };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_field() {
+ check(
+ r#"
+enum Foo {
+ Bar { x: i32 }
+ //^
+}
+fn baz(foo: Foo) {
+ match foo {
+ Foo::Bar { x$0 } => x
+ //^
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_pattern_const() {
+ check(
+ r#"
+enum Foo { Bar }
+ //^^^
+impl Foo {
+ fn baz(self) {
+ match self { Self::Bar$0 => {} }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_pattern_record() {
+ check(
+ r#"
+enum Foo { Bar { val: i32 } }
+ //^^^
+impl Foo {
+ fn baz(self) -> i32 {
+ match self { Self::Bar$0 { val } => {} }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_expr_const() {
+ check(
+ r#"
+enum Foo { Bar }
+ //^^^
+impl Foo {
+ fn baz(self) { Self::Bar$0; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_expr_record() {
+ check(
+ r#"
+enum Foo { Bar { val: i32 } }
+ //^^^
+impl Foo {
+ fn baz(self) { Self::Bar$0 {val: 4}; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_type_alias_generic_parameter() {
+ check(
+ r#"
+type Alias<T> = T$0;
+ //^
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_macro_container() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+foo::module$0::mac!();
+
+//- /foo/lib.rs crate:foo
+pub mod module {
+ //^^^^^^
+ #[macro_export]
+ macro_rules! _mac { () => { () } }
+ pub use crate::_mac as mac;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_in_path() {
+ check(
+ r#"
+trait Iterator {
+ type Item;
+ //^^^^
+}
+
+fn f() -> impl Iterator<Item$0 = u8> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_super_assoc_ty_in_path() {
+ check(
+ r#"
+trait Super {
+ type Item;
+ //^^^^
+}
+
+trait Sub: Super {}
+
+fn f() -> impl Sub<Item$0 = u8> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn unknown_assoc_ty() {
+ check_unresolved(
+ r#"
+trait Iterator { type Item; }
+fn f() -> impl Iterator<Invalid$0 = u8> {}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_in_path_multiple() {
+ check(
+ r#"
+trait Iterator {
+ type A;
+ //^
+ type B;
+}
+
+fn f() -> impl Iterator<A$0 = u8, B = ()> {}
+"#,
+ );
+ check(
+ r#"
+trait Iterator {
+ type A;
+ type B;
+ //^
+}
+
+fn f() -> impl Iterator<A = u8, B$0 = ()> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_ufcs() {
+ check(
+ r#"
+trait Iterator {
+ type Item;
+ //^^^^
+}
+
+fn g() -> <() as Iterator<Item$0 = ()>>::Item {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_ufcs_multiple() {
+ check(
+ r#"
+trait Iterator {
+ type A;
+ //^
+ type B;
+}
+
+fn g() -> <() as Iterator<A$0 = (), B = u8>>::B {}
+"#,
+ );
+ check(
+ r#"
+trait Iterator {
+ type A;
+ type B;
+ //^
+}
+
+fn g() -> <() as Iterator<A = (), B$0 = u8>>::A {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_self_param_ty_specified() {
+ check(
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn bar(self: &Foo) {
+ //^^^^
+ let foo = sel$0f;
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_self_param_on_decl() {
+ check(
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn bar(&self$0) {
+ //^^^^
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_on_decl() {
+ check(
+ r#"
+fn foo<'foobar$0>(_: &'foobar ()) {
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_decl() {
+ check(
+ r#"
+fn foo<'foobar>(_: &'foobar$0 ()) {
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_decl_nested() {
+ check(
+ r#"
+fn foo<'foobar>(_: &'foobar ()) {
+ fn foo<'foobar>(_: &'foobar$0 ()) {}
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_hrtb() {
+ // FIXME: requires the HIR to somehow track these hrtb lifetimes
+ check_unresolved(
+ r#"
+trait Foo<T> {}
+fn foo<T>() where for<'a> T: Foo<&'a$0 (u8, u16)>, {}
+ //^^
+"#,
+ );
+ check_unresolved(
+ r#"
+trait Foo<T> {}
+fn foo<T>() where for<'a$0> T: Foo<&'a (u8, u16)>, {}
+ //^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_lifetime_hrtb_for_type() {
+ // FIXME: requires ForTypes to be implemented
+ check_unresolved(
+ r#"trait Foo<T> {}
+fn foo<T>() where T: for<'a> Foo<&'a$0 (u8, u16)>, {}
+ //^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_label() {
+ check(
+ r#"
+fn foo<'foo>(_: &'foo ()) {
+ 'foo: {
+ //^^^^
+ 'bar: loop {
+ break 'foo$0;
+ }
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_intra_doc_link_same_file() {
+ check(
+ r#"
+/// Blah, [`bar`](bar) .. [`foo`](foo$0) has [`bar`](bar)
+pub fn bar() { }
+
+/// You might want to see [`std::fs::read()`] too.
+pub fn foo() { }
+ //^^^
+
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_intra_doc_link_inner() {
+ check(
+ r#"
+//- /main.rs
+mod m;
+struct S;
+ //^
+
+//- /m.rs
+//! [`super::S$0`]
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_incomplete_field() {
+ check(
+ r#"
+struct A { a: u32 }
+ //^
+fn foo() { A { a$0: }; }
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_proc_macro() {
+ check(
+ r#"
+//- /main.rs crate:main deps:mac
+use mac::fn_macro;
+
+fn_macro$0!();
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro]
+fn fn_macro() {}
+ //^^^^^^^^
+ "#,
+ )
+ }
+
+ #[test]
+ fn goto_intra_doc_links() {
+ check(
+ r#"
+
+pub mod theitem {
+ /// This is the item. Cool!
+ pub struct TheItem;
+ //^^^^^^^
+}
+
+/// Gives you a [`TheItem$0`].
+///
+/// [`TheItem`]: theitem::TheItem
+pub fn gimme() -> theitem::TheItem {
+ theitem::TheItem
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_ident_from_pat_macro() {
+ check(
+ r#"
+macro_rules! pat {
+ ($name:ident) => { Enum::Variant1($name) }
+}
+
+enum Enum {
+ Variant1(u8),
+ Variant2,
+}
+
+fn f(e: Enum) {
+ match e {
+ pat!(bind) => {
+ //^^^^
+ bind$0
+ }
+ Enum::Variant2 => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_include() {
+ check(
+ r#"
+//- /main.rs
+
+#[rustc_builtin_macro]
+macro_rules! include_str {}
+
+fn main() {
+ let str = include_str!("foo.txt$0");
+}
+//- /foo.txt
+// empty
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_doc_include_str() {
+ check(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include_str {}
+
+#[doc = include_str!("docs.md$0")]
+struct Item;
+
+//- /docs.md
+// docs
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_shadow_include() {
+ check(
+ r#"
+//- /main.rs
+macro_rules! include {
+ ("included.rs") => {}
+}
+
+include!("included.rs$0");
+
+//- /included.rs
+// empty
+"#,
+ );
+ }
+
+ #[cfg(test)]
+ mod goto_impl_of_trait_fn {
+ use super::check;
+ #[test]
+ fn cursor_on_impl() {
+ check(
+ r#"
+trait Twait {
+ fn a();
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a$0();
+ //^
+}
+ "#,
+ );
+ }
+ #[test]
+ fn method_call() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self);
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a(&self){};
+ //^
+}
+fn f() {
+ let s = Stwuct;
+ s.a$0();
+}
+ "#,
+ );
+ }
+ #[test]
+ fn path_call() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self);
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a(&self){};
+ //^
+}
+fn f() {
+ let s = Stwuct;
+ Stwuct::a$0(&s);
+}
+ "#,
+ );
+ }
+ #[test]
+ fn where_clause_can_work() {
+ check(
+ r#"
+trait G {
+ fn g(&self);
+}
+trait Bound{}
+trait EA{}
+struct Gen<T>(T);
+impl <T:EA> G for Gen<T> {
+ fn g(&self) {
+ }
+}
+impl <T> G for Gen<T>
+where T : Bound
+{
+ fn g(&self){
+ //^
+ }
+}
+struct A;
+impl Bound for A{}
+fn f() {
+ let gen = Gen::<A>(A);
+ gen.g$0();
+}
+ "#,
+ );
+ }
+ #[test]
+ fn wc_case_is_ok() {
+ check(
+ r#"
+trait G {
+ fn g(&self);
+}
+trait BParent{}
+trait Bound: BParent{}
+struct Gen<T>(T);
+impl <T> G for Gen<T>
+where T : Bound
+{
+ fn g(&self){
+ //^
+ }
+}
+struct A;
+impl Bound for A{}
+fn f() {
+ let gen = Gen::<A>(A);
+ gen.g$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_call_defaulted() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self) {}
+ //^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+}
+fn f() {
+ let s = Stwuct;
+ s.a$0();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn method_call_on_generic() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self) {}
+ //^
+}
+
+fn f<T: Twait>(s: T) {
+ s.a$0();
+}
+ "#,
+ );
+ }
+ }
+
+ #[test]
+ fn goto_def_of_trait_impl_const() {
+ check(
+ r#"
+trait Twait {
+ const NOMS: bool;
+ // ^^^^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ const NOMS$0: bool = true;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_of_trait_impl_type_alias() {
+ check(
+ r#"
+trait Twait {
+ type IsBad;
+ // ^^^^^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ type IsBad$0 = !;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_derive_input() {
+ check(
+ r#"
+ //- minicore:derive
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+ // ^^^^
+ #[derive(Copy$0)]
+ struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+pub macro Copy {}
+ // ^^^^
+#[cfg_attr(feature = "false", derive)]
+#[derive(Copy$0)]
+struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+ // ^^^^
+}
+#[derive(foo::Copy$0)]
+struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ // ^^^
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(foo$0::Copy)]
+struct Foo;
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_macro_multi() {
+ check(
+ r#"
+struct Foo {
+ foo: ()
+ //^^^
+}
+macro_rules! foo {
+ ($ident:ident) => {
+ fn $ident(Foo { $ident }: Foo) {}
+ }
+}
+foo!(foo$0);
+ //^^^
+ //^^^
+"#,
+ );
+ check(
+ r#"
+fn bar() {}
+ //^^^
+struct bar;
+ //^^^
+macro_rules! foo {
+ ($ident:ident) => {
+ fn foo() {
+ let _: $ident = $ident;
+ }
+ }
+}
+
+foo!(bar$0);
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_await_poll() {
+ check(
+ r#"
+//- minicore: future
+
+struct MyFut;
+
+impl core::future::Future for MyFut {
+ type Output = ();
+
+ fn poll(
+ //^^^^
+ self: std::pin::Pin<&mut Self>,
+ cx: &mut std::task::Context<'_>
+ ) -> std::task::Poll<Self::Output>
+ {
+ ()
+ }
+}
+
+fn f() {
+ MyFut.await$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_await_into_future_poll() {
+ check(
+ r#"
+//- minicore: future
+
+struct Futurable;
+
+impl core::future::IntoFuture for Futurable {
+ type IntoFuture = MyFut;
+}
+
+struct MyFut;
+
+impl core::future::Future for MyFut {
+ type Output = ();
+
+ fn poll(
+ //^^^^
+ self: std::pin::Pin<&mut Self>,
+ cx: &mut std::task::Context<'_>
+ ) -> std::task::Poll<Self::Output>
+ {
+ ()
+ }
+}
+
+fn f() {
+ Futurable.await$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_try_op() {
+ check(
+ r#"
+//- minicore: try
+
+struct Struct;
+
+impl core::ops::Try for Struct {
+ fn branch(
+ //^^^^^^
+ self
+ ) {}
+}
+
+fn f() {
+ Struct?$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_index_op() {
+ check(
+ r#"
+//- minicore: index
+
+struct Struct;
+
+impl core::ops::Index<usize> for Struct {
+ fn index(
+ //^^^^^
+ self
+ ) {}
+}
+
+fn f() {
+ Struct[0]$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_prefix_op() {
+ check(
+ r#"
+//- minicore: deref
+
+struct Struct;
+
+impl core::ops::Deref for Struct {
+ fn deref(
+ //^^^^^
+ self
+ ) {}
+}
+
+fn f() {
+ $0*Struct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_bin_op() {
+ check(
+ r#"
+//- minicore: add
+
+struct Struct;
+
+impl core::ops::Add for Struct {
+ fn add(
+ //^^^
+ self
+ ) {}
+}
+
+fn f() {
+ Struct +$0 Struct;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_bin_op_multiple_impl() {
+ check(
+ r#"
+//- minicore: add
+struct S;
+impl core::ops::Add for S {
+ fn add(
+ //^^^
+ ) {}
+}
+impl core::ops::Add<usize> for S {
+ fn add(
+ ) {}
+}
+
+fn f() {
+ S +$0 S
+}
+"#,
+ );
+
+ check(
+ r#"
+//- minicore: add
+struct S;
+impl core::ops::Add for S {
+ fn add(
+ ) {}
+}
+impl core::ops::Add<usize> for S {
+ fn add(
+ //^^^
+ ) {}
+}
+
+fn f() {
+ S +$0 0usize
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn path_call_multiple_trait_impl() {
+ check(
+ r#"
+trait Trait<T> {
+ fn f(_: T);
+}
+impl Trait<i32> for usize {
+ fn f(_: i32) {}
+ //^
+}
+impl Trait<i64> for usize {
+ fn f(_: i64) {}
+}
+fn main() {
+ usize::f$0(0i32);
+}
+"#,
+ );
+
+ check(
+ r#"
+trait Trait<T> {
+ fn f(_: T);
+}
+impl Trait<i32> for usize {
+ fn f(_: i32) {}
+}
+impl Trait<i64> for usize {
+ fn f(_: i64) {}
+ //^
+}
+fn main() {
+ usize::f$0(0i64);
+}
+"#,
+ )
+ }
+}
--- /dev/null
- let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())));
+mod render;
+
+#[cfg(test)]
+mod tests;
+
+use std::iter;
+
+use either::Either;
+use hir::{HasSource, Semantics};
+use ide_db::{
+ base_db::FileRange,
+ defs::{Definition, IdentClass, OperatorClass},
+ famous_defs::FamousDefs,
+ helpers::pick_best_token,
+ FxIndexSet, RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T};
+
+use crate::{
+ doc_links::token_as_doc_comment,
+ markup::Markup,
+ runnables::{runnable_fn, runnable_mod},
+ FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav,
+};
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct HoverConfig {
+ pub links_in_hover: bool,
+ pub documentation: Option<HoverDocFormat>,
+ pub keywords: bool,
+}
+
+impl HoverConfig {
+ fn markdown(&self) -> bool {
+ matches!(self.documentation, Some(HoverDocFormat::Markdown))
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum HoverDocFormat {
+ Markdown,
+ PlainText,
+}
+
+#[derive(Debug, Clone)]
+pub enum HoverAction {
+ Runnable(Runnable),
+ Implementation(FilePosition),
+ Reference(FilePosition),
+ GoToType(Vec<HoverGotoTypeData>),
+}
+
+impl HoverAction {
+ fn goto_type_from_targets(db: &RootDatabase, targets: Vec<hir::ModuleDef>) -> Self {
+ let targets = targets
+ .into_iter()
+ .filter_map(|it| {
+ Some(HoverGotoTypeData {
+ mod_path: render::path(
+ db,
+ it.module(db)?,
+ it.name(db).map(|name| name.to_string()),
+ ),
+ nav: it.try_to_nav(db)?,
+ })
+ })
+ .collect();
+ HoverAction::GoToType(targets)
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct HoverGotoTypeData {
+ pub mod_path: String,
+ pub nav: NavigationTarget,
+}
+
+/// Contains the results when hovering over an item
+#[derive(Debug, Default)]
+pub struct HoverResult {
+ pub markup: Markup,
+ pub actions: Vec<HoverAction>,
+}
+
+// Feature: Hover
+//
+// Shows additional information, like the type of an expression or the documentation for a definition when "focusing" code.
+// Focusing is usually hovering with a mouse, but can also be triggered with a shortcut.
+//
+// image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[]
+pub(crate) fn hover(
+ db: &RootDatabase,
+ FileRange { file_id, range }: FileRange,
+ config: &HoverConfig,
+) -> Option<RangeInfo<HoverResult>> {
+ let sema = &hir::Semantics::new(db);
+ let file = sema.parse(file_id).syntax().clone();
+
+ if !range.is_empty() {
+ return hover_ranged(&file, range, sema, config);
+ }
+ let offset = range.start();
+
+ let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 4,
+ // index and prefix ops
+ T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
+ kind if kind.is_keyword() => 2,
+ T!['('] | T![')'] => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ cov_mark::hit!(no_highlight_on_comment_hover);
+ return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| {
+ let res = hover_for_definition(sema, file_id, def, &node, config)?;
+ Some(RangeInfo::new(range, res))
+ });
+ }
+
++ let in_attr = original_token
++ .parent_ancestors()
++ .filter_map(ast::Item::cast)
++ .any(|item| sema.is_attr_macro_call(&item))
++ && !matches!(
++ original_token.parent().and_then(ast::TokenTree::cast),
++ Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind()))
++ );
+ // prefer descending the same token kind in attribute expansions, in normal macros text
+ // equivalency is more important
+ let descended = if in_attr {
+ [sema.descend_into_macros_with_kind_preference(original_token.clone())].into()
+ } else {
+ sema.descend_into_macros_with_same_text(original_token.clone())
+ };
+
+ // FIXME: Definition should include known lints and the like instead of having this special case here
+ let hovered_lint = descended.iter().find_map(|token| {
+ let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
+ render::try_for_lint(&attr, token)
+ });
+ if let Some(res) = hovered_lint {
+ return Some(RangeInfo::new(original_token.text_range(), res));
+ }
+
+ let result = descended
+ .iter()
+ .filter_map(|token| {
+ let node = token.parent()?;
+ let class = IdentClass::classify_token(sema, token)?;
+ if let IdentClass::Operator(OperatorClass::Await(_)) = class {
+ // It's better for us to fall back to the keyword hover here,
+ // rendering poll is very confusing
+ return None;
+ }
+ Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
+ })
+ .flatten()
+ .unique_by(|&(def, _)| def)
+ .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config))
+ .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| {
+ acc.actions.extend(actions);
+ acc.markup = Markup::from(format!("{}\n---\n{}", acc.markup, markup));
+ acc
+ });
+
+ if result.is_none() {
+ // fallbacks, show keywords or types
+
+ let res = descended.iter().find_map(|token| render::keyword(sema, config, token));
+ if let Some(res) = res {
+ return Some(RangeInfo::new(original_token.text_range(), res));
+ }
+ let res = descended
+ .iter()
+ .find_map(|token| hover_type_fallback(sema, config, token, &original_token));
+ if let Some(_) = res {
+ return res;
+ }
+ }
+ result.map(|mut res: HoverResult| {
+ res.actions = dedupe_or_merge_hover_actions(res.actions);
+ RangeInfo::new(original_token.text_range(), res)
+ })
+}
+
+pub(crate) fn hover_for_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ file_id: FileId,
+ definition: Definition,
+ node: &SyntaxNode,
+ config: &HoverConfig,
+) -> Option<HoverResult> {
+ let famous_defs = match &definition {
+ Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())),
+ _ => None,
+ };
+ render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| {
+ HoverResult {
+ markup: render::process_markup(sema.db, definition, &markup, config),
+ actions: show_implementations_action(sema.db, definition)
+ .into_iter()
+ .chain(show_fn_references_action(sema.db, definition))
+ .chain(runnable_action(sema, definition, file_id))
+ .chain(goto_type_action_for_def(sema.db, definition))
+ .collect(),
+ }
+ })
+}
+
+fn hover_ranged(
+ file: &SyntaxNode,
+ range: syntax::TextRange,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+) -> Option<RangeInfo<HoverResult>> {
+ // FIXME: make this work in attributes
+ let expr_or_pat = file.covering_element(range).ancestors().find_map(|it| {
+ match_ast! {
+ match it {
+ ast::Expr(expr) => Some(Either::Left(expr)),
+ ast::Pat(pat) => Some(Either::Right(pat)),
+ _ => None,
+ }
+ }
+ })?;
+ let res = match &expr_or_pat {
+ Either::Left(ast::Expr::TryExpr(try_expr)) => render::try_expr(sema, config, try_expr),
+ Either::Left(ast::Expr::PrefixExpr(prefix_expr))
+ if prefix_expr.op_kind() == Some(ast::UnaryOp::Deref) =>
+ {
+ render::deref_expr(sema, config, prefix_expr)
+ }
+ _ => None,
+ };
+ let res = res.or_else(|| render::type_info(sema, config, &expr_or_pat));
+ res.map(|it| {
+ let range = match expr_or_pat {
+ Either::Left(it) => it.syntax().text_range(),
+ Either::Right(it) => it.syntax().text_range(),
+ };
+ RangeInfo::new(range, it)
+ })
+}
+
+fn hover_type_fallback(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ token: &SyntaxToken,
+ original_token: &SyntaxToken,
+) -> Option<RangeInfo<HoverResult>> {
+ let node =
+ token.parent_ancestors().take_while(|it| !ast::Item::can_cast(it.kind())).find(|n| {
+ ast::Expr::can_cast(n.kind())
+ || ast::Pat::can_cast(n.kind())
+ || ast::Type::can_cast(n.kind())
+ })?;
+
+ let expr_or_pat = match_ast! {
+ match node {
+ ast::Expr(it) => Either::Left(it),
+ ast::Pat(it) => Either::Right(it),
+ // If this node is a MACRO_CALL, it means that `descend_into_macros_many` failed to resolve.
+ // (e.g expanding a builtin macro). So we give up here.
+ ast::MacroCall(_it) => return None,
+ _ => return None,
+ }
+ };
+
+ let res = render::type_info(sema, config, &expr_or_pat)?;
+ let range = sema
+ .original_range_opt(&node)
+ .map(|frange| frange.range)
+ .unwrap_or_else(|| original_token.text_range());
+ Some(RangeInfo::new(range, res))
+}
+
+fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ fn to_action(nav_target: NavigationTarget) -> HoverAction {
+ HoverAction::Implementation(FilePosition {
+ file_id: nav_target.file_id,
+ offset: nav_target.focus_or_full_range().start(),
+ })
+ }
+
+ let adt = match def {
+ Definition::Trait(it) => return it.try_to_nav(db).map(to_action),
+ Definition::Adt(it) => Some(it),
+ Definition::SelfType(it) => it.self_ty(db).as_adt(),
+ _ => None,
+ }?;
+ adt.try_to_nav(db).map(to_action)
+}
+
+fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ match def {
+ Definition::Function(it) => it.try_to_nav(db).map(|nav_target| {
+ HoverAction::Reference(FilePosition {
+ file_id: nav_target.file_id,
+ offset: nav_target.focus_or_full_range().start(),
+ })
+ }),
+ _ => None,
+ }
+}
+
+fn runnable_action(
+ sema: &hir::Semantics<'_, RootDatabase>,
+ def: Definition,
+ file_id: FileId,
+) -> Option<HoverAction> {
+ match def {
+ Definition::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable),
+ Definition::Function(func) => {
+ let src = func.source(sema.db)?;
+ if src.file_id != file_id.into() {
+ cov_mark::hit!(hover_macro_generated_struct_fn_doc_comment);
+ cov_mark::hit!(hover_macro_generated_struct_fn_doc_attr);
+ return None;
+ }
+
+ runnable_fn(sema, func).map(HoverAction::Runnable)
+ }
+ _ => None,
+ }
+}
+
+fn goto_type_action_for_def(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+
+ if let Definition::GenericParam(hir::GenericParam::TypeParam(it)) = def {
+ it.trait_bounds(db).into_iter().for_each(|it| push_new_def(it.into()));
+ } else {
+ let ty = match def {
+ Definition::Local(it) => it.ty(db),
+ Definition::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db),
+ Definition::Field(field) => field.ty(db),
+ Definition::Function(function) => function.ret_type(db),
+ _ => return None,
+ };
+
+ walk_and_push_ty(db, &ty, &mut push_new_def);
+ }
+
+ Some(HoverAction::goto_type_from_targets(db, targets))
+}
+
+fn walk_and_push_ty(
+ db: &RootDatabase,
+ ty: &hir::Type,
+ push_new_def: &mut dyn FnMut(hir::ModuleDef),
+) {
+ ty.walk(db, |t| {
+ if let Some(adt) = t.as_adt() {
+ push_new_def(adt.into());
+ } else if let Some(trait_) = t.as_dyn_trait() {
+ push_new_def(trait_.into());
+ } else if let Some(traits) = t.as_impl_traits(db) {
+ traits.for_each(|it| push_new_def(it.into()));
+ } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
+ push_new_def(trait_.into());
+ }
+ });
+}
+
+fn dedupe_or_merge_hover_actions(actions: Vec<HoverAction>) -> Vec<HoverAction> {
+ let mut deduped_actions = Vec::with_capacity(actions.len());
+ let mut go_to_type_targets = FxIndexSet::default();
+
+ let mut seen_implementation = false;
+ let mut seen_reference = false;
+ let mut seen_runnable = false;
+ for action in actions {
+ match action {
+ HoverAction::GoToType(targets) => {
+ go_to_type_targets.extend(targets);
+ }
+ HoverAction::Implementation(..) => {
+ if !seen_implementation {
+ seen_implementation = true;
+ deduped_actions.push(action);
+ }
+ }
+ HoverAction::Reference(..) => {
+ if !seen_reference {
+ seen_reference = true;
+ deduped_actions.push(action);
+ }
+ }
+ HoverAction::Runnable(..) => {
+ if !seen_runnable {
+ seen_runnable = true;
+ deduped_actions.push(action);
+ }
+ }
+ };
+ }
+
+ if !go_to_type_targets.is_empty() {
+ deduped_actions.push(HoverAction::GoToType(go_to_type_targets.into_iter().collect()));
+ }
+
+ deduped_actions
+}
--- /dev/null
- use hir::{known, Callable, HasVisibility, HirDisplay, Mutability, Semantics, TypeInfo};
+use std::fmt;
+
+use either::Either;
- pub reborrow_hints: ReborrowHints,
++use hir::{
++ known, Adjust, AutoBorrow, Callable, HasVisibility, HirDisplay, Mutability, OverloadedDeref,
++ PointerCast, Safety, Semantics, TypeInfo,
++};
+use ide_db::{
+ base_db::FileRange, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty, FxHashMap,
+ RootDatabase,
+};
+use itertools::Itertools;
+use stdx::to_lower_snake_case;
+use syntax::{
+ ast::{self, AstNode, HasArgList, HasGenericParams, HasName, UnaryOp},
+ match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
+ TextSize, T,
+};
+
+use crate::FileId;
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct InlayHintsConfig {
+ pub render_colons: bool,
+ pub type_hints: bool,
+ pub parameter_hints: bool,
+ pub chaining_hints: bool,
- pub enum ReborrowHints {
++ pub adjustment_hints: AdjustmentHints,
+ pub closure_return_type_hints: ClosureReturnTypeHints,
+ pub binding_mode_hints: bool,
+ pub lifetime_elision_hints: LifetimeElisionHints,
+ pub param_names_for_lifetime_elision_hints: bool,
+ pub hide_named_constructor_hints: bool,
+ pub hide_closure_initialization_hints: bool,
+ pub max_length: Option<usize>,
+ pub closing_brace_hints_min_lines: Option<usize>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum ClosureReturnTypeHints {
+ Always,
+ WithBlock,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum LifetimeElisionHints {
+ Always,
+ SkipTrivial,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
- MutableOnly,
++pub enum AdjustmentHints {
+ Always,
- ImplicitReborrowHint,
++ ReborrowOnly,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum InlayKind {
+ BindingModeHint,
+ ChainingHint,
+ ClosingBraceHint,
+ ClosureReturnTypeHint,
+ GenericParamListHint,
- range_limit: Option<FileRange>,
++ AdjustmentHint,
++ AdjustmentHintClosingParenthesis,
+ LifetimeHint,
+ ParameterHint,
+ TypeHint,
+}
+
+#[derive(Debug)]
+pub struct InlayHint {
+ pub range: TextRange,
+ pub kind: InlayKind,
+ pub label: InlayHintLabel,
+ pub tooltip: Option<InlayTooltip>,
+}
+
+#[derive(Debug)]
+pub enum InlayTooltip {
+ String(String),
+ HoverRanged(FileId, TextRange),
+ HoverOffset(FileId, TextSize),
+}
+
+pub struct InlayHintLabel {
+ pub parts: Vec<InlayHintLabelPart>,
+}
+
+impl InlayHintLabel {
+ pub fn as_simple_str(&self) -> Option<&str> {
+ match &*self.parts {
+ [part] => part.as_simple_str(),
+ _ => None,
+ }
+ }
+
+ pub fn prepend_str(&mut self, s: &str) {
+ match &mut *self.parts {
+ [part, ..] if part.as_simple_str().is_some() => part.text = format!("{s}{}", part.text),
+ _ => self.parts.insert(0, InlayHintLabelPart { text: s.into(), linked_location: None }),
+ }
+ }
+
+ pub fn append_str(&mut self, s: &str) {
+ match &mut *self.parts {
+ [.., part] if part.as_simple_str().is_some() => part.text.push_str(s),
+ _ => self.parts.push(InlayHintLabelPart { text: s.into(), linked_location: None }),
+ }
+ }
+}
+
+impl From<String> for InlayHintLabel {
+ fn from(s: String) -> Self {
+ Self { parts: vec![InlayHintLabelPart { text: s, linked_location: None }] }
+ }
+}
+
++impl From<&str> for InlayHintLabel {
++ fn from(s: &str) -> Self {
++ Self { parts: vec![InlayHintLabelPart { text: s.into(), linked_location: None }] }
++ }
++}
++
+impl fmt::Display for InlayHintLabel {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.parts.iter().map(|part| &part.text).format(""))
+ }
+}
+
+impl fmt::Debug for InlayHintLabel {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_list().entries(&self.parts).finish()
+ }
+}
+
+pub struct InlayHintLabelPart {
+ pub text: String,
+ /// Source location represented by this label part. The client will use this to fetch the part's
+ /// hover tooltip, and Ctrl+Clicking the label part will navigate to the definition the location
+ /// refers to (not necessarily the location itself).
+ /// When setting this, no tooltip must be set on the containing hint, or VS Code will display
+ /// them both.
+ pub linked_location: Option<FileRange>,
+}
+
+impl InlayHintLabelPart {
+ pub fn as_simple_str(&self) -> Option<&str> {
+ match self {
+ Self { text, linked_location: None } => Some(text),
+ _ => None,
+ }
+ }
+}
+
+impl fmt::Debug for InlayHintLabelPart {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.as_simple_str() {
+ Some(string) => string.fmt(f),
+ None => f
+ .debug_struct("InlayHintLabelPart")
+ .field("text", &self.text)
+ .field("linked_location", &self.linked_location)
+ .finish(),
+ }
+ }
+}
+
+// Feature: Inlay Hints
+//
+// rust-analyzer shows additional information inline with the source code.
+// Editors usually render this using read-only virtual text snippets interspersed with code.
+//
+// rust-analyzer by default shows hints for
+//
+// * types of local variables
+// * names of function arguments
+// * types of chained expressions
+//
+// Optionally, one can enable additional hints for
+//
+// * return types of closure expressions
+// * elided lifetimes
+// * compiler inserted reborrows
+//
+// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]
+pub(crate) fn inlay_hints(
+ db: &RootDatabase,
+ file_id: FileId,
- Some(FileRange { range, .. }) => match file.covering_element(range) {
++ range_limit: Option<TextRange>,
+ config: &InlayHintsConfig,
+) -> Vec<InlayHint> {
+ let _p = profile::span("inlay_hints");
+ let sema = Semantics::new(db);
+ let file = sema.parse(file_id);
+ let file = file.syntax();
+
+ let mut acc = Vec::new();
+
+ if let Some(scope) = sema.scope(&file) {
+ let famous_defs = FamousDefs(&sema, scope.krate());
+
+ let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
+ match range_limit {
- ast::Expr::PathExpr(_) => reborrow_hints(hints, sema, config, &expr),
++ Some(range) => match file.covering_element(range) {
+ NodeOrToken::Token(_) => return acc,
+ NodeOrToken::Node(n) => n
+ .descendants()
+ .filter(|descendant| range.intersect(descendant.text_range()).is_some())
+ .for_each(hints),
+ },
+ None => file.descendants().for_each(hints),
+ };
+ }
+
+ acc
+}
+
+fn hints(
+ hints: &mut Vec<InlayHint>,
+ famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ node: SyntaxNode,
+) {
+ closing_brace_hints(hints, sema, config, file_id, node.clone());
+ match_ast! {
+ match node {
+ ast::Expr(expr) => {
+ chaining_hints(hints, sema, &famous_defs, config, file_id, &expr);
++ adjustment_hints(hints, sema, config, &expr);
+ match expr {
+ ast::Expr::CallExpr(it) => param_name_hints(hints, sema, config, ast::Expr::from(it)),
+ ast::Expr::MethodCallExpr(it) => {
+ param_name_hints(hints, sema, config, ast::Expr::from(it))
+ }
+ ast::Expr::ClosureExpr(it) => closure_ret_hints(hints, sema, &famous_defs, config, file_id, it),
+ // We could show reborrows for all expressions, but usually that is just noise to the user
+ // and the main point here is to show why "moving" a mutable reference doesn't necessarily move it
- fn reborrow_hints(
++ // ast::Expr::PathExpr(_) => reborrow_hints(hints, sema, config, &expr),
+ _ => None,
+ }
+ },
+ ast::Pat(it) => {
+ binding_mode_hints(hints, sema, config, &it);
+ if let ast::Pat::IdentPat(it) = it {
+ bind_pat_hints(hints, sema, config, file_id, &it);
+ }
+ Some(())
+ },
+ ast::Item(it) => match it {
+ // FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints
+ ast::Item::Impl(_) => None,
+ ast::Item::Fn(it) => fn_lifetime_fn_hints(hints, config, it),
+ // static type elisions
+ ast::Item::Static(it) => implicit_static_hints(hints, config, Either::Left(it)),
+ ast::Item::Const(it) => implicit_static_hints(hints, config, Either::Right(it)),
+ _ => None,
+ },
+ // FIXME: fn-ptr type, dyn fn type, and trait object type elisions
+ ast::Type(_) => None,
+ _ => None,
+ }
+ };
+}
+
+fn closing_brace_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ node: SyntaxNode,
+) -> Option<()> {
+ let min_lines = config.closing_brace_hints_min_lines?;
+
+ let name = |it: ast::Name| it.syntax().text_range();
+
+ let mut closing_token;
+ let (label, name_range) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) {
+ closing_token = item_list.r_curly_token()?;
+
+ let parent = item_list.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::Impl(imp) => {
+ let imp = sema.to_def(&imp)?;
+ let ty = imp.self_ty(sema.db);
+ let trait_ = imp.trait_(sema.db);
+ let hint_text = match trait_ {
+ Some(tr) => format!("impl {} for {}", tr.name(sema.db), ty.display_truncated(sema.db, config.max_length)),
+ None => format!("impl {}", ty.display_truncated(sema.db, config.max_length)),
+ };
+ (hint_text, None)
+ },
+ ast::Trait(tr) => {
+ (format!("trait {}", tr.name()?), tr.name().map(name))
+ },
+ _ => return None,
+ }
+ }
+ } else if let Some(list) = ast::ItemList::cast(node.clone()) {
+ closing_token = list.r_curly_token()?;
+
+ let module = ast::Module::cast(list.syntax().parent()?)?;
+ (format!("mod {}", module.name()?), module.name().map(name))
+ } else if let Some(block) = ast::BlockExpr::cast(node.clone()) {
+ closing_token = block.stmt_list()?.r_curly_token()?;
+
+ let parent = block.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::Fn(it) => {
+ // FIXME: this could include parameters, but `HirDisplay` prints too much info
+ // and doesn't respect the max length either, so the hints end up way too long
+ (format!("fn {}", it.name()?), it.name().map(name))
+ },
+ ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)),
+ ast::Const(it) => {
+ if it.underscore_token().is_some() {
+ ("const _".into(), None)
+ } else {
+ (format!("const {}", it.name()?), it.name().map(name))
+ }
+ },
+ _ => return None,
+ }
+ }
+ } else if let Some(mac) = ast::MacroCall::cast(node.clone()) {
+ let last_token = mac.syntax().last_token()?;
+ if last_token.kind() != T![;] && last_token.kind() != SyntaxKind::R_CURLY {
+ return None;
+ }
+ closing_token = last_token;
+
+ (
+ format!("{}!", mac.path()?),
+ mac.path().and_then(|it| it.segment()).map(|it| it.syntax().text_range()),
+ )
+ } else {
+ return None;
+ };
+
+ if let Some(mut next) = closing_token.next_token() {
+ if next.kind() == T![;] {
+ if let Some(tok) = next.next_token() {
+ closing_token = next;
+ next = tok;
+ }
+ }
+ if !(next.kind() == SyntaxKind::WHITESPACE && next.text().contains('\n')) {
+ // Only display the hint if the `}` is the last token on the line
+ return None;
+ }
+ }
+
+ let mut lines = 1;
+ node.text().for_each_chunk(|s| lines += s.matches('\n').count());
+ if lines < min_lines {
+ return None;
+ }
+
+ let linked_location = name_range.map(|range| FileRange { file_id, range });
+ acc.push(InlayHint {
+ range: closing_token.text_range(),
+ kind: InlayKind::ClosingBraceHint,
+ label: InlayHintLabel { parts: vec![InlayHintLabelPart { text: label, linked_location }] },
+ tooltip: None, // provided by label part location
+ });
+
+ None
+}
+
+fn implicit_static_hints(
+ acc: &mut Vec<InlayHint>,
+ config: &InlayHintsConfig,
+ statik_or_const: Either<ast::Static, ast::Const>,
+) -> Option<()> {
+ if config.lifetime_elision_hints != LifetimeElisionHints::Always {
+ return None;
+ }
+
+ if let Either::Right(it) = &statik_or_const {
+ if ast::AssocItemList::can_cast(
+ it.syntax().parent().map_or(SyntaxKind::EOF, |it| it.kind()),
+ ) {
+ return None;
+ }
+ }
+
+ if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) {
+ if ty.lifetime().is_none() {
+ let t = ty.amp_token()?;
+ acc.push(InlayHint {
+ range: t.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label: "'static".to_owned().into(),
+ tooltip: Some(InlayTooltip::String("Elided static lifetime".into())),
+ });
+ }
+ }
+
+ Some(())
+}
+
+fn fn_lifetime_fn_hints(
+ acc: &mut Vec<InlayHint>,
+ config: &InlayHintsConfig,
+ func: ast::Fn,
+) -> Option<()> {
+ if config.lifetime_elision_hints == LifetimeElisionHints::Never {
+ return None;
+ }
+
+ let mk_lt_hint = |t: SyntaxToken, label: String| InlayHint {
+ range: t.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label: label.into(),
+ tooltip: Some(InlayTooltip::String("Elided lifetime".into())),
+ };
+
+ let param_list = func.param_list()?;
+ let generic_param_list = func.generic_param_list();
+ let ret_type = func.ret_type();
+ let self_param = param_list.self_param().filter(|it| it.amp_token().is_some());
+
+ let is_elided = |lt: &Option<ast::Lifetime>| match lt {
+ Some(lt) => matches!(lt.text().as_str(), "'_"),
+ None => true,
+ };
+
+ let potential_lt_refs = {
+ let mut acc: Vec<_> = vec![];
+ if let Some(self_param) = &self_param {
+ let lifetime = self_param.lifetime();
+ let is_elided = is_elided(&lifetime);
+ acc.push((None, self_param.amp_token(), lifetime, is_elided));
+ }
+ param_list.params().filter_map(|it| Some((it.pat(), it.ty()?))).for_each(|(pat, ty)| {
+ // FIXME: check path types
+ walk_ty(&ty, &mut |ty| match ty {
+ ast::Type::RefType(r) => {
+ let lifetime = r.lifetime();
+ let is_elided = is_elided(&lifetime);
+ acc.push((
+ pat.as_ref().and_then(|it| match it {
+ ast::Pat::IdentPat(p) => p.name(),
+ _ => None,
+ }),
+ r.amp_token(),
+ lifetime,
+ is_elided,
+ ))
+ }
+ _ => (),
+ })
+ });
+ acc
+ };
+
+ // allocate names
+ let mut gen_idx_name = {
+ let mut gen = (0u8..).map(|idx| match idx {
+ idx if idx < 10 => SmolStr::from_iter(['\'', (idx + 48) as char]),
+ idx => format!("'{idx}").into(),
+ });
+ move || gen.next().unwrap_or_default()
+ };
+ let mut allocated_lifetimes = vec![];
+
+ let mut used_names: FxHashMap<SmolStr, usize> =
+ match config.param_names_for_lifetime_elision_hints {
+ true => generic_param_list
+ .iter()
+ .flat_map(|gpl| gpl.lifetime_params())
+ .filter_map(|param| param.lifetime())
+ .filter_map(|lt| Some((SmolStr::from(lt.text().as_str().get(1..)?), 0)))
+ .collect(),
+ false => Default::default(),
+ };
+ {
+ let mut potential_lt_refs = potential_lt_refs.iter().filter(|&&(.., is_elided)| is_elided);
+ if let Some(_) = &self_param {
+ if let Some(_) = potential_lt_refs.next() {
+ allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
+ // self can't be used as a lifetime, so no need to check for collisions
+ "'self".into()
+ } else {
+ gen_idx_name()
+ });
+ }
+ }
+ potential_lt_refs.for_each(|(name, ..)| {
+ let name = match name {
+ Some(it) if config.param_names_for_lifetime_elision_hints => {
+ if let Some(c) = used_names.get_mut(it.text().as_str()) {
+ *c += 1;
+ SmolStr::from(format!("'{text}{c}", text = it.text().as_str()))
+ } else {
+ used_names.insert(it.text().as_str().into(), 0);
+ SmolStr::from_iter(["\'", it.text().as_str()])
+ }
+ }
+ _ => gen_idx_name(),
+ };
+ allocated_lifetimes.push(name);
+ });
+ }
+
+ // fetch output lifetime if elision rule applies
+ let output = match potential_lt_refs.as_slice() {
+ [(_, _, lifetime, _), ..] if self_param.is_some() || potential_lt_refs.len() == 1 => {
+ match lifetime {
+ Some(lt) => match lt.text().as_str() {
+ "'_" => allocated_lifetimes.get(0).cloned(),
+ "'static" => None,
+ name => Some(name.into()),
+ },
+ None => allocated_lifetimes.get(0).cloned(),
+ }
+ }
+ [..] => None,
+ };
+
+ if allocated_lifetimes.is_empty() && output.is_none() {
+ return None;
+ }
+
+ // apply hints
+ // apply output if required
+ let mut is_trivial = true;
+ if let (Some(output_lt), Some(r)) = (&output, ret_type) {
+ if let Some(ty) = r.ty() {
+ walk_ty(&ty, &mut |ty| match ty {
+ ast::Type::RefType(ty) if ty.lifetime().is_none() => {
+ if let Some(amp) = ty.amp_token() {
+ is_trivial = false;
+ acc.push(mk_lt_hint(amp, output_lt.to_string()));
+ }
+ }
+ _ => (),
+ })
+ }
+ }
+
+ if config.lifetime_elision_hints == LifetimeElisionHints::SkipTrivial && is_trivial {
+ return None;
+ }
+
+ let mut a = allocated_lifetimes.iter();
+ for (_, amp_token, _, is_elided) in potential_lt_refs {
+ if is_elided {
+ let t = amp_token?;
+ let lt = a.next()?;
+ acc.push(mk_lt_hint(t, lt.to_string()));
+ }
+ }
+
+ // generate generic param list things
+ match (generic_param_list, allocated_lifetimes.as_slice()) {
+ (_, []) => (),
+ (Some(gpl), allocated_lifetimes) => {
+ let angle_tok = gpl.l_angle_token()?;
+ let is_empty = gpl.generic_params().next().is_none();
+ acc.push(InlayHint {
+ range: angle_tok.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label: format!(
+ "{}{}",
+ allocated_lifetimes.iter().format(", "),
+ if is_empty { "" } else { ", " }
+ )
+ .into(),
+ tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
+ });
+ }
+ (None, allocated_lifetimes) => acc.push(InlayHint {
+ range: func.name()?.syntax().text_range(),
+ kind: InlayKind::GenericParamListHint,
+ label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(),
+ tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
+ }),
+ }
+ Some(())
+}
+
+fn closure_ret_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ closure: ast::ClosureExpr,
+) -> Option<()> {
+ if config.closure_return_type_hints == ClosureReturnTypeHints::Never {
+ return None;
+ }
+
+ if closure.ret_type().is_some() {
+ return None;
+ }
+
+ if !closure_has_block_body(&closure)
+ && config.closure_return_type_hints == ClosureReturnTypeHints::WithBlock
+ {
+ return None;
+ }
+
+ let param_list = closure.param_list()?;
+
+ let closure = sema.descend_node_into_attributes(closure.clone()).pop()?;
+ let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure))?.adjusted();
+ let callable = ty.as_callable(sema.db)?;
+ let ty = callable.return_type();
+ if ty.is_unit() {
+ return None;
+ }
+ acc.push(InlayHint {
+ range: param_list.syntax().text_range(),
+ kind: InlayKind::ClosureReturnTypeHint,
+ label: hint_iterator(sema, &famous_defs, config, &ty)
+ .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string())
+ .into(),
+ tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())),
+ });
+ Some(())
+}
+
- if config.reborrow_hints == ReborrowHints::Never {
++fn adjustment_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ expr: &ast::Expr,
+) -> Option<()> {
- let mutability = sema.is_implicit_reborrow(desc_expr)?;
- let label = match mutability {
- hir::Mutability::Shared if config.reborrow_hints != ReborrowHints::MutableOnly => "&*",
- hir::Mutability::Mut => "&mut *",
- _ => return None,
++ if config.adjustment_hints == AdjustmentHints::Never {
++ return None;
++ }
++
++ if let ast::Expr::ParenExpr(_) = expr {
++ // These inherit from the inner expression which would result in duplicate hints
+ return None;
+ }
+
++ let parent = expr.syntax().parent().and_then(ast::Expr::cast);
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let desc_expr = descended.as_ref().unwrap_or(expr);
- acc.push(InlayHint {
- range: expr.syntax().text_range(),
- kind: InlayKind::ImplicitReborrowHint,
- label: label.to_string().into(),
- tooltip: Some(InlayTooltip::String("Compiler inserted reborrow".into())),
- });
++ let adjustments = sema.expr_adjustments(desc_expr).filter(|it| !it.is_empty())?;
++ let needs_parens = match parent {
++ Some(parent) => {
++ match parent {
++ ast::Expr::AwaitExpr(_)
++ | ast::Expr::CallExpr(_)
++ | ast::Expr::CastExpr(_)
++ | ast::Expr::FieldExpr(_)
++ | ast::Expr::MethodCallExpr(_)
++ | ast::Expr::TryExpr(_) => true,
++ // FIXME: shorthands need special casing, though not sure if adjustments are even valid there
++ ast::Expr::RecordExpr(_) => false,
++ ast::Expr::IndexExpr(index) => index.base().as_ref() == Some(expr),
++ _ => false,
++ }
++ }
++ None => false,
+ };
- use ide_db::base_db::FileRange;
++ if needs_parens {
++ acc.push(InlayHint {
++ range: expr.syntax().text_range(),
++ kind: InlayKind::AdjustmentHint,
++ label: "(".into(),
++ tooltip: None,
++ });
++ }
++ for adjustment in adjustments.into_iter().rev() {
++ // FIXME: Add some nicer tooltips to each of these
++ let text = match adjustment {
++ Adjust::NeverToAny if config.adjustment_hints == AdjustmentHints::Always => {
++ "<never-to-any>"
++ }
++ Adjust::Deref(None) => "*",
++ Adjust::Deref(Some(OverloadedDeref(Mutability::Mut))) => "*",
++ Adjust::Deref(Some(OverloadedDeref(Mutability::Shared))) => "*",
++ Adjust::Borrow(AutoBorrow::Ref(Mutability::Shared)) => "&",
++ Adjust::Borrow(AutoBorrow::Ref(Mutability::Mut)) => "&mut ",
++ Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Shared)) => "&raw const ",
++ Adjust::Borrow(AutoBorrow::RawPtr(Mutability::Mut)) => "&raw mut ",
++ // some of these could be represented via `as` casts, but that's not too nice and
++ // handling everything as a prefix expr makes the `(` and `)` insertion easier
++ Adjust::Pointer(cast) if config.adjustment_hints == AdjustmentHints::Always => {
++ match cast {
++ PointerCast::ReifyFnPointer => "<fn-item-to-fn-pointer>",
++ PointerCast::UnsafeFnPointer => "<safe-fn-pointer-to-unsafe-fn-pointer>",
++ PointerCast::ClosureFnPointer(Safety::Unsafe) => {
++ "<closure-to-unsafe-fn-pointer>"
++ }
++ PointerCast::ClosureFnPointer(Safety::Safe) => "<closure-to-fn-pointer>",
++ PointerCast::MutToConstPointer => "<mut-ptr-to-const-ptr>",
++ PointerCast::ArrayToPointer => "<array-ptr-to-element-ptr>",
++ PointerCast::Unsize => "<unsize>",
++ }
++ }
++ _ => continue,
++ };
++ acc.push(InlayHint {
++ range: expr.syntax().text_range(),
++ kind: InlayKind::AdjustmentHint,
++ label: text.into(),
++ tooltip: None,
++ });
++ }
++ if needs_parens {
++ acc.push(InlayHint {
++ range: expr.syntax().text_range(),
++ kind: InlayKind::AdjustmentHintClosingParenthesis,
++ label: ")".into(),
++ tooltip: None,
++ });
++ }
+ Some(())
+}
+
+fn chaining_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ expr: &ast::Expr,
+) -> Option<()> {
+ if !config.chaining_hints {
+ return None;
+ }
+
+ if matches!(expr, ast::Expr::RecordExpr(_)) {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let desc_expr = descended.as_ref().unwrap_or(expr);
+
+ let mut tokens = expr
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(NodeOrToken::into_token)
+ .filter(|t| match t.kind() {
+ SyntaxKind::WHITESPACE if !t.text().contains('\n') => false,
+ SyntaxKind::COMMENT => false,
+ _ => true,
+ });
+
+ // Chaining can be defined as an expression whose next sibling tokens are newline and dot
+ // Ignoring extra whitespace and comments
+ let next = tokens.next()?.kind();
+ if next == SyntaxKind::WHITESPACE {
+ let mut next_next = tokens.next()?.kind();
+ while next_next == SyntaxKind::WHITESPACE {
+ next_next = tokens.next()?.kind();
+ }
+ if next_next == T![.] {
+ let ty = sema.type_of_expr(desc_expr)?.original;
+ if ty.is_unknown() {
+ return None;
+ }
+ if matches!(expr, ast::Expr::PathExpr(_)) {
+ if let Some(hir::Adt::Struct(st)) = ty.as_adt() {
+ if st.fields(sema.db).is_empty() {
+ return None;
+ }
+ }
+ }
+ acc.push(InlayHint {
+ range: expr.syntax().text_range(),
+ kind: InlayKind::ChainingHint,
+ label: hint_iterator(sema, &famous_defs, config, &ty)
+ .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string())
+ .into(),
+ tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())),
+ });
+ }
+ }
+ Some(())
+}
+
+fn param_name_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ expr: ast::Expr,
+) -> Option<()> {
+ if !config.parameter_hints {
+ return None;
+ }
+
+ let (callable, arg_list) = get_callable(sema, &expr)?;
+ let hints = callable
+ .params(sema.db)
+ .into_iter()
+ .zip(arg_list.args())
+ .filter_map(|((param, _ty), arg)| {
+ // Only annotate hints for expressions that exist in the original file
+ let range = sema.original_range_opt(arg.syntax())?;
+ let (param_name, name_syntax) = match param.as_ref()? {
+ Either::Left(pat) => ("self".to_string(), pat.name()),
+ Either::Right(pat) => match pat {
+ ast::Pat::IdentPat(it) => (it.name()?.to_string(), it.name()),
+ _ => return None,
+ },
+ };
+ Some((name_syntax, param_name, arg, range))
+ })
+ .filter(|(_, param_name, arg, _)| {
+ !should_hide_param_name_hint(sema, &callable, param_name, arg)
+ })
+ .map(|(param, param_name, _, FileRange { range, .. })| {
+ let mut tooltip = None;
+ if let Some(name) = param {
+ if let hir::CallableKind::Function(f) = callable.kind() {
+ // assert the file is cached so we can map out of macros
+ if let Some(_) = sema.source(f) {
+ tooltip = sema.original_range_opt(name.syntax());
+ }
+ }
+ }
+
+ InlayHint {
+ range,
+ kind: InlayKind::ParameterHint,
+ label: param_name.into(),
+ tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())),
+ }
+ });
+
+ acc.extend(hints);
+ Some(())
+}
+
+fn binding_mode_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ pat: &ast::Pat,
+) -> Option<()> {
+ if !config.binding_mode_hints {
+ return None;
+ }
+
+ let range = pat.syntax().text_range();
+ sema.pattern_adjustments(&pat).iter().for_each(|ty| {
+ let reference = ty.is_reference();
+ let mut_reference = ty.is_mutable_reference();
+ let r = match (reference, mut_reference) {
+ (true, true) => "&mut",
+ (true, false) => "&",
+ _ => return,
+ };
+ acc.push(InlayHint {
+ range,
+ kind: InlayKind::BindingModeHint,
+ label: r.to_string().into(),
+ tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
+ });
+ });
+ match pat {
+ ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => {
+ let bm = sema.binding_mode_of_pat(pat)?;
+ let bm = match bm {
+ hir::BindingMode::Move => return None,
+ hir::BindingMode::Ref(Mutability::Mut) => "ref mut",
+ hir::BindingMode::Ref(Mutability::Shared) => "ref",
+ };
+ acc.push(InlayHint {
+ range,
+ kind: InlayKind::BindingModeHint,
+ label: bm.to_string().into(),
+ tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
+ });
+ }
+ _ => (),
+ }
+
+ Some(())
+}
+
+fn bind_pat_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ pat: &ast::IdentPat,
+) -> Option<()> {
+ if !config.type_hints {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(pat.clone()).pop();
+ let desc_pat = descended.as_ref().unwrap_or(pat);
+ let ty = sema.type_of_pat(&desc_pat.clone().into())?.original;
+
+ if should_not_display_type_hint(sema, config, pat, &ty) {
+ return None;
+ }
+
+ let krate = sema.scope(desc_pat.syntax())?.krate();
+ let famous_defs = FamousDefs(sema, krate);
+ let label = hint_iterator(sema, &famous_defs, config, &ty);
+
+ let label = match label {
+ Some(label) => label,
+ None => {
+ let ty_name = ty.display_truncated(sema.db, config.max_length).to_string();
+ if config.hide_named_constructor_hints
+ && is_named_constructor(sema, pat, &ty_name).is_some()
+ {
+ return None;
+ }
+ ty_name
+ }
+ };
+
+ acc.push(InlayHint {
+ range: match pat.name() {
+ Some(name) => name.syntax().text_range(),
+ None => pat.syntax().text_range(),
+ },
+ kind: InlayKind::TypeHint,
+ label: label.into(),
+ tooltip: pat
+ .name()
+ .map(|it| it.syntax().text_range())
+ .map(|it| InlayTooltip::HoverRanged(file_id, it)),
+ });
+
+ Some(())
+}
+
+fn is_named_constructor(
+ sema: &Semantics<'_, RootDatabase>,
+ pat: &ast::IdentPat,
+ ty_name: &str,
+) -> Option<()> {
+ let let_node = pat.syntax().parent()?;
+ let expr = match_ast! {
+ match let_node {
+ ast::LetStmt(it) => it.initializer(),
+ ast::LetExpr(it) => it.expr(),
+ _ => None,
+ }
+ }?;
+
+ let expr = sema.descend_node_into_attributes(expr.clone()).pop().unwrap_or(expr);
+ // unwrap postfix expressions
+ let expr = match expr {
+ ast::Expr::TryExpr(it) => it.expr(),
+ ast::Expr::AwaitExpr(it) => it.expr(),
+ expr => Some(expr),
+ }?;
+ let expr = match expr {
+ ast::Expr::CallExpr(call) => match call.expr()? {
+ ast::Expr::PathExpr(path) => path,
+ _ => return None,
+ },
+ ast::Expr::PathExpr(path) => path,
+ _ => return None,
+ };
+ let path = expr.path()?;
+
+ let callable = sema.type_of_expr(&ast::Expr::PathExpr(expr))?.original.as_callable(sema.db);
+ let callable_kind = callable.map(|it| it.kind());
+ let qual_seg = match callable_kind {
+ Some(hir::CallableKind::Function(_) | hir::CallableKind::TupleEnumVariant(_)) => {
+ path.qualifier()?.segment()
+ }
+ _ => path.segment(),
+ }?;
+
+ let ctor_name = match qual_seg.kind()? {
+ ast::PathSegmentKind::Name(name_ref) => {
+ match qual_seg.generic_arg_list().map(|it| it.generic_args()) {
+ Some(generics) => format!("{}<{}>", name_ref, generics.format(", ")),
+ None => name_ref.to_string(),
+ }
+ }
+ ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(),
+ _ => return None,
+ };
+ (ctor_name == ty_name).then(|| ())
+}
+
+/// Checks if the type is an Iterator from std::iter and replaces its hint with an `impl Iterator<Item = Ty>`.
+fn hint_iterator(
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ ty: &hir::Type,
+) -> Option<String> {
+ let db = sema.db;
+ let strukt = ty.strip_references().as_adt()?;
+ let krate = strukt.module(db).krate();
+ if krate != famous_defs.core()? {
+ return None;
+ }
+ let iter_trait = famous_defs.core_iter_Iterator()?;
+ let iter_mod = famous_defs.core_iter()?;
+
+ // Assert that this struct comes from `core::iter`.
+ if !(strukt.visibility(db) == hir::Visibility::Public
+ && strukt.module(db).path_to_root(db).contains(&iter_mod))
+ {
+ return None;
+ }
+
+ if ty.impls_trait(db, iter_trait, &[]) {
+ let assoc_type_item = iter_trait.items(db).into_iter().find_map(|item| match item {
+ hir::AssocItem::TypeAlias(alias) if alias.name(db) == known::Item => Some(alias),
+ _ => None,
+ })?;
+ if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) {
+ const LABEL_START: &str = "impl Iterator<Item = ";
+ const LABEL_END: &str = ">";
+
+ let ty_display = hint_iterator(sema, famous_defs, config, &ty)
+ .map(|assoc_type_impl| assoc_type_impl.to_string())
+ .unwrap_or_else(|| {
+ ty.display_truncated(
+ db,
+ config
+ .max_length
+ .map(|len| len.saturating_sub(LABEL_START.len() + LABEL_END.len())),
+ )
+ .to_string()
+ });
+ return Some(format!("{}{}{}", LABEL_START, ty_display, LABEL_END));
+ }
+ }
+
+ None
+}
+
+fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool {
+ if let Some(hir::Adt::Enum(enum_data)) = pat_ty.as_adt() {
+ let pat_text = bind_pat.to_string();
+ enum_data
+ .variants(db)
+ .into_iter()
+ .map(|variant| variant.name(db).to_smol_str())
+ .any(|enum_name| enum_name == pat_text)
+ } else {
+ false
+ }
+}
+
+fn should_not_display_type_hint(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ bind_pat: &ast::IdentPat,
+ pat_ty: &hir::Type,
+) -> bool {
+ let db = sema.db;
+
+ if pat_ty.is_unknown() {
+ return true;
+ }
+
+ if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() {
+ if s.fields(db).is_empty() && s.name(db).to_smol_str() == bind_pat.to_string() {
+ return true;
+ }
+ }
+
+ if config.hide_closure_initialization_hints {
+ if let Some(parent) = bind_pat.syntax().parent() {
+ if let Some(it) = ast::LetStmt::cast(parent.clone()) {
+ if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() {
+ if closure_has_block_body(&closure) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ for node in bind_pat.syntax().ancestors() {
+ match_ast! {
+ match node {
+ ast::LetStmt(it) => return it.ty().is_some(),
+ // FIXME: We might wanna show type hints in parameters for non-top level patterns as well
+ ast::Param(it) => return it.ty().is_some(),
+ ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
+ ast::LetExpr(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
+ ast::IfExpr(_) => return false,
+ ast::WhileExpr(_) => return false,
+ ast::ForExpr(it) => {
+ // We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit).
+ // Type of expr should be iterable.
+ return it.in_token().is_none() ||
+ it.iterable()
+ .and_then(|iterable_expr| sema.type_of_expr(&iterable_expr))
+ .map(TypeInfo::original)
+ .map_or(true, |iterable_ty| iterable_ty.is_unknown() || iterable_ty.is_unit())
+ },
+ _ => (),
+ }
+ }
+ }
+ false
+}
+
+fn closure_has_block_body(closure: &ast::ClosureExpr) -> bool {
+ matches!(closure.body(), Some(ast::Expr::BlockExpr(_)))
+}
+
+fn should_hide_param_name_hint(
+ sema: &Semantics<'_, RootDatabase>,
+ callable: &hir::Callable,
+ param_name: &str,
+ argument: &ast::Expr,
+) -> bool {
+ // These are to be tested in the `parameter_hint_heuristics` test
+ // hide when:
+ // - the parameter name is a suffix of the function's name
+ // - the argument is a qualified constructing or call expression where the qualifier is an ADT
+ // - exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix
+ // of argument with _ splitting it off
+ // - param starts with `ra_fixture`
+ // - param is a well known name in a unary function
+
+ let param_name = param_name.trim_start_matches('_');
+ if param_name.is_empty() {
+ return true;
+ }
+
+ if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) {
+ return false;
+ }
+
+ let fn_name = match callable.kind() {
+ hir::CallableKind::Function(it) => Some(it.name(sema.db).to_smol_str()),
+ _ => None,
+ };
+ let fn_name = fn_name.as_deref();
+ is_param_name_suffix_of_fn_name(param_name, callable, fn_name)
+ || is_argument_similar_to_param_name(argument, param_name)
+ || param_name.starts_with("ra_fixture")
+ || (callable.n_params() == 1 && is_obvious_param(param_name))
+ || is_adt_constructor_similar_to_param_name(sema, argument, param_name)
+}
+
+fn is_argument_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool {
+ // check whether param_name and argument are the same or
+ // whether param_name is a prefix/suffix of argument(split at `_`)
+ let argument = match get_string_representation(argument) {
+ Some(argument) => argument,
+ None => return false,
+ };
+
+ // std is honestly too panic happy...
+ let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at));
+
+ let param_name = param_name.trim_start_matches('_');
+ let argument = argument.trim_start_matches('_');
+
+ match str_split_at(argument, param_name.len()) {
+ Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => {
+ return rest.is_empty() || rest.starts_with('_');
+ }
+ _ => (),
+ }
+ match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) {
+ Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => {
+ return rest.is_empty() || rest.ends_with('_');
+ }
+ _ => (),
+ }
+ false
+}
+
+/// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal.
+///
+/// `fn strip_suffix(suffix)` will be hidden.
+/// `fn stripsuffix(suffix)` will not be hidden.
+fn is_param_name_suffix_of_fn_name(
+ param_name: &str,
+ callable: &Callable,
+ fn_name: Option<&str>,
+) -> bool {
+ match (callable.n_params(), fn_name) {
+ (1, Some(function)) => {
+ function == param_name
+ || function
+ .len()
+ .checked_sub(param_name.len())
+ .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at)))
+ .map_or(false, |(prefix, suffix)| {
+ suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_')
+ })
+ }
+ _ => false,
+ }
+}
+
+fn is_adt_constructor_similar_to_param_name(
+ sema: &Semantics<'_, RootDatabase>,
+ argument: &ast::Expr,
+ param_name: &str,
+) -> bool {
+ let path = match argument {
+ ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e {
+ ast::Expr::PathExpr(p) => p.path(),
+ _ => None,
+ }),
+ ast::Expr::PathExpr(p) => p.path(),
+ ast::Expr::RecordExpr(r) => r.path(),
+ _ => return false,
+ };
+ let path = match path {
+ Some(it) => it,
+ None => return false,
+ };
+ (|| match sema.resolve_path(&path)? {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => {
+ Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name)
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Function(_) | hir::ModuleDef::Variant(_)) => {
+ if to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name {
+ return Some(true);
+ }
+ let qual = path.qualifier()?;
+ match sema.resolve_path(&qual)? {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => {
+ Some(to_lower_snake_case(&qual.segment()?.name_ref()?.text()) == param_name)
+ }
+ _ => None,
+ }
+ }
+ _ => None,
+ })()
+ .unwrap_or(false)
+}
+
+fn get_string_representation(expr: &ast::Expr) -> Option<String> {
+ match expr {
+ ast::Expr::MethodCallExpr(method_call_expr) => {
+ let name_ref = method_call_expr.name_ref()?;
+ match name_ref.text().as_str() {
+ "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()),
+ name_ref => Some(name_ref.to_owned()),
+ }
+ }
+ ast::Expr::MacroExpr(macro_expr) => {
+ Some(macro_expr.macro_call()?.path()?.segment()?.to_string())
+ }
+ ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()),
+ ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()),
+ ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?),
+ ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?),
+ ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?),
+ _ => None,
+ }
+}
+
+fn is_obvious_param(param_name: &str) -> bool {
+ // avoid displaying hints for common functions like map, filter, etc.
+ // or other obvious words used in std
+ let is_obvious_param_name =
+ matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other");
+ param_name.len() == 1 || is_obvious_param_name
+}
+
+fn get_callable(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &ast::Expr,
+) -> Option<(hir::Callable, ast::ArgList)> {
+ match expr {
+ ast::Expr::CallExpr(expr) => {
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let expr = descended.as_ref().unwrap_or(expr);
+ sema.type_of_expr(&expr.expr()?)?.original.as_callable(sema.db).zip(expr.arg_list())
+ }
+ ast::Expr::MethodCallExpr(expr) => {
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let expr = descended.as_ref().unwrap_or(expr);
+ sema.resolve_method_call_as_callable(expr).zip(expr.arg_list())
+ }
+ _ => None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
- use crate::inlay_hints::ReborrowHints;
+ use itertools::Itertools;
+ use syntax::{TextRange, TextSize};
+ use test_utils::extract_annotations;
+
- reborrow_hints: ReborrowHints::Always,
++ use crate::inlay_hints::AdjustmentHints;
+ use crate::{fixture, inlay_hints::InlayHintsConfig, LifetimeElisionHints};
+
+ use super::ClosureReturnTypeHints;
+
+ const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig {
+ render_colons: false,
+ type_hints: false,
+ parameter_hints: false,
+ chaining_hints: false,
+ lifetime_elision_hints: LifetimeElisionHints::Never,
+ closure_return_type_hints: ClosureReturnTypeHints::Never,
- reborrow_hints: ReborrowHints::Always,
++ adjustment_hints: AdjustmentHints::Never,
+ binding_mode_hints: false,
+ hide_named_constructor_hints: false,
+ hide_closure_initialization_hints: false,
+ param_names_for_lifetime_elision_hints: false,
+ max_length: None,
+ closing_brace_hints_min_lines: None,
+ };
+ const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
+ type_hints: true,
+ parameter_hints: true,
+ chaining_hints: true,
- Some(FileRange {
- file_id,
- range: TextRange::new(TextSize::from(500), TextSize::from(600)),
- }),
+ closure_return_type_hints: ClosureReturnTypeHints::WithBlock,
+ binding_mode_hints: true,
+ lifetime_elision_hints: LifetimeElisionHints::Always,
+ ..DISABLED_CONFIG
+ };
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ check_with_config(TEST_CONFIG, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_params(ra_fixture: &str) {
+ check_with_config(
+ InlayHintsConfig { parameter_hints: true, ..DISABLED_CONFIG },
+ ra_fixture,
+ );
+ }
+
+ #[track_caller]
+ fn check_types(ra_fixture: &str) {
+ check_with_config(InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_chains(ra_fixture: &str) {
+ check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let mut expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
+ let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
+ let actual = inlay_hints
+ .into_iter()
+ .map(|it| (it.range, it.label.to_string()))
+ .sorted_by_key(|(range, _)| range.start())
+ .collect::<Vec<_>>();
+ expected.sort_by_key(|(range, _)| range.start());
+
+ assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
+ }
+
+ #[track_caller]
+ fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
+ expect.assert_debug_eq(&inlay_hints)
+ }
+
+ #[test]
+ fn hints_disabled() {
+ check_with_config(
+ InlayHintsConfig { render_colons: true, ..DISABLED_CONFIG },
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(4, 4);
+}"#,
+ );
+ }
+
+ // Parameter hint tests
+
+ #[test]
+ fn param_hints_only() {
+ check_params(
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(
+ 4,
+ //^ a
+ 4,
+ //^ b
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_hints_on_closure() {
+ check_params(
+ r#"
+fn main() {
+ let clo = |a: u8, b: u8| a + b;
+ clo(
+ 1,
+ //^ a
+ 2,
+ //^ b
+ );
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn param_name_similar_to_fn_name_still_hints() {
+ check_params(
+ r#"
+fn max(x: i32, y: i32) -> i32 { x + y }
+fn main() {
+ let _x = max(
+ 4,
+ //^ x
+ 4,
+ //^ y
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_name_similar_to_fn_name() {
+ check_params(
+ r#"
+fn param_with_underscore(with_underscore: i32) -> i32 { with_underscore }
+fn main() {
+ let _x = param_with_underscore(
+ 4,
+ );
+}"#,
+ );
+ check_params(
+ r#"
+fn param_with_underscore(underscore: i32) -> i32 { underscore }
+fn main() {
+ let _x = param_with_underscore(
+ 4,
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_name_same_as_fn_name() {
+ check_params(
+ r#"
+fn foo(foo: i32) -> i32 { foo }
+fn main() {
+ let _x = foo(
+ 4,
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn never_hide_param_when_multiple_params() {
+ check_params(
+ r#"
+fn foo(foo: i32, bar: i32) -> i32 { bar + baz }
+fn main() {
+ let _x = foo(
+ 4,
+ //^ foo
+ 8,
+ //^ bar
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_hints_look_through_as_ref_and_clone() {
+ check_params(
+ r#"
+fn foo(bar: i32, baz: f32) {}
+
+fn main() {
+ let bar = 3;
+ let baz = &"baz";
+ let fez = 1.0;
+ foo(bar.clone(), bar.clone());
+ //^^^^^^^^^^^ baz
+ foo(bar.as_ref(), bar.as_ref());
+ //^^^^^^^^^^^^ baz
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn self_param_hints() {
+ check_params(
+ r#"
+struct Foo;
+
+impl Foo {
+ fn foo(self: Self) {}
+ fn bar(self: &Self) {}
+}
+
+fn main() {
+ Foo::foo(Foo);
+ //^^^ self
+ Foo::bar(&Foo);
+ //^^^^ self
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn param_name_hints_show_for_literals() {
+ check_params(
+ r#"pub fn test(a: i32, b: i32) -> [i32; 2] { [a, b] }
+fn main() {
+ test(
+ 0xa_b,
+ //^^^^^ a
+ 0xa_b,
+ //^^^^^ b
+ );
+}"#,
+ )
+ }
+
+ #[test]
+ fn function_call_parameter_hint() {
+ check_params(
+ r#"
+//- minicore: option
+struct FileId {}
+struct SmolStr {}
+
+struct TextRange {}
+struct SyntaxKind {}
+struct NavigationTarget {}
+
+struct Test {}
+
+impl Test {
+ fn method(&self, mut param: i32) -> i32 { param * 2 }
+
+ fn from_syntax(
+ file_id: FileId,
+ name: SmolStr,
+ focus_range: Option<TextRange>,
+ full_range: TextRange,
+ kind: SyntaxKind,
+ docs: Option<String>,
+ ) -> NavigationTarget {
+ NavigationTarget {}
+ }
+}
+
+fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 {
+ foo + bar
+}
+
+fn main() {
+ let not_literal = 1;
+ let _: i32 = test_func(1, 2, "hello", 3, not_literal);
+ //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last
+ let t: Test = Test {};
+ t.method(123);
+ //^^^ param
+ Test::method(&t, 3456);
+ //^^ self ^^^^ param
+ Test::from_syntax(
+ FileId {},
+ "impl".into(),
+ //^^^^^^^^^^^^^ name
+ None,
+ //^^^^ focus_range
+ TextRange {},
+ //^^^^^^^^^^^^ full_range
+ SyntaxKind {},
+ //^^^^^^^^^^^^^ kind
+ None,
+ //^^^^ docs
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn parameter_hint_heuristics() {
+ check_params(
+ r#"
+fn check(ra_fixture_thing: &str) {}
+
+fn map(f: i32) {}
+fn filter(predicate: i32) {}
+
+fn strip_suffix(suffix: &str) {}
+fn stripsuffix(suffix: &str) {}
+fn same(same: u32) {}
+fn same2(_same2: u32) {}
+
+fn enum_matches_param_name(completion_kind: CompletionKind) {}
+
+fn foo(param: u32) {}
+fn bar(param_eter: u32) {}
+
+enum CompletionKind {
+ Keyword,
+}
+
+fn non_ident_pat((a, b): (u32, u32)) {}
+
+fn main() {
+ const PARAM: u32 = 0;
+ foo(PARAM);
+ foo(!PARAM);
+ // ^^^^^^ param
+ check("");
+
+ map(0);
+ filter(0);
+
+ strip_suffix("");
+ stripsuffix("");
+ //^^ suffix
+ same(0);
+ same2(0);
+
+ enum_matches_param_name(CompletionKind::Keyword);
+
+ let param = 0;
+ foo(param);
+ foo(param as _);
+ let param_end = 0;
+ foo(param_end);
+ let start_param = 0;
+ foo(start_param);
+ let param2 = 0;
+ foo(param2);
+ //^^^^^^ param
+
+ macro_rules! param {
+ () => {};
+ };
+ foo(param!());
+
+ let param_eter = 0;
+ bar(param_eter);
+ let param_eter_end = 0;
+ bar(param_eter_end);
+ let start_param_eter = 0;
+ bar(start_param_eter);
+ let param_eter2 = 0;
+ bar(param_eter2);
+ //^^^^^^^^^^^ param_eter
+
+ non_ident_pat((0, 0));
+}"#,
+ );
+ }
+
+ // Type-Hint tests
+
+ #[test]
+ fn type_hints_only() {
+ check_types(
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(4, 4);
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn type_hints_bindings_after_at() {
+ check_types(
+ r#"
+//- minicore: option
+fn main() {
+ let ref foo @ bar @ ref mut baz = 0;
+ //^^^ &i32
+ //^^^ i32
+ //^^^ &mut i32
+ let [x @ ..] = [0];
+ //^ [i32; 1]
+ if let x @ Some(_) = Some(0) {}
+ //^ Option<i32>
+ let foo @ (bar, baz) = (3, 3);
+ //^^^ (i32, i32)
+ //^^^ i32
+ //^^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn default_generic_types_should_not_be_displayed() {
+ check(
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let zz = Test { t: 23u8, k: 33 };
+ //^^ Test<i32>
+ let zz_ref = &zz;
+ //^^^^^^ &Test<i32>
+ let test = || zz;
+ //^^^^ || -> Test<i32>
+}"#,
+ );
+ }
+
+ #[test]
+ fn shorten_iterators_in_associated_params() {
+ check_types(
+ r#"
+//- minicore: iterators
+use core::iter;
+
+pub struct SomeIter<T> {}
+
+impl<T> SomeIter<T> {
+ pub fn new() -> Self { SomeIter {} }
+ pub fn push(&mut self, t: T) {}
+}
+
+impl<T> Iterator for SomeIter<T> {
+ type Item = T;
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let mut some_iter = SomeIter::new();
+ //^^^^^^^^^ SomeIter<Take<Repeat<i32>>>
+ some_iter.push(iter::repeat(2).take(2));
+ let iter_of_iters = some_iter.take(2);
+ //^^^^^^^^^^^^^ impl Iterator<Item = impl Iterator<Item = i32>>
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn iterator_hint_regression_issue_12674() {
+ // Ensure we don't crash while solving the projection type of iterators.
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+//- minicore: iterators
+struct S<T>(T);
+impl<T> S<T> {
+ fn iter(&self) -> Iter<'_, T> { loop {} }
+}
+struct Iter<'a, T: 'a>(&'a T);
+impl<'a, T> Iterator for Iter<'a, T> {
+ type Item = &'a T;
+ fn next(&mut self) -> Option<Self::Item> { loop {} }
+}
+struct Container<'a> {
+ elements: S<&'a str>,
+}
+struct SliceIter<'a, T>(&'a T);
+impl<'a, T> Iterator for SliceIter<'a, T> {
+ type Item = &'a T;
+ fn next(&mut self) -> Option<Self::Item> { loop {} }
+}
+
+fn main(a: SliceIter<'_, Container>) {
+ a
+ .filter_map(|c| Some(c.elements.iter().filter_map(|v| Some(v))))
+ .map(|e| e);
+}
+ "#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 484..554,
+ kind: ChainingHint,
+ label: [
+ "impl Iterator<Item = impl Iterator<Item = &&str>>",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 484..554,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 484..485,
+ kind: ChainingHint,
+ label: [
+ "SliceIter<Container>",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 484..485,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn infer_call_method_return_associated_types_with_generic() {
+ check_types(
+ r#"
+ pub trait Default {
+ fn default() -> Self;
+ }
+ pub trait Foo {
+ type Bar: Default;
+ }
+
+ pub fn quux<T: Foo>() -> T::Bar {
+ let y = Default::default();
+ //^ <T as Foo>::Bar
+
+ y
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn fn_hints() {
+ check_types(
+ r#"
+//- minicore: fn, sized
+fn foo() -> impl Fn() { loop {} }
+fn foo1() -> impl Fn(f64) { loop {} }
+fn foo2() -> impl Fn(f64, f64) { loop {} }
+fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} }
+fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} }
+fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} }
+fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} }
+fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} }
+
+fn main() {
+ let foo = foo();
+ // ^^^ impl Fn()
+ let foo = foo1();
+ // ^^^ impl Fn(f64)
+ let foo = foo2();
+ // ^^^ impl Fn(f64, f64)
+ let foo = foo3();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo4();
+ // ^^^ &dyn Fn(f64, f64) -> u32
+ let foo = foo5();
+ // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32
+ let foo = foo6();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo7();
+ // ^^^ *const impl Fn(f64, f64) -> u32
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn check_hint_range_limit() {
+ let fixture = r#"
+ //- minicore: fn, sized
+ fn foo() -> impl Fn() { loop {} }
+ fn foo1() -> impl Fn(f64) { loop {} }
+ fn foo2() -> impl Fn(f64, f64) { loop {} }
+ fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} }
+ fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} }
+ fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} }
+ fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} }
+ fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} }
+
+ fn main() {
+ let foo = foo();
+ let foo = foo1();
+ let foo = foo2();
+ // ^^^ impl Fn(f64, f64)
+ let foo = foo3();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo4();
+ let foo = foo5();
+ let foo = foo6();
+ let foo = foo7();
+ }
+ "#;
+ let (analysis, file_id) = fixture::file(fixture);
+ let expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
+ let inlay_hints = analysis
+ .inlay_hints(
+ &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
+ file_id,
- #[test]
- fn hints_implicit_reborrow() {
- check_with_config(
- InlayHintsConfig {
- reborrow_hints: ReborrowHints::Always,
- parameter_hints: true,
- ..DISABLED_CONFIG
- },
- r#"
- fn __() {
- let unique = &mut ();
- let r_mov = unique;
- let foo: &mut _ = unique;
- //^^^^^^ &mut *
- ref_mut_id(unique);
- //^^^^^^ mut_ref
- //^^^^^^ &mut *
- let shared = ref_id(unique);
- //^^^^^^ shared_ref
- //^^^^^^ &*
- let mov = shared;
- let r_mov: &_ = shared;
- ref_id(shared);
- //^^^^^^ shared_ref
-
- identity(unique);
- identity(shared);
- }
- fn identity<T>(t: T) -> T {
- t
- }
- fn ref_mut_id(mut_ref: &mut ()) -> &mut () {
- mut_ref
- //^^^^^^^ &mut *
- }
- fn ref_id(shared_ref: &()) -> &() {
- shared_ref
- }
- "#,
- );
- }
-
++ Some(TextRange::new(TextSize::from(500), TextSize::from(600))),
+ )
+ .unwrap();
+ let actual =
+ inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::<Vec<_>>();
+ assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
+ }
+
+ #[test]
+ fn fn_hints_ptr_rpit_fn_parentheses() {
+ check_types(
+ r#"
+//- minicore: fn, sized
+trait Trait {}
+
+fn foo1() -> *const impl Fn() { loop {} }
+fn foo2() -> *const (impl Fn() + Sized) { loop {} }
+fn foo3() -> *const (impl Fn() + ?Sized) { loop {} }
+fn foo4() -> *const (impl Sized + Fn()) { loop {} }
+fn foo5() -> *const (impl ?Sized + Fn()) { loop {} }
+fn foo6() -> *const (impl Fn() + Trait) { loop {} }
+fn foo7() -> *const (impl Fn() + Sized + Trait) { loop {} }
+fn foo8() -> *const (impl Fn() + ?Sized + Trait) { loop {} }
+fn foo9() -> *const (impl Fn() -> u8 + ?Sized) { loop {} }
+fn foo10() -> *const (impl Fn() + Sized + ?Sized) { loop {} }
+
+fn main() {
+ let foo = foo1();
+ // ^^^ *const impl Fn()
+ let foo = foo2();
+ // ^^^ *const impl Fn()
+ let foo = foo3();
+ // ^^^ *const (impl Fn() + ?Sized)
+ let foo = foo4();
+ // ^^^ *const impl Fn()
+ let foo = foo5();
+ // ^^^ *const (impl Fn() + ?Sized)
+ let foo = foo6();
+ // ^^^ *const (impl Fn() + Trait)
+ let foo = foo7();
+ // ^^^ *const (impl Fn() + Trait)
+ let foo = foo8();
+ // ^^^ *const (impl Fn() + Trait + ?Sized)
+ let foo = foo9();
+ // ^^^ *const (impl Fn() -> u8 + ?Sized)
+ let foo = foo10();
+ // ^^^ *const impl Fn()
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unit_structs_have_no_type_hints() {
+ check_types(
+ r#"
+//- minicore: result
+struct SyntheticSyntax;
+
+fn main() {
+ match Ok(()) {
+ Ok(_) => (),
+ Err(SyntheticSyntax) => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn let_statement() {
+ check_types(
+ r#"
+#[derive(PartialEq)]
+enum Option<T> { None, Some(T) }
+
+#[derive(PartialEq)]
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ struct InnerStruct {}
+
+ let test = 54;
+ //^^^^ i32
+ let test: i32 = 33;
+ let mut test = 33;
+ //^^^^ i32
+ let _ = 22;
+ let test = "test";
+ //^^^^ &str
+ let test = InnerStruct {};
+ //^^^^ InnerStruct
+
+ let test = unresolved();
+
+ let test = (42, 'a');
+ //^^^^ (i32, char)
+ let (a, (b, (c,)) = (2, (3, (9.2,));
+ //^ i32 ^ i32 ^ f64
+ let &x = &92;
+ //^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn if_expr() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ let test = Some(Test { a: Some(3), b: 1 });
+ //^^^^ Option<Test>
+ if let None = &test {};
+ if let test = &test {};
+ //^^^^ &Option<Test>
+ if let Some(test) = &test {};
+ //^^^^ &Test
+ if let Some(Test { a, b }) = &test {};
+ //^ &Option<u32> ^ &u8
+ if let Some(Test { a: x, b: y }) = &test {};
+ //^ &Option<u32> ^ &u8
+ if let Some(Test { a: Some(x), b: y }) = &test {};
+ //^ &u32 ^ &u8
+ if let Some(Test { a: None, b: y }) = &test {};
+ //^ &u8
+ if let Some(Test { b: y, .. }) = &test {};
+ //^ &u8
+ if test == None {}
+}"#,
+ );
+ }
+
+ #[test]
+ fn while_expr() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ let test = Some(Test { a: Some(3), b: 1 });
+ //^^^^ Option<Test>
+ while let Some(Test { a: Some(x), b: y }) = &test {};
+ //^ &u32 ^ &u8
+}"#,
+ );
+ }
+
+ #[test]
+ fn match_arm_list() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ match Some(Test { a: Some(3), b: 1 }) {
+ None => (),
+ test => (),
+ //^^^^ Option<Test>
+ Some(Test { a: Some(x), b: y }) => (),
+ //^ u32 ^ u8
+ _ => {}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn complete_for_hint() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub struct Vec<T> {}
+
+impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+ pub fn push(&mut self, t: T) {}
+}
+
+impl<T> IntoIterator for Vec<T> {
+ type Item = T;
+ type IntoIter = IntoIter<T>;
+}
+
+struct IntoIter<T> {}
+
+impl<T> Iterator for IntoIter<T> {
+ type Item = T;
+}
+
+fn main() {
+ let mut data = Vec::new();
+ //^^^^ Vec<&str>
+ data.push("foo");
+ for i in data {
+ //^ &str
+ let z = i;
+ //^ &str
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_dyn_trait_bounds() {
+ check_types(
+ r#"
+pub struct Vec<T> {}
+
+impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+}
+
+pub struct Box<T> {}
+
+trait Display {}
+auto trait Sync {}
+
+fn main() {
+ // The block expression wrapping disables the constructor hint hiding logic
+ let _v = { Vec::<Box<&(dyn Display + Sync)>>::new() };
+ //^^ Vec<Box<&(dyn Display + Sync)>>
+ let _v = { Vec::<Box<*const (dyn Display + Sync)>>::new() };
+ //^^ Vec<Box<*const (dyn Display + Sync)>>
+ let _v = { Vec::<Box<dyn Display + Sync>>::new() };
+ //^^ Vec<Box<dyn Display + Sync>>
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shorten_iterator_hints() {
+ check_types(
+ r#"
+//- minicore: iterators
+use core::iter;
+
+struct MyIter;
+
+impl Iterator for MyIter {
+ type Item = ();
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let _x = MyIter;
+ //^^ MyIter
+ let _x = iter::repeat(0);
+ //^^ impl Iterator<Item = i32>
+ fn generic<T: Clone>(t: T) {
+ let _x = iter::repeat(t);
+ //^^ impl Iterator<Item = T>
+ let _chained = iter::repeat(t).take(10);
+ //^^^^^^^^ impl Iterator<Item = T>
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn skip_constructor_and_enum_type_hints() {
+ check_with_config(
+ InlayHintsConfig {
+ type_hints: true,
+ hide_named_constructor_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+//- minicore: try, option
+use core::ops::ControlFlow;
+
+mod x {
+ pub mod y { pub struct Foo; }
+ pub struct Foo;
+ pub enum AnotherEnum {
+ Variant()
+ };
+}
+struct Struct;
+struct TupleStruct();
+
+impl Struct {
+ fn new() -> Self {
+ Struct
+ }
+ fn try_new() -> ControlFlow<(), Self> {
+ ControlFlow::Continue(Struct)
+ }
+}
+
+struct Generic<T>(T);
+impl Generic<i32> {
+ fn new() -> Self {
+ Generic(0)
+ }
+}
+
+enum Enum {
+ Variant(u32)
+}
+
+fn times2(value: i32) -> i32 {
+ 2 * value
+}
+
+fn main() {
+ let enumb = Enum::Variant(0);
+
+ let strukt = x::Foo;
+ let strukt = x::y::Foo;
+ let strukt = Struct;
+ let strukt = Struct::new();
+
+ let tuple_struct = TupleStruct();
+
+ let generic0 = Generic::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic1 = Generic(0);
+ // ^^^^^^^^ Generic<i32>
+ let generic2 = Generic::<i32>::new();
+ let generic3 = <Generic<i32>>::new();
+ let generic4 = Generic::<i32>(0);
+
+
+ let option = Some(0);
+ // ^^^^^^ Option<i32>
+ let func = times2;
+ // ^^^^ fn times2(i32) -> i32
+ let closure = |x: i32| x * 2;
+ // ^^^^^^^ |i32| -> i32
+}
+
+fn fallible() -> ControlFlow<()> {
+ let strukt = Struct::try_new()?;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shows_constructor_type_hints_when_enabled() {
+ check_types(
+ r#"
+//- minicore: try
+use core::ops::ControlFlow;
+
+struct Struct;
+struct TupleStruct();
+
+impl Struct {
+ fn new() -> Self {
+ Struct
+ }
+ fn try_new() -> ControlFlow<(), Self> {
+ ControlFlow::Continue(Struct)
+ }
+}
+
+struct Generic<T>(T);
+impl Generic<i32> {
+ fn new() -> Self {
+ Generic(0)
+ }
+}
+
+fn main() {
+ let strukt = Struct::new();
+ // ^^^^^^ Struct
+ let tuple_struct = TupleStruct();
+ // ^^^^^^^^^^^^ TupleStruct
+ let generic0 = Generic::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic1 = Generic::<i32>::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic2 = <Generic<i32>>::new();
+ // ^^^^^^^^ Generic<i32>
+}
+
+fn fallible() -> ControlFlow<()> {
+ let strukt = Struct::try_new()?;
+ // ^^^^^^ Struct
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn closures() {
+ check(
+ r#"
+fn main() {
+ let mut start = 0;
+ //^^^^^ i32
+ (0..2).for_each(|increment | { start += increment; });
+ //^^^^^^^^^ i32
+
+ let multiply =
+ //^^^^^^^^ |i32, i32| -> i32
+ | a, b| a * b
+ //^ i32 ^ i32
+
+ ;
+
+ let _: i32 = multiply(1, 2);
+ //^ a ^ b
+ let multiply_ref = &multiply;
+ //^^^^^^^^^^^^ &|i32, i32| -> i32
+
+ let return_42 = || 42;
+ //^^^^^^^^^ || -> i32
+ || { 42 };
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn return_type_hints_for_closure_without_block() {
+ check_with_config(
+ InlayHintsConfig {
+ closure_return_type_hints: ClosureReturnTypeHints::Always,
+ ..DISABLED_CONFIG
+ },
+ r#"
+fn main() {
+ let a = || { 0 };
+ //^^ i32
+ let b = || 0;
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn skip_closure_type_hints() {
+ check_with_config(
+ InlayHintsConfig {
+ type_hints: true,
+ hide_closure_initialization_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+//- minicore: fn
+fn main() {
+ let multiple_2 = |x: i32| { x * 2 };
+
+ let multiple_2 = |x: i32| x * 2;
+ // ^^^^^^^^^^ |i32| -> i32
+
+ let (not) = (|x: bool| { !x });
+ // ^^^ |bool| -> bool
+
+ let (is_zero, _b) = (|x: usize| { x == 0 }, false);
+ // ^^^^^^^ |usize| -> bool
+ // ^^ bool
+
+ let plus_one = |x| { x + 1 };
+ // ^ u8
+ foo(plus_one);
+
+ let add_mul = bar(|x: u8| { x + 1 });
+ // ^^^^^^^ impl FnOnce(u8) -> u8 + ?Sized
+
+ let closure = if let Some(6) = add_mul(2).checked_sub(1) {
+ // ^^^^^^^ fn(i32) -> i32
+ |x: i32| { x * 2 }
+ } else {
+ |x: i32| { x * 3 }
+ };
+}
+
+fn foo(f: impl FnOnce(u8) -> u8) {}
+
+fn bar(f: impl FnOnce(u8) -> u8) -> impl FnOnce(u8) -> u8 {
+ move |x: u8| f(x) * 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hint_truncation() {
+ check_with_config(
+ InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
+ r#"
+struct Smol<T>(T);
+
+struct VeryLongOuterName<T>(T);
+
+fn main() {
+ let a = Smol(0u32);
+ //^ Smol<u32>
+ let b = VeryLongOuterName(0usize);
+ //^ VeryLongOuterName<…>
+ let c = Smol(Smol(0u32))
+ //^ Smol<Smol<…>>
+}"#,
+ );
+ }
+
+ // Chaining hint tests
+
+ #[test]
+ fn chaining_hints_ignore_comments() {
+ check_expect(
+ InlayHintsConfig { type_hints: false, chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A(B);
+impl A { fn into_b(self) -> B { self.0 } }
+struct B(C);
+impl B { fn into_c(self) -> C { self.0 } }
+struct C;
+
+fn main() {
+ let c = A(B(C))
+ .into_b() // This is a comment
+ // This is another comment
+ .into_c();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 147..172,
+ kind: ChainingHint,
+ label: [
+ "B",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 147..172,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 147..154,
+ kind: ChainingHint,
+ label: [
+ "A",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 147..154,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn chaining_hints_without_newlines() {
+ check_chains(
+ r#"
+struct A(B);
+impl A { fn into_b(self) -> B { self.0 } }
+struct B(C);
+impl B { fn into_c(self) -> C { self.0 } }
+struct C;
+
+fn main() {
+ let c = A(B(C)).into_b().into_c();
+}"#,
+ );
+ }
+
+ #[test]
+ fn struct_access_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A { pub b: B }
+struct B { pub c: C }
+struct C(pub bool);
+struct D;
+
+impl D {
+ fn foo(&self) -> i32 { 42 }
+}
+
+fn main() {
+ let x = A { b: B { c: C(true) } }
+ .b
+ .c
+ .0;
+ let x = D
+ .foo();
+}"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 143..190,
+ kind: ChainingHint,
+ label: [
+ "C",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 143..190,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 143..179,
+ kind: ChainingHint,
+ label: [
+ "B",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 143..179,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn generic_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A<T>(T);
+struct B<T>(T);
+struct C<T>(T);
+struct X<T,R>(T, R);
+
+impl<T> A<T> {
+ fn new(t: T) -> Self { A(t) }
+ fn into_b(self) -> B<T> { B(self.0) }
+}
+impl<T> B<T> {
+ fn into_c(self) -> C<T> { C(self.0) }
+}
+fn main() {
+ let c = A::new(X(42, true))
+ .into_b()
+ .into_c();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 246..283,
+ kind: ChainingHint,
+ label: [
+ "B<X<i32, bool>>",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 246..283,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 246..265,
+ kind: ChainingHint,
+ label: [
+ "A<X<i32, bool>>",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 246..265,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn shorten_iterator_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+//- minicore: iterators
+use core::iter;
+
+struct MyIter;
+
+impl Iterator for MyIter {
+ type Item = ();
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let _x = MyIter.by_ref()
+ .take(5)
+ .by_ref()
+ .take(5)
+ .by_ref();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 174..241,
+ kind: ChainingHint,
+ label: [
+ "impl Iterator<Item = ()>",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..241,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..224,
+ kind: ChainingHint,
+ label: [
+ "impl Iterator<Item = ()>",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..224,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..206,
+ kind: ChainingHint,
+ label: [
+ "impl Iterator<Item = ()>",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..206,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..189,
+ kind: ChainingHint,
+ label: [
+ "&mut MyIter",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..189,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn hints_in_attr_call() {
+ check_expect(
+ TEST_CONFIG,
+ r#"
+//- proc_macros: identity, input_replace
+struct Struct;
+impl Struct {
+ fn chain(self) -> Self {
+ self
+ }
+}
+#[proc_macros::identity]
+fn main() {
+ let strukt = Struct;
+ strukt
+ .chain()
+ .chain()
+ .chain();
+ Struct::chain(strukt);
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 124..130,
+ kind: TypeHint,
+ label: [
+ "Struct",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 124..130,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 145..185,
+ kind: ChainingHint,
+ label: [
+ "Struct",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 145..185,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 145..168,
+ kind: ChainingHint,
+ label: [
+ "Struct",
+ ],
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 145..168,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 222..228,
+ kind: ParameterHint,
+ label: [
+ "self",
+ ],
+ tooltip: Some(
+ HoverOffset(
+ FileId(
+ 0,
+ ),
+ 42,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes() {
+ check(
+ r#"
+fn empty() {}
+
+fn no_gpl(a: &()) {}
+ //^^^^^^<'0>
+ // ^'0
+fn empty_gpl<>(a: &()) {}
+ // ^'0 ^'0
+fn partial<'b>(a: &(), b: &'b ()) {}
+// ^'0, $ ^'0
+fn partial<'a>(a: &'a (), b: &()) {}
+// ^'0, $ ^'0
+
+fn single_ret(a: &()) -> &() {}
+// ^^^^^^^^^^<'0>
+ // ^'0 ^'0
+fn full_mul(a: &(), b: &()) {}
+// ^^^^^^^^<'0, '1>
+ // ^'0 ^'1
+
+fn foo<'c>(a: &'c ()) -> &() {}
+ // ^'c
+
+fn nested_in(a: & &X< &()>) {}
+// ^^^^^^^^^<'0, '1, '2>
+ //^'0 ^'1 ^'2
+fn nested_out(a: &()) -> & &X< &()>{}
+// ^^^^^^^^^^<'0>
+ //^'0 ^'0 ^'0 ^'0
+
+impl () {
+ fn foo(&self) {}
+ // ^^^<'0>
+ // ^'0
+ fn foo(&self) -> &() {}
+ // ^^^<'0>
+ // ^'0 ^'0
+ fn foo(&self, a: &()) -> &() {}
+ // ^^^<'0, '1>
+ // ^'0 ^'1 ^'0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_named() {
+ check_with_config(
+ InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },
+ r#"
+fn nested_in<'named>(named: & &X< &()>) {}
+// ^'named1, 'named2, 'named3, $
+ //^'named1 ^'named2 ^'named3
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_trivial_skip() {
+ check_with_config(
+ InlayHintsConfig {
+ lifetime_elision_hints: LifetimeElisionHints::SkipTrivial,
+ ..TEST_CONFIG
+ },
+ r#"
+fn no_gpl(a: &()) {}
+fn empty_gpl<>(a: &()) {}
+fn partial<'b>(a: &(), b: &'b ()) {}
+fn partial<'a>(a: &'a (), b: &()) {}
+
+fn single_ret(a: &()) -> &() {}
+// ^^^^^^^^^^<'0>
+ // ^'0 ^'0
+fn full_mul(a: &(), b: &()) {}
+
+fn foo<'c>(a: &'c ()) -> &() {}
+ // ^'c
+
+fn nested_in(a: & &X< &()>) {}
+fn nested_out(a: &()) -> & &X< &()>{}
+// ^^^^^^^^^^<'0>
+ //^'0 ^'0 ^'0 ^'0
+
+impl () {
+ fn foo(&self) {}
+ fn foo(&self) -> &() {}
+ // ^^^<'0>
+ // ^'0 ^'0
+ fn foo(&self, a: &()) -> &() {}
+ // ^^^<'0, '1>
+ // ^'0 ^'1 ^'0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_static() {
+ check_with_config(
+ InlayHintsConfig {
+ lifetime_elision_hints: LifetimeElisionHints::Always,
+ ..TEST_CONFIG
+ },
+ r#"
+trait Trait {}
+static S: &str = "";
+// ^'static
+const C: &str = "";
+// ^'static
+const C: &dyn Trait = panic!();
+// ^'static
+
+impl () {
+ const C: &str = "";
+ const C: &dyn Trait = panic!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_binding_modes() {
+ check_with_config(
+ InlayHintsConfig { binding_mode_hints: true, ..DISABLED_CONFIG },
+ r#"
+fn __(
+ (x,): (u32,),
+ (x,): &(u32,),
+ //^^^^&
+ //^ ref
+ (x,): &mut (u32,)
+ //^^^^&mut
+ //^ ref mut
+) {
+ let (x,) = (0,);
+ let (x,) = &(0,);
+ //^^^^ &
+ //^ ref
+ let (x,) = &mut (0,);
+ //^^^^ &mut
+ //^ ref mut
+ let &mut (x,) = &mut (0,);
+ let (ref mut x,) = &mut (0,);
+ //^^^^^^^^^^^^ &mut
+ let &mut (ref mut x,) = &mut (0,);
+ let (mut x,) = &mut (0,);
+ //^^^^^^^^ &mut
+ match (0,) {
+ (x,) => ()
+ }
+ match &(0,) {
+ (x,) => ()
+ //^^^^ &
+ //^ ref
+ }
+ match &mut (0,) {
+ (x,) => ()
+ //^^^^ &mut
+ //^ ref mut
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn hints_closing_brace() {
+ check_with_config(
+ InlayHintsConfig { closing_brace_hints_min_lines: Some(2), ..DISABLED_CONFIG },
+ r#"
+fn a() {}
+
+fn f() {
+} // no hint unless `}` is the last token on the line
+
+fn g() {
+ }
+//^ fn g
+
+fn h<T>(with: T, arguments: u8, ...) {
+ }
+//^ fn h
+
+trait Tr {
+ fn f();
+ fn g() {
+ }
+ //^ fn g
+ }
+//^ trait Tr
+impl Tr for () {
+ }
+//^ impl Tr for ()
+impl dyn Tr {
+ }
+//^ impl dyn Tr
+
+static S0: () = 0;
+static S1: () = {};
+static S2: () = {
+ };
+//^ static S2
+const _: () = {
+ };
+//^ const _
+
+mod m {
+ }
+//^ mod m
+
+m! {}
+m!();
+m!(
+ );
+//^ m!
+
+m! {
+ }
+//^ m!
+
+fn f() {
+ let v = vec![
+ ];
+ }
+//^ fn f
+"#,
+ );
+ }
++
++ #[test]
++ fn adjustment_hints() {
++ check_with_config(
++ InlayHintsConfig { adjustment_hints: AdjustmentHints::Always, ..DISABLED_CONFIG },
++ r#"
++//- minicore: coerce_unsized
++fn main() {
++ let _: u32 = loop {};
++ //^^^^^^^<never-to-any>
++ let _: &u32 = &mut 0;
++ //^^^^^^&
++ //^^^^^^*
++ let _: &mut u32 = &mut 0;
++ //^^^^^^&mut $
++ //^^^^^^*
++ let _: *const u32 = &mut 0;
++ //^^^^^^&raw const $
++ //^^^^^^*
++ let _: *mut u32 = &mut 0;
++ //^^^^^^&raw mut $
++ //^^^^^^*
++ let _: fn() = main;
++ //^^^^<fn-item-to-fn-pointer>
++ let _: unsafe fn() = main;
++ //^^^^<safe-fn-pointer-to-unsafe-fn-pointer>
++ //^^^^<fn-item-to-fn-pointer>
++ let _: unsafe fn() = main as fn();
++ //^^^^^^^^^^^^<safe-fn-pointer-to-unsafe-fn-pointer>
++ let _: fn() = || {};
++ //^^^^^<closure-to-fn-pointer>
++ let _: unsafe fn() = || {};
++ //^^^^^<closure-to-unsafe-fn-pointer>
++ let _: *const u32 = &mut 0u32 as *mut u32;
++ //^^^^^^^^^^^^^^^^^^^^^<mut-ptr-to-const-ptr>
++ let _: &mut [_] = &mut [0; 0];
++ //^^^^^^^^^^^<unsize>
++ //^^^^^^^^^^^&mut $
++ //^^^^^^^^^^^*
++
++ Struct.consume();
++ Struct.by_ref();
++ //^^^^^^(
++ //^^^^^^&
++ //^^^^^^)
++ Struct.by_ref_mut();
++ //^^^^^^(
++ //^^^^^^&mut $
++ //^^^^^^)
++
++ (&Struct).consume();
++ //^^^^^^^*
++ (&Struct).by_ref();
++
++ (&mut Struct).consume();
++ //^^^^^^^^^^^*
++ (&mut Struct).by_ref();
++ //^^^^^^^^^^^&
++ //^^^^^^^^^^^*
++ (&mut Struct).by_ref_mut();
++}
++
++#[derive(Copy, Clone)]
++struct Struct;
++impl Struct {
++ fn consume(self) {}
++ fn by_ref(&self) {}
++ fn by_ref_mut(&mut self) {}
++}
++"#,
++ )
++ }
+}
--- /dev/null
- ClosureReturnTypeHints, InlayHint, InlayHintLabel, InlayHintsConfig, InlayKind,
- InlayTooltip, LifetimeElisionHints, ReborrowHints,
+//! ide crate provides "ide-centric" APIs for the rust-analyzer. That is,
+//! it generally operates with files and text ranges, and returns results as
+//! Strings, suitable for displaying to the human.
+//!
+//! What powers this API are the `RootDatabase` struct, which defines a `salsa`
+//! database, and the `hir` crate, where majority of the analysis happens.
+//! However, IDE specific bits of the analysis (most notably completion) happen
+//! in this crate.
+
+// For proving that RootDatabase is RefUnwindSafe.
+#![recursion_limit = "128"]
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+#[cfg(test)]
+mod fixture;
+
+mod markup;
+mod prime_caches;
+mod navigation_target;
+
+mod annotations;
+mod call_hierarchy;
+mod signature_help;
+mod doc_links;
+mod highlight_related;
+mod expand_macro;
+mod extend_selection;
+mod file_structure;
+mod folding_ranges;
+mod goto_declaration;
+mod goto_definition;
+mod goto_implementation;
+mod goto_type_definition;
+mod hover;
+mod inlay_hints;
+mod join_lines;
+mod markdown_remove;
+mod matching_brace;
+mod moniker;
+mod move_item;
+mod parent_module;
+mod references;
+mod rename;
+mod runnables;
+mod ssr;
+mod static_index;
+mod status;
+mod syntax_highlighting;
+mod syntax_tree;
+mod typing;
+mod view_crate_graph;
+mod view_hir;
+mod view_item_tree;
+mod shuffle_crate_graph;
+
+use std::sync::Arc;
+
+use cfg::CfgOptions;
+use ide_db::{
+ base_db::{
+ salsa::{self, ParallelDatabase},
+ CrateOrigin, Env, FileLoader, FileSet, SourceDatabase, VfsPath,
+ },
+ symbol_index, LineIndexDatabase,
+};
+use syntax::SourceFile;
+
+use crate::navigation_target::{ToNav, TryToNav};
+
+pub use crate::{
+ annotations::{Annotation, AnnotationConfig, AnnotationKind, AnnotationLocation},
+ call_hierarchy::CallItem,
+ expand_macro::ExpandedMacro,
+ file_structure::{StructureNode, StructureNodeKind},
+ folding_ranges::{Fold, FoldKind},
+ highlight_related::{HighlightRelatedConfig, HighlightedRange},
+ hover::{HoverAction, HoverConfig, HoverDocFormat, HoverGotoTypeData, HoverResult},
+ inlay_hints::{
- range: Option<FileRange>,
++ AdjustmentHints, ClosureReturnTypeHints, InlayHint, InlayHintLabel, InlayHintsConfig,
++ InlayKind, InlayTooltip, LifetimeElisionHints,
+ },
+ join_lines::JoinLinesConfig,
+ markup::Markup,
+ moniker::{MonikerDescriptorKind, MonikerKind, MonikerResult, PackageInformation},
+ move_item::Direction,
+ navigation_target::NavigationTarget,
+ prime_caches::ParallelPrimeCachesProgress,
+ references::ReferenceSearchResult,
+ rename::RenameError,
+ runnables::{Runnable, RunnableKind, TestId},
+ signature_help::SignatureHelp,
+ static_index::{StaticIndex, StaticIndexedFile, TokenId, TokenStaticData},
+ syntax_highlighting::{
+ tags::{Highlight, HlMod, HlMods, HlOperator, HlPunct, HlTag},
+ HighlightConfig, HlRange,
+ },
+};
+pub use hir::{Documentation, Semantics};
+pub use ide_assists::{
+ Assist, AssistConfig, AssistId, AssistKind, AssistResolveStrategy, SingleResolve,
+};
+pub use ide_completion::{
+ CallableSnippets, CompletionConfig, CompletionItem, CompletionItemKind, CompletionRelevance,
+ Snippet, SnippetScope,
+};
+pub use ide_db::{
+ base_db::{
+ Cancelled, Change, CrateGraph, CrateId, Edition, FileId, FilePosition, FileRange,
+ SourceRoot, SourceRootId,
+ },
+ label::Label,
+ line_index::{LineCol, LineColUtf16, LineIndex},
+ search::{ReferenceCategory, SearchScope},
+ source_change::{FileSystemEdit, SourceChange},
+ symbol_index::Query,
+ RootDatabase, SymbolKind,
+};
+pub use ide_diagnostics::{Diagnostic, DiagnosticsConfig, ExprFillDefaultMode, Severity};
+pub use ide_ssr::SsrError;
+pub use syntax::{TextRange, TextSize};
+pub use text_edit::{Indel, TextEdit};
+
+pub type Cancellable<T> = Result<T, Cancelled>;
+
+/// Info associated with a text range.
+#[derive(Debug)]
+pub struct RangeInfo<T> {
+ pub range: TextRange,
+ pub info: T,
+}
+
+impl<T> RangeInfo<T> {
+ pub fn new(range: TextRange, info: T) -> RangeInfo<T> {
+ RangeInfo { range, info }
+ }
+}
+
+/// `AnalysisHost` stores the current state of the world.
+#[derive(Debug)]
+pub struct AnalysisHost {
+ db: RootDatabase,
+}
+
+impl AnalysisHost {
+ pub fn new(lru_capacity: Option<usize>) -> AnalysisHost {
+ AnalysisHost { db: RootDatabase::new(lru_capacity) }
+ }
+
+ pub fn update_lru_capacity(&mut self, lru_capacity: Option<usize>) {
+ self.db.update_lru_capacity(lru_capacity);
+ }
+
+ /// Returns a snapshot of the current state, which you can query for
+ /// semantic information.
+ pub fn analysis(&self) -> Analysis {
+ Analysis { db: self.db.snapshot() }
+ }
+
+ /// Applies changes to the current state of the world. If there are
+ /// outstanding snapshots, they will be canceled.
+ pub fn apply_change(&mut self, change: Change) {
+ self.db.apply_change(change)
+ }
+
+ /// NB: this clears the database
+ pub fn per_query_memory_usage(&mut self) -> Vec<(String, profile::Bytes)> {
+ self.db.per_query_memory_usage()
+ }
+ pub fn request_cancellation(&mut self) {
+ self.db.request_cancellation();
+ }
+ pub fn raw_database(&self) -> &RootDatabase {
+ &self.db
+ }
+ pub fn raw_database_mut(&mut self) -> &mut RootDatabase {
+ &mut self.db
+ }
+
+ pub fn shuffle_crate_graph(&mut self) {
+ shuffle_crate_graph::shuffle_crate_graph(&mut self.db);
+ }
+}
+
+impl Default for AnalysisHost {
+ fn default() -> AnalysisHost {
+ AnalysisHost::new(None)
+ }
+}
+
+/// Analysis is a snapshot of a world state at a moment in time. It is the main
+/// entry point for asking semantic information about the world. When the world
+/// state is advanced using `AnalysisHost::apply_change` method, all existing
+/// `Analysis` are canceled (most method return `Err(Canceled)`).
+#[derive(Debug)]
+pub struct Analysis {
+ db: salsa::Snapshot<RootDatabase>,
+}
+
+// As a general design guideline, `Analysis` API are intended to be independent
+// from the language server protocol. That is, when exposing some functionality
+// we should think in terms of "what API makes most sense" and not in terms of
+// "what types LSP uses". Although currently LSP is the only consumer of the
+// API, the API should in theory be usable as a library, or via a different
+// protocol.
+impl Analysis {
+ // Creates an analysis instance for a single file, without any external
+ // dependencies, stdlib support or ability to apply changes. See
+ // `AnalysisHost` for creating a fully-featured analysis.
+ pub fn from_single_file(text: String) -> (Analysis, FileId) {
+ let mut host = AnalysisHost::default();
+ let file_id = FileId(0);
+ let mut file_set = FileSet::default();
+ file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string()));
+ let source_root = SourceRoot::new_local(file_set);
+
+ let mut change = Change::new();
+ change.set_roots(vec![source_root]);
+ let mut crate_graph = CrateGraph::default();
+ // FIXME: cfg options
+ // Default to enable test for single file.
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.insert_atom("test".into());
+ crate_graph.add_crate_root(
+ file_id,
+ Edition::CURRENT,
+ None,
+ None,
+ cfg_options.clone(),
+ cfg_options,
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo { repo: None, name: None },
+ );
+ change.change_file(file_id, Some(Arc::new(text)));
+ change.set_crate_graph(crate_graph);
+ host.apply_change(change);
+ (host.analysis(), file_id)
+ }
+
+ /// Debug info about the current state of the analysis.
+ pub fn status(&self, file_id: Option<FileId>) -> Cancellable<String> {
+ self.with_db(|db| status::status(&*db, file_id))
+ }
+
+ pub fn parallel_prime_caches<F>(&self, num_worker_threads: u8, cb: F) -> Cancellable<()>
+ where
+ F: Fn(ParallelPrimeCachesProgress) + Sync + std::panic::UnwindSafe,
+ {
+ self.with_db(move |db| prime_caches::parallel_prime_caches(db, num_worker_threads, &cb))
+ }
+
+ /// Gets the text of the source file.
+ pub fn file_text(&self, file_id: FileId) -> Cancellable<Arc<String>> {
+ self.with_db(|db| db.file_text(file_id))
+ }
+
+ /// Gets the syntax tree of the file.
+ pub fn parse(&self, file_id: FileId) -> Cancellable<SourceFile> {
+ self.with_db(|db| db.parse(file_id).tree())
+ }
+
+ /// Returns true if this file belongs to an immutable library.
+ pub fn is_library_file(&self, file_id: FileId) -> Cancellable<bool> {
+ use ide_db::base_db::SourceDatabaseExt;
+ self.with_db(|db| db.source_root(db.file_source_root(file_id)).is_library)
+ }
+
+ /// Gets the file's `LineIndex`: data structure to convert between absolute
+ /// offsets and line/column representation.
+ pub fn file_line_index(&self, file_id: FileId) -> Cancellable<Arc<LineIndex>> {
+ self.with_db(|db| db.line_index(file_id))
+ }
+
+ /// Selects the next syntactic nodes encompassing the range.
+ pub fn extend_selection(&self, frange: FileRange) -> Cancellable<TextRange> {
+ self.with_db(|db| extend_selection::extend_selection(db, frange))
+ }
+
+ /// Returns position of the matching brace (all types of braces are
+ /// supported).
+ pub fn matching_brace(&self, position: FilePosition) -> Cancellable<Option<TextSize>> {
+ self.with_db(|db| {
+ let parse = db.parse(position.file_id);
+ let file = parse.tree();
+ matching_brace::matching_brace(&file, position.offset)
+ })
+ }
+
+ /// Returns a syntax tree represented as `String`, for debug purposes.
+ // FIXME: use a better name here.
+ pub fn syntax_tree(
+ &self,
+ file_id: FileId,
+ text_range: Option<TextRange>,
+ ) -> Cancellable<String> {
+ self.with_db(|db| syntax_tree::syntax_tree(db, file_id, text_range))
+ }
+
+ pub fn view_hir(&self, position: FilePosition) -> Cancellable<String> {
+ self.with_db(|db| view_hir::view_hir(db, position))
+ }
+
+ pub fn view_item_tree(&self, file_id: FileId) -> Cancellable<String> {
+ self.with_db(|db| view_item_tree::view_item_tree(db, file_id))
+ }
+
+ /// Renders the crate graph to GraphViz "dot" syntax.
+ pub fn view_crate_graph(&self, full: bool) -> Cancellable<Result<String, String>> {
+ self.with_db(|db| view_crate_graph::view_crate_graph(db, full))
+ }
+
+ pub fn expand_macro(&self, position: FilePosition) -> Cancellable<Option<ExpandedMacro>> {
+ self.with_db(|db| expand_macro::expand_macro(db, position))
+ }
+
+ /// Returns an edit to remove all newlines in the range, cleaning up minor
+ /// stuff like trailing commas.
+ pub fn join_lines(&self, config: &JoinLinesConfig, frange: FileRange) -> Cancellable<TextEdit> {
+ self.with_db(|db| {
+ let parse = db.parse(frange.file_id);
+ join_lines::join_lines(config, &parse.tree(), frange.range)
+ })
+ }
+
+ /// Returns an edit which should be applied when opening a new line, fixing
+ /// up minor stuff like continuing the comment.
+ /// The edit will be a snippet (with `$0`).
+ pub fn on_enter(&self, position: FilePosition) -> Cancellable<Option<TextEdit>> {
+ self.with_db(|db| typing::on_enter(db, position))
+ }
+
+ /// Returns an edit which should be applied after a character was typed.
+ ///
+ /// This is useful for some on-the-fly fixups, like adding `;` to `let =`
+ /// automatically.
+ pub fn on_char_typed(
+ &self,
+ position: FilePosition,
+ char_typed: char,
+ autoclose: bool,
+ ) -> Cancellable<Option<SourceChange>> {
+ // Fast path to not even parse the file.
+ if !typing::TRIGGER_CHARS.contains(char_typed) {
+ return Ok(None);
+ }
+ if char_typed == '<' && !autoclose {
+ return Ok(None);
+ }
+
+ self.with_db(|db| typing::on_char_typed(db, position, char_typed))
+ }
+
+ /// Returns a tree representation of symbols in the file. Useful to draw a
+ /// file outline.
+ pub fn file_structure(&self, file_id: FileId) -> Cancellable<Vec<StructureNode>> {
+ self.with_db(|db| file_structure::file_structure(&db.parse(file_id).tree()))
+ }
+
+ /// Returns a list of the places in the file where type hints can be displayed.
+ pub fn inlay_hints(
+ &self,
+ config: &InlayHintsConfig,
+ file_id: FileId,
++ range: Option<TextRange>,
+ ) -> Cancellable<Vec<InlayHint>> {
+ self.with_db(|db| inlay_hints::inlay_hints(db, file_id, range, config))
+ }
+
+ /// Returns the set of folding ranges.
+ pub fn folding_ranges(&self, file_id: FileId) -> Cancellable<Vec<Fold>> {
+ self.with_db(|db| folding_ranges::folding_ranges(&db.parse(file_id).tree()))
+ }
+
+ /// Fuzzy searches for a symbol.
+ pub fn symbol_search(&self, query: Query) -> Cancellable<Vec<NavigationTarget>> {
+ self.with_db(|db| {
+ symbol_index::world_symbols(db, query)
+ .into_iter() // xx: should we make this a par iter?
+ .filter_map(|s| s.try_to_nav(db))
+ .collect::<Vec<_>>()
+ })
+ }
+
+ /// Returns the definitions from the symbol at `position`.
+ pub fn goto_definition(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_definition::goto_definition(db, position))
+ }
+
+ /// Returns the declaration from the symbol at `position`.
+ pub fn goto_declaration(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_declaration::goto_declaration(db, position))
+ }
+
+ /// Returns the impls from the symbol at `position`.
+ pub fn goto_implementation(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_implementation::goto_implementation(db, position))
+ }
+
+ /// Returns the type definitions for the symbol at `position`.
+ pub fn goto_type_definition(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| goto_type_definition::goto_type_definition(db, position))
+ }
+
+ /// Finds all usages of the reference at point.
+ pub fn find_all_refs(
+ &self,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+ ) -> Cancellable<Option<Vec<ReferenceSearchResult>>> {
+ self.with_db(|db| references::find_all_refs(&Semantics::new(db), position, search_scope))
+ }
+
+ /// Returns a short text describing element at position.
+ pub fn hover(
+ &self,
+ config: &HoverConfig,
+ range: FileRange,
+ ) -> Cancellable<Option<RangeInfo<HoverResult>>> {
+ self.with_db(|db| hover::hover(db, range, config))
+ }
+
+ /// Returns moniker of symbol at position.
+ pub fn moniker(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<moniker::MonikerResult>>>> {
+ self.with_db(|db| moniker::moniker(db, position))
+ }
+
+ /// Return URL(s) for the documentation of the symbol under the cursor.
+ pub fn external_docs(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<doc_links::DocumentationLink>> {
+ self.with_db(|db| doc_links::external_docs(db, &position))
+ }
+
+ /// Computes parameter information at the given position.
+ pub fn signature_help(&self, position: FilePosition) -> Cancellable<Option<SignatureHelp>> {
+ self.with_db(|db| signature_help::signature_help(db, position))
+ }
+
+ /// Computes call hierarchy candidates for the given file position.
+ pub fn call_hierarchy(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Option<RangeInfo<Vec<NavigationTarget>>>> {
+ self.with_db(|db| call_hierarchy::call_hierarchy(db, position))
+ }
+
+ /// Computes incoming calls for the given file position.
+ pub fn incoming_calls(&self, position: FilePosition) -> Cancellable<Option<Vec<CallItem>>> {
+ self.with_db(|db| call_hierarchy::incoming_calls(db, position))
+ }
+
+ /// Computes outgoing calls for the given file position.
+ pub fn outgoing_calls(&self, position: FilePosition) -> Cancellable<Option<Vec<CallItem>>> {
+ self.with_db(|db| call_hierarchy::outgoing_calls(db, position))
+ }
+
+ /// Returns a `mod name;` declaration which created the current module.
+ pub fn parent_module(&self, position: FilePosition) -> Cancellable<Vec<NavigationTarget>> {
+ self.with_db(|db| parent_module::parent_module(db, position))
+ }
+
+ /// Returns crates this file belongs too.
+ pub fn crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
+ self.with_db(|db| parent_module::crates_for(db, file_id))
+ }
+
+ /// Returns crates this file belongs too.
+ pub fn transitive_rev_deps(&self, crate_id: CrateId) -> Cancellable<Vec<CrateId>> {
+ self.with_db(|db| db.crate_graph().transitive_rev_deps(crate_id).collect())
+ }
+
+ /// Returns crates this file *might* belong too.
+ pub fn relevant_crates_for(&self, file_id: FileId) -> Cancellable<Vec<CrateId>> {
+ self.with_db(|db| db.relevant_crates(file_id).iter().copied().collect())
+ }
+
+ /// Returns the edition of the given crate.
+ pub fn crate_edition(&self, crate_id: CrateId) -> Cancellable<Edition> {
+ self.with_db(|db| db.crate_graph()[crate_id].edition)
+ }
+
+ /// Returns the root file of the given crate.
+ pub fn crate_root(&self, crate_id: CrateId) -> Cancellable<FileId> {
+ self.with_db(|db| db.crate_graph()[crate_id].root_file_id)
+ }
+
+ /// Returns the set of possible targets to run for the current file.
+ pub fn runnables(&self, file_id: FileId) -> Cancellable<Vec<Runnable>> {
+ self.with_db(|db| runnables::runnables(db, file_id))
+ }
+
+ /// Returns the set of tests for the given file position.
+ pub fn related_tests(
+ &self,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+ ) -> Cancellable<Vec<Runnable>> {
+ self.with_db(|db| runnables::related_tests(db, position, search_scope))
+ }
+
+ /// Computes syntax highlighting for the given file
+ pub fn highlight(
+ &self,
+ highlight_config: HighlightConfig,
+ file_id: FileId,
+ ) -> Cancellable<Vec<HlRange>> {
+ self.with_db(|db| syntax_highlighting::highlight(db, highlight_config, file_id, None))
+ }
+
+ /// Computes all ranges to highlight for a given item in a file.
+ pub fn highlight_related(
+ &self,
+ config: HighlightRelatedConfig,
+ position: FilePosition,
+ ) -> Cancellable<Option<Vec<HighlightedRange>>> {
+ self.with_db(|db| {
+ highlight_related::highlight_related(&Semantics::new(db), config, position)
+ })
+ }
+
+ /// Computes syntax highlighting for the given file range.
+ pub fn highlight_range(
+ &self,
+ highlight_config: HighlightConfig,
+ frange: FileRange,
+ ) -> Cancellable<Vec<HlRange>> {
+ self.with_db(|db| {
+ syntax_highlighting::highlight(db, highlight_config, frange.file_id, Some(frange.range))
+ })
+ }
+
+ /// Computes syntax highlighting for the given file.
+ pub fn highlight_as_html(&self, file_id: FileId, rainbow: bool) -> Cancellable<String> {
+ self.with_db(|db| syntax_highlighting::highlight_as_html(db, file_id, rainbow))
+ }
+
+ /// Computes completions at the given position.
+ pub fn completions(
+ &self,
+ config: &CompletionConfig,
+ position: FilePosition,
+ trigger_character: Option<char>,
+ ) -> Cancellable<Option<Vec<CompletionItem>>> {
+ self.with_db(|db| {
+ ide_completion::completions(db, config, position, trigger_character).map(Into::into)
+ })
+ }
+
+ /// Resolves additional completion data at the position given.
+ pub fn resolve_completion_edits(
+ &self,
+ config: &CompletionConfig,
+ position: FilePosition,
+ imports: impl IntoIterator<Item = (String, String)> + std::panic::UnwindSafe,
+ ) -> Cancellable<Vec<TextEdit>> {
+ Ok(self
+ .with_db(|db| ide_completion::resolve_completion_edits(db, config, position, imports))?
+ .unwrap_or_default())
+ }
+
+ /// Computes the set of diagnostics for the given file.
+ pub fn diagnostics(
+ &self,
+ config: &DiagnosticsConfig,
+ resolve: AssistResolveStrategy,
+ file_id: FileId,
+ ) -> Cancellable<Vec<Diagnostic>> {
+ self.with_db(|db| ide_diagnostics::diagnostics(db, config, &resolve, file_id))
+ }
+
+ /// Convenience function to return assists + quick fixes for diagnostics
+ pub fn assists_with_fixes(
+ &self,
+ assist_config: &AssistConfig,
+ diagnostics_config: &DiagnosticsConfig,
+ resolve: AssistResolveStrategy,
+ frange: FileRange,
+ ) -> Cancellable<Vec<Assist>> {
+ let include_fixes = match &assist_config.allowed {
+ Some(it) => it.iter().any(|&it| it == AssistKind::None || it == AssistKind::QuickFix),
+ None => true,
+ };
+
+ self.with_db(|db| {
+ let diagnostic_assists = if include_fixes {
+ ide_diagnostics::diagnostics(db, diagnostics_config, &resolve, frange.file_id)
+ .into_iter()
+ .flat_map(|it| it.fixes.unwrap_or_default())
+ .filter(|it| it.target.intersect(frange.range).is_some())
+ .collect()
+ } else {
+ Vec::new()
+ };
+ let ssr_assists = ssr::ssr_assists(db, &resolve, frange);
+ let assists = ide_assists::assists(db, assist_config, resolve, frange);
+
+ let mut res = diagnostic_assists;
+ res.extend(ssr_assists.into_iter());
+ res.extend(assists.into_iter());
+
+ res
+ })
+ }
+
+ /// Returns the edit required to rename reference at the position to the new
+ /// name.
+ pub fn rename(
+ &self,
+ position: FilePosition,
+ new_name: &str,
+ ) -> Cancellable<Result<SourceChange, RenameError>> {
+ self.with_db(|db| rename::rename(db, position, new_name))
+ }
+
+ pub fn prepare_rename(
+ &self,
+ position: FilePosition,
+ ) -> Cancellable<Result<RangeInfo<()>, RenameError>> {
+ self.with_db(|db| rename::prepare_rename(db, position))
+ }
+
+ pub fn will_rename_file(
+ &self,
+ file_id: FileId,
+ new_name_stem: &str,
+ ) -> Cancellable<Option<SourceChange>> {
+ self.with_db(|db| rename::will_rename_file(db, file_id, new_name_stem))
+ }
+
+ pub fn structural_search_replace(
+ &self,
+ query: &str,
+ parse_only: bool,
+ resolve_context: FilePosition,
+ selections: Vec<FileRange>,
+ ) -> Cancellable<Result<SourceChange, SsrError>> {
+ self.with_db(|db| {
+ let rule: ide_ssr::SsrRule = query.parse()?;
+ let mut match_finder =
+ ide_ssr::MatchFinder::in_context(db, resolve_context, selections)?;
+ match_finder.add_rule(rule)?;
+ let edits = if parse_only { Default::default() } else { match_finder.edits() };
+ Ok(SourceChange::from(edits))
+ })
+ }
+
+ pub fn annotations(
+ &self,
+ config: &AnnotationConfig,
+ file_id: FileId,
+ ) -> Cancellable<Vec<Annotation>> {
+ self.with_db(|db| annotations::annotations(db, config, file_id))
+ }
+
+ pub fn resolve_annotation(&self, annotation: Annotation) -> Cancellable<Annotation> {
+ self.with_db(|db| annotations::resolve_annotation(db, annotation))
+ }
+
+ pub fn move_item(
+ &self,
+ range: FileRange,
+ direction: Direction,
+ ) -> Cancellable<Option<TextEdit>> {
+ self.with_db(|db| move_item::move_item(db, range, direction))
+ }
+
+ /// Performs an operation on the database that may be canceled.
+ ///
+ /// rust-analyzer needs to be able to answer semantic questions about the
+ /// code while the code is being modified. A common problem is that a
+ /// long-running query is being calculated when a new change arrives.
+ ///
+ /// We can't just apply the change immediately: this will cause the pending
+ /// query to see inconsistent state (it will observe an absence of
+ /// repeatable read). So what we do is we **cancel** all pending queries
+ /// before applying the change.
+ ///
+ /// Salsa implements cancellation by unwinding with a special value and
+ /// catching it on the API boundary.
+ fn with_db<F, T>(&self, f: F) -> Cancellable<T>
+ where
+ F: FnOnce(&RootDatabase) -> T + std::panic::UnwindSafe,
+ {
+ Cancelled::catch(|| f(&self.db))
+ }
+}
+
+#[test]
+fn analysis_is_send() {
+ fn is_send<T: Send>() {}
+ is_send::<Analysis>();
+}
--- /dev/null
- use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
+//! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports)
+//! for LSIF and LSP.
+
- base_db::{CrateOrigin, FileId, FileLoader, FilePosition, LangCrateOrigin},
++use hir::{AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
+use ide_db::{
- use crate::{doc_links::token_as_doc_comment, RangeInfo};
++ base_db::{CrateOrigin, FilePosition, LangCrateOrigin},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{AstNode, SyntaxKind::*, T};
+
- pub(crate) fn crate_for_file(db: &RootDatabase, file_id: FileId) -> Option<Crate> {
- for &krate in db.relevant_crates(file_id).iter() {
- let crate_def_map = db.crate_def_map(krate);
- for (_, data) in crate_def_map.modules() {
- if data.origin.file_id() == Some(file_id) {
- return Some(krate.into());
- }
- }
- }
- None
- }
-
++use crate::{doc_links::token_as_doc_comment, parent_module::crates_for, RangeInfo};
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum MonikerDescriptorKind {
+ Namespace,
+ Type,
+ Term,
+ Method,
+ TypeParameter,
+ Parameter,
+ Macro,
+ Meta,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct MonikerDescriptor {
+ pub name: Name,
+ pub desc: MonikerDescriptorKind,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct MonikerIdentifier {
+ pub crate_name: String,
+ pub description: Vec<MonikerDescriptor>,
+}
+
+impl ToString for MonikerIdentifier {
+ fn to_string(&self) -> String {
+ match self {
+ MonikerIdentifier { description, crate_name } => {
+ format!(
+ "{}::{}",
+ crate_name,
+ description.iter().map(|x| x.name.to_string()).join("::")
+ )
+ }
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum MonikerKind {
+ Import,
+ Export,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MonikerResult {
+ pub identifier: MonikerIdentifier,
+ pub kind: MonikerKind,
+ pub package_information: PackageInformation,
+}
+
+impl MonikerResult {
+ pub fn from_def(db: &RootDatabase, def: Definition, from_crate: Crate) -> Option<Self> {
+ def_to_moniker(db, def, from_crate)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PackageInformation {
+ pub name: String,
+ pub repo: Option<String>,
+ pub version: Option<String>,
+}
+
- let current_crate = crate_for_file(db, file_id)?;
+pub(crate) fn moniker(
+ db: &RootDatabase,
+ FilePosition { file_id, offset }: FilePosition,
+) -> Option<RangeInfo<Vec<MonikerResult>>> {
+ let sema = &Semantics::new(db);
+ let file = sema.parse(file_id).syntax().clone();
++ let current_crate: hir::Crate = crates_for(db, file_id).pop()?.into();
+ let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
+ IDENT
+ | INT_NUMBER
+ | LIFETIME_IDENT
+ | T![self]
+ | T![super]
+ | T![crate]
+ | T![Self]
+ | COMMENT => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, _| {
+ let m = def_to_moniker(db, def, current_crate)?;
+ Some(RangeInfo::new(original_token.text_range(), vec![m]))
+ });
+ }
+ let navs = sema
+ .descend_into_macros(original_token.clone())
+ .into_iter()
+ .filter_map(|token| {
+ IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {
+ it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
+ })
+ })
+ .flatten()
+ .unique()
+ .collect::<Vec<_>>();
+ Some(RangeInfo::new(original_token.text_range(), navs))
+}
+
+pub(crate) fn def_to_moniker(
+ db: &RootDatabase,
+ def: Definition,
+ from_crate: Crate,
+) -> Option<MonikerResult> {
+ if matches!(
+ def,
+ Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::DeriveHelper(_)
+ | Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_)
+ ) {
+ return None;
+ }
+
+ let module = def.module(db)?;
+ let krate = module.krate();
+ let mut description = vec![];
+ description.extend(module.path_to_root(db).into_iter().filter_map(|x| {
+ Some(MonikerDescriptor { name: x.name(db)?, desc: MonikerDescriptorKind::Namespace })
+ }));
+
+ // Handle associated items within a trait
+ if let Some(assoc) = def.as_assoc_item(db) {
+ let container = assoc.container(db);
+ match container {
+ AssocItemContainer::Trait(trait_) => {
+ // Because different traits can have functions with the same name,
+ // we have to include the trait name as part of the moniker for uniqueness.
+ description.push(MonikerDescriptor {
+ name: trait_.name(db),
+ desc: MonikerDescriptorKind::Type,
+ });
+ }
+ AssocItemContainer::Impl(impl_) => {
+ // Because a struct can implement multiple traits, for implementations
+ // we add both the struct name and the trait name to the path
+ if let Some(adt) = impl_.self_ty(db).as_adt() {
+ description.push(MonikerDescriptor {
+ name: adt.name(db),
+ desc: MonikerDescriptorKind::Type,
+ });
+ }
+
+ if let Some(trait_) = impl_.trait_(db) {
+ description.push(MonikerDescriptor {
+ name: trait_.name(db),
+ desc: MonikerDescriptorKind::Type,
+ });
+ }
+ }
+ }
+ }
+
+ if let Definition::Field(it) = def {
+ description.push(MonikerDescriptor {
+ name: it.parent_def(db).name(db),
+ desc: MonikerDescriptorKind::Type,
+ });
+ }
+
+ let name_desc = match def {
+ // These are handled by top-level guard (for performance).
+ Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::DeriveHelper(_)
+ | Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_) => return None,
+
+ Definition::Local(local) => {
+ if !local.is_param(db) {
+ return None;
+ }
+
+ MonikerDescriptor { name: local.name(db), desc: MonikerDescriptorKind::Parameter }
+ }
+ Definition::Macro(m) => {
+ MonikerDescriptor { name: m.name(db), desc: MonikerDescriptorKind::Macro }
+ }
+ Definition::Function(f) => {
+ MonikerDescriptor { name: f.name(db), desc: MonikerDescriptorKind::Method }
+ }
+ Definition::Variant(v) => {
+ MonikerDescriptor { name: v.name(db), desc: MonikerDescriptorKind::Type }
+ }
+ Definition::Const(c) => {
+ MonikerDescriptor { name: c.name(db)?, desc: MonikerDescriptorKind::Term }
+ }
+ Definition::Trait(trait_) => {
+ MonikerDescriptor { name: trait_.name(db), desc: MonikerDescriptorKind::Type }
+ }
+ Definition::TypeAlias(ta) => {
+ MonikerDescriptor { name: ta.name(db), desc: MonikerDescriptorKind::TypeParameter }
+ }
+ Definition::Module(m) => {
+ MonikerDescriptor { name: m.name(db)?, desc: MonikerDescriptorKind::Namespace }
+ }
+ Definition::BuiltinType(b) => {
+ MonikerDescriptor { name: b.name(), desc: MonikerDescriptorKind::Type }
+ }
+ Definition::SelfType(imp) => MonikerDescriptor {
+ name: imp.self_ty(db).as_adt()?.name(db),
+ desc: MonikerDescriptorKind::Type,
+ },
+ Definition::Field(it) => {
+ MonikerDescriptor { name: it.name(db), desc: MonikerDescriptorKind::Term }
+ }
+ Definition::Adt(adt) => {
+ MonikerDescriptor { name: adt.name(db), desc: MonikerDescriptorKind::Type }
+ }
+ Definition::Static(s) => {
+ MonikerDescriptor { name: s.name(db), desc: MonikerDescriptorKind::Meta }
+ }
+ };
+
+ description.push(name_desc);
+
+ Some(MonikerResult {
+ identifier: MonikerIdentifier {
+ crate_name: krate.display_name(db)?.crate_name().to_string(),
+ description,
+ },
+ kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import },
+ package_information: {
+ let (name, repo, version) = match krate.origin(db) {
+ CrateOrigin::CratesIo { repo, name } => (
+ name.unwrap_or(krate.display_name(db)?.canonical_name().to_string()),
+ repo,
+ krate.version(db),
+ ),
+ CrateOrigin::Lang(lang) => (
+ krate.display_name(db)?.canonical_name().to_string(),
+ Some("https://github.com/rust-lang/rust/".to_string()),
+ Some(match lang {
+ LangCrateOrigin::Other => {
+ "https://github.com/rust-lang/rust/library/".into()
+ }
+ lang => format!("https://github.com/rust-lang/rust/library/{lang}",),
+ }),
+ ),
+ };
+ PackageInformation { name, repo, version }
+ },
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+
+ use super::MonikerKind;
+
+ #[track_caller]
+ fn no_moniker(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ if let Some(x) = analysis.moniker(position).unwrap() {
+ assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {:?}", x);
+ }
+ }
+
+ #[track_caller]
+ fn check_moniker(ra_fixture: &str, identifier: &str, package: &str, kind: MonikerKind) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let x = analysis.moniker(position).unwrap().expect("no moniker found").info;
+ assert_eq!(x.len(), 1);
+ let x = x.into_iter().next().unwrap();
+ assert_eq!(identifier, x.identifier.to_string());
+ assert_eq!(package, format!("{:?}", x.package_information));
+ assert_eq!(kind, x.kind);
+ }
+
+ #[test]
+ fn basic() {
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func$0();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func() {}
+}
+"#,
+ "foo::module::func",
+ r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
+ MonikerKind::Import,
+ );
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func$0() {}
+}
+"#,
+ "foo::module::func",
+ r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ pub fn func$0() {}
+ }
+}
+"#,
+ "foo::module::MyTrait::func",
+ r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_constant() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ const MY_CONST$0: u8;
+ }
+}
+"#,
+ "foo::module::MyTrait::MY_CONST",
+ r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_type() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ type MyType$0;
+ }
+}
+"#,
+ "foo::module::MyTrait::MyType",
+ r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_impl_function() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ pub fn func() {}
+ }
+
+ struct MyStruct {}
+
+ impl MyTrait for MyStruct {
+ pub fn func$0() {}
+ }
+}
+"#,
+ "foo::module::MyStruct::MyTrait::func",
+ r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_field() {
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::St;
+fn main() {
+ let x = St { a$0: 2 };
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub struct St {
+ pub a: i32,
+}
+"#,
+ "foo::St::a",
+ r#"PackageInformation { name: "foo", repo: Some("https://a.b/foo.git"), version: Some("0.1.0") }"#,
+ MonikerKind::Import,
+ );
+ }
+
+ #[test]
+ fn no_moniker_for_local() {
+ no_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func() {
+ let x$0 = 2;
+ }
+}
+"#,
+ );
+ }
+}
--- /dev/null
+//! This module implements a reference search.
+//! First, the element at the cursor position must be either an `ast::Name`
+//! or `ast::NameRef`. If it's an `ast::NameRef`, at the classification step we
+//! try to resolve the direct tree parent of this element, otherwise we
+//! already have a definition and just need to get its HIR together with
+//! some information that is needed for further steps of searching.
+//! After that, we collect files that might contain references and look
+//! for text occurrences of the identifier. If there's an `ast::NameRef`
+//! at the index that the match starts at and its tree parent is
+//! resolved to the search element definition, we get a reference.
+
+use hir::{PathResolution, Semantics};
+use ide_db::{
+ base_db::FileId,
+ defs::{Definition, NameClass, NameRefClass},
+ search::{ReferenceCategory, SearchScope, UsageSearchResult},
+ RootDatabase,
+};
++use itertools::Itertools;
+use stdx::hash::NoHashHashMap;
+use syntax::{
+ algo::find_node_at_offset,
+ ast::{self, HasName},
+ match_ast, AstNode,
+ SyntaxKind::*,
+ SyntaxNode, TextRange, TextSize, T,
+};
+
+use crate::{FilePosition, NavigationTarget, TryToNav};
+
+#[derive(Debug, Clone)]
+pub struct ReferenceSearchResult {
+ pub declaration: Option<Declaration>,
+ pub references: NoHashHashMap<FileId, Vec<(TextRange, Option<ReferenceCategory>)>>,
+}
+
+#[derive(Debug, Clone)]
+pub struct Declaration {
+ pub nav: NavigationTarget,
+ pub is_mut: bool,
+}
+
+// Feature: Find All References
+//
+// Shows all references of the item at the cursor location
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Shift+Alt+F12]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020670-b7c34f00-917a-11eb-8003-370ac5f2b3cb.gif[]
+pub(crate) fn find_all_refs(
+ sema: &Semantics<'_, RootDatabase>,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+) -> Option<Vec<ReferenceSearchResult>> {
+ let _p = profile::span("find_all_refs");
+ let syntax = sema.parse(position.file_id).syntax().clone();
+ let make_searcher = |literal_search: bool| {
+ move |def: Definition| {
+ let declaration = match def {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ }
+ def => def.try_to_nav(sema.db),
+ }
+ .map(|nav| {
+ let decl_range = nav.focus_or_full_range();
+ Declaration {
+ is_mut: decl_mutability(&def, sema.parse(nav.file_id).syntax(), decl_range),
+ nav,
+ }
+ });
+ let mut usages =
+ def.usages(sema).set_scope(search_scope.clone()).include_self_refs().all();
+
+ if literal_search {
+ retain_adt_literal_usages(&mut usages, def, sema);
+ }
+
+ let references = usages
+ .into_iter()
+ .map(|(file_id, refs)| {
+ (
+ file_id,
+ refs.into_iter()
+ .map(|file_ref| (file_ref.range, file_ref.category))
++ .unique()
+ .collect(),
+ )
+ })
+ .collect();
+
+ ReferenceSearchResult { declaration, references }
+ }
+ };
+
+ match name_for_constructor_search(&syntax, position) {
+ Some(name) => {
+ let def = match NameClass::classify(sema, &name)? {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def: _, field_ref } => {
+ Definition::Field(field_ref)
+ }
+ };
+ Some(vec![make_searcher(true)(def)])
+ }
+ None => {
+ let search = make_searcher(false);
+ Some(find_defs(sema, &syntax, position.offset)?.map(search).collect())
+ }
+ }
+}
+
+pub(crate) fn find_defs<'a>(
+ sema: &'a Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ offset: TextSize,
+) -> Option<impl Iterator<Item = Definition> + 'a> {
+ let token = syntax.token_at_offset(offset).find(|t| {
+ matches!(
+ t.kind(),
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
+ )
+ });
+ token.map(|token| {
+ sema.descend_into_macros_with_same_text(token)
+ .into_iter()
+ .filter_map(|it| ast::NameLike::cast(it.parent()?))
+ .filter_map(move |name_like| {
+ let def = match name_like {
+ ast::NameLike::NameRef(name_ref) => {
+ match NameRefClass::classify(sema, &name_ref)? {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
+ Definition::Local(local_ref)
+ }
+ }
+ }
+ ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ },
+ ast::NameLike::Lifetime(lifetime) => {
+ NameRefClass::classify_lifetime(sema, &lifetime)
+ .and_then(|class| match class {
+ NameRefClass::Definition(it) => Some(it),
+ _ => None,
+ })
+ .or_else(|| {
+ NameClass::classify_lifetime(sema, &lifetime)
+ .and_then(NameClass::defined)
+ })?
+ }
+ };
+ Some(def)
+ })
+ })
+}
+
+pub(crate) fn decl_mutability(def: &Definition, syntax: &SyntaxNode, range: TextRange) -> bool {
+ match def {
+ Definition::Local(_) | Definition::Field(_) => {}
+ _ => return false,
+ };
+
+ match find_node_at_offset::<ast::LetStmt>(syntax, range.start()) {
+ Some(stmt) if stmt.initializer().is_some() => match stmt.pat() {
+ Some(ast::Pat::IdentPat(it)) => it.mut_token().is_some(),
+ _ => false,
+ },
+ _ => false,
+ }
+}
+
+/// Filter out all non-literal usages for adt-defs
+fn retain_adt_literal_usages(
+ usages: &mut UsageSearchResult,
+ def: Definition,
+ sema: &Semantics<'_, RootDatabase>,
+) {
+ let refs = usages.references.values_mut();
+ match def {
+ Definition::Adt(hir::Adt::Enum(enum_)) => {
+ refs.for_each(|it| {
+ it.retain(|reference| {
+ reference
+ .name
+ .as_name_ref()
+ .map_or(false, |name_ref| is_enum_lit_name_ref(sema, enum_, name_ref))
+ })
+ });
+ usages.references.retain(|_, it| !it.is_empty());
+ }
+ Definition::Adt(_) | Definition::Variant(_) => {
+ refs.for_each(|it| {
+ it.retain(|reference| reference.name.as_name_ref().map_or(false, is_lit_name_ref))
+ });
+ usages.references.retain(|_, it| !it.is_empty());
+ }
+ _ => {}
+ }
+}
+
+/// Returns `Some` if the cursor is at a position for an item to search for all its constructor/literal usages
+fn name_for_constructor_search(syntax: &SyntaxNode, position: FilePosition) -> Option<ast::Name> {
+ let token = syntax.token_at_offset(position.offset).right_biased()?;
+ let token_parent = token.parent()?;
+ let kind = token.kind();
+ if kind == T![;] {
+ ast::Struct::cast(token_parent)
+ .filter(|struct_| struct_.field_list().is_none())
+ .and_then(|struct_| struct_.name())
+ } else if kind == T!['{'] {
+ match_ast! {
+ match token_parent {
+ ast::RecordFieldList(rfl) => match_ast! {
+ match (rfl.syntax().parent()?) {
+ ast::Variant(it) => it.name(),
+ ast::Struct(it) => it.name(),
+ ast::Union(it) => it.name(),
+ _ => None,
+ }
+ },
+ ast::VariantList(vl) => ast::Enum::cast(vl.syntax().parent()?)?.name(),
+ _ => None,
+ }
+ }
+ } else if kind == T!['('] {
+ let tfl = ast::TupleFieldList::cast(token_parent)?;
+ match_ast! {
+ match (tfl.syntax().parent()?) {
+ ast::Variant(it) => it.name(),
+ ast::Struct(it) => it.name(),
+ _ => None,
+ }
+ }
+ } else {
+ None
+ }
+}
+
+fn is_enum_lit_name_ref(
+ sema: &Semantics<'_, RootDatabase>,
+ enum_: hir::Enum,
+ name_ref: &ast::NameRef,
+) -> bool {
+ let path_is_variant_of_enum = |path: ast::Path| {
+ matches!(
+ sema.resolve_path(&path),
+ Some(PathResolution::Def(hir::ModuleDef::Variant(variant)))
+ if variant.parent_enum(sema.db) == enum_
+ )
+ };
+ name_ref
+ .syntax()
+ .ancestors()
+ .find_map(|ancestor| {
+ match_ast! {
+ match ancestor {
+ ast::PathExpr(path_expr) => path_expr.path().map(path_is_variant_of_enum),
+ ast::RecordExpr(record_expr) => record_expr.path().map(path_is_variant_of_enum),
+ _ => None,
+ }
+ }
+ })
+ .unwrap_or(false)
+}
+
+fn path_ends_with(path: Option<ast::Path>, name_ref: &ast::NameRef) -> bool {
+ path.and_then(|path| path.segment())
+ .and_then(|segment| segment.name_ref())
+ .map_or(false, |segment| segment == *name_ref)
+}
+
+fn is_lit_name_ref(name_ref: &ast::NameRef) -> bool {
+ name_ref.syntax().ancestors().find_map(|ancestor| {
+ match_ast! {
+ match ancestor {
+ ast::PathExpr(path_expr) => Some(path_ends_with(path_expr.path(), name_ref)),
+ ast::RecordExpr(record_expr) => Some(path_ends_with(record_expr.path(), name_ref)),
+ _ => None,
+ }
+ }
+ }).unwrap_or(false)
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use ide_db::{base_db::FileId, search::ReferenceCategory};
+ use stdx::format_to;
+
+ use crate::{fixture, SearchScope};
+
+ #[test]
+ fn test_struct_literal_after_space() {
+ check(
+ r#"
+struct Foo $0{
+ a: i32,
+}
+impl Foo {
+ fn f() -> i32 { 42 }
+}
+fn main() {
+ let f: Foo;
+ f = Foo {a: Foo::f()};
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..26 7..10
+
+ FileId(0) 101..104
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_before_space() {
+ check(
+ r#"
+struct Foo$0 {}
+ fn main() {
+ let f: Foo;
+ f = Foo {};
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..13 7..10
+
+ FileId(0) 41..44
+ FileId(0) 54..57
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_with_generic_type() {
+ check(
+ r#"
+struct Foo<T> $0{}
+ fn main() {
+ let f: Foo::<i32>;
+ f = Foo {};
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..16 7..10
+
+ FileId(0) 64..67
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_for_tuple() {
+ check(
+ r#"
+struct Foo$0(i32);
+
+fn main() {
+ let f: Foo;
+ f = Foo(1);
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..16 7..10
+
+ FileId(0) 54..57
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_struct_literal_for_union() {
+ check(
+ r#"
+union Foo $0{
+ x: u32
+}
+
+fn main() {
+ let f: Foo;
+ f = Foo { x: 1 };
+}
+"#,
+ expect![[r#"
+ Foo Union FileId(0) 0..24 6..9
+
+ FileId(0) 62..65
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_after_space() {
+ check(
+ r#"
+enum Foo $0{
+ A,
+ B(),
+ C{},
+}
+fn main() {
+ let f: Foo;
+ f = Foo::A;
+ f = Foo::B();
+ f = Foo::C{};
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..37 5..8
+
+ FileId(0) 74..77
+ FileId(0) 90..93
+ FileId(0) 108..111
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_variant_record_after_space() {
+ check(
+ r#"
+enum Foo {
+ A $0{ n: i32 },
+ B,
+}
+fn main() {
+ let f: Foo;
+ f = Foo::B;
+ f = Foo::A { n: 92 };
+}
+"#,
+ expect![[r#"
+ A Variant FileId(0) 15..27 15..16
+
+ FileId(0) 95..96
+ "#]],
+ );
+ }
+ #[test]
+ fn test_variant_tuple_before_paren() {
+ check(
+ r#"
+enum Foo {
+ A$0(i32),
+ B,
+}
+fn main() {
+ let f: Foo;
+ f = Foo::B;
+ f = Foo::A(92);
+}
+"#,
+ expect![[r#"
+ A Variant FileId(0) 15..21 15..16
+
+ FileId(0) 89..90
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_before_space() {
+ check(
+ r#"
+enum Foo$0 {
+ A,
+ B,
+}
+fn main() {
+ let f: Foo;
+ f = Foo::A;
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..26 5..8
+
+ FileId(0) 50..53
+ FileId(0) 63..66
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_with_generic_type() {
+ check(
+ r#"
+enum Foo<T> $0{
+ A(T),
+ B,
+}
+fn main() {
+ let f: Foo<i8>;
+ f = Foo::A(1);
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..32 5..8
+
+ FileId(0) 73..76
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_enum_for_tuple() {
+ check(
+ r#"
+enum Foo$0{
+ A(i8),
+ B(i8),
+}
+fn main() {
+ let f: Foo;
+ f = Foo::A(1);
+}
+"#,
+ expect![[r#"
+ Foo Enum FileId(0) 0..33 5..8
+
+ FileId(0) 70..73
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_for_local() {
+ check(
+ r#"
+fn main() {
+ let mut i = 1;
+ let j = 1;
+ i = i$0 + j;
+
+ {
+ i = 0;
+ }
+
+ i = 5;
+}"#,
+ expect![[r#"
+ i Local FileId(0) 20..25 24..25 Write
+
+ FileId(0) 50..51 Write
+ FileId(0) 54..55 Read
+ FileId(0) 76..77 Write
+ FileId(0) 94..95 Write
+ "#]],
+ );
+ }
+
+ #[test]
+ fn search_filters_by_range() {
+ check(
+ r#"
+fn foo() {
+ let spam$0 = 92;
+ spam + spam
+}
+fn bar() {
+ let spam = 92;
+ spam + spam
+}
+"#,
+ expect![[r#"
+ spam Local FileId(0) 19..23 19..23
+
+ FileId(0) 34..38 Read
+ FileId(0) 41..45 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_for_param_inside() {
+ check(
+ r#"
+fn foo(i : u32) -> u32 { i$0 }
+"#,
+ expect![[r#"
+ i ValueParam FileId(0) 7..8 7..8
+
+ FileId(0) 25..26 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_for_fn_param() {
+ check(
+ r#"
+fn foo(i$0 : u32) -> u32 { i }
+"#,
+ expect![[r#"
+ i ValueParam FileId(0) 7..8 7..8
+
+ FileId(0) 25..26 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_field_name() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ pub spam$0: u32,
+}
+
+fn main(s: Foo) {
+ let f = s.spam;
+}
+"#,
+ expect![[r#"
+ spam Field FileId(0) 17..30 21..25
+
+ FileId(0) 67..71 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_impl_item_name() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn f$0(&self) { }
+}
+"#,
+ expect![[r#"
+ f Function FileId(0) 27..43 30..31
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_name() {
+ check(
+ r#"
+enum Foo {
+ A,
+ B$0,
+ C,
+}
+"#,
+ expect![[r#"
+ B Variant FileId(0) 22..23 22..23
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_field() {
+ check(
+ r#"
+enum Foo {
+ A,
+ B { field$0: u8 },
+ C,
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 26..35 26..31
+
+ (no references)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_two_modules() {
+ check(
+ r#"
+//- /lib.rs
+pub mod foo;
+pub mod bar;
+
+fn f() {
+ let i = foo::Foo { n: 5 };
+}
+
+//- /foo.rs
+use crate::bar;
+
+pub struct Foo {
+ pub n: u32,
+}
+
+fn f() {
+ let i = bar::Bar { n: 5 };
+}
+
+//- /bar.rs
+use crate::foo;
+
+pub struct Bar {
+ pub n: u32,
+}
+
+fn f() {
+ let i = foo::Foo$0 { n: 5 };
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(1) 17..51 28..31
+
+ FileId(0) 53..56
+ FileId(2) 79..82
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_decl_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo$0;
+
+use foo::Foo;
+
+fn f() {
+ let i = Foo { n: 5 };
+}
+
+//- /foo.rs
+pub struct Foo {
+ pub n: u32,
+}
+"#,
+ expect![[r#"
+ foo Module FileId(0) 0..8 4..7
+
+ FileId(0) 14..17 Import
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_decl_module_on_self() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+
+//- /foo.rs
+use self$0;
+"#,
+ expect![[r#"
+ foo Module FileId(0) 0..8 4..7
+
+ FileId(1) 4..8 Import
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_decl_module_on_self_crate_root() {
+ check(
+ r#"
+//- /lib.rs
+use self$0;
+"#,
+ expect![[r#"
+ Module FileId(0) 0..10
+
+ FileId(0) 4..8 Import
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_super_mod_vis() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+
+//- /foo.rs
+mod some;
+use some::Foo;
+
+fn f() {
+ let i = Foo { n: 5 };
+}
+
+//- /foo/some.rs
+pub(super) struct Foo$0 {
+ pub n: u32,
+}
+"#,
+ expect![[r#"
+ Foo Struct FileId(2) 0..41 18..21
+
+ FileId(1) 20..23 Import
+ FileId(1) 47..50
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_with_scope() {
+ let code = r#"
+ //- /lib.rs
+ mod foo;
+ mod bar;
+
+ pub fn quux$0() {}
+
+ //- /foo.rs
+ fn f() { super::quux(); }
+
+ //- /bar.rs
+ fn f() { super::quux(); }
+ "#;
+
+ check_with_scope(
+ code,
+ None,
+ expect![[r#"
+ quux Function FileId(0) 19..35 26..30
+
+ FileId(1) 16..20
+ FileId(2) 16..20
+ "#]],
+ );
+
+ check_with_scope(
+ code,
+ Some(SearchScope::single_file(FileId(2))),
+ expect![[r#"
+ quux Function FileId(0) 19..35 26..30
+
+ FileId(2) 16..20
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_macro_def() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! m1$0 { () => (()) }
+
+fn foo() {
+ m1();
+ m1();
+}
+"#,
+ expect![[r#"
+ m1 Macro FileId(0) 0..46 29..31
+
+ FileId(0) 63..65
+ FileId(0) 73..75
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_basic_highlight_read_write() {
+ check(
+ r#"
+fn foo() {
+ let mut i$0 = 0;
+ i = i + 1;
+}
+"#,
+ expect![[r#"
+ i Local FileId(0) 19..24 23..24 Write
+
+ FileId(0) 34..35 Write
+ FileId(0) 38..39 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_basic_highlight_field_read_write() {
+ check(
+ r#"
+struct S {
+ f: u32,
+}
+
+fn foo() {
+ let mut s = S{f: 0};
+ s.f$0 = 0;
+}
+"#,
+ expect![[r#"
+ f Field FileId(0) 15..21 15..16
+
+ FileId(0) 55..56 Read
+ FileId(0) 68..69 Write
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_basic_highlight_decl_no_write() {
+ check(
+ r#"
+fn foo() {
+ let i$0;
+ i = 1;
+}
+"#,
+ expect![[r#"
+ i Local FileId(0) 19..20 19..20
+
+ FileId(0) 26..27 Write
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_struct_function_refs_outside_module() {
+ check(
+ r#"
+mod foo {
+ pub struct Foo;
+
+ impl Foo {
+ pub fn new$0() -> Foo { Foo }
+ }
+}
+
+fn main() {
+ let _f = foo::Foo::new();
+}
+"#,
+ expect![[r#"
+ new Function FileId(0) 54..81 61..64
+
+ FileId(0) 126..129
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_nested_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo { mod bar; }
+
+fn f$0() {}
+
+//- /foo/bar.rs
+use crate::f;
+
+fn g() { f(); }
+"#,
+ expect![[r#"
+ f Function FileId(0) 22..31 25..26
+
+ FileId(1) 11..12 Import
+ FileId(1) 24..25
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_struct_pat() {
+ check(
+ r#"
+struct S {
+ field$0: u8,
+}
+
+fn f(s: S) {
+ match s {
+ S { field } => {}
+ }
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 15..24 15..20
+
+ FileId(0) 68..73 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_pat() {
+ check(
+ r#"
+enum En {
+ Variant {
+ field$0: u8,
+ }
+}
+
+fn f(e: En) {
+ match e {
+ En::Variant { field } => {}
+ }
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 32..41 32..37
+
+ FileId(0) 102..107 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_all_refs_enum_var_privacy() {
+ check(
+ r#"
+mod m {
+ pub enum En {
+ Variant {
+ field$0: u8,
+ }
+ }
+}
+
+fn f() -> m::En {
+ m::En::Variant { field: 0 }
+}
+"#,
+ expect![[r#"
+ field Field FileId(0) 56..65 56..61
+
+ FileId(0) 125..130 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_self_refs() {
+ check(
+ r#"
+struct Foo { bar: i32 }
+
+impl Foo {
+ fn foo(self) {
+ let x = self$0.bar;
+ if true {
+ let _ = match () {
+ () => self,
+ };
+ }
+ }
+}
+"#,
+ expect![[r#"
+ self SelfParam FileId(0) 47..51 47..51
+
+ FileId(0) 71..75 Read
+ FileId(0) 152..156 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_self_refs_decl() {
+ check(
+ r#"
+struct Foo { bar: i32 }
+
+impl Foo {
+ fn foo(self$0) {
+ self;
+ }
+}
+"#,
+ expect![[r#"
+ self SelfParam FileId(0) 47..51 47..51
+
+ FileId(0) 63..67 Read
+ "#]],
+ );
+ }
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ check_with_scope(ra_fixture, None, expect)
+ }
+
+ fn check_with_scope(ra_fixture: &str, search_scope: Option<SearchScope>, expect: Expect) {
+ let (analysis, pos) = fixture::position(ra_fixture);
+ let refs = analysis.find_all_refs(pos, search_scope).unwrap().unwrap();
+
+ let mut actual = String::new();
+ for refs in refs {
+ actual += "\n\n";
+
+ if let Some(decl) = refs.declaration {
+ format_to!(actual, "{}", decl.nav.debug_render());
+ if decl.is_mut {
+ format_to!(actual, " {:?}", ReferenceCategory::Write)
+ }
+ actual += "\n\n";
+ }
+
+ for (file_id, references) in &refs.references {
+ for (range, access) in references {
+ format_to!(actual, "{:?} {:?}", file_id, range);
+ if let Some(access) = access {
+ format_to!(actual, " {:?}", access);
+ }
+ actual += "\n";
+ }
+ }
+
+ if refs.references.is_empty() {
+ actual += "(no references)\n";
+ }
+ }
+ expect.assert_eq(actual.trim_start())
+ }
+
+ #[test]
+ fn test_find_lifetimes_function() {
+ check(
+ r#"
+trait Foo<'a> {}
+impl<'a> Foo<'a> for &'a () {}
+fn foo<'a, 'b: 'a>(x: &'a$0 ()) -> &'a () where &'a (): Foo<'a> {
+ fn bar<'a>(_: &'a ()) {}
+ x
+}
+"#,
+ expect![[r#"
+ 'a LifetimeParam FileId(0) 55..57 55..57
+
+ FileId(0) 63..65
+ FileId(0) 71..73
+ FileId(0) 82..84
+ FileId(0) 95..97
+ FileId(0) 106..108
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_lifetimes_type_alias() {
+ check(
+ r#"
+type Foo<'a, T> where T: 'a$0 = &'a T;
+"#,
+ expect![[r#"
+ 'a LifetimeParam FileId(0) 9..11 9..11
+
+ FileId(0) 25..27
+ FileId(0) 31..33
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_lifetimes_trait_impl() {
+ check(
+ r#"
+trait Foo<'a> {
+ fn foo() -> &'a ();
+}
+impl<'a> Foo<'a> for &'a () {
+ fn foo() -> &'a$0 () {
+ unimplemented!()
+ }
+}
+"#,
+ expect![[r#"
+ 'a LifetimeParam FileId(0) 47..49 47..49
+
+ FileId(0) 55..57
+ FileId(0) 64..66
+ FileId(0) 89..91
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_map_range_to_original() {
+ check(
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let a$0 = "test";
+ foo!(a);
+}
+"#,
+ expect![[r#"
+ a Local FileId(0) 59..60 59..60
+
+ FileId(0) 80..81 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_map_range_to_original_ref() {
+ check(
+ r#"
+macro_rules! foo {($i:ident) => {$i} }
+fn main() {
+ let a = "test";
+ foo!(a$0);
+}
+"#,
+ expect![[r#"
+ a Local FileId(0) 59..60 59..60
+
+ FileId(0) 80..81 Read
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_labels() {
+ check(
+ r#"
+fn foo<'a>() -> &'a () {
+ 'a: loop {
+ 'b: loop {
+ continue 'a$0;
+ }
+ break 'a;
+ }
+}
+"#,
+ expect![[r#"
+ 'a Label FileId(0) 29..32 29..31
+
+ FileId(0) 80..82
+ FileId(0) 108..110
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_find_const_param() {
+ check(
+ r#"
+fn foo<const FOO$0: usize>() -> usize {
+ FOO
+}
+"#,
+ expect![[r#"
+ FOO ConstParam FileId(0) 7..23 13..16
+
+ FileId(0) 42..45
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_trait() {
+ check(
+ r#"
+trait Foo$0 where Self: {}
+
+impl Foo for () {}
+"#,
+ expect![[r#"
+ Foo Trait FileId(0) 0..24 6..9
+
+ FileId(0) 31..34
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_trait_self() {
+ check(
+ r#"
+trait Foo where Self$0 {
+ fn f() -> Self;
+}
+
+impl Foo for () {}
+"#,
+ expect![[r#"
+ Self TypeParam FileId(0) 6..9 6..9
+
+ FileId(0) 16..20
+ FileId(0) 37..41
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_self_ty() {
+ check(
+ r#"
+ struct $0Foo;
+
+ impl Foo where Self: {
+ fn f() -> Self;
+ }
+ "#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..11 7..10
+
+ FileId(0) 18..21
+ FileId(0) 28..32
+ FileId(0) 50..54
+ "#]],
+ );
+ check(
+ r#"
+struct Foo;
+
+impl Foo where Self: {
+ fn f() -> Self$0;
+}
+"#,
+ expect![[r#"
+ impl Impl FileId(0) 13..57 18..21
+
+ FileId(0) 18..21
+ FileId(0) 28..32
+ FileId(0) 50..54
+ "#]],
+ );
+ }
+ #[test]
+ fn test_self_variant_with_payload() {
+ check(
+ r#"
+enum Foo { Bar() }
+
+impl Foo {
+ fn foo(self) {
+ match self {
+ Self::Bar$0() => (),
+ }
+ }
+}
+
+"#,
+ expect![[r#"
+ Bar Variant FileId(0) 11..16 11..14
+
+ FileId(0) 89..92
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_attr_differs_from_fn_with_same_name() {
+ check(
+ r#"
+#[test]
+fn test$0() {
+ test();
+}
+"#,
+ expect![[r#"
+ test Function FileId(0) 0..33 11..15
+
+ FileId(0) 24..28
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_const_in_pattern() {
+ check(
+ r#"
+const A$0: i32 = 42;
+
+fn main() {
+ match A {
+ A => (),
+ _ => (),
+ }
+ if let A = A {}
+}
+"#,
+ expect![[r#"
+ A Const FileId(0) 0..18 6..7
+
+ FileId(0) 42..43
+ FileId(0) 54..55
+ FileId(0) 97..98
+ FileId(0) 101..102
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_primitives() {
+ check(
+ r#"
+fn foo(_: bool) -> bo$0ol { true }
+"#,
+ expect![[r#"
+ FileId(0) 10..14
+ FileId(0) 19..23
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_transitive() {
+ check(
+ r#"
+//- /level3.rs new_source_root:local crate:level3
+pub struct Fo$0o;
+//- /level2.rs new_source_root:local crate:level2 deps:level3
+pub use level3::Foo;
+//- /level1.rs new_source_root:local crate:level1 deps:level2
+pub use level2::Foo;
+//- /level0.rs new_source_root:local crate:level0 deps:level1
+pub use level1::Foo;
+"#,
+ expect![[r#"
+ Foo Struct FileId(0) 0..15 11..14
+
+ FileId(1) 16..19 Import
+ FileId(2) 16..19 Import
+ FileId(3) 16..19 Import
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_decl_macro_references() {
+ check(
+ r#"
+//- /lib.rs crate:lib
+#[macro_use]
+mod qux;
+mod bar;
+
+pub use self::foo;
+//- /qux.rs
+#[macro_export]
+macro_rules! foo$0 {
+ () => {struct Foo;};
+}
+//- /bar.rs
+foo!();
+//- /other.rs crate:other deps:lib new_source_root:local
+lib::foo!();
+"#,
+ expect![[r#"
+ foo Macro FileId(1) 0..61 29..32
+
+ FileId(0) 46..49 Import
+ FileId(2) 0..3
+ FileId(3) 5..8
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_doesnt_reference_attribute_on_call() {
+ check(
+ r#"
+macro_rules! m {
+ () => {};
+}
+
+#[proc_macro_test::attr_noop]
+m$0!();
+
+"#,
+ expect![[r#"
+ m Macro FileId(0) 0..32 13..14
+
+ FileId(0) 64..65
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multi_def() {
+ check(
+ r#"
+macro_rules! m {
+ ($name:ident) => {
+ mod module {
+ pub fn $name() {}
+ }
+
+ pub fn $name() {}
+ }
+}
+
+m!(func$0);
+
+fn f() {
+ func();
+ module::func();
+}
+ "#,
+ expect![[r#"
+ func Function FileId(0) 137..146 140..144
+
+ FileId(0) 161..165
+
+
+ func Function FileId(0) 137..146 140..144
+
+ FileId(0) 181..185
+ "#]],
+ )
+ }
+
+ #[test]
+ fn attr_expanded() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::identity]
+fn func$0() {
+ func();
+}
+"#,
+ expect![[r#"
+ func Function FileId(0) 25..50 28..32
+
+ FileId(0) 41..45
+ "#]],
+ )
+ }
+
+ #[test]
+ fn attr_assoc_item() {
+ check(
+ r#"
+//- proc_macros: identity
+
+trait Trait {
+ #[proc_macros::identity]
+ fn func() {
+ Self::func$0();
+ }
+}
+"#,
+ expect![[r#"
+ func Function FileId(0) 48..87 51..55
+
+ FileId(0) 74..78
+ "#]],
+ )
+ }
+
+ // FIXME: import is classified as function
+ #[test]
+ fn attr() {
+ check(
+ r#"
+//- proc_macros: identity
+use proc_macros::identity;
+
+#[proc_macros::$0identity]
+fn func() {}
+"#,
+ expect![[r#"
+ identity Attribute FileId(1) 1..107 32..40
+
+ FileId(0) 43..51
+ "#]],
+ );
+ check(
+ r#"
+#![crate_type="proc-macro"]
+#[proc_macro_attribute]
+fn func$0() {}
+"#,
+ expect![[r#"
+ func Attribute FileId(0) 28..64 55..59
+
+ (no references)
+ "#]],
+ );
+ }
+
+ // FIXME: import is classified as function
+ #[test]
+ fn proc_macro() {
+ check(
+ r#"
+//- proc_macros: mirror
+use proc_macros::mirror;
+
+mirror$0! {}
+"#,
+ expect![[r#"
+ mirror Macro FileId(1) 1..77 22..28
+
+ FileId(0) 26..32
+ "#]],
+ )
+ }
+
+ #[test]
+ fn derive() {
+ check(
+ r#"
+//- proc_macros: derive_identity
+//- minicore: derive
+use proc_macros::DeriveIdentity;
+
+#[derive(proc_macros::DeriveIdentity$0)]
+struct Foo;
+"#,
+ expect![[r#"
+ derive_identity Derive FileId(2) 1..107 45..60
+
+ FileId(0) 17..31 Import
+ FileId(0) 56..70
+ "#]],
+ );
+ check(
+ r#"
+#![crate_type="proc-macro"]
+#[proc_macro_derive(Derive, attributes(x))]
+pub fn deri$0ve(_stream: TokenStream) -> TokenStream {}
+"#,
+ expect![[r#"
+ derive Derive FileId(0) 28..125 79..85
+
+ (no references)
+ "#]],
+ );
+ }
+}
--- /dev/null
+//! This module provides primitives for showing type and function parameter information when editing
+//! a call or use-site.
+
+use std::collections::BTreeSet;
+
+use either::Either;
+use hir::{AssocItem, GenericParam, HasAttrs, HirDisplay, Semantics, Trait};
+use ide_db::{active_parameter::callable_for_node, base_db::FilePosition};
+use stdx::format_to;
+use syntax::{
+ algo,
+ ast::{self, HasArgList},
+ match_ast, AstNode, Direction, SyntaxToken, TextRange, TextSize,
+};
+
+use crate::RootDatabase;
+
+/// Contains information about an item signature as seen from a use site.
+///
+/// This includes the "active parameter", which is the parameter whose value is currently being
+/// edited.
+#[derive(Debug)]
+pub struct SignatureHelp {
+ pub doc: Option<String>,
+ pub signature: String,
+ pub active_parameter: Option<usize>,
+ parameters: Vec<TextRange>,
+}
+
+impl SignatureHelp {
+ pub fn parameter_labels(&self) -> impl Iterator<Item = &str> + '_ {
+ self.parameters.iter().map(move |&it| &self.signature[it])
+ }
+
+ pub fn parameter_ranges(&self) -> &[TextRange] {
+ &self.parameters
+ }
+
+ fn push_call_param(&mut self, param: &str) {
+ self.push_param('(', param);
+ }
+
+ fn push_generic_param(&mut self, param: &str) {
+ self.push_param('<', param);
+ }
+
+ fn push_param(&mut self, opening_delim: char, param: &str) {
+ if !self.signature.ends_with(opening_delim) {
+ self.signature.push_str(", ");
+ }
+ let start = TextSize::of(&self.signature);
+ self.signature.push_str(param);
+ let end = TextSize::of(&self.signature);
+ self.parameters.push(TextRange::new(start, end))
+ }
+}
+
+/// Computes parameter information for the given position.
+pub(crate) fn signature_help(db: &RootDatabase, position: FilePosition) -> Option<SignatureHelp> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(position.file_id);
+ let file = file.syntax();
+ let token = file
+ .token_at_offset(position.offset)
+ .left_biased()
+ // if the cursor is sandwiched between two space tokens and the call is unclosed
+ // this prevents us from leaving the CallExpression
+ .and_then(|tok| algo::skip_trivia_token(tok, Direction::Prev))?;
+ let token = sema.descend_into_macros_single(token);
+
+ for node in token.parent_ancestors() {
+ match_ast! {
+ match node {
+ ast::ArgList(arg_list) => {
+ let cursor_outside = arg_list.r_paren_token().as_ref() == Some(&token);
+ if cursor_outside {
+ return None;
+ }
+ return signature_help_for_call(&sema, token);
+ },
+ ast::GenericArgList(garg_list) => {
+ let cursor_outside = garg_list.r_angle_token().as_ref() == Some(&token);
+ if cursor_outside {
+ return None;
+ }
+ return signature_help_for_generics(&sema, token);
+ },
+ _ => (),
+ }
+ }
+ }
+
+ None
+}
+
+fn signature_help_for_call(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<SignatureHelp> {
+ // Find the calling expression and its NameRef
+ let mut node = token.parent()?;
+ let calling_node = loop {
+ if let Some(callable) = ast::CallableExpr::cast(node.clone()) {
+ if callable
+ .arg_list()
+ .map_or(false, |it| it.syntax().text_range().contains(token.text_range().start()))
+ {
+ break callable;
+ }
+ }
+
+ // Stop at multi-line expressions, since the signature of the outer call is not very
+ // helpful inside them.
+ if let Some(expr) = ast::Expr::cast(node.clone()) {
+ if expr.syntax().text().contains_char('\n') {
+ return None;
+ }
+ }
+
+ node = node.parent()?;
+ };
+
+ let (callable, active_parameter) = callable_for_node(sema, &calling_node, &token)?;
+
+ let mut res =
+ SignatureHelp { doc: None, signature: String::new(), parameters: vec![], active_parameter };
+
+ let db = sema.db;
+ let mut fn_params = None;
+ match callable.kind() {
+ hir::CallableKind::Function(func) => {
+ res.doc = func.docs(db).map(|it| it.into());
+ format_to!(res.signature, "fn {}", func.name(db));
+ fn_params = Some(match callable.receiver_param(db) {
+ Some(_self) => func.params_without_self(db),
+ None => func.assoc_fn_params(db),
+ });
+ }
+ hir::CallableKind::TupleStruct(strukt) => {
+ res.doc = strukt.docs(db).map(|it| it.into());
+ format_to!(res.signature, "struct {}", strukt.name(db));
+ }
+ hir::CallableKind::TupleEnumVariant(variant) => {
+ res.doc = variant.docs(db).map(|it| it.into());
+ format_to!(
+ res.signature,
+ "enum {}::{}",
+ variant.parent_enum(db).name(db),
+ variant.name(db)
+ );
+ }
+ hir::CallableKind::Closure | hir::CallableKind::FnPtr | hir::CallableKind::Other => (),
+ }
+
+ res.signature.push('(');
+ {
+ if let Some(self_param) = callable.receiver_param(db) {
+ format_to!(res.signature, "{}", self_param)
+ }
+ let mut buf = String::new();
+ for (idx, (pat, ty)) in callable.params(db).into_iter().enumerate() {
+ buf.clear();
+ if let Some(pat) = pat {
+ match pat {
+ Either::Left(_self) => format_to!(buf, "self: "),
+ Either::Right(pat) => format_to!(buf, "{}: ", pat),
+ }
+ }
+ // APITs (argument position `impl Trait`s) are inferred as {unknown} as the user is
+ // in the middle of entering call arguments.
+ // In that case, fall back to render definitions of the respective parameters.
+ // This is overly conservative: we do not substitute known type vars
+ // (see FIXME in tests::impl_trait) and falling back on any unknowns.
+ match (ty.contains_unknown(), fn_params.as_deref()) {
+ (true, Some(fn_params)) => format_to!(buf, "{}", fn_params[idx].ty().display(db)),
+ _ => format_to!(buf, "{}", ty.display(db)),
+ }
+ res.push_call_param(&buf);
+ }
+ }
+ res.signature.push(')');
+
+ let mut render = |ret_type: hir::Type| {
+ if !ret_type.is_unit() {
+ format_to!(res.signature, " -> {}", ret_type.display(db));
+ }
+ };
+ match callable.kind() {
+ hir::CallableKind::Function(func) if callable.return_type().contains_unknown() => {
+ render(func.ret_type(db))
+ }
+ hir::CallableKind::Function(_)
+ | hir::CallableKind::Closure
+ | hir::CallableKind::FnPtr
+ | hir::CallableKind::Other => render(callable.return_type()),
+ hir::CallableKind::TupleStruct(_) | hir::CallableKind::TupleEnumVariant(_) => {}
+ }
+ Some(res)
+}
+
+fn signature_help_for_generics(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<SignatureHelp> {
+ let parent = token.parent()?;
+ let arg_list = parent
+ .ancestors()
+ .filter_map(ast::GenericArgList::cast)
+ .find(|list| list.syntax().text_range().contains(token.text_range().start()))?;
+
+ let mut active_parameter = arg_list
+ .generic_args()
+ .take_while(|arg| arg.syntax().text_range().end() <= token.text_range().start())
+ .count();
+
+ let first_arg_is_non_lifetime = arg_list
+ .generic_args()
+ .next()
+ .map_or(false, |arg| !matches!(arg, ast::GenericArg::LifetimeArg(_)));
+
+ let mut generics_def = if let Some(path) =
+ arg_list.syntax().ancestors().find_map(ast::Path::cast)
+ {
+ let res = sema.resolve_path(&path)?;
+ let generic_def: hir::GenericDef = match res {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::Function(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::Trait(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::TypeAlias(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::Variant(it)) => it.into(),
+ hir::PathResolution::Def(hir::ModuleDef::BuiltinType(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Const(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Macro(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Module(_))
+ | hir::PathResolution::Def(hir::ModuleDef::Static(_)) => return None,
+ hir::PathResolution::BuiltinAttr(_)
+ | hir::PathResolution::ToolModule(_)
+ | hir::PathResolution::Local(_)
+ | hir::PathResolution::TypeParam(_)
+ | hir::PathResolution::ConstParam(_)
+ | hir::PathResolution::SelfType(_)
+ | hir::PathResolution::DeriveHelper(_) => return None,
+ };
+
+ generic_def
+ } else if let Some(method_call) = arg_list.syntax().parent().and_then(ast::MethodCallExpr::cast)
+ {
+ // recv.method::<$0>()
+ let method = sema.resolve_method_call(&method_call)?;
+ method.into()
+ } else {
+ return None;
+ };
+
+ let mut res = SignatureHelp {
+ doc: None,
+ signature: String::new(),
+ parameters: vec![],
+ active_parameter: None,
+ };
+
+ let db = sema.db;
+ match generics_def {
+ hir::GenericDef::Function(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "fn {}", it.name(db));
+ }
+ hir::GenericDef::Adt(hir::Adt::Enum(it)) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "enum {}", it.name(db));
+ }
+ hir::GenericDef::Adt(hir::Adt::Struct(it)) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "struct {}", it.name(db));
+ }
+ hir::GenericDef::Adt(hir::Adt::Union(it)) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "union {}", it.name(db));
+ }
+ hir::GenericDef::Trait(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "trait {}", it.name(db));
+ }
+ hir::GenericDef::TypeAlias(it) => {
+ res.doc = it.docs(db).map(|it| it.into());
+ format_to!(res.signature, "type {}", it.name(db));
+ }
+ hir::GenericDef::Variant(it) => {
+ // In paths, generics of an enum can be specified *after* one of its variants.
+ // eg. `None::<u8>`
+ // We'll use the signature of the enum, but include the docs of the variant.
+ res.doc = it.docs(db).map(|it| it.into());
+ let it = it.parent_enum(db);
+ format_to!(res.signature, "enum {}", it.name(db));
+ generics_def = it.into();
+ }
+ // These don't have generic args that can be specified
+ hir::GenericDef::Impl(_) | hir::GenericDef::Const(_) => return None,
+ }
+
+ let params = generics_def.params(sema.db);
+ let num_lifetime_params =
+ params.iter().take_while(|param| matches!(param, GenericParam::LifetimeParam(_))).count();
+ if first_arg_is_non_lifetime {
+ // Lifetime parameters were omitted.
+ active_parameter += num_lifetime_params;
+ }
+ res.active_parameter = Some(active_parameter);
+
+ res.signature.push('<');
+ let mut buf = String::new();
+ for param in params {
+ if let hir::GenericParam::TypeParam(ty) = param {
+ if ty.is_implicit(db) {
+ continue;
+ }
+ }
+
+ buf.clear();
+ format_to!(buf, "{}", param.display(db));
+ res.push_generic_param(&buf);
+ }
+ if let hir::GenericDef::Trait(tr) = generics_def {
+ add_assoc_type_bindings(db, &mut res, tr, arg_list);
+ }
+ res.signature.push('>');
+
+ Some(res)
+}
+
+fn add_assoc_type_bindings(
+ db: &RootDatabase,
+ res: &mut SignatureHelp,
+ tr: Trait,
+ args: ast::GenericArgList,
+) {
+ if args.syntax().ancestors().find_map(ast::TypeBound::cast).is_none() {
+ // Assoc type bindings are only valid in type bound position.
+ return;
+ }
+
+ let present_bindings = args
+ .generic_args()
+ .filter_map(|arg| match arg {
+ ast::GenericArg::AssocTypeArg(arg) => arg.name_ref().map(|n| n.to_string()),
+ _ => None,
+ })
+ .collect::<BTreeSet<_>>();
+
+ let mut buf = String::new();
+ for binding in &present_bindings {
+ buf.clear();
+ format_to!(buf, "{} = …", binding);
+ res.push_generic_param(&buf);
+ }
+
+ for item in tr.items_with_supertraits(db) {
+ if let AssocItem::TypeAlias(ty) = item {
+ let name = ty.name(db).to_smol_str();
+ if !present_bindings.contains(&*name) {
+ buf.clear();
+ format_to!(buf, "{} = …", name);
+ res.push_generic_param(&buf);
+ }
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use std::iter;
+
+ use expect_test::{expect, Expect};
+ use ide_db::base_db::{fixture::ChangeFixture, FilePosition};
+ use stdx::format_to;
+
+ use crate::RootDatabase;
+
+ /// Creates analysis from a multi-file fixture, returns positions marked with $0.
+ pub(crate) fn position(ra_fixture: &str) -> (RootDatabase, FilePosition) {
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ let mut database = RootDatabase::default();
+ database.apply_change(change_fixture.change);
+ let (file_id, range_or_offset) =
+ change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+ (database, FilePosition { file_id, offset })
+ }
+
+ fn check(ra_fixture: &str, expect: Expect) {
+ let fixture = format!(
+ r#"
+//- minicore: sized, fn
+{ra_fixture}
+ "#
+ );
+ let (db, position) = position(&fixture);
+ let sig_help = crate::signature_help::signature_help(&db, position);
+ let actual = match sig_help {
+ Some(sig_help) => {
+ let mut rendered = String::new();
+ if let Some(docs) = &sig_help.doc {
+ format_to!(rendered, "{}\n------\n", docs.as_str());
+ }
+ format_to!(rendered, "{}\n", sig_help.signature);
+ let mut offset = 0;
+ for (i, range) in sig_help.parameter_ranges().iter().enumerate() {
+ let is_active = sig_help.active_parameter == Some(i);
+
+ let start = u32::from(range.start());
+ let gap = start.checked_sub(offset).unwrap_or_else(|| {
+ panic!("parameter ranges out of order: {:?}", sig_help.parameter_ranges())
+ });
+ rendered.extend(iter::repeat(' ').take(gap as usize));
+ let param_text = &sig_help.signature[*range];
+ let width = param_text.chars().count(); // …
+ let marker = if is_active { '^' } else { '-' };
+ rendered.extend(iter::repeat(marker).take(width));
+ offset += gap + u32::from(range.len());
+ }
+ if !sig_help.parameter_ranges().is_empty() {
+ format_to!(rendered, "\n");
+ }
+ rendered
+ }
+ None => String::new(),
+ };
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn test_fn_signature_two_args() {
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo($03, ); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ^^^^^^ ------
+ "#]],
+ );
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo(3$0, ); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ^^^^^^ ------
+ "#]],
+ );
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo(3,$0 ); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ------ ^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo(3, $0); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ------ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_two_args_empty() {
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo($0); }
+"#,
+ expect![[r#"
+ fn foo(x: u32, y: u32) -> u32
+ ^^^^^^ ------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_two_args_first_generics() {
+ check(
+ r#"
+fn foo<T, U: Copy + Display>(x: T, y: U) -> u32
+ where T: Copy + Display, U: Debug
+{ x + y }
+
+fn bar() { foo($03, ); }
+"#,
+ expect![[r#"
+ fn foo(x: i32, y: U) -> u32
+ ^^^^^^ ----
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_no_params() {
+ check(
+ r#"
+fn foo<T>() -> T where T: Copy + Display {}
+fn bar() { foo($0); }
+"#,
+ expect![[r#"
+ fn foo() -> T
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_impl() {
+ check(
+ r#"
+struct F;
+impl F { pub fn new() { } }
+fn bar() {
+ let _ : F = F::new($0);
+}
+"#,
+ expect![[r#"
+ fn new()
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_method_self() {
+ check(
+ r#"
+struct S;
+impl S { pub fn do_it(&self) {} }
+
+fn bar() {
+ let s: S = S;
+ s.do_it($0);
+}
+"#,
+ expect![[r#"
+ fn do_it(&self)
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_method_with_arg() {
+ check(
+ r#"
+struct S;
+impl S {
+ fn foo(&self, x: i32) {}
+}
+
+fn main() { S.foo($0); }
+"#,
+ expect![[r#"
+ fn foo(&self, x: i32)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_generic_method() {
+ check(
+ r#"
+struct S<T>(T);
+impl<T> S<T> {
+ fn foo(&self, x: T) {}
+}
+
+fn main() { S(1u32).foo($0); }
+"#,
+ expect![[r#"
+ fn foo(&self, x: u32)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_for_method_with_arg_as_assoc_fn() {
+ check(
+ r#"
+struct S;
+impl S {
+ fn foo(&self, x: i32) {}
+}
+
+fn main() { S::foo($0); }
+"#,
+ expect![[r#"
+ fn foo(self: &S, x: i32)
+ ^^^^^^^^ ------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs_simple() {
+ check(
+ r#"
+/// test
+// non-doc-comment
+fn foo(j: u32) -> u32 {
+ j
+}
+
+fn bar() {
+ let _ = foo($0);
+}
+"#,
+ expect![[r#"
+ test
+ ------
+ fn foo(j: u32) -> u32
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs() {
+ check(
+ r#"
+/// Adds one to the number given.
+///
+/// # Examples
+///
+/// ```
+/// let five = 5;
+///
+/// assert_eq!(6, my_crate::add_one(5));
+/// ```
+pub fn add_one(x: i32) -> i32 {
+ x + 1
+}
+
+pub fn do() {
+ add_one($0
+}"#,
+ expect![[r##"
+ Adds one to the number given.
+
+ # Examples
+
+ ```
+ let five = 5;
+
+ assert_eq!(6, my_crate::add_one(5));
+ ```
+ ------
+ fn add_one(x: i32) -> i32
+ ^^^^^^
+ "##]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs_impl() {
+ check(
+ r#"
+struct addr;
+impl addr {
+ /// Adds one to the number given.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// let five = 5;
+ ///
+ /// assert_eq!(6, my_crate::add_one(5));
+ /// ```
+ pub fn add_one(x: i32) -> i32 {
+ x + 1
+ }
+}
+
+pub fn do_it() {
+ addr {};
+ addr::add_one($0);
+}
+"#,
+ expect![[r##"
+ Adds one to the number given.
+
+ # Examples
+
+ ```
+ let five = 5;
+
+ assert_eq!(6, my_crate::add_one(5));
+ ```
+ ------
+ fn add_one(x: i32) -> i32
+ ^^^^^^
+ "##]],
+ );
+ }
+
+ #[test]
+ fn test_fn_signature_with_docs_from_actix() {
+ check(
+ r#"
+trait Actor {
+ /// Actor execution context type
+ type Context;
+}
+trait WriteHandler<E>
+where
+ Self: Actor
+{
+ /// Method is called when writer finishes.
+ ///
+ /// By default this method stops actor's `Context`.
+ fn finished(&mut self, ctx: &mut Self::Context) {}
+}
+
+fn foo(mut r: impl WriteHandler<()>) {
+ r.finished($0);
+}
+"#,
+ expect![[r#"
+ Method is called when writer finishes.
+
+ By default this method stops actor's `Context`.
+ ------
+ fn finished(&mut self, ctx: &mut <impl WriteHandler<()> as Actor>::Context)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn call_info_bad_offset() {
+ check(
+ r#"
+fn foo(x: u32, y: u32) -> u32 {x + y}
+fn bar() { foo $0 (3, ); }
+"#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn outside_of_arg_list() {
+ check(
+ r#"
+fn foo(a: u8) {}
+fn f() {
+ foo(123)$0
+}
+"#,
+ expect![[]],
+ );
+ check(
+ r#"
+fn foo<T>(a: u8) {}
+fn f() {
+ foo::<u32>$0()
+}
+"#,
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_nested_method_in_lambda() {
+ check(
+ r#"
+struct Foo;
+impl Foo { fn bar(&self, _: u32) { } }
+
+fn bar(_: u32) { }
+
+fn main() {
+ let foo = Foo;
+ std::thread::spawn(move || foo.bar($0));
+}
+"#,
+ expect![[r#"
+ fn bar(&self, _: u32)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_for_tuple_structs() {
+ check(
+ r#"
+/// A cool tuple struct
+struct S(u32, i32);
+fn main() {
+ let s = S(0, $0);
+}
+"#,
+ expect![[r#"
+ A cool tuple struct
+ ------
+ struct S(u32, i32)
+ --- ^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn generic_struct() {
+ check(
+ r#"
+struct S<T>(T);
+fn main() {
+ let s = S($0);
+}
+"#,
+ expect![[r#"
+ struct S({unknown})
+ ^^^^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn works_for_enum_variants() {
+ check(
+ r#"
+enum E {
+ /// A Variant
+ A(i32),
+ /// Another
+ B,
+ /// And C
+ C { a: i32, b: i32 }
+}
+
+fn main() {
+ let a = E::A($0);
+}
+"#,
+ expect![[r#"
+ A Variant
+ ------
+ enum E::A(i32)
+ ^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn cant_call_struct_record() {
+ check(
+ r#"
+struct S { x: u32, y: i32 }
+fn main() {
+ let s = S($0);
+}
+"#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn cant_call_enum_record() {
+ check(
+ r#"
+enum E {
+ /// A Variant
+ A(i32),
+ /// Another
+ B,
+ /// And C
+ C { a: i32, b: i32 }
+}
+
+fn main() {
+ let a = E::C($0);
+}
+"#,
+ expect![[""]],
+ );
+ }
+
+ #[test]
+ fn fn_signature_for_call_in_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+fn foo() { }
+id! {
+ fn bar() { foo($0); }
+}
+"#,
+ expect![[r#"
+ fn foo()
+ "#]],
+ );
+ }
+
+ #[test]
+ fn call_info_for_lambdas() {
+ check(
+ r#"
+struct S;
+fn foo(s: S) -> i32 { 92 }
+fn main() {
+ (|s| foo(s))($0)
+}
+ "#,
+ expect![[r#"
+ (s: S) -> i32
+ ^^^^
+ "#]],
+ )
+ }
+
+ #[test]
+ fn call_info_for_fn_ptr() {
+ check(
+ r#"
+fn main(f: fn(i32, f64) -> char) {
+ f(0, $0)
+}
+ "#,
+ expect![[r#"
+ (i32, f64) -> char
+ --- ^^^
+ "#]],
+ )
+ }
+
+ #[test]
+ fn call_info_for_unclosed_call() {
+ check(
+ r#"
+fn foo(foo: u32, bar: u32) {}
+fn main() {
+ foo($0
+}"#,
+ expect![[r#"
+ fn foo(foo: u32, bar: u32)
+ ^^^^^^^^ --------
+ "#]],
+ );
+ // check with surrounding space
+ check(
+ r#"
+fn foo(foo: u32, bar: u32) {}
+fn main() {
+ foo( $0
+}"#,
+ expect![[r#"
+ fn foo(foo: u32, bar: u32)
+ ^^^^^^^^ --------
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_multiline_argument() {
+ check(
+ r#"
+fn callee(a: u8, b: u8) {}
+fn main() {
+ callee(match 0 {
+ 0 => 1,$0
+ })
+}"#,
+ expect![[r#""#]],
+ );
+ check(
+ r#"
+fn callee(a: u8, b: u8) {}
+fn main() {
+ callee(match 0 {
+ 0 => 1,
+ },$0)
+}"#,
+ expect![[r#"
+ fn callee(a: u8, b: u8)
+ ----- ^^^^^
+ "#]],
+ );
+ check(
+ r#"
+fn callee(a: u8, b: u8) {}
+fn main() {
+ callee($0match 0 {
+ 0 => 1,
+ })
+}"#,
+ expect![[r#"
+ fn callee(a: u8, b: u8)
+ ^^^^^ -----
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_simple() {
+ check(
+ r#"
+/// Option docs.
+enum Option<T> {
+ Some(T),
+ None,
+}
+
+fn f() {
+ let opt: Option<$0
+}
+ "#,
+ expect![[r#"
+ Option docs.
+ ------
+ enum Option<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_on_variant() {
+ check(
+ r#"
+/// Option docs.
+enum Option<T> {
+ /// Some docs.
+ Some(T),
+ /// None docs.
+ None,
+}
+
+use Option::*;
+
+fn f() {
+ None::<$0
+}
+ "#,
+ expect![[r#"
+ None docs.
+ ------
+ enum Option<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_lots_of_generics() {
+ check(
+ r#"
+trait Tr<T> {}
+
+struct S<T>(T);
+
+impl<T> S<T> {
+ fn f<G, H>(g: G, h: impl Tr<G>) where G: Tr<()> {}
+}
+
+fn f() {
+ S::<u8>::f::<(), $0
+}
+ "#,
+ expect![[r#"
+ fn f<G: Tr<()>, H>
+ --------- ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_in_trait_ufcs() {
+ check(
+ r#"
+trait Tr {
+ fn f<T: Tr, U>() {}
+}
+
+struct S;
+
+impl Tr for S {}
+
+fn f() {
+ <S as Tr>::f::<$0
+}
+ "#,
+ expect![[r#"
+ fn f<T: Tr, U>
+ ^^^^^ -
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generics_in_method_call() {
+ check(
+ r#"
+struct S;
+
+impl S {
+ fn f<T>(&self) {}
+}
+
+fn f() {
+ S.f::<$0
+}
+ "#,
+ expect![[r#"
+ fn f<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generic_param_in_method_call() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn test<V>(&mut self, val: V) {}
+}
+fn sup() {
+ Foo.test($0)
+}
+"#,
+ expect![[r#"
+ fn test(&mut self, val: V)
+ ^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_generic_kinds() {
+ check(
+ r#"
+fn callee<'a, const A: u8, T, const C: u8>() {}
+
+fn f() {
+ callee::<'static, $0
+}
+ "#,
+ expect![[r#"
+ fn callee<'a, const A: u8, T, const C: u8>
+ -- ^^^^^^^^^^^ - -----------
+ "#]],
+ );
+ check(
+ r#"
+fn callee<'a, const A: u8, T, const C: u8>() {}
+
+fn f() {
+ callee::<NON_LIFETIME$0
+}
+ "#,
+ expect![[r#"
+ fn callee<'a, const A: u8, T, const C: u8>
+ -- ^^^^^^^^^^^ - -----------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_trait_assoc_types() {
+ check(
+ r#"
+trait Trait<'a, T> {
+ type Assoc;
+}
+fn f() -> impl Trait<(), $0
+ "#,
+ expect![[r#"
+ trait Trait<'a, T, Assoc = …>
+ -- - ^^^^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+trait Iterator {
+ type Item;
+}
+fn f() -> impl Iterator<$0
+ "#,
+ expect![[r#"
+ trait Iterator<Item = …>
+ ^^^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+trait Iterator {
+ type Item;
+}
+fn f() -> impl Iterator<Item = $0
+ "#,
+ expect![[r#"
+ trait Iterator<Item = …>
+ ^^^^^^^^
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<$0
+ "#,
+ expect![[r#"
+ trait Tr<A = …, B = …>
+ ^^^^^ -----
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<B$0
+ "#,
+ expect![[r#"
+ trait Tr<A = …, B = …>
+ ^^^^^ -----
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<B = $0
+ "#,
+ expect![[r#"
+ trait Tr<B = …, A = …>
+ ^^^^^ -----
+ "#]],
+ );
+ check(
+ r#"
+trait Tr {
+ type A;
+ type B;
+}
+fn f() -> impl Tr<B = (), $0
+ "#,
+ expect![[r#"
+ trait Tr<B = …, A = …>
+ ----- ^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_supertrait_assoc() {
+ check(
+ r#"
+trait Super {
+ type SuperTy;
+}
+trait Sub: Super + Super {
+ type SubTy;
+}
+fn f() -> impl Sub<$0
+ "#,
+ expect![[r#"
+ trait Sub<SubTy = …, SuperTy = …>
+ ^^^^^^^^^ -----------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn no_assoc_types_outside_type_bounds() {
+ check(
+ r#"
+trait Tr<T> {
+ type Assoc;
+}
+
+impl Tr<$0
+ "#,
+ expect![[r#"
+ trait Tr<T>
+ ^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn impl_trait() {
+ // FIXME: Substitute type vars in impl trait (`U` -> `i8`)
+ check(
+ r#"
+trait Trait<T> {}
+struct Wrap<T>(T);
+fn foo<U>(x: Wrap<impl Trait<U>>) {}
+fn f() {
+ foo::<i8>($0)
+}
+"#,
+ expect![[r#"
+ fn foo(x: Wrap<impl Trait<U>>)
+ ^^^^^^^^^^^^^^^^^^^^^^
+ "#]],
+ );
+ }
+
+ #[test]
+ fn fully_qualified_syntax() {
+ check(
+ r#"
+fn f() {
+ trait A { fn foo(&self, other: Self); }
+ A::foo(&self$0, other);
+}
+"#,
+ expect![[r#"
+ fn foo(self: &Self, other: Self)
+ ^^^^^^^^^^^ -----------
+ "#]],
+ );
+ }
+
+ #[test]
+ fn help_for_generic_call() {
+ check(
+ r#"
+fn f<F: FnOnce(u8, u16) -> i32>(f: F) {
+ f($0)
+}
+"#,
+ expect![[r#"
+ (u8, u16) -> i32
+ ^^ ---
+ "#]],
+ );
++ check(
++ r#"
++fn f<T, F: FnOnce(&T, u16) -> &T>(f: F) {
++ f($0)
++}
++"#,
++ expect![[r#"
++ (&T, u16) -> &T
++ ^^ ---
++ "#]],
++ );
++ }
++
++ #[test]
++ fn regression_13579() {
++ check(
++ r#"
++fn f() {
++ take(2)($0);
++}
++
++fn take<C, Error>(
++ count: C
++) -> impl Fn() -> C {
++ move || count
++}
++"#,
++ expect![[r#"
++ () -> i32
++ "#]],
++ );
+ }
+}
--- /dev/null
- moniker::{crate_for_file, def_to_moniker, MonikerResult},
+//! This module provides `StaticIndex` which is used for powering
+//! read-only code browsers and emitting LSIF
+
+use std::collections::HashMap;
+
+use hir::{db::HirDatabase, Crate, Module, Semantics};
+use ide_db::{
+ base_db::{FileId, FileRange, SourceDatabaseExt},
+ defs::{Definition, IdentClass},
+ FxHashSet, RootDatabase,
+};
+use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T};
+
+use crate::{
+ hover::hover_for_definition,
- let current_crate = crate_for_file(self.db, file_id);
++ moniker::{def_to_moniker, MonikerResult},
++ parent_module::crates_for,
+ Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig,
+ TryToNav,
+};
+
+/// A static representation of fully analyzed source code.
+///
+/// The intended use-case is powering read-only code browsers and emitting LSIF
+#[derive(Debug)]
+pub struct StaticIndex<'a> {
+ pub files: Vec<StaticIndexedFile>,
+ pub tokens: TokenStore,
+ analysis: &'a Analysis,
+ db: &'a RootDatabase,
+ def_map: HashMap<Definition, TokenId>,
+}
+
+#[derive(Debug)]
+pub struct ReferenceData {
+ pub range: FileRange,
+ pub is_definition: bool,
+}
+
+#[derive(Debug)]
+pub struct TokenStaticData {
+ pub hover: Option<HoverResult>,
+ pub definition: Option<FileRange>,
+ pub references: Vec<ReferenceData>,
+ pub moniker: Option<MonikerResult>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenId(usize);
+
+impl TokenId {
+ pub fn raw(self) -> usize {
+ self.0
+ }
+}
+
+#[derive(Default, Debug)]
+pub struct TokenStore(Vec<TokenStaticData>);
+
+impl TokenStore {
+ pub fn insert(&mut self, data: TokenStaticData) -> TokenId {
+ let id = TokenId(self.0.len());
+ self.0.push(data);
+ id
+ }
+
+ pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> {
+ self.0.get_mut(id.0)
+ }
+
+ pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> {
+ self.0.get(id.0)
+ }
+
+ pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
+ self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
+ }
+}
+
+#[derive(Debug)]
+pub struct StaticIndexedFile {
+ pub file_id: FileId,
+ pub folds: Vec<Fold>,
+ pub inlay_hints: Vec<InlayHint>,
+ pub tokens: Vec<(TextRange, TokenId)>,
+}
+
+fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
+ let mut worklist: Vec<_> =
+ Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
+ let mut modules = Vec::new();
+
+ while let Some(module) = worklist.pop() {
+ modules.push(module);
+ worklist.extend(module.children(db));
+ }
+
+ modules
+}
+
+impl StaticIndex<'_> {
+ fn add_file(&mut self, file_id: FileId) {
- reborrow_hints: crate::ReborrowHints::Never,
++ let current_crate = crates_for(self.db, file_id).pop().map(Into::into);
+ let folds = self.analysis.folding_ranges(file_id).unwrap();
+ let inlay_hints = self
+ .analysis
+ .inlay_hints(
+ &InlayHintsConfig {
+ render_colons: true,
+ type_hints: true,
+ parameter_hints: true,
+ chaining_hints: true,
+ closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock,
+ lifetime_elision_hints: crate::LifetimeElisionHints::Never,
++ adjustment_hints: crate::AdjustmentHints::Never,
+ hide_named_constructor_hints: false,
+ hide_closure_initialization_hints: false,
+ param_names_for_lifetime_elision_hints: false,
+ binding_mode_hints: false,
+ max_length: Some(25),
+ closing_brace_hints_min_lines: Some(25),
+ },
+ file_id,
+ None,
+ )
+ .unwrap();
+ // hovers
+ let sema = hir::Semantics::new(self.db);
+ let tokens_or_nodes = sema.parse(file_id).syntax().clone();
+ let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x {
+ syntax::NodeOrToken::Node(_) => None,
+ syntax::NodeOrToken::Token(x) => Some(x),
+ });
+ let hover_config = HoverConfig {
+ links_in_hover: true,
+ documentation: Some(HoverDocFormat::Markdown),
+ keywords: true,
+ };
+ let tokens = tokens.filter(|token| {
+ matches!(
+ token.kind(),
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
+ )
+ });
+ let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens: vec![] };
+ for token in tokens {
+ let range = token.text_range();
+ let node = token.parent().unwrap();
+ let def = match get_definition(&sema, token.clone()) {
+ Some(x) => x,
+ None => continue,
+ };
+ let id = if let Some(x) = self.def_map.get(&def) {
+ *x
+ } else {
+ let x = self.tokens.insert(TokenStaticData {
+ hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
+ definition: def
+ .try_to_nav(self.db)
+ .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
+ references: vec![],
+ moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
+ });
+ self.def_map.insert(def, x);
+ x
+ };
+ let token = self.tokens.get_mut(id).unwrap();
+ token.references.push(ReferenceData {
+ range: FileRange { range, file_id },
+ is_definition: match def.try_to_nav(self.db) {
+ Some(x) => x.file_id == file_id && x.focus_or_full_range() == range,
+ None => false,
+ },
+ });
+ result.tokens.push((range, id));
+ }
+ self.files.push(result);
+ }
+
+ pub fn compute(analysis: &Analysis) -> StaticIndex<'_> {
+ let db = &*analysis.db;
+ let work = all_modules(db).into_iter().filter(|module| {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ let source_root = db.file_source_root(file_id);
+ let source_root = db.source_root(source_root);
+ !source_root.is_library
+ });
+ let mut this = StaticIndex {
+ files: vec![],
+ tokens: Default::default(),
+ analysis,
+ db,
+ def_map: Default::default(),
+ };
+ let mut visited_files = FxHashSet::default();
+ for module in work {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ if visited_files.contains(&file_id) {
+ continue;
+ }
+ this.add_file(file_id);
+ // mark the file
+ visited_files.insert(file_id);
+ }
+ this
+ }
+}
+
+fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> {
+ for token in sema.descend_into_macros(token) {
+ let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
+ if let Some(&[x]) = def.as_deref() {
+ return Some(x);
+ }
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{fixture, StaticIndex};
+ use ide_db::base_db::FileRange;
+ use std::collections::HashSet;
+ use syntax::TextSize;
+
+ fn check_all_ranges(ra_fixture: &str) {
+ let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
+ let s = StaticIndex::compute(&analysis);
+ let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ for f in s.files {
+ for (range, _) in f.tokens {
+ let x = FileRange { file_id: f.file_id, range };
+ if !range_set.contains(&x) {
+ panic!("additional range {:?}", x);
+ }
+ range_set.remove(&x);
+ }
+ }
+ if !range_set.is_empty() {
+ panic!("unfound ranges {:?}", range_set);
+ }
+ }
+
+ fn check_definitions(ra_fixture: &str) {
+ let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
+ let s = StaticIndex::compute(&analysis);
+ let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ for (_, t) in s.tokens.iter() {
+ if let Some(x) = t.definition {
+ if x.range.start() == TextSize::from(0) {
+ // ignore definitions that are whole of file
+ continue;
+ }
+ if !range_set.contains(&x) {
+ panic!("additional definition {:?}", x);
+ }
+ range_set.remove(&x);
+ }
+ }
+ if !range_set.is_empty() {
+ panic!("unfound definitions {:?}", range_set);
+ }
+ }
+
+ #[test]
+ fn struct_and_enum() {
+ check_all_ranges(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo) }
+ //^ ^ ^^^
+"#,
+ );
+ check_definitions(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo) }
+ //^ ^
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_crate() {
+ check_definitions(
+ r#"
+//- /main.rs crate:main deps:foo
+
+
+use foo::func;
+
+fn main() {
+ //^^^^
+ func();
+}
+//- /foo/lib.rs crate:foo
+
+pub func() {
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn derives() {
+ check_all_ranges(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+//^^^^^^^^^^^^^^^^^^^
+pub macro Copy {}
+ //^^^^
+#[derive(Copy)]
+//^^^^^^ ^^^^
+struct Hello(i32);
+ //^^^^^ ^^^
+"#,
+ );
+ }
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "limit"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[features]
+tracking = []
+default = ["tracking"]
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "mbe"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+rustc-hash = "1.1.0"
+smallvec = "1.10.0"
+tracing = "0.1.35"
+
+syntax = { path = "../syntax", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+
+[dev-dependencies]
+test-utils = { path = "../test-utils" }
--- /dev/null
- let mut c = Convertor::new(node, global_offset, existing_token_map, next_id, replace, append);
+//! Conversions between [`SyntaxNode`] and [`tt::TokenTree`].
+
+use rustc_hash::FxHashMap;
+use stdx::{always, non_empty_vec::NonEmptyVec};
+use syntax::{
+ ast::{self, make::tokens::doc_comment},
+ AstToken, Parse, PreorderWithTokens, SmolStr, SyntaxElement, SyntaxKind,
+ SyntaxKind::*,
+ SyntaxNode, SyntaxToken, SyntaxTreeBuilder, TextRange, TextSize, WalkEvent, T,
+};
+use tt::buffer::{Cursor, TokenBuffer};
+
+use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, TokenMap};
+
++#[cfg(test)]
++mod tests;
++
+/// Convert the syntax node to a `TokenTree` (what macro
+/// will consume).
+pub fn syntax_node_to_token_tree(node: &SyntaxNode) -> (tt::Subtree, TokenMap) {
+ let (subtree, token_map, _) = syntax_node_to_token_tree_with_modifications(
+ node,
+ Default::default(),
+ 0,
+ Default::default(),
+ Default::default(),
+ );
+ (subtree, token_map)
+}
+
+/// Convert the syntax node to a `TokenTree` (what macro will consume)
+/// with the censored range excluded.
+pub fn syntax_node_to_token_tree_with_modifications(
+ node: &SyntaxNode,
+ existing_token_map: TokenMap,
+ next_id: u32,
+ replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+) -> (tt::Subtree, TokenMap, u32) {
+ let global_offset = node.text_range().start();
- let mut conv = RawConvertor {
++ let mut c = Converter::new(node, global_offset, existing_token_map, next_id, replace, append);
+ let subtree = convert_tokens(&mut c);
+ c.id_alloc.map.shrink_to_fit();
+ always!(c.replace.is_empty(), "replace: {:?}", c.replace);
+ always!(c.append.is_empty(), "append: {:?}", c.append);
+ (subtree, c.id_alloc.map, c.id_alloc.next_id)
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub struct SyntheticTokenId(pub u32);
+
+#[derive(Debug, Clone)]
+pub struct SyntheticToken {
+ pub kind: SyntaxKind,
+ pub text: SmolStr,
+ pub range: TextRange,
+ pub id: SyntheticTokenId,
+}
+
+// The following items are what `rustc` macro can be parsed into :
+// link: https://github.com/rust-lang/rust/blob/9ebf47851a357faa4cd97f4b1dc7835f6376e639/src/libsyntax/ext/expand.rs#L141
+// * Expr(P<ast::Expr>) -> token_tree_to_expr
+// * Pat(P<ast::Pat>) -> token_tree_to_pat
+// * Ty(P<ast::Ty>) -> token_tree_to_ty
+// * Stmts(SmallVec<[ast::Stmt; 1]>) -> token_tree_to_stmts
+// * Items(SmallVec<[P<ast::Item>; 1]>) -> token_tree_to_items
+//
+// * TraitItems(SmallVec<[ast::TraitItem; 1]>)
+// * AssocItems(SmallVec<[ast::AssocItem; 1]>)
+// * ForeignItems(SmallVec<[ast::ForeignItem; 1]>
+
+pub fn token_tree_to_syntax_node(
+ tt: &tt::Subtree,
+ entry_point: parser::TopEntryPoint,
+) -> (Parse<SyntaxNode>, TokenMap) {
+ let buffer = match tt {
+ tt::Subtree { delimiter: None, token_trees } => {
+ TokenBuffer::from_tokens(token_trees.as_slice())
+ }
+ _ => TokenBuffer::from_subtree(tt),
+ };
+ let parser_input = to_parser_input(&buffer);
+ let parser_output = entry_point.parse(&parser_input);
+ let mut tree_sink = TtTreeSink::new(buffer.begin());
+ for event in parser_output.iter() {
+ match event {
+ parser::Step::Token { kind, n_input_tokens: n_raw_tokens } => {
+ tree_sink.token(kind, n_raw_tokens)
+ }
+ parser::Step::Enter { kind } => tree_sink.start_node(kind),
+ parser::Step::Exit => tree_sink.finish_node(),
+ parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
+ }
+ }
+ let (parse, range_map) = tree_sink.finish();
+ (parse, range_map)
+}
+
+/// Convert a string to a `TokenTree`
+pub fn parse_to_token_tree(text: &str) -> Option<(tt::Subtree, TokenMap)> {
+ let lexed = parser::LexedStr::new(text);
+ if lexed.errors().next().is_some() {
+ return None;
+ }
+
- fn convert_tokens<C: TokenConvertor>(conv: &mut C) -> tt::Subtree {
++ let mut conv = RawConverter {
+ lexed,
+ pos: 0,
+ id_alloc: TokenIdAlloc {
+ map: Default::default(),
+ global_offset: TextSize::default(),
+ next_id: 0,
+ },
+ };
+
+ let subtree = convert_tokens(&mut conv);
+ Some((subtree, conv.id_alloc.map))
+}
+
+/// Split token tree with separate expr: $($e:expr)SEP*
+pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
+ if tt.token_trees.is_empty() {
+ return Vec::new();
+ }
+
+ let mut iter = TtIter::new(tt);
+ let mut res = Vec::new();
+
+ while iter.peek_n(0).is_some() {
+ let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr);
+
+ res.push(match expanded.value {
+ None => break,
+ Some(tt @ tt::TokenTree::Leaf(_)) => {
+ tt::Subtree { delimiter: None, token_trees: vec![tt] }
+ }
+ Some(tt::TokenTree::Subtree(tt)) => tt,
+ });
+
+ let mut fork = iter.clone();
+ if fork.expect_char(sep).is_err() {
+ break;
+ }
+ iter = fork;
+ }
+
+ if iter.peek_n(0).is_some() {
+ res.push(tt::Subtree { delimiter: None, token_trees: iter.into_iter().cloned().collect() });
+ }
+
+ res
+}
+
- Some(kind) if !kind.is_trivia() => tt::Spacing::Joint,
++fn convert_tokens<C: TokenConverter>(conv: &mut C) -> tt::Subtree {
+ struct StackEntry {
+ subtree: tt::Subtree,
+ idx: usize,
+ open_range: TextRange,
+ }
+
+ let entry = StackEntry {
+ subtree: tt::Subtree { delimiter: None, ..Default::default() },
+ // never used (delimiter is `None`)
+ idx: !0,
+ open_range: TextRange::empty(TextSize::of('.')),
+ };
+ let mut stack = NonEmptyVec::new(entry);
+
+ loop {
+ let StackEntry { subtree, .. } = stack.last_mut();
+ let result = &mut subtree.token_trees;
+ let (token, range) = match conv.bump() {
+ Some(it) => it,
+ None => break,
+ };
+ let synth_id = token.synthetic_id(conv);
+
+ let kind = token.kind(conv);
+ if kind == COMMENT {
+ if let Some(tokens) = conv.convert_doc_comment(&token) {
+ // FIXME: There has to be a better way to do this
+ // Add the comments token id to the converted doc string
+ let id = conv.id_alloc().alloc(range, synth_id);
+ result.extend(tokens.into_iter().map(|mut tt| {
+ if let tt::TokenTree::Subtree(sub) = &mut tt {
+ if let Some(tt::TokenTree::Leaf(tt::Leaf::Literal(lit))) =
+ sub.token_trees.get_mut(2)
+ {
+ lit.id = id
+ }
+ }
+ tt
+ }));
+ }
+ continue;
+ }
+ let tt = if kind.is_punct() && kind != UNDERSCORE {
+ if synth_id.is_none() {
+ assert_eq!(range.len(), TextSize::of('.'));
+ }
+
+ if let Some(delim) = subtree.delimiter {
+ let expected = match delim.kind {
+ tt::DelimiterKind::Parenthesis => T![')'],
+ tt::DelimiterKind::Brace => T!['}'],
+ tt::DelimiterKind::Bracket => T![']'],
+ };
+
+ if kind == expected {
+ if let Some(entry) = stack.pop() {
+ conv.id_alloc().close_delim(entry.idx, Some(range));
+ stack.last_mut().subtree.token_trees.push(entry.subtree.into());
+ }
+ continue;
+ }
+ }
+
+ let delim = match kind {
+ T!['('] => Some(tt::DelimiterKind::Parenthesis),
+ T!['{'] => Some(tt::DelimiterKind::Brace),
+ T!['['] => Some(tt::DelimiterKind::Bracket),
+ _ => None,
+ };
+
+ if let Some(kind) = delim {
+ let mut subtree = tt::Subtree::default();
+ let (id, idx) = conv.id_alloc().open_delim(range, synth_id);
+ subtree.delimiter = Some(tt::Delimiter { id, kind });
+ stack.push(StackEntry { subtree, idx, open_range: range });
+ continue;
+ }
+
+ let spacing = match conv.peek().map(|next| next.kind(conv)) {
- /// A raw token (straight from lexer) convertor
- struct RawConvertor<'a> {
++ Some(kind) if is_single_token_op(kind) => tt::Spacing::Joint,
+ _ => tt::Spacing::Alone,
+ };
+ let char = match token.to_char(conv) {
+ Some(c) => c,
+ None => {
+ panic!("Token from lexer must be single char: token = {:#?}", token);
+ }
+ };
+ tt::Leaf::from(tt::Punct { char, spacing, id: conv.id_alloc().alloc(range, synth_id) })
+ .into()
+ } else {
+ macro_rules! make_leaf {
+ ($i:ident) => {
+ tt::$i { id: conv.id_alloc().alloc(range, synth_id), text: token.to_text(conv) }
+ .into()
+ };
+ }
+ let leaf: tt::Leaf = match kind {
+ T![true] | T![false] => make_leaf!(Ident),
+ IDENT => make_leaf!(Ident),
+ UNDERSCORE => make_leaf!(Ident),
+ k if k.is_keyword() => make_leaf!(Ident),
+ k if k.is_literal() => make_leaf!(Literal),
+ LIFETIME_IDENT => {
+ let char_unit = TextSize::of('\'');
+ let r = TextRange::at(range.start(), char_unit);
+ let apostrophe = tt::Leaf::from(tt::Punct {
+ char: '\'',
+ spacing: tt::Spacing::Joint,
+ id: conv.id_alloc().alloc(r, synth_id),
+ });
+ result.push(apostrophe.into());
+
+ let r = TextRange::at(range.start() + char_unit, range.len() - char_unit);
+ let ident = tt::Leaf::from(tt::Ident {
+ text: SmolStr::new(&token.to_text(conv)[1..]),
+ id: conv.id_alloc().alloc(r, synth_id),
+ });
+ result.push(ident.into());
+ continue;
+ }
+ _ => continue,
+ };
+
+ leaf.into()
+ };
+ result.push(tt);
+ }
+
+ // If we get here, we've consumed all input tokens.
+ // We might have more than one subtree in the stack, if the delimiters are improperly balanced.
+ // Merge them so we're left with one.
+ while let Some(entry) = stack.pop() {
+ let parent = stack.last_mut();
+
+ conv.id_alloc().close_delim(entry.idx, None);
+ let leaf: tt::Leaf = tt::Punct {
+ id: conv.id_alloc().alloc(entry.open_range, None),
+ char: match entry.subtree.delimiter.unwrap().kind {
+ tt::DelimiterKind::Parenthesis => '(',
+ tt::DelimiterKind::Brace => '{',
+ tt::DelimiterKind::Bracket => '[',
+ },
+ spacing: tt::Spacing::Alone,
+ }
+ .into();
+ parent.subtree.token_trees.push(leaf.into());
+ parent.subtree.token_trees.extend(entry.subtree.token_trees);
+ }
+
+ let subtree = stack.into_last().subtree;
+ if let [tt::TokenTree::Subtree(first)] = &*subtree.token_trees {
+ first.clone()
+ } else {
+ subtree
+ }
+}
+
++fn is_single_token_op(kind: SyntaxKind) -> bool {
++ matches!(
++ kind,
++ EQ | L_ANGLE
++ | R_ANGLE
++ | BANG
++ | AMP
++ | PIPE
++ | TILDE
++ | AT
++ | DOT
++ | COMMA
++ | SEMICOLON
++ | COLON
++ | POUND
++ | DOLLAR
++ | QUESTION
++ | PLUS
++ | MINUS
++ | STAR
++ | SLASH
++ | PERCENT
++ | CARET
++ // LIFETIME_IDENT will be split into a sequence of `'` (a single quote) and an
++ // identifier.
++ | LIFETIME_IDENT
++ )
++}
++
+/// Returns the textual content of a doc comment block as a quoted string
+/// That is, strips leading `///` (or `/**`, etc)
+/// and strips the ending `*/`
+/// And then quote the string, which is needed to convert to `tt::Literal`
+fn doc_comment_text(comment: &ast::Comment) -> SmolStr {
+ let prefix_len = comment.prefix().len();
+ let mut text = &comment.text()[prefix_len..];
+
+ // Remove ending "*/"
+ if comment.kind().shape == ast::CommentShape::Block {
+ text = &text[0..text.len() - 2];
+ }
+
+ // Quote the string
+ // Note that `tt::Literal` expect an escaped string
+ let text = format!("\"{}\"", text.escape_debug());
+ text.into()
+}
+
+fn convert_doc_comment(token: &syntax::SyntaxToken) -> Option<Vec<tt::TokenTree>> {
+ cov_mark::hit!(test_meta_doc_comments);
+ let comment = ast::Comment::cast(token.clone())?;
+ let doc = comment.kind().doc?;
+
+ // Make `doc="\" Comments\""
+ let meta_tkns = vec![mk_ident("doc"), mk_punct('='), mk_doc_literal(&comment)];
+
+ // Make `#![]`
+ let mut token_trees = Vec::with_capacity(3);
+ token_trees.push(mk_punct('#'));
+ if let ast::CommentPlacement::Inner = doc {
+ token_trees.push(mk_punct('!'));
+ }
+ token_trees.push(tt::TokenTree::from(tt::Subtree {
+ delimiter: Some(tt::Delimiter {
+ kind: tt::DelimiterKind::Bracket,
+ id: tt::TokenId::unspecified(),
+ }),
+ token_trees: meta_tkns,
+ }));
+
+ return Some(token_trees);
+
+ // Helper functions
+ fn mk_ident(s: &str) -> tt::TokenTree {
+ tt::TokenTree::from(tt::Leaf::from(tt::Ident {
+ text: s.into(),
+ id: tt::TokenId::unspecified(),
+ }))
+ }
+
+ fn mk_punct(c: char) -> tt::TokenTree {
+ tt::TokenTree::from(tt::Leaf::from(tt::Punct {
+ char: c,
+ spacing: tt::Spacing::Alone,
+ id: tt::TokenId::unspecified(),
+ }))
+ }
+
+ fn mk_doc_literal(comment: &ast::Comment) -> tt::TokenTree {
+ let lit = tt::Literal { text: doc_comment_text(comment), id: tt::TokenId::unspecified() };
+
+ tt::TokenTree::from(tt::Leaf::from(lit))
+ }
+}
+
+struct TokenIdAlloc {
+ map: TokenMap,
+ global_offset: TextSize,
+ next_id: u32,
+}
+
+impl TokenIdAlloc {
+ fn alloc(
+ &mut self,
+ absolute_range: TextRange,
+ synthetic_id: Option<SyntheticTokenId>,
+ ) -> tt::TokenId {
+ let relative_range = absolute_range - self.global_offset;
+ let token_id = tt::TokenId(self.next_id);
+ self.next_id += 1;
+ self.map.insert(token_id, relative_range);
+ if let Some(id) = synthetic_id {
+ self.map.insert_synthetic(token_id, id);
+ }
+ token_id
+ }
+
+ fn open_delim(
+ &mut self,
+ open_abs_range: TextRange,
+ synthetic_id: Option<SyntheticTokenId>,
+ ) -> (tt::TokenId, usize) {
+ let token_id = tt::TokenId(self.next_id);
+ self.next_id += 1;
+ let idx = self.map.insert_delim(
+ token_id,
+ open_abs_range - self.global_offset,
+ open_abs_range - self.global_offset,
+ );
+ if let Some(id) = synthetic_id {
+ self.map.insert_synthetic(token_id, id);
+ }
+ (token_id, idx)
+ }
+
+ fn close_delim(&mut self, idx: usize, close_abs_range: Option<TextRange>) {
+ match close_abs_range {
+ None => {
+ self.map.remove_delim(idx);
+ }
+ Some(close) => {
+ self.map.update_close_delim(idx, close - self.global_offset);
+ }
+ }
+ }
+}
+
- trait TokenConvertor: Sized {
++/// A raw token (straight from lexer) converter
++struct RawConverter<'a> {
+ lexed: parser::LexedStr<'a>,
+ pos: usize,
+ id_alloc: TokenIdAlloc,
+}
+
+trait SrcToken<Ctx>: std::fmt::Debug {
+ fn kind(&self, ctx: &Ctx) -> SyntaxKind;
+
+ fn to_char(&self, ctx: &Ctx) -> Option<char>;
+
+ fn to_text(&self, ctx: &Ctx) -> SmolStr;
+
+ fn synthetic_id(&self, ctx: &Ctx) -> Option<SyntheticTokenId>;
+}
+
- impl<'a> SrcToken<RawConvertor<'a>> for usize {
- fn kind(&self, ctx: &RawConvertor<'a>) -> SyntaxKind {
++trait TokenConverter: Sized {
+ type Token: SrcToken<Self>;
+
+ fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>>;
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)>;
+
+ fn peek(&self) -> Option<Self::Token>;
+
+ fn id_alloc(&mut self) -> &mut TokenIdAlloc;
+}
+
- fn to_char(&self, ctx: &RawConvertor<'a>) -> Option<char> {
++impl<'a> SrcToken<RawConverter<'a>> for usize {
++ fn kind(&self, ctx: &RawConverter<'a>) -> SyntaxKind {
+ ctx.lexed.kind(*self)
+ }
+
- fn to_text(&self, ctx: &RawConvertor<'_>) -> SmolStr {
++ fn to_char(&self, ctx: &RawConverter<'a>) -> Option<char> {
+ ctx.lexed.text(*self).chars().next()
+ }
+
- fn synthetic_id(&self, _ctx: &RawConvertor<'a>) -> Option<SyntheticTokenId> {
++ fn to_text(&self, ctx: &RawConverter<'_>) -> SmolStr {
+ ctx.lexed.text(*self).into()
+ }
+
- impl<'a> TokenConvertor for RawConvertor<'a> {
++ fn synthetic_id(&self, _ctx: &RawConverter<'a>) -> Option<SyntheticTokenId> {
+ None
+ }
+}
+
- struct Convertor {
++impl<'a> TokenConverter for RawConverter<'a> {
+ type Token = usize;
+
+ fn convert_doc_comment(&self, &token: &usize) -> Option<Vec<tt::TokenTree>> {
+ let text = self.lexed.text(token);
+ convert_doc_comment(&doc_comment(text))
+ }
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ let token = self.pos;
+ self.pos += 1;
+ let range = self.lexed.text_range(token);
+ let range = TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+
+ Some((token, range))
+ }
+
+ fn peek(&self) -> Option<Self::Token> {
+ if self.pos == self.lexed.len() {
+ return None;
+ }
+ Some(self.pos)
+ }
+
+ fn id_alloc(&mut self) -> &mut TokenIdAlloc {
+ &mut self.id_alloc
+ }
+}
+
- impl Convertor {
++struct Converter {
+ id_alloc: TokenIdAlloc,
+ current: Option<SyntaxToken>,
+ current_synthetic: Vec<SyntheticToken>,
+ preorder: PreorderWithTokens,
+ replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ range: TextRange,
+ punct_offset: Option<(SyntaxToken, TextSize)>,
+}
+
- ) -> Convertor {
++impl Converter {
+ fn new(
+ node: &SyntaxNode,
+ global_offset: TextSize,
+ existing_token_map: TokenMap,
+ next_id: u32,
+ mut replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ mut append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
- Convertor {
++ ) -> Converter {
+ let range = node.text_range();
+ let mut preorder = node.preorder_with_tokens();
+ let (first, synthetic) = Self::next_token(&mut preorder, &mut replace, &mut append);
- impl SrcToken<Convertor> for SynToken {
- fn kind(&self, _ctx: &Convertor) -> SyntaxKind {
++ Converter {
+ id_alloc: { TokenIdAlloc { map: existing_token_map, global_offset, next_id } },
+ current: first,
+ current_synthetic: synthetic,
+ preorder,
+ range,
+ replace,
+ append,
+ punct_offset: None,
+ }
+ }
+
+ fn next_token(
+ preorder: &mut PreorderWithTokens,
+ replace: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ append: &mut FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ ) -> (Option<SyntaxToken>, Vec<SyntheticToken>) {
+ while let Some(ev) = preorder.next() {
+ let ele = match ev {
+ WalkEvent::Enter(ele) => ele,
+ WalkEvent::Leave(ele) => {
+ if let Some(mut v) = append.remove(&ele) {
+ if !v.is_empty() {
+ v.reverse();
+ return (None, v);
+ }
+ }
+ continue;
+ }
+ };
+ if let Some(mut v) = replace.remove(&ele) {
+ preorder.skip_subtree();
+ if !v.is_empty() {
+ v.reverse();
+ return (None, v);
+ }
+ }
+ match ele {
+ SyntaxElement::Token(t) => return (Some(t), Vec::new()),
+ _ => {}
+ }
+ }
+ (None, Vec::new())
+ }
+}
+
+#[derive(Debug)]
+enum SynToken {
+ Ordinary(SyntaxToken),
+ // FIXME is this supposed to be `Punct`?
+ Punch(SyntaxToken, TextSize),
+ Synthetic(SyntheticToken),
+}
+
+impl SynToken {
+ fn token(&self) -> Option<&SyntaxToken> {
+ match self {
+ SynToken::Ordinary(it) | SynToken::Punch(it, _) => Some(it),
+ SynToken::Synthetic(_) => None,
+ }
+ }
+}
+
- SynToken::Punch(token, _) => token.kind(),
++impl SrcToken<Converter> for SynToken {
++ fn kind(&self, ctx: &Converter) -> SyntaxKind {
+ match self {
+ SynToken::Ordinary(token) => token.kind(),
- fn to_char(&self, _ctx: &Convertor) -> Option<char> {
++ SynToken::Punch(..) => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(),
+ SynToken::Synthetic(token) => token.kind,
+ }
+ }
- fn to_text(&self, _ctx: &Convertor) -> SmolStr {
++ fn to_char(&self, _ctx: &Converter) -> Option<char> {
+ match self {
+ SynToken::Ordinary(_) => None,
+ SynToken::Punch(it, i) => it.text().chars().nth((*i).into()),
+ SynToken::Synthetic(token) if token.text.len() == 1 => token.text.chars().next(),
+ SynToken::Synthetic(_) => None,
+ }
+ }
- fn synthetic_id(&self, _ctx: &Convertor) -> Option<SyntheticTokenId> {
++ fn to_text(&self, _ctx: &Converter) -> SmolStr {
+ match self {
+ SynToken::Ordinary(token) => token.text().into(),
+ SynToken::Punch(token, _) => token.text().into(),
+ SynToken::Synthetic(token) => token.text.clone(),
+ }
+ }
+
- impl TokenConvertor for Convertor {
++ fn synthetic_id(&self, _ctx: &Converter) -> Option<SyntheticTokenId> {
+ match self {
+ SynToken::Synthetic(token) => Some(token.id),
+ _ => None,
+ }
+ }
+}
+
- if !&self.range.contains_range(curr.text_range()) {
++impl TokenConverter for Converter {
+ type Token = SynToken;
+ fn convert_doc_comment(&self, token: &Self::Token) -> Option<Vec<tt::TokenTree>> {
+ convert_doc_comment(token.token()?)
+ }
+
+ fn bump(&mut self) -> Option<(Self::Token, TextRange)> {
+ if let Some((punct, offset)) = self.punct_offset.clone() {
+ if usize::from(offset) + 1 < punct.text().len() {
+ let offset = offset + TextSize::of('.');
+ let range = punct.text_range();
+ self.punct_offset = Some((punct.clone(), offset));
+ let range = TextRange::at(range.start() + offset, TextSize::of('.'));
+ return Some((SynToken::Punch(punct, offset), range));
+ }
+ }
+
+ if let Some(synth_token) = self.current_synthetic.pop() {
+ if self.current_synthetic.is_empty() {
+ let (new_current, new_synth) =
+ Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
+ self.current = new_current;
+ self.current_synthetic = new_synth;
+ }
+ let range = synth_token.range;
+ return Some((SynToken::Synthetic(synth_token), range));
+ }
+
+ let curr = self.current.clone()?;
- Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(_), _)),
++ if !self.range.contains_range(curr.text_range()) {
+ return None;
+ }
+ let (new_current, new_synth) =
+ Self::next_token(&mut self.preorder, &mut self.replace, &mut self.append);
+ self.current = new_current;
+ self.current_synthetic = new_synth;
+ let token = if curr.kind().is_punct() {
+ self.punct_offset = Some((curr.clone(), 0.into()));
+ let range = curr.text_range();
+ let range = TextRange::at(range.start(), TextSize::of('.'));
+ (SynToken::Punch(curr, 0.into()), range)
+ } else {
+ self.punct_offset = None;
+ let range = curr.text_range();
+ (SynToken::Ordinary(curr), range)
+ };
+
+ Some(token)
+ }
+
+ fn peek(&self) -> Option<Self::Token> {
+ if let Some((punct, mut offset)) = self.punct_offset.clone() {
+ offset += TextSize::of('.');
+ if usize::from(offset) < punct.text().len() {
+ return Some(SynToken::Punch(punct, offset));
+ }
+ }
+
+ if let Some(synth_token) = self.current_synthetic.last() {
+ return Some(SynToken::Synthetic(synth_token.clone()));
+ }
+
+ let curr = self.current.clone()?;
+ if !self.range.contains_range(curr.text_range()) {
+ return None;
+ }
+
+ let token = if curr.kind().is_punct() {
+ SynToken::Punch(curr, 0.into())
+ } else {
+ SynToken::Ordinary(curr)
+ };
+ Some(token)
+ }
+
+ fn id_alloc(&mut self) -> &mut TokenIdAlloc {
+ &mut self.id_alloc
+ }
+}
+
+struct TtTreeSink<'a> {
+ buf: String,
+ cursor: Cursor<'a>,
+ open_delims: FxHashMap<tt::TokenId, TextSize>,
+ text_pos: TextSize,
+ inner: SyntaxTreeBuilder,
+ token_map: TokenMap,
+}
+
+impl<'a> TtTreeSink<'a> {
+ fn new(cursor: Cursor<'a>) -> Self {
+ TtTreeSink {
+ buf: String::new(),
+ cursor,
+ open_delims: FxHashMap::default(),
+ text_pos: 0.into(),
+ inner: SyntaxTreeBuilder::default(),
+ token_map: TokenMap::default(),
+ }
+ }
+
+ fn finish(mut self) -> (Parse<SyntaxNode>, TokenMap) {
+ self.token_map.shrink_to_fit();
+ (self.inner.finish(), self.token_map)
+ }
+}
+
+fn delim_to_str(d: tt::DelimiterKind, closing: bool) -> &'static str {
+ let texts = match d {
+ tt::DelimiterKind::Parenthesis => "()",
+ tt::DelimiterKind::Brace => "{}",
+ tt::DelimiterKind::Bracket => "[]",
+ };
+
+ let idx = closing as usize;
+ &texts[idx..texts.len() - (1 - idx)]
+}
+
+impl<'a> TtTreeSink<'a> {
+ fn token(&mut self, kind: SyntaxKind, mut n_tokens: u8) {
+ if kind == LIFETIME_IDENT {
+ n_tokens = 2;
+ }
+
+ let mut last = self.cursor;
+ for _ in 0..n_tokens {
+ let tmp: u8;
+ if self.cursor.eof() {
+ break;
+ }
+ last = self.cursor;
+ let text: &str = loop {
+ break match self.cursor.token_tree() {
+ Some(tt::buffer::TokenTreeRef::Leaf(leaf, _)) => {
+ // Mark the range if needed
+ let (text, id) = match leaf {
+ tt::Leaf::Ident(ident) => (ident.text.as_str(), ident.id),
+ tt::Leaf::Punct(punct) => {
+ assert!(punct.char.is_ascii());
+ tmp = punct.char as u8;
+ (std::str::from_utf8(std::slice::from_ref(&tmp)).unwrap(), punct.id)
+ }
+ tt::Leaf::Literal(lit) => (lit.text.as_str(), lit.id),
+ };
+ let range = TextRange::at(self.text_pos, TextSize::of(text));
+ self.token_map.insert(id, range);
+ self.cursor = self.cursor.bump();
+ text
+ }
+ Some(tt::buffer::TokenTreeRef::Subtree(subtree, _)) => {
+ self.cursor = self.cursor.subtree().unwrap();
+ match subtree.delimiter {
+ Some(d) => {
+ self.open_delims.insert(d.id, self.text_pos);
+ delim_to_str(d.kind, false)
+ }
+ None => continue,
+ }
+ }
+ None => {
+ let parent = self.cursor.end().unwrap();
+ self.cursor = self.cursor.bump();
+ match parent.delimiter {
+ Some(d) => {
+ if let Some(open_delim) = self.open_delims.get(&d.id) {
+ let open_range = TextRange::at(*open_delim, TextSize::of('('));
+ let close_range =
+ TextRange::at(self.text_pos, TextSize::of('('));
+ self.token_map.insert_delim(d.id, open_range, close_range);
+ }
+ delim_to_str(d.kind, true)
+ }
+ None => continue,
+ }
+ }
+ };
+ };
+ self.buf += text;
+ self.text_pos += TextSize::of(text);
+ }
+
+ self.inner.token(kind, self.buf.as_str());
+ self.buf.clear();
+ // Add whitespace between adjoint puncts
+ let next = last.bump();
+ if let (
+ Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(curr), _)),
- if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
++ Some(tt::buffer::TokenTreeRef::Leaf(tt::Leaf::Punct(next), _)),
+ ) = (last.token_tree(), next.token_tree())
+ {
+ // Note: We always assume the semi-colon would be the last token in
+ // other parts of RA such that we don't add whitespace here.
++ //
++ // When `next` is a `Punct` of `'`, that's a part of a lifetime identifier so we don't
++ // need to add whitespace either.
++ if curr.spacing == tt::Spacing::Alone && curr.char != ';' && next.char != '\'' {
+ self.inner.token(WHITESPACE, " ");
+ self.text_pos += TextSize::of(' ');
+ }
+ }
+ }
+
+ fn start_node(&mut self, kind: SyntaxKind) {
+ self.inner.start_node(kind);
+ }
+
+ fn finish_node(&mut self) {
+ self.inner.finish_node();
+ }
+
+ fn error(&mut self, error: String) {
+ self.inner.error(error, self.text_pos)
+ }
+}
--- /dev/null
--- /dev/null
++use std::collections::HashMap;
++
++use syntax::{ast, AstNode};
++use test_utils::extract_annotations;
++use tt::{
++ buffer::{TokenBuffer, TokenTreeRef},
++ Leaf, Punct, Spacing,
++};
++
++use super::syntax_node_to_token_tree;
++
++fn check_punct_spacing(fixture: &str) {
++ let source_file = ast::SourceFile::parse(fixture).ok().unwrap();
++ let (subtree, token_map) = syntax_node_to_token_tree(source_file.syntax());
++ let mut annotations: HashMap<_, _> = extract_annotations(fixture)
++ .into_iter()
++ .map(|(range, annotation)| {
++ let token = token_map.token_by_range(range).expect("no token found");
++ let spacing = match annotation.as_str() {
++ "Alone" => Spacing::Alone,
++ "Joint" => Spacing::Joint,
++ a => panic!("unknown annotation: {}", a),
++ };
++ (token, spacing)
++ })
++ .collect();
++
++ let buf = TokenBuffer::from_subtree(&subtree);
++ let mut cursor = buf.begin();
++ while !cursor.eof() {
++ while let Some(token_tree) = cursor.token_tree() {
++ if let TokenTreeRef::Leaf(Leaf::Punct(Punct { spacing, id, .. }), _) = token_tree {
++ if let Some(expected) = annotations.remove(&id) {
++ assert_eq!(expected, *spacing);
++ }
++ }
++ cursor = cursor.bump_subtree();
++ }
++ cursor = cursor.bump();
++ }
++
++ assert!(annotations.is_empty(), "unchecked annotations: {:?}", annotations);
++}
++
++#[test]
++fn punct_spacing() {
++ check_punct_spacing(
++ r#"
++fn main() {
++ 0+0;
++ //^ Alone
++ 0+(0);
++ //^ Alone
++ 0<=0;
++ //^ Joint
++ // ^ Alone
++ 0<=(0);
++ // ^ Alone
++ a=0;
++ //^ Alone
++ a=(0);
++ //^ Alone
++ a+=0;
++ //^ Joint
++ // ^ Alone
++ a+=(0);
++ // ^ Alone
++ a&&b;
++ //^ Joint
++ // ^ Alone
++ a&&(b);
++ // ^ Alone
++ foo::bar;
++ // ^ Joint
++ // ^ Alone
++ use foo::{bar,baz,};
++ // ^ Alone
++ // ^ Alone
++ // ^ Alone
++ struct Struct<'a> {};
++ // ^ Joint
++ // ^ Joint
++ Struct::<0>;
++ // ^ Alone
++ Struct::<{0}>;
++ // ^ Alone
++ ;;
++ //^ Joint
++ // ^ Alone
++}
++ "#,
++ );
++}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "parser"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+drop_bomb = "0.1.5"
+rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
+limit = { path = "../limit", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+sourcegen = { path = "../sourcegen" }
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "paths"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+# Adding this dep sadly puts a lot of rust-analyzer crates after the
+# serde-derive crate. Even though we don't activate the derive feature here,
+# someone else in the crate graph certainly does!
+# serde = "1"
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "proc-macro-api"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+object = { version = "0.29.0", default-features = false, features = [
+ "std",
+ "read_core",
+ "elf",
+ "macho",
+ "pe",
+] }
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = { version = "1.0.81", features = ["unbounded_depth"] }
+tracing = "0.1.37"
+memmap2 = "0.5.4"
+snap = "1.0.5"
+
+paths = { path = "../paths", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+# Intentionally *not* depend on anything salsa-related
+# base-db = { path = "../base-db", version = "0.0.0" }
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "proc-macro-srv-cli"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[dependencies]
+proc-macro-srv = { version = "0.0.0", path = "../proc-macro-srv" }
+
+[features]
+sysroot-abi = ["proc-macro-srv/sysroot-abi"]
+
+[[bin]]
+name = "rust-analyzer-proc-macro-srv"
+path = "src/main.rs"
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "proc-macro-srv"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+object = { version = "0.29.0", default-features = false, features = [
+ "std",
+ "read_core",
+ "elf",
+ "macho",
+ "pe",
+] }
+libloading = "0.7.3"
+memmap2 = "0.5.4"
+
+tt = { path = "../tt", version = "0.0.0" }
+mbe = { path = "../mbe", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
+proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+
+# used as proc macro test targets
+proc-macro-test = { path = "../proc-macro-test" }
+
+[features]
+sysroot-abi = []
--- /dev/null
- _ => Err(LoadProcMacroDylibError::UnsupportedABI),
+//! Procedural macros are implemented by compiling the macro providing crate
+//! to a dynamic library with a particular ABI which the compiler uses to expand
+//! macros. Unfortunately this ABI is not specified and can change from version
+//! to version of the compiler. To support this we copy the ABI from the rust
+//! compiler into submodules of this module (e.g proc_macro_srv::abis::abi_1_47).
+//!
+//! All of these ABIs are subsumed in the `Abi` enum, which exposes a simple
+//! interface the rest of rust-analyzer can use to talk to the macro
+//! provider.
+//!
+//! # Adding a new ABI
+//!
+//! To add a new ABI you'll need to copy the source of the target proc_macro
+//! crate from the source tree of the Rust compiler into this directory tree.
+//! Then you'll need to modify it
+//! - Remove any feature! or other things which won't compile on stable
+//! - change any absolute imports to relative imports within the ABI tree
+//!
+//! Then you'll need to add a branch to the `Abi` enum and an implementation of
+//! `Abi::expand`, `Abi::list_macros` and `Abi::from_lib` for the new ABI. See
+//! `proc_macro_srv/src/abis/abi_1_47/mod.rs` for an example. Finally you'll
+//! need to update the conditionals in `Abi::from_lib` to return your new ABI
+//! for the relevant versions of the rust compiler
+//!
+
+mod abi_1_58;
+mod abi_1_63;
+#[cfg(feature = "sysroot-abi")]
+mod abi_sysroot;
+
+// see `build.rs`
+include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
+
+// Used by `test/utils.rs`
+#[cfg(all(test, feature = "sysroot-abi"))]
+pub(crate) use abi_sysroot::TokenStream as TestTokenStream;
+
+use super::dylib::LoadProcMacroDylibError;
+pub(crate) use abi_1_58::Abi as Abi_1_58;
+pub(crate) use abi_1_63::Abi as Abi_1_63;
+#[cfg(feature = "sysroot-abi")]
+pub(crate) use abi_sysroot::Abi as Abi_Sysroot;
+use libloading::Library;
+use proc_macro_api::{ProcMacroKind, RustCInfo};
+
+pub struct PanicMessage {
+ message: Option<String>,
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<String> {
+ self.message.clone()
+ }
+}
+
+pub(crate) enum Abi {
+ Abi1_58(Abi_1_58),
+ Abi1_63(Abi_1_63),
+ #[cfg(feature = "sysroot-abi")]
+ AbiSysroot(Abi_Sysroot),
+}
+
+impl Abi {
+ /// Load a new ABI.
+ ///
+ /// # Arguments
+ ///
+ /// *`lib` - The dynamic library containing the macro implementations
+ /// *`symbol_name` - The symbol name the macros can be found attributes
+ /// *`info` - RustCInfo about the compiler that was used to compile the
+ /// macro crate. This is the information we use to figure out
+ /// which ABI to return
+ pub fn from_lib(
+ lib: &Library,
+ symbol_name: String,
+ info: RustCInfo,
+ ) -> Result<Abi, LoadProcMacroDylibError> {
+ // the sysroot ABI relies on `extern proc_macro` with unstable features,
+ // instead of a snapshot of the proc macro bridge's source code. it's only
+ // enabled if we have an exact version match.
+ #[cfg(feature = "sysroot-abi")]
+ {
+ if info.version_string == RUSTC_VERSION_STRING {
+ let inner = unsafe { Abi_Sysroot::from_lib(lib, symbol_name) }?;
+ return Ok(Abi::AbiSysroot(inner));
+ }
+
+ // if we reached this point, versions didn't match. in testing, we
+ // want that to panic - this could mean that the format of `rustc
+ // --version` no longer matches the format of the version string
+ // stored in the `.rustc` section, and we want to catch that in-tree
+ // with `x.py test`
+ #[cfg(test)]
+ {
+ let allow_mismatch = std::env::var("PROC_MACRO_SRV_ALLOW_SYSROOT_MISMATCH");
+ if let Ok("1") = allow_mismatch.as_deref() {
+ // only used by rust-analyzer developers, when working on the
+ // sysroot ABI from the rust-analyzer repository - which should
+ // only happen pre-subtree. this can be removed later.
+ } else {
+ panic!(
+ "sysroot ABI mismatch: dylib rustc version (read from .rustc section): {:?} != proc-macro-srv version (read from 'rustc --version'): {:?}",
+ info.version_string, RUSTC_VERSION_STRING
+ );
+ }
+ }
+ }
+
+ // FIXME: this should use exclusive ranges when they're stable
+ // https://github.com/rust-lang/rust/issues/37854
+ match (info.version.0, info.version.1) {
+ (1, 58..=62) => {
+ let inner = unsafe { Abi_1_58::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_58(inner))
+ }
+ (1, 63) => {
+ let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_63(inner))
+ }
++ _ => Err(LoadProcMacroDylibError::UnsupportedABI(info.version_string.clone())),
+ }
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ match self {
+ Self::Abi1_58(abi) => abi.expand(macro_name, macro_body, attributes),
+ Self::Abi1_63(abi) => abi.expand(macro_name, macro_body, attributes),
+ #[cfg(feature = "sysroot-abi")]
+ Self::AbiSysroot(abi) => abi.expand(macro_name, macro_body, attributes),
+ }
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ match self {
+ Self::Abi1_58(abi) => abi.list_macros(),
+ Self::Abi1_63(abi) => abi.list_macros(),
+ #[cfg(feature = "sysroot-abi")]
+ Self::AbiSysroot(abi) => abi.list_macros(),
+ }
+ }
+}
+
+#[test]
+fn test_version_check() {
+ let path = paths::AbsPathBuf::assert(crate::proc_macro_test_dylib_path());
+ let info = proc_macro_api::read_dylib_info(&path).unwrap();
+ assert!(info.version.1 >= 50);
+}
--- /dev/null
- UnsupportedABI,
+//! Handles dynamic library loading for proc macro
+
+use std::{
+ fmt,
+ fs::File,
+ io,
+ path::{Path, PathBuf},
+};
+
+use libloading::Library;
+use memmap2::Mmap;
+use object::Object;
+use paths::AbsPath;
+use proc_macro_api::{read_dylib_info, ProcMacroKind};
+
+use super::abis::Abi;
+
+const NEW_REGISTRAR_SYMBOL: &str = "_rustc_proc_macro_decls_";
+
+fn invalid_data_err(e: impl Into<Box<dyn std::error::Error + Send + Sync>>) -> io::Error {
+ io::Error::new(io::ErrorKind::InvalidData, e)
+}
+
+fn is_derive_registrar_symbol(symbol: &str) -> bool {
+ symbol.contains(NEW_REGISTRAR_SYMBOL)
+}
+
+fn find_registrar_symbol(file: &Path) -> io::Result<Option<String>> {
+ let file = File::open(file)?;
+ let buffer = unsafe { Mmap::map(&file)? };
+
+ Ok(object::File::parse(&*buffer)
+ .map_err(invalid_data_err)?
+ .exports()
+ .map_err(invalid_data_err)?
+ .into_iter()
+ .map(|export| export.name())
+ .filter_map(|sym| String::from_utf8(sym.into()).ok())
+ .find(|sym| is_derive_registrar_symbol(sym))
+ .map(|sym| {
+ // From MacOS docs:
+ // https://developer.apple.com/library/archive/documentation/System/Conceptual/ManPages_iPhoneOS/man3/dlsym.3.html
+ // Unlike other dyld API's, the symbol name passed to dlsym() must NOT be
+ // prepended with an underscore.
+ if cfg!(target_os = "macos") && sym.starts_with('_') {
+ sym[1..].to_owned()
+ } else {
+ sym
+ }
+ }))
+}
+
+/// Loads dynamic library in platform dependent manner.
+///
+/// For unix, you have to use RTLD_DEEPBIND flag to escape problems described
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample)
+/// and [here](https://github.com/rust-lang/rust/issues/60593).
+///
+/// Usage of RTLD_DEEPBIND
+/// [here](https://github.com/fedochet/rust-proc-macro-panic-inside-panic-expample/issues/1)
+///
+/// It seems that on Windows that behaviour is default, so we do nothing in that case.
+#[cfg(windows)]
+fn load_library(file: &Path) -> Result<Library, libloading::Error> {
+ unsafe { Library::new(file) }
+}
+
+#[cfg(unix)]
+fn load_library(file: &Path) -> Result<Library, libloading::Error> {
+ use libloading::os::unix::Library as UnixLibrary;
+ use std::os::raw::c_int;
+
+ const RTLD_NOW: c_int = 0x00002;
+ const RTLD_DEEPBIND: c_int = 0x00008;
+
+ unsafe { UnixLibrary::open(Some(file), RTLD_NOW | RTLD_DEEPBIND).map(|lib| lib.into()) }
+}
+
+#[derive(Debug)]
+pub enum LoadProcMacroDylibError {
+ Io(io::Error),
+ LibLoading(libloading::Error),
- Self::UnsupportedABI => write!(f, "unsupported ABI version"),
++ UnsupportedABI(String),
+}
+
+impl fmt::Display for LoadProcMacroDylibError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ Self::Io(e) => e.fmt(f),
++ Self::UnsupportedABI(v) => write!(f, "unsupported ABI `{v}`"),
+ Self::LibLoading(e) => e.fmt(f),
+ }
+ }
+}
+
+impl From<io::Error> for LoadProcMacroDylibError {
+ fn from(e: io::Error) -> Self {
+ LoadProcMacroDylibError::Io(e)
+ }
+}
+
+impl From<libloading::Error> for LoadProcMacroDylibError {
+ fn from(e: libloading::Error) -> Self {
+ LoadProcMacroDylibError::LibLoading(e)
+ }
+}
+
+struct ProcMacroLibraryLibloading {
+ // Hold on to the library so it doesn't unload
+ _lib: Library,
+ abi: Abi,
+}
+
+impl ProcMacroLibraryLibloading {
+ fn open(file: &Path) -> Result<Self, LoadProcMacroDylibError> {
+ let symbol_name = find_registrar_symbol(file)?.ok_or_else(|| {
+ invalid_data_err(format!("Cannot find registrar symbol in file {}", file.display()))
+ })?;
+
+ let abs_file: &AbsPath = file.try_into().map_err(|_| {
+ invalid_data_err(format!("expected an absolute path, got {}", file.display()))
+ })?;
+ let version_info = read_dylib_info(abs_file)?;
+
+ let lib = load_library(file).map_err(invalid_data_err)?;
+ let abi = Abi::from_lib(&lib, symbol_name, version_info)?;
+ Ok(ProcMacroLibraryLibloading { _lib: lib, abi })
+ }
+}
+
+pub struct Expander {
+ inner: ProcMacroLibraryLibloading,
+}
+
+impl Expander {
+ pub fn new(lib: &Path) -> Result<Expander, LoadProcMacroDylibError> {
+ // Some libraries for dynamic loading require canonicalized path even when it is
+ // already absolute
+ let lib = lib.canonicalize()?;
+
+ let lib = ensure_file_with_lock_free_access(&lib)?;
+
+ let library = ProcMacroLibraryLibloading::open(lib.as_ref())?;
+
+ Ok(Expander { inner: library })
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, String> {
+ let result = self.inner.abi.expand(macro_name, macro_body, attributes);
+ result.map_err(|e| e.as_str().unwrap_or_else(|| "<unknown error>".to_string()))
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ self.inner.abi.list_macros()
+ }
+}
+
+/// Copy the dylib to temp directory to prevent locking in Windows
+#[cfg(windows)]
+fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> {
+ use std::collections::hash_map::RandomState;
+ use std::ffi::OsString;
+ use std::hash::{BuildHasher, Hasher};
+
+ if std::env::var("RA_DONT_COPY_PROC_MACRO_DLL").is_ok() {
+ return Ok(path.to_path_buf());
+ }
+
+ let mut to = std::env::temp_dir();
+
+ let file_name = path.file_name().ok_or_else(|| {
+ io::Error::new(
+ io::ErrorKind::InvalidInput,
+ format!("File path is invalid: {}", path.display()),
+ )
+ })?;
+
+ // Generate a unique number by abusing `HashMap`'s hasher.
+ // Maybe this will also "inspire" a libs team member to finally put `rand` in libstd.
+ let t = RandomState::new().build_hasher().finish();
+
+ let mut unique_name = OsString::from(t.to_string());
+ unique_name.push(file_name);
+
+ to.push(unique_name);
+ std::fs::copy(path, &to).unwrap();
+ Ok(to)
+}
+
+#[cfg(unix)]
+fn ensure_file_with_lock_free_access(path: &Path) -> io::Result<PathBuf> {
+ Ok(path.to_path_buf())
+}
--- /dev/null
- format!("Failed to get file metadata for {}: {:?}", path.display(), err)
+//! RA Proc Macro Server
+//!
+//! This library is able to call compiled Rust custom derive dynamic libraries on arbitrary code.
+//! The general idea here is based on <https://github.com/fedochet/rust-proc-macro-expander>.
+//!
+//! But we adapt it to better fit RA needs:
+//!
+//! * We use `tt` for proc-macro `TokenStream` server, it is easier to manipulate and interact with
+//! RA than `proc-macro2` token stream.
+//! * By **copying** the whole rustc `lib_proc_macro` code, we are able to build this with `stable`
+//! rustc rather than `unstable`. (Although in general ABI compatibility is still an issue)…
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![cfg_attr(
+ feature = "sysroot-abi",
+ feature(proc_macro_internals, proc_macro_diagnostic, proc_macro_span)
+)]
+#![allow(unreachable_pub)]
+
+mod dylib;
+mod abis;
+
+pub mod cli;
+
+use std::{
+ collections::{hash_map::Entry, HashMap},
+ env,
+ ffi::OsString,
+ fs,
+ path::{Path, PathBuf},
+ thread,
+ time::SystemTime,
+};
+
+use proc_macro_api::{
+ msg::{ExpandMacro, FlatTree, PanicMessage},
+ ProcMacroKind,
+};
+
+#[derive(Default)]
+pub(crate) struct ProcMacroSrv {
+ expanders: HashMap<(PathBuf, SystemTime), dylib::Expander>,
+}
+
+const EXPANDER_STACK_SIZE: usize = 8 * 1024 * 1024;
+
+impl ProcMacroSrv {
+ pub fn expand(&mut self, task: ExpandMacro) -> Result<FlatTree, PanicMessage> {
+ let expander = self.expander(task.lib.as_ref()).map_err(|err| {
+ debug_assert!(false, "should list macros before asking to expand");
+ PanicMessage(format!("failed to load macro: {}", err))
+ })?;
+
+ let prev_env = EnvSnapshot::new();
+ for (k, v) in &task.env {
+ env::set_var(k, v);
+ }
+ let prev_working_dir = match task.current_dir {
+ Some(dir) => {
+ let prev_working_dir = std::env::current_dir().ok();
+ if let Err(err) = std::env::set_current_dir(&dir) {
+ eprintln!("Failed to set the current working dir to {}. Error: {:?}", dir, err)
+ }
+ prev_working_dir
+ }
+ None => None,
+ };
+
+ let macro_body = task.macro_body.to_subtree();
+ let attributes = task.attributes.map(|it| it.to_subtree());
+ let result = thread::scope(|s| {
+ let thread = thread::Builder::new()
+ .stack_size(EXPANDER_STACK_SIZE)
+ .name(task.macro_name.clone())
+ .spawn_scoped(s, || {
+ expander
+ .expand(&task.macro_name, ¯o_body, attributes.as_ref())
+ .map(|it| FlatTree::new(&it))
+ });
+ let res = match thread {
+ Ok(handle) => handle.join(),
+ Err(e) => std::panic::resume_unwind(Box::new(e)),
+ };
+
+ match res {
+ Ok(res) => res,
+ Err(e) => std::panic::resume_unwind(e),
+ }
+ });
+
+ prev_env.rollback();
+
+ if let Some(dir) = prev_working_dir {
+ if let Err(err) = std::env::set_current_dir(&dir) {
+ eprintln!(
+ "Failed to set the current working dir to {}. Error: {:?}",
+ dir.display(),
+ err
+ )
+ }
+ }
+
+ result.map_err(PanicMessage)
+ }
+
+ pub(crate) fn list_macros(
+ &mut self,
+ dylib_path: &Path,
+ ) -> Result<Vec<(String, ProcMacroKind)>, String> {
+ let expander = self.expander(dylib_path)?;
+ Ok(expander.list_macros())
+ }
+
+ fn expander(&mut self, path: &Path) -> Result<&dylib::Expander, String> {
+ let time = fs::metadata(path).and_then(|it| it.modified()).map_err(|err| {
- format!("Cannot create expander for {}: {:?}", path.display(), err)
++ format!("Failed to get file metadata for {}: {}", path.display(), err)
+ })?;
+
+ Ok(match self.expanders.entry((path.to_path_buf(), time)) {
+ Entry::Vacant(v) => v.insert(dylib::Expander::new(path).map_err(|err| {
++ format!("Cannot create expander for {}: {}", path.display(), err)
+ })?),
+ Entry::Occupied(e) => e.into_mut(),
+ })
+ }
+}
+
+struct EnvSnapshot {
+ vars: HashMap<OsString, OsString>,
+}
+
+impl EnvSnapshot {
+ fn new() -> EnvSnapshot {
+ EnvSnapshot { vars: env::vars_os().collect() }
+ }
+
+ fn rollback(self) {
+ let mut old_vars = self.vars;
+ for (name, value) in env::vars_os() {
+ let old_value = old_vars.remove(&name);
+ if old_value != Some(value) {
+ match old_value {
+ None => env::remove_var(name),
+ Some(old_value) => env::set_var(name, old_value),
+ }
+ }
+ }
+ for (name, old_value) in old_vars {
+ env::set_var(name, old_value)
+ }
+ }
+}
+
+#[cfg(all(feature = "sysroot-abi", test))]
+mod tests;
+
+#[cfg(test)]
+pub fn proc_macro_test_dylib_path() -> std::path::PathBuf {
+ proc_macro_test::PROC_MACRO_TEST_LOCATION.into()
+}
--- /dev/null
- PUNCH ! [joint] 4294967295
+//! proc-macro tests
+
+#[macro_use]
+mod utils;
+use utils::*;
+
+use expect_test::expect;
+
+#[test]
+fn test_derive_empty() {
+ assert_expand("DeriveEmpty", r#"struct S;"#, expect![[r#"SUBTREE $"#]]);
+}
+
+#[test]
+fn test_derive_error() {
+ assert_expand(
+ "DeriveError",
+ r#"struct S;"#,
+ expect![[r##"
+ SUBTREE $
+ IDENT compile_error 4294967295
- PUNCH - [joint] 4294967295
++ PUNCH ! [alone] 4294967295
+ SUBTREE () 4294967295
+ LITERAL "#[derive(DeriveError)] struct S ;" 4294967295
+ PUNCH ; [alone] 4294967295"##]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_noop() {
+ assert_expand(
+ "fn_like_noop",
+ r#"ident, 0, 1, []"#,
+ expect![[r#"
+ SUBTREE $
+ IDENT ident 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 0 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 1 4294967295
+ PUNCH , [alone] 4294967295
+ SUBTREE [] 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_ident_subtree() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ r#"ident, []"#,
+ expect![[r#"
+ SUBTREE $
+ IDENT ident 4294967295
+ PUNCH , [alone] 4294967295
+ SUBTREE [] 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_raw_ident() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ "r#async",
+ expect![[r#"
+ SUBTREE $
+ IDENT async 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_mk_literals() {
+ assert_expand(
+ "fn_like_mk_literals",
+ r#""#,
+ expect![[r#"
+ SUBTREE $
+ LITERAL b"byte_string" 4294967295
+ LITERAL 'c' 4294967295
+ LITERAL "string" 4294967295
+ LITERAL 3.14f64 4294967295
+ LITERAL 3.14 4294967295
+ LITERAL 123i64 4294967295
+ LITERAL 123 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_mk_idents() {
+ // FIXME: this test is wrong: raw should be 'r#raw' but ABIs 1.64 and below
+ // simply ignore `is_raw` when implementing the `Ident` interface.
+ assert_expand(
+ "fn_like_mk_idents",
+ r#""#,
+ expect![[r#"
+ SUBTREE $
+ IDENT standard 4294967295
+ IDENT raw 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_fn_like_macro_clone_literals() {
+ assert_expand(
+ "fn_like_clone_tokens",
+ r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#,
+ expect![[r#"
+ SUBTREE $
+ LITERAL 1u16 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 2_u32 4294967295
+ PUNCH , [alone] 4294967295
- PUNCH ! [joint] 4294967295
++ PUNCH - [alone] 4294967295
+ LITERAL 4i64 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL 3.14f32 4294967295
+ PUNCH , [alone] 4294967295
+ LITERAL "hello bridge" 4294967295"#]],
+ );
+}
+
+#[test]
+fn test_attr_macro() {
+ // Corresponds to
+ // #[proc_macro_test::attr_error(some arguments)]
+ // mod m {}
+ assert_expand_attr(
+ "attr_error",
+ r#"mod m {}"#,
+ r#"some arguments"#,
+ expect![[r##"
+ SUBTREE $
+ IDENT compile_error 4294967295
++ PUNCH ! [alone] 4294967295
+ SUBTREE () 4294967295
+ LITERAL "#[attr_error(some arguments)] mod m {}" 4294967295
+ PUNCH ; [alone] 4294967295"##]],
+ );
+}
+
+/// Tests that we find and classify all proc macros correctly.
+#[test]
+fn list_test_macros() {
+ let res = list().join("\n");
+
+ expect![[r#"
+ fn_like_noop [FuncLike]
+ fn_like_panic [FuncLike]
+ fn_like_error [FuncLike]
+ fn_like_clone_tokens [FuncLike]
+ fn_like_mk_literals [FuncLike]
+ fn_like_mk_idents [FuncLike]
+ attr_noop [Attr]
+ attr_panic [Attr]
+ attr_error [Attr]
+ DeriveEmpty [CustomDerive]
+ DerivePanic [CustomDerive]
+ DeriveError [CustomDerive]"#]]
+ .assert_eq(&res);
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "proc-macro-test"
+version = "0.0.0"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+publish = false
+
+[lib]
+doctest = false
+
+[build-dependencies]
+proc-macro-test-impl = { path = "imp", version = "0.0.0" }
+toolchain = { path = "../toolchain", version = "0.0.0" }
+cargo_metadata = "0.15.0"
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "proc-macro-test-impl"
+version = "0.0.0"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+publish = false
+
+[lib]
+doctest = false
+proc-macro = true
+
+[workspace]
+
+[dependencies]
+# this crate should not have any dependencies, since it uses its own workspace,
+# and its own `Cargo.lock`
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "profile"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+once_cell = "1.15.0"
+cfg-if = "1.0.0"
+libc = "0.2.135"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+countme = { version = "3.0.1", features = ["enable"] }
+jemalloc-ctl = { version = "0.5.0", package = "tikv-jemalloc-ctl", optional = true }
+
+[target.'cfg(target_os = "linux")'.dependencies]
+perf-event = "0.4.7"
+
+[target.'cfg(windows)'.dependencies]
+winapi = { version = "0.3.9", features = ["processthreadsapi", "psapi"] }
+
+[features]
+cpu_profiler = []
+jemalloc = ["jemalloc-ctl"]
+
+# Uncomment to enable for the whole crate graph
+# default = [ "cpu_profiler" ]
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "project-model"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+tracing = "0.1.35"
+rustc-hash = "1.1.0"
+cargo_metadata = "0.15.0"
+semver = "1.0.14"
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = "1.0.86"
+anyhow = "1.0.62"
+la-arena = { version = "0.3.0", path = "../../lib/la-arena" }
+
+cfg = { path = "../cfg", version = "0.0.0" }
+base-db = { path = "../base-db", version = "0.0.0" }
+toolchain = { path = "../toolchain", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+
+[dev-dependencies]
+expect-test = "1.4.0"
--- /dev/null
- // default lib and bins. This is an independent concept from the --targets
+//! Workspace information we get from cargo consists of two pieces. The first is
+//! the output of `cargo metadata`. The second is the output of running
+//! `build.rs` files (`OUT_DIR` env var, extra cfg flags) and compiling proc
+//! macro.
+//!
+//! This module implements this second part. We use "build script" terminology
+//! here, but it covers procedural macros as well.
+
+use std::{
+ cell::RefCell,
+ io, mem,
+ path::{self, PathBuf},
+ process::Command,
+};
+
+use cargo_metadata::{camino::Utf8Path, Message};
+use la_arena::ArenaMap;
+use paths::AbsPathBuf;
+use rustc_hash::FxHashMap;
+use semver::Version;
+use serde::Deserialize;
+
+use crate::{
+ cfg_flag::CfgFlag, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation,
+ InvocationStrategy, Package,
+};
+
+#[derive(Debug, Default, Clone, PartialEq, Eq)]
+pub struct WorkspaceBuildScripts {
+ outputs: ArenaMap<Package, BuildScriptOutput>,
+ error: Option<String>,
+}
+
+#[derive(Debug, Clone, Default, PartialEq, Eq)]
+pub(crate) struct BuildScriptOutput {
+ /// List of config flags defined by this package's build script.
+ pub(crate) cfgs: Vec<CfgFlag>,
+ /// List of cargo-related environment variables with their value.
+ ///
+ /// If the package has a build script which defines environment variables,
+ /// they can also be found here.
+ pub(crate) envs: Vec<(String, String)>,
+ /// Directory where a build script might place its output.
+ pub(crate) out_dir: Option<AbsPathBuf>,
+ /// Path to the proc-macro library file if this package exposes proc-macros.
+ pub(crate) proc_macro_dylib_path: Option<AbsPathBuf>,
+}
+
+impl BuildScriptOutput {
+ fn is_unchanged(&self) -> bool {
+ self.cfgs.is_empty()
+ && self.envs.is_empty()
+ && self.out_dir.is_none()
+ && self.proc_macro_dylib_path.is_none()
+ }
+}
+
+impl WorkspaceBuildScripts {
+ fn build_command(config: &CargoConfig) -> io::Result<Command> {
+ let mut cmd = match config.run_build_script_command.as_deref() {
+ Some([program, args @ ..]) => {
+ let mut cmd = Command::new(program);
+ cmd.args(args);
+ cmd
+ }
+ _ => {
+ let mut cmd = Command::new(toolchain::cargo());
+
+ cmd.args(&["check", "--quiet", "--workspace", "--message-format=json"]);
+
+ // --all-targets includes tests, benches and examples in addition to the
++ // default lib and bins. This is an independent concept from the --target
+ // flag below.
+ cmd.arg("--all-targets");
+
+ if let Some(target) = &config.target {
+ cmd.args(&["--target", target]);
+ }
+
+ match &config.features {
+ CargoFeatures::All => {
+ cmd.arg("--all-features");
+ }
+ CargoFeatures::Selected { features, no_default_features } => {
+ if *no_default_features {
+ cmd.arg("--no-default-features");
+ }
+ if !features.is_empty() {
+ cmd.arg("--features");
+ cmd.arg(features.join(" "));
+ }
+ }
+ }
+
+ cmd
+ }
+ };
+
+ cmd.envs(&config.extra_env);
+ if config.wrap_rustc_in_build_scripts {
+ // Setup RUSTC_WRAPPER to point to `rust-analyzer` binary itself. We use
+ // that to compile only proc macros and build scripts during the initial
+ // `cargo check`.
+ let myself = std::env::current_exe()?;
+ cmd.env("RUSTC_WRAPPER", myself);
+ cmd.env("RA_RUSTC_WRAPPER", "1");
+ }
+
+ Ok(cmd)
+ }
+
+ /// Runs the build scripts for the given workspace
+ pub(crate) fn run_for_workspace(
+ config: &CargoConfig,
+ workspace: &CargoWorkspace,
+ progress: &dyn Fn(String),
+ toolchain: &Option<Version>,
+ ) -> io::Result<WorkspaceBuildScripts> {
+ const RUST_1_62: Version = Version::new(1, 62, 0);
+
+ let current_dir = match &config.invocation_location {
+ InvocationLocation::Root(root) if config.run_build_script_command.is_some() => {
+ root.as_path()
+ }
+ _ => &workspace.workspace_root(),
+ }
+ .as_ref();
+
+ match Self::run_per_ws(Self::build_command(config)?, workspace, current_dir, progress) {
+ Ok(WorkspaceBuildScripts { error: Some(error), .. })
+ if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_62) =>
+ {
+ // building build scripts failed, attempt to build with --keep-going so
+ // that we potentially get more build data
+ let mut cmd = Self::build_command(config)?;
+ cmd.args(&["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1");
+ let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?;
+ res.error = Some(error);
+ Ok(res)
+ }
+ res => res,
+ }
+ }
+
+ /// Runs the build scripts by invoking the configured command *once*.
+ /// This populates the outputs for all passed in workspaces.
+ pub(crate) fn run_once(
+ config: &CargoConfig,
+ workspaces: &[&CargoWorkspace],
+ progress: &dyn Fn(String),
+ ) -> io::Result<Vec<WorkspaceBuildScripts>> {
+ assert_eq!(config.invocation_strategy, InvocationStrategy::Once);
+
+ let current_dir = match &config.invocation_location {
+ InvocationLocation::Root(root) => root,
+ InvocationLocation::Workspace => {
+ return Err(io::Error::new(
+ io::ErrorKind::Other,
+ "Cannot run build scripts from workspace with invocation strategy `once`",
+ ))
+ }
+ };
+ let cmd = Self::build_command(config)?;
+ // NB: Cargo.toml could have been modified between `cargo metadata` and
+ // `cargo check`. We shouldn't assume that package ids we see here are
+ // exactly those from `config`.
+ let mut by_id = FxHashMap::default();
+ // some workspaces might depend on the same crates, so we need to duplicate the outputs
+ // to those collisions
+ let mut collisions = Vec::new();
+ let mut res: Vec<_> = workspaces
+ .iter()
+ .enumerate()
+ .map(|(idx, workspace)| {
+ let mut res = WorkspaceBuildScripts::default();
+ for package in workspace.packages() {
+ res.outputs.insert(package, BuildScriptOutput::default());
+ if by_id.contains_key(&workspace[package].id) {
+ collisions.push((&workspace[package].id, idx, package));
+ } else {
+ by_id.insert(workspace[package].id.clone(), (package, idx));
+ }
+ }
+ res
+ })
+ .collect();
+
+ let errors = Self::run_command(
+ cmd,
+ current_dir.as_path().as_ref(),
+ |package, cb| {
+ if let Some(&(package, workspace)) = by_id.get(package) {
+ cb(&workspaces[workspace][package].name, &mut res[workspace].outputs[package]);
+ }
+ },
+ progress,
+ )?;
+ res.iter_mut().for_each(|it| it.error = errors.clone());
+ collisions.into_iter().for_each(|(id, workspace, package)| {
+ if let Some(&(p, w)) = by_id.get(id) {
+ res[workspace].outputs[package] = res[w].outputs[p].clone();
+ }
+ });
+
+ if tracing::enabled!(tracing::Level::INFO) {
+ for (idx, workspace) in workspaces.iter().enumerate() {
+ for package in workspace.packages() {
+ let package_build_data = &mut res[idx].outputs[package];
+ if !package_build_data.is_unchanged() {
+ tracing::info!(
+ "{}: {:?}",
+ workspace[package].manifest.parent().display(),
+ package_build_data,
+ );
+ }
+ }
+ }
+ }
+
+ Ok(res)
+ }
+
+ fn run_per_ws(
+ cmd: Command,
+ workspace: &CargoWorkspace,
+ current_dir: &path::Path,
+ progress: &dyn Fn(String),
+ ) -> io::Result<WorkspaceBuildScripts> {
+ let mut res = WorkspaceBuildScripts::default();
+ let outputs = &mut res.outputs;
+ // NB: Cargo.toml could have been modified between `cargo metadata` and
+ // `cargo check`. We shouldn't assume that package ids we see here are
+ // exactly those from `config`.
+ let mut by_id: FxHashMap<String, Package> = FxHashMap::default();
+ for package in workspace.packages() {
+ outputs.insert(package, BuildScriptOutput::default());
+ by_id.insert(workspace[package].id.clone(), package);
+ }
+
+ res.error = Self::run_command(
+ cmd,
+ current_dir,
+ |package, cb| {
+ if let Some(&package) = by_id.get(package) {
+ cb(&workspace[package].name, &mut outputs[package]);
+ }
+ },
+ progress,
+ )?;
+
+ if tracing::enabled!(tracing::Level::INFO) {
+ for package in workspace.packages() {
+ let package_build_data = &mut outputs[package];
+ if !package_build_data.is_unchanged() {
+ tracing::info!(
+ "{}: {:?}",
+ workspace[package].manifest.parent().display(),
+ package_build_data,
+ );
+ }
+ }
+ }
+
+ Ok(res)
+ }
+
+ fn run_command(
+ mut cmd: Command,
+ current_dir: &path::Path,
+ // ideally this would be something like:
+ // with_output_for: impl FnMut(&str, dyn FnOnce(&mut BuildScriptOutput)),
+ // but owned trait objects aren't a thing
+ mut with_output_for: impl FnMut(&str, &mut dyn FnMut(&str, &mut BuildScriptOutput)),
+ progress: &dyn Fn(String),
+ ) -> io::Result<Option<String>> {
+ let errors = RefCell::new(String::new());
+ let push_err = |err: &str| {
+ let mut e = errors.borrow_mut();
+ e.push_str(err);
+ e.push('\n');
+ };
+
+ tracing::info!("Running build scripts in {}: {:?}", current_dir.display(), cmd);
+ cmd.current_dir(current_dir);
+ let output = stdx::process::spawn_with_streaming_output(
+ cmd,
+ &mut |line| {
+ // Copy-pasted from existing cargo_metadata. It seems like we
+ // should be using serde_stacker here?
+ let mut deserializer = serde_json::Deserializer::from_str(line);
+ deserializer.disable_recursion_limit();
+ let message = Message::deserialize(&mut deserializer)
+ .unwrap_or_else(|_| Message::TextLine(line.to_string()));
+
+ match message {
+ Message::BuildScriptExecuted(mut message) => {
+ with_output_for(&message.package_id.repr, &mut |name, data| {
+ progress(format!("running build-script: {}", name));
+ let cfgs = {
+ let mut acc = Vec::new();
+ for cfg in &message.cfgs {
+ match cfg.parse::<CfgFlag>() {
+ Ok(it) => acc.push(it),
+ Err(err) => {
+ push_err(&format!(
+ "invalid cfg from cargo-metadata: {}",
+ err
+ ));
+ return;
+ }
+ };
+ }
+ acc
+ };
+ if !message.env.is_empty() {
+ data.envs = mem::take(&mut message.env);
+ }
+ // cargo_metadata crate returns default (empty) path for
+ // older cargos, which is not absolute, so work around that.
+ let out_dir = mem::take(&mut message.out_dir).into_os_string();
+ if !out_dir.is_empty() {
+ let out_dir = AbsPathBuf::assert(PathBuf::from(out_dir));
+ // inject_cargo_env(package, package_build_data);
+ // NOTE: cargo and rustc seem to hide non-UTF-8 strings from env! and option_env!()
+ if let Some(out_dir) =
+ out_dir.as_os_str().to_str().map(|s| s.to_owned())
+ {
+ data.envs.push(("OUT_DIR".to_string(), out_dir));
+ }
+ data.out_dir = Some(out_dir);
+ data.cfgs = cfgs;
+ }
+ });
+ }
+ Message::CompilerArtifact(message) => {
+ with_output_for(&message.package_id.repr, &mut |name, data| {
+ progress(format!("building proc-macros: {}", name));
+ if message.target.kind.iter().any(|k| k == "proc-macro") {
+ // Skip rmeta file
+ if let Some(filename) =
+ message.filenames.iter().find(|name| is_dylib(name))
+ {
+ let filename = AbsPathBuf::assert(PathBuf::from(&filename));
+ data.proc_macro_dylib_path = Some(filename);
+ }
+ }
+ });
+ }
+ Message::CompilerMessage(message) => {
+ progress(message.target.name);
+
+ if let Some(diag) = message.message.rendered.as_deref() {
+ push_err(diag);
+ }
+ }
+ Message::BuildFinished(_) => {}
+ Message::TextLine(_) => {}
+ _ => {}
+ }
+ },
+ &mut |line| {
+ push_err(line);
+ },
+ )?;
+
+ let errors = if !output.status.success() {
+ let errors = errors.into_inner();
+ Some(if errors.is_empty() { "cargo check failed".to_string() } else { errors })
+ } else {
+ None
+ };
+ Ok(errors)
+ }
+
+ pub fn error(&self) -> Option<&str> {
+ self.error.as_deref()
+ }
+
+ pub(crate) fn get_output(&self, idx: Package) -> Option<&BuildScriptOutput> {
+ self.outputs.get(idx)
+ }
+}
+
+// FIXME: Find a better way to know if it is a dylib.
+fn is_dylib(path: &Utf8Path) -> bool {
+ match path.extension().map(|e| e.to_string().to_lowercase()) {
+ None => false,
+ Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"),
+ }
+}
--- /dev/null
- let target = config
- .target
- .clone()
- .or_else(|| cargo_config_build_target(cargo_toml, &config.extra_env))
- .or_else(|| rustc_discover_host_triple(cargo_toml, &config.extra_env));
+//! See [`CargoWorkspace`].
+
+use std::iter;
+use std::path::PathBuf;
+use std::str::from_utf8;
+use std::{ops, process::Command};
+
+use anyhow::{Context, Result};
+use base_db::Edition;
+use cargo_metadata::{CargoOpt, MetadataCommand};
+use la_arena::{Arena, Idx};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashMap;
+use serde::Deserialize;
+use serde_json::from_value;
+
+use crate::{utf8_stdout, InvocationLocation, ManifestPath};
+use crate::{CfgOverrides, InvocationStrategy};
+
+/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
+/// workspace. It pretty closely mirrors `cargo metadata` output.
+///
+/// Note that internally, rust-analyzer uses a different structure:
+/// `CrateGraph`. `CrateGraph` is lower-level: it knows only about the crates,
+/// while this knows about `Packages` & `Targets`: purely cargo-related
+/// concepts.
+///
+/// We use absolute paths here, `cargo metadata` guarantees to always produce
+/// abs paths.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct CargoWorkspace {
+ packages: Arena<PackageData>,
+ targets: Arena<TargetData>,
+ workspace_root: AbsPathBuf,
+}
+
+impl ops::Index<Package> for CargoWorkspace {
+ type Output = PackageData;
+ fn index(&self, index: Package) -> &PackageData {
+ &self.packages[index]
+ }
+}
+
+impl ops::Index<Target> for CargoWorkspace {
+ type Output = TargetData;
+ fn index(&self, index: Target) -> &TargetData {
+ &self.targets[index]
+ }
+}
+
+/// Describes how to set the rustc source directory.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum RustcSource {
+ /// Explicit path for the rustc source directory.
+ Path(AbsPathBuf),
+ /// Try to automatically detect where the rustc source directory is.
+ Discover,
+}
+
+/// Crates to disable `#[cfg(test)]` on.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum UnsetTestCrates {
+ None,
+ Only(Vec<String>),
+ All,
+}
+
+impl Default for UnsetTestCrates {
+ fn default() -> Self {
+ Self::None
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum CargoFeatures {
+ All,
+ Selected {
+ /// List of features to activate.
+ features: Vec<String>,
+ /// Do not activate the `default` feature.
+ no_default_features: bool,
+ },
+}
+
+impl Default for CargoFeatures {
+ fn default() -> Self {
+ CargoFeatures::Selected { features: vec![], no_default_features: false }
+ }
+}
+
+#[derive(Default, Clone, Debug, PartialEq, Eq)]
+pub struct CargoConfig {
+ /// List of features to activate.
+ pub features: CargoFeatures,
+ /// rustc target
+ pub target: Option<String>,
+ /// Sysroot loading behavior
+ pub sysroot: Option<RustcSource>,
+ /// rustc private crate source
+ pub rustc_source: Option<RustcSource>,
+ /// crates to disable `#[cfg(test)]` on
+ pub unset_test_crates: UnsetTestCrates,
+ /// Invoke `cargo check` through the RUSTC_WRAPPER.
+ pub wrap_rustc_in_build_scripts: bool,
+ /// The command to run instead of `cargo check` for building build scripts.
+ pub run_build_script_command: Option<Vec<String>>,
+ /// Extra env vars to set when invoking the cargo command
+ pub extra_env: FxHashMap<String, String>,
+ pub invocation_strategy: InvocationStrategy,
+ pub invocation_location: InvocationLocation,
+}
+
+impl CargoConfig {
+ pub fn cfg_overrides(&self) -> CfgOverrides {
+ match &self.unset_test_crates {
+ UnsetTestCrates::None => CfgOverrides::Selective(iter::empty().collect()),
+ UnsetTestCrates::Only(unset_test_crates) => CfgOverrides::Selective(
+ unset_test_crates
+ .iter()
+ .cloned()
+ .zip(iter::repeat_with(|| {
+ cfg::CfgDiff::new(Vec::new(), vec![cfg::CfgAtom::Flag("test".into())])
+ .unwrap()
+ }))
+ .collect(),
+ ),
+ UnsetTestCrates::All => CfgOverrides::Wildcard(
+ cfg::CfgDiff::new(Vec::new(), vec![cfg::CfgAtom::Flag("test".into())]).unwrap(),
+ ),
+ }
+ }
+}
+
+pub type Package = Idx<PackageData>;
+
+pub type Target = Idx<TargetData>;
+
+/// Information associated with a cargo crate
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct PackageData {
+ /// Version given in the `Cargo.toml`
+ pub version: semver::Version,
+ /// Name as given in the `Cargo.toml`
+ pub name: String,
+ /// Repository as given in the `Cargo.toml`
+ pub repository: Option<String>,
+ /// Path containing the `Cargo.toml`
+ pub manifest: ManifestPath,
+ /// Targets provided by the crate (lib, bin, example, test, ...)
+ pub targets: Vec<Target>,
+ /// Does this package come from the local filesystem (and is editable)?
+ pub is_local: bool,
+ /// Whether this package is a member of the workspace
+ pub is_member: bool,
+ /// List of packages this package depends on
+ pub dependencies: Vec<PackageDependency>,
+ /// Rust edition for this package
+ pub edition: Edition,
+ /// Features provided by the crate, mapped to the features required by that feature.
+ pub features: FxHashMap<String, Vec<String>>,
+ /// List of features enabled on this package
+ pub active_features: Vec<String>,
+ /// String representation of package id
+ pub id: String,
+ /// The contents of [package.metadata.rust-analyzer]
+ pub metadata: RustAnalyzerPackageMetaData,
+}
+
+#[derive(Deserialize, Default, Debug, Clone, Eq, PartialEq)]
+pub struct RustAnalyzerPackageMetaData {
+ pub rustc_private: bool,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct PackageDependency {
+ pub pkg: Package,
+ pub name: String,
+ pub kind: DepKind,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord)]
+pub enum DepKind {
+ /// Available to the library, binary, and dev targets in the package (but not the build script).
+ Normal,
+ /// Available only to test and bench targets (and the library target, when built with `cfg(test)`).
+ Dev,
+ /// Available only to the build script target.
+ Build,
+}
+
+impl DepKind {
+ fn iter(list: &[cargo_metadata::DepKindInfo]) -> impl Iterator<Item = Self> + '_ {
+ let mut dep_kinds = Vec::new();
+ if list.is_empty() {
+ dep_kinds.push(Self::Normal);
+ }
+ for info in list {
+ let kind = match info.kind {
+ cargo_metadata::DependencyKind::Normal => Self::Normal,
+ cargo_metadata::DependencyKind::Development => Self::Dev,
+ cargo_metadata::DependencyKind::Build => Self::Build,
+ cargo_metadata::DependencyKind::Unknown => continue,
+ };
+ dep_kinds.push(kind);
+ }
+ dep_kinds.sort_unstable();
+ dep_kinds.dedup();
+ dep_kinds.into_iter()
+ }
+}
+
+/// Information associated with a package's target
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct TargetData {
+ /// Package that provided this target
+ pub package: Package,
+ /// Name as given in the `Cargo.toml` or generated from the file name
+ pub name: String,
+ /// Path to the main source file of the target
+ pub root: AbsPathBuf,
+ /// Kind of target
+ pub kind: TargetKind,
+ /// Is this target a proc-macro
+ pub is_proc_macro: bool,
+ /// Required features of the target without which it won't build
+ pub required_features: Vec<String>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum TargetKind {
+ Bin,
+ /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...).
+ Lib,
+ Example,
+ Test,
+ Bench,
+ BuildScript,
+ Other,
+}
+
+impl TargetKind {
+ fn new(kinds: &[String]) -> TargetKind {
+ for kind in kinds {
+ return match kind.as_str() {
+ "bin" => TargetKind::Bin,
+ "test" => TargetKind::Test,
+ "bench" => TargetKind::Bench,
+ "example" => TargetKind::Example,
+ "custom-build" => TargetKind::BuildScript,
+ "proc-macro" => TargetKind::Lib,
+ _ if kind.contains("lib") => TargetKind::Lib,
+ _ => continue,
+ };
+ }
+ TargetKind::Other
+ }
+}
+
+// Deserialize helper for the cargo metadata
+#[derive(Deserialize, Default)]
+struct PackageMetadata {
+ #[serde(rename = "rust-analyzer")]
+ rust_analyzer: Option<RustAnalyzerPackageMetaData>,
+}
+
+impl CargoWorkspace {
+ pub fn fetch_metadata(
+ cargo_toml: &ManifestPath,
+ current_dir: &AbsPath,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Result<cargo_metadata::Metadata> {
- if let Some(target) = target {
- meta.other_options(vec![String::from("--filter-platform"), target]);
++ let targets = find_list_of_build_targets(config, cargo_toml);
+
+ let mut meta = MetadataCommand::new();
+ meta.cargo_path(toolchain::cargo());
+ meta.manifest_path(cargo_toml.to_path_buf());
+ match &config.features {
+ CargoFeatures::All => {
+ meta.features(CargoOpt::AllFeatures);
+ }
+ CargoFeatures::Selected { features, no_default_features } => {
+ if *no_default_features {
+ meta.features(CargoOpt::NoDefaultFeatures);
+ }
+ if !features.is_empty() {
+ meta.features(CargoOpt::SomeFeatures(features.clone()));
+ }
+ }
+ }
+ meta.current_dir(current_dir.as_os_str());
+
- ) -> Option<String> {
++ if !targets.is_empty() {
++ let other_options: Vec<_> = targets
++ .into_iter()
++ .flat_map(|target| ["--filter-platform".to_string(), target])
++ .collect();
++ meta.other_options(other_options);
+ }
+
+ // FIXME: Fetching metadata is a slow process, as it might require
+ // calling crates.io. We should be reporting progress here, but it's
+ // unclear whether cargo itself supports it.
+ progress("metadata".to_string());
+
+ (|| -> Result<cargo_metadata::Metadata, cargo_metadata::Error> {
+ let mut command = meta.cargo_command();
+ command.envs(&config.extra_env);
+ let output = command.output()?;
+ if !output.status.success() {
+ return Err(cargo_metadata::Error::CargoMetadata {
+ stderr: String::from_utf8(output.stderr)?,
+ });
+ }
+ let stdout = from_utf8(&output.stdout)?
+ .lines()
+ .find(|line| line.starts_with('{'))
+ .ok_or(cargo_metadata::Error::NoJson)?;
+ cargo_metadata::MetadataCommand::parse(stdout)
+ })()
+ .with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))
+ }
+
+ pub fn new(mut meta: cargo_metadata::Metadata) -> CargoWorkspace {
+ let mut pkg_by_id = FxHashMap::default();
+ let mut packages = Arena::default();
+ let mut targets = Arena::default();
+
+ let ws_members = &meta.workspace_members;
+
+ meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
+ for meta_pkg in meta.packages {
+ let cargo_metadata::Package {
+ name,
+ version,
+ id,
+ source,
+ targets: meta_targets,
+ features,
+ manifest_path,
+ repository,
+ edition,
+ metadata,
+ ..
+ } = meta_pkg;
+ let meta = from_value::<PackageMetadata>(metadata).unwrap_or_default();
+ let edition = match edition {
+ cargo_metadata::Edition::E2015 => Edition::Edition2015,
+ cargo_metadata::Edition::E2018 => Edition::Edition2018,
+ cargo_metadata::Edition::E2021 => Edition::Edition2021,
+ _ => {
+ tracing::error!("Unsupported edition `{:?}`", edition);
+ Edition::CURRENT
+ }
+ };
+ // We treat packages without source as "local" packages. That includes all members of
+ // the current workspace, as well as any path dependency outside the workspace.
+ let is_local = source.is_none();
+ let is_member = ws_members.contains(&id);
+
+ let pkg = packages.alloc(PackageData {
+ id: id.repr.clone(),
+ name,
+ version,
+ manifest: AbsPathBuf::assert(manifest_path.into()).try_into().unwrap(),
+ targets: Vec::new(),
+ is_local,
+ is_member,
+ edition,
+ repository,
+ dependencies: Vec::new(),
+ features: features.into_iter().collect(),
+ active_features: Vec::new(),
+ metadata: meta.rust_analyzer.unwrap_or_default(),
+ });
+ let pkg_data = &mut packages[pkg];
+ pkg_by_id.insert(id, pkg);
+ for meta_tgt in meta_targets {
+ let cargo_metadata::Target { name, kind, required_features, src_path, .. } =
+ meta_tgt;
+ let tgt = targets.alloc(TargetData {
+ package: pkg,
+ name,
+ root: AbsPathBuf::assert(src_path.into()),
+ kind: TargetKind::new(&kind),
+ is_proc_macro: &*kind == ["proc-macro"],
+ required_features,
+ });
+ pkg_data.targets.push(tgt);
+ }
+ }
+ let resolve = meta.resolve.expect("metadata executed with deps");
+ for mut node in resolve.nodes {
+ let &source = pkg_by_id.get(&node.id).unwrap();
+ node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg));
+ let dependencies = node
+ .deps
+ .iter()
+ .flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind)));
+ for (dep_node, kind) in dependencies {
+ let &pkg = pkg_by_id.get(&dep_node.pkg).unwrap();
+ let dep = PackageDependency { name: dep_node.name.clone(), pkg, kind };
+ packages[source].dependencies.push(dep);
+ }
+ packages[source].active_features.extend(node.features);
+ }
+
+ let workspace_root =
+ AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string()));
+
+ CargoWorkspace { packages, targets, workspace_root }
+ }
+
+ pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a {
+ self.packages.iter().map(|(id, _pkg)| id)
+ }
+
+ pub fn target_by_root(&self, root: &AbsPath) -> Option<Target> {
+ self.packages()
+ .filter(|&pkg| self[pkg].is_member)
+ .find_map(|pkg| self[pkg].targets.iter().find(|&&it| &self[it].root == root))
+ .copied()
+ }
+
+ pub fn workspace_root(&self) -> &AbsPath {
+ &self.workspace_root
+ }
+
+ pub fn package_flag(&self, package: &PackageData) -> String {
+ if self.is_unique(&*package.name) {
+ package.name.clone()
+ } else {
+ format!("{}:{}", package.name, package.version)
+ }
+ }
+
+ pub fn parent_manifests(&self, manifest_path: &ManifestPath) -> Option<Vec<ManifestPath>> {
+ let mut found = false;
+ let parent_manifests = self
+ .packages()
+ .filter_map(|pkg| {
+ if !found && &self[pkg].manifest == manifest_path {
+ found = true
+ }
+ self[pkg].dependencies.iter().find_map(|dep| {
+ (&self[dep.pkg].manifest == manifest_path).then(|| self[pkg].manifest.clone())
+ })
+ })
+ .collect::<Vec<ManifestPath>>();
+
+ // some packages has this pkg as dep. return their manifests
+ if parent_manifests.len() > 0 {
+ return Some(parent_manifests);
+ }
+
+ // this pkg is inside this cargo workspace, fallback to workspace root
+ if found {
+ return Some(vec![
+ ManifestPath::try_from(self.workspace_root().join("Cargo.toml")).ok()?
+ ]);
+ }
+
+ // not in this workspace
+ None
+ }
+
+ fn is_unique(&self, name: &str) -> bool {
+ self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
+ }
+}
+
++fn find_list_of_build_targets(config: &CargoConfig, cargo_toml: &ManifestPath) -> Vec<String> {
++ if let Some(target) = &config.target {
++ return [target.into()].to_vec();
++ }
++
++ let build_targets = cargo_config_build_target(cargo_toml, &config.extra_env);
++ if !build_targets.is_empty() {
++ return build_targets;
++ }
++
++ rustc_discover_host_triple(cargo_toml, &config.extra_env).into_iter().collect()
++}
++
+fn rustc_discover_host_triple(
+ cargo_toml: &ManifestPath,
+ extra_env: &FxHashMap<String, String>,
+) -> Option<String> {
+ let mut rustc = Command::new(toolchain::rustc());
+ rustc.envs(extra_env);
+ rustc.current_dir(cargo_toml.parent()).arg("-vV");
+ tracing::debug!("Discovering host platform by {:?}", rustc);
+ match utf8_stdout(rustc) {
+ Ok(stdout) => {
+ let field = "host: ";
+ let target = stdout.lines().find_map(|l| l.strip_prefix(field));
+ if let Some(target) = target {
+ Some(target.to_string())
+ } else {
+ // If we fail to resolve the host platform, it's not the end of the world.
+ tracing::info!("rustc -vV did not report host platform, got:\n{}", stdout);
+ None
+ }
+ }
+ Err(e) => {
+ tracing::warn!("Failed to discover host platform: {}", e);
+ None
+ }
+ }
+}
+
+fn cargo_config_build_target(
+ cargo_toml: &ManifestPath,
+ extra_env: &FxHashMap<String, String>,
- match utf8_stdout(cargo_config) {
- Ok(stdout) => stdout
- .strip_prefix("build.target = \"")
- .and_then(|stdout| stdout.strip_suffix('"'))
- .map(ToOwned::to_owned),
- Err(_) => None,
++) -> Vec<String> {
+ let mut cargo_config = Command::new(toolchain::cargo());
+ cargo_config.envs(extra_env);
+ cargo_config
+ .current_dir(cargo_toml.parent())
+ .args(&["-Z", "unstable-options", "config", "get", "build.target"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ // if successful we receive `build.target = "target-triple"`
++ // or `build.target = ["<target 1>", ..]`
+ tracing::debug!("Discovering cargo config target by {:?}", cargo_config);
++ utf8_stdout(cargo_config).map(parse_output_cargo_config_build_target).unwrap_or_default()
++}
++
++fn parse_output_cargo_config_build_target(stdout: String) -> Vec<String> {
++ let trimmed = stdout.trim_start_matches("build.target = ").trim_matches('"');
++
++ if !trimmed.starts_with('[') {
++ return [trimmed.to_string()].to_vec();
++ }
++
++ let res = serde_json::from_str(trimmed);
++ if let Err(e) = &res {
++ tracing::warn!("Failed to parse `build.target` as an array of target: {}`", e);
+ }
++ res.unwrap_or_default()
+}
--- /dev/null
- if let Some(core) = sysroot.by_name("core") {
- sysroot.crates[alloc].deps.push(core);
+//! Loads "sysroot" crate.
+//!
+//! One confusing point here is that normally sysroot is a bunch of `.rlib`s,
+//! but we can't process `.rlib` and need source code instead. The source code
+//! is typically installed with `rustup component add rust-src` command.
+
+use std::{env, fs, iter, ops, path::PathBuf, process::Command};
+
+use anyhow::{format_err, Result};
+use la_arena::{Arena, Idx};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashMap;
+
+use crate::{utf8_stdout, ManifestPath};
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Sysroot {
+ root: AbsPathBuf,
+ src_root: AbsPathBuf,
+ crates: Arena<SysrootCrateData>,
+}
+
+pub(crate) type SysrootCrate = Idx<SysrootCrateData>;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct SysrootCrateData {
+ pub name: String,
+ pub root: ManifestPath,
+ pub deps: Vec<SysrootCrate>,
+}
+
+impl ops::Index<SysrootCrate> for Sysroot {
+ type Output = SysrootCrateData;
+ fn index(&self, index: SysrootCrate) -> &SysrootCrateData {
+ &self.crates[index]
+ }
+}
+
+impl Sysroot {
+ /// Returns sysroot "root" directory, where `bin/`, `etc/`, `lib/`, `libexec/`
+ /// subfolder live, like:
+ /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu`
+ pub fn root(&self) -> &AbsPath {
+ &self.root
+ }
+
+ /// Returns the sysroot "source" directory, where stdlib sources are located, like:
+ /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library`
+ pub fn src_root(&self) -> &AbsPath {
+ &self.src_root
+ }
+
+ pub fn public_deps(&self) -> impl Iterator<Item = (&'static str, SysrootCrate, bool)> + '_ {
+ // core is added as a dependency before std in order to
+ // mimic rustcs dependency order
+ ["core", "alloc", "std"]
+ .into_iter()
+ .zip(iter::repeat(true))
+ .chain(iter::once(("test", false)))
+ .filter_map(move |(name, prelude)| Some((name, self.by_name(name)?, prelude)))
+ }
+
+ pub fn proc_macro(&self) -> Option<SysrootCrate> {
+ self.by_name("proc_macro")
+ }
+
+ pub fn crates(&self) -> impl Iterator<Item = SysrootCrate> + ExactSizeIterator + '_ {
+ self.crates.iter().map(|(id, _data)| id)
+ }
+}
+
+impl Sysroot {
+ /// Attempts to discover the toolchain's sysroot from the given `dir`.
+ pub fn discover(dir: &AbsPath, extra_env: &FxHashMap<String, String>) -> Result<Sysroot> {
+ tracing::debug!("discovering sysroot for {}", dir.display());
+ let sysroot_dir = discover_sysroot_dir(dir, extra_env)?;
+ let sysroot_src_dir =
+ discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?;
+ let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
+ Ok(res)
+ }
+
+ pub fn discover_rustc(
+ cargo_toml: &ManifestPath,
+ extra_env: &FxHashMap<String, String>,
+ ) -> Option<ManifestPath> {
+ tracing::debug!("discovering rustc source for {}", cargo_toml.display());
+ let current_dir = cargo_toml.parent();
+ let sysroot_dir = discover_sysroot_dir(current_dir, extra_env).ok()?;
+ get_rustc_src(&sysroot_dir)
+ }
+
+ pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf) -> Result<Sysroot> {
+ let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| {
+ format_err!("can't load standard library from sysroot {}", sysroot_dir.display())
+ })?;
+ let res = Sysroot::load(sysroot_dir, sysroot_src_dir)?;
+ Ok(res)
+ }
+
+ pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf) -> Result<Sysroot> {
+ let mut sysroot =
+ Sysroot { root: sysroot_dir, src_root: sysroot_src_dir, crates: Arena::default() };
+
+ for path in SYSROOT_CRATES.trim().lines() {
+ let name = path.split('/').last().unwrap();
+ let root = [format!("{}/src/lib.rs", path), format!("lib{}/lib.rs", path)]
+ .into_iter()
+ .map(|it| sysroot.src_root.join(it))
+ .filter_map(|it| ManifestPath::try_from(it).ok())
+ .find(|it| fs::metadata(it).is_ok());
+
+ if let Some(root) = root {
+ sysroot.crates.alloc(SysrootCrateData {
+ name: name.into(),
+ root,
+ deps: Vec::new(),
+ });
+ }
+ }
+
+ if let Some(std) = sysroot.by_name("std") {
+ for dep in STD_DEPS.trim().lines() {
+ if let Some(dep) = sysroot.by_name(dep) {
+ sysroot.crates[std].deps.push(dep)
+ }
+ }
+ }
+
+ if let Some(alloc) = sysroot.by_name("alloc") {
- if let Some(std) = sysroot.by_name("std") {
- sysroot.crates[proc_macro].deps.push(std);
++ for dep in ALLOC_DEPS.trim().lines() {
++ if let Some(dep) = sysroot.by_name(dep) {
++ sysroot.crates[alloc].deps.push(dep)
++ }
+ }
+ }
+
+ if let Some(proc_macro) = sysroot.by_name("proc_macro") {
- term
++ for dep in PROC_MACRO_DEPS.trim().lines() {
++ if let Some(dep) = sysroot.by_name(dep) {
++ sysroot.crates[proc_macro].deps.push(dep)
++ }
+ }
+ }
+
+ if sysroot.by_name("core").is_none() {
+ let var_note = if env::var_os("RUST_SRC_PATH").is_some() {
+ " (`RUST_SRC_PATH` might be incorrect, try unsetting it)"
+ } else {
+ ""
+ };
+ anyhow::bail!(
+ "could not find libcore in sysroot path `{}`{}",
+ sysroot.src_root.as_path().display(),
+ var_note,
+ );
+ }
+
+ Ok(sysroot)
+ }
+
+ fn by_name(&self, name: &str) -> Option<SysrootCrate> {
+ let (id, _data) = self.crates.iter().find(|(_id, data)| data.name == name)?;
+ Some(id)
+ }
+}
+
+fn discover_sysroot_dir(
+ current_dir: &AbsPath,
+ extra_env: &FxHashMap<String, String>,
+) -> Result<AbsPathBuf> {
+ let mut rustc = Command::new(toolchain::rustc());
+ rustc.envs(extra_env);
+ rustc.current_dir(current_dir).args(&["--print", "sysroot"]);
+ tracing::debug!("Discovering sysroot by {:?}", rustc);
+ let stdout = utf8_stdout(rustc)?;
+ Ok(AbsPathBuf::assert(PathBuf::from(stdout)))
+}
+
+fn discover_sysroot_src_dir(sysroot_path: &AbsPathBuf) -> Option<AbsPathBuf> {
+ if let Ok(path) = env::var("RUST_SRC_PATH") {
+ if let Ok(path) = AbsPathBuf::try_from(path.as_str()) {
+ let core = path.join("core");
+ if fs::metadata(&core).is_ok() {
+ tracing::debug!("Discovered sysroot by RUST_SRC_PATH: {}", path.display());
+ return Some(path);
+ }
+ tracing::debug!("RUST_SRC_PATH is set, but is invalid (no core: {:?}), ignoring", core);
+ } else {
+ tracing::debug!("RUST_SRC_PATH is set, but is invalid, ignoring");
+ }
+ }
+
+ get_rust_src(sysroot_path)
+}
+
+fn discover_sysroot_src_dir_or_add_component(
+ sysroot_path: &AbsPathBuf,
+ current_dir: &AbsPath,
+ extra_env: &FxHashMap<String, String>,
+) -> Result<AbsPathBuf> {
+ discover_sysroot_src_dir(sysroot_path)
+ .or_else(|| {
+ let mut rustup = Command::new(toolchain::rustup());
+ rustup.envs(extra_env);
+ rustup.current_dir(current_dir).args(&["component", "add", "rust-src"]);
+ tracing::info!("adding rust-src component by {:?}", rustup);
+ utf8_stdout(rustup).ok()?;
+ get_rust_src(sysroot_path)
+ })
+ .ok_or_else(|| {
+ format_err!(
+ "\
+can't load standard library from sysroot
+{}
+(discovered via `rustc --print sysroot`)
+try installing the Rust source the same way you installed rustc",
+ sysroot_path.display(),
+ )
+ })
+}
+
+fn get_rustc_src(sysroot_path: &AbsPath) -> Option<ManifestPath> {
+ let rustc_src = sysroot_path.join("lib/rustlib/rustc-src/rust/compiler/rustc/Cargo.toml");
+ let rustc_src = ManifestPath::try_from(rustc_src).ok()?;
+ tracing::debug!("checking for rustc source code: {}", rustc_src.display());
+ if fs::metadata(&rustc_src).is_ok() {
+ Some(rustc_src)
+ } else {
+ None
+ }
+}
+
+fn get_rust_src(sysroot_path: &AbsPath) -> Option<AbsPathBuf> {
+ let rust_src = sysroot_path.join("lib/rustlib/src/rust/library");
+ tracing::debug!("checking sysroot library: {}", rust_src.display());
+ if fs::metadata(&rust_src).is_ok() {
+ Some(rust_src)
+ } else {
+ None
+ }
+}
+
+const SYSROOT_CRATES: &str = "
+alloc
++backtrace
+core
+panic_abort
+panic_unwind
+proc_macro
+profiler_builtins
+std
+stdarch/crates/std_detect
- core
- panic_abort
+test
+unwind";
+
++const ALLOC_DEPS: &str = "core";
++
+const STD_DEPS: &str = "
+alloc
- term
- test
- unwind";
+panic_unwind
++panic_abort
++core
+profiler_builtins
++unwind
+std_detect
++test";
++
++const PROC_MACRO_DEPS: &str = "std";
--- /dev/null
- 1,
+use std::{
+ ops::Deref,
+ path::{Path, PathBuf},
+};
+
+use base_db::{CrateGraph, FileId};
+use cfg::{CfgAtom, CfgDiff};
+use expect_test::{expect, Expect};
+use paths::{AbsPath, AbsPathBuf};
+use serde::de::DeserializeOwned;
+
+use crate::{
+ CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot,
+ WorkspaceBuildScripts,
+};
+
+fn load_cargo(file: &str) -> CrateGraph {
+ load_cargo_with_overrides(file, CfgOverrides::default())
+}
+
+fn load_cargo_with_overrides(file: &str, cfg_overrides: CfgOverrides) -> CrateGraph {
+ let meta = get_test_json_file(file);
+ let cargo_workspace = CargoWorkspace::new(meta);
+ let project_workspace = ProjectWorkspace::Cargo {
+ cargo: cargo_workspace,
+ build_scripts: WorkspaceBuildScripts::default(),
+ sysroot: None,
+ rustc: None,
+ rustc_cfg: Vec::new(),
+ cfg_overrides,
+ toolchain: None,
+ };
+ to_crate_graph(project_workspace)
+}
+
+fn load_rust_project(file: &str) -> CrateGraph {
+ let data = get_test_json_file(file);
+ let project = rooted_project_json(data);
+ let sysroot = Some(get_fake_sysroot());
+ let project_workspace = ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new() };
+ to_crate_graph(project_workspace)
+}
+
+fn get_test_json_file<T: DeserializeOwned>(file: &str) -> T {
+ let file = get_test_path(file);
+ let data = std::fs::read_to_string(file).unwrap();
+ let mut json = data.parse::<serde_json::Value>().unwrap();
+ fixup_paths(&mut json);
+ return serde_json::from_value(json).unwrap();
+
+ fn fixup_paths(val: &mut serde_json::Value) {
+ match val {
+ serde_json::Value::String(s) => replace_root(s, true),
+ serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths),
+ serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths),
+ serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => {
+ }
+ }
+ }
+}
+
+fn replace_root(s: &mut String, direction: bool) {
+ if direction {
+ let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" };
+ *s = s.replace("$ROOT$", root)
+ } else {
+ let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" };
+ *s = s.replace(root, "$ROOT$")
+ }
+}
+
+fn get_test_path(file: &str) -> PathBuf {
+ let base = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
+ base.join("test_data").join(file)
+}
+
+fn get_fake_sysroot() -> Sysroot {
+ let sysroot_path = get_test_path("fake-sysroot");
+ // there's no `libexec/` directory with a `proc-macro-srv` binary in that
+ // fake sysroot, so we give them both the same path:
+ let sysroot_dir = AbsPathBuf::assert(sysroot_path);
+ let sysroot_src_dir = sysroot_dir.clone();
+ Sysroot::load(sysroot_dir, sysroot_src_dir).unwrap()
+}
+
+fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
+ let mut root = "$ROOT$".to_string();
+ replace_root(&mut root, true);
+ let path = Path::new(&root);
+ let base = AbsPath::assert(path);
+ ProjectJson::new(base, data)
+}
+
+fn to_crate_graph(project_workspace: ProjectWorkspace) -> CrateGraph {
+ project_workspace.to_crate_graph(
+ &mut |_, _| Ok(Vec::new()),
+ &mut {
+ let mut counter = 0;
+ move |_path| {
+ counter += 1;
+ Some(FileId(counter))
+ }
+ },
+ &Default::default(),
+ )
+}
+
+fn check_crate_graph(crate_graph: CrateGraph, expect: Expect) {
+ let mut crate_graph = format!("{:#?}", crate_graph);
+ replace_root(&mut crate_graph, false);
+ expect.assert_eq(&crate_graph);
+}
+
+#[test]
+fn cargo_hello_world_project_model_with_wildcard_overrides() {
+ let cfg_overrides = CfgOverrides::Wildcard(
+ CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(),
+ );
+ let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides);
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "an_example",
+ ),
+ canonical_name: "an-example",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "it",
+ ),
+ canonical_name: "it",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2015,
+ version: Some(
+ "0.2.98",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "libc",
+ ),
+ canonical_name: "libc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=default",
+ "feature=std",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=align",
+ "feature=const-extern-fn",
+ "feature=default",
+ "feature=extra_traits",
+ "feature=rustc-dep-of-std",
+ "feature=std",
+ "feature=use_std",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "libc",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "libc",
+ "CARGO_PKG_VERSION_PATCH": "98",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: Some(
+ "https://github.com/rust-lang/libc",
+ ),
+ name: Some(
+ "libc",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ )
+}
+
+#[test]
+fn cargo_hello_world_project_model_with_selective_overrides() {
+ let cfg_overrides = {
+ CfgOverrides::Selective(
+ std::iter::once((
+ "libc".to_owned(),
+ CfgDiff::new(Vec::new(), vec![CfgAtom::Flag("test".into())]).unwrap(),
+ ))
+ .collect(),
+ )
+ };
+ let crate_graph = load_cargo_with_overrides("hello-world-metadata.json", cfg_overrides);
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "an_example",
+ ),
+ canonical_name: "an-example",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "it",
+ ),
+ canonical_name: "it",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2015,
+ version: Some(
+ "0.2.98",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "libc",
+ ),
+ canonical_name: "libc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=default",
+ "feature=std",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=align",
+ "feature=const-extern-fn",
+ "feature=default",
+ "feature=extra_traits",
+ "feature=rustc-dep-of-std",
+ "feature=std",
+ "feature=use_std",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "libc",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "libc",
+ "CARGO_PKG_VERSION_PATCH": "98",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: Some(
+ "https://github.com/rust-lang/libc",
+ ),
+ name: Some(
+ "libc",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ )
+}
+
+#[test]
+fn cargo_hello_world_project_model() {
+ let crate_graph = load_cargo("hello-world-metadata.json");
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello-world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "an_example",
+ ),
+ canonical_name: "an-example",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: Some(
+ "0.1.0",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "it",
+ ),
+ canonical_name: "it",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "test",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$hello-world",
+ "CARGO_PKG_VERSION": "0.1.0",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "hello_world",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "hello-world",
+ "CARGO_PKG_VERSION_PATCH": "0",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "1",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "hello_world",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 4,
+ ),
+ name: CrateName(
+ "libc",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello-world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2015,
+ version: Some(
+ "0.2.98",
+ ),
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "libc",
+ ),
+ canonical_name: "libc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=default",
+ "feature=std",
+ ],
+ ),
+ potential_cfg_options: CfgOptions(
+ [
+ "debug_assertions",
+ "feature=align",
+ "feature=const-extern-fn",
+ "feature=default",
+ "feature=extra_traits",
+ "feature=rustc-dep-of-std",
+ "feature=std",
+ "feature=use_std",
+ ],
+ ),
+ env: Env {
+ entries: {
+ "CARGO_PKG_LICENSE": "",
+ "CARGO_PKG_VERSION_MAJOR": "0",
+ "CARGO_MANIFEST_DIR": "$ROOT$.cargo/registry/src/github.com-1ecc6299db9ec823/libc-0.2.98",
+ "CARGO_PKG_VERSION": "0.2.98",
+ "CARGO_PKG_AUTHORS": "",
+ "CARGO_CRATE_NAME": "libc",
+ "CARGO_PKG_LICENSE_FILE": "",
+ "CARGO_PKG_HOMEPAGE": "",
+ "CARGO_PKG_DESCRIPTION": "",
+ "CARGO_PKG_NAME": "libc",
+ "CARGO_PKG_VERSION_PATCH": "98",
+ "CARGO": "cargo",
+ "CARGO_PKG_REPOSITORY": "",
+ "CARGO_PKG_VERSION_MINOR": "2",
+ "CARGO_PKG_VERSION_PRE": "",
+ },
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "crate has not (yet) been built",
+ ),
+ origin: CratesIo {
+ repo: Some(
+ "https://github.com/rust-lang/libc",
+ ),
+ name: Some(
+ "libc",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ )
+}
+
+#[test]
+fn rust_project_hello_world_project_model() {
+ let crate_graph = load_rust_project("hello-world-project.json");
+ check_crate_graph(
+ crate_graph,
+ expect![[r#"
+ CrateGraph {
+ arena: {
+ CrateId(
+ 0,
+ ): CrateData {
+ root_file_id: FileId(
+ 1,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "alloc",
+ ),
+ canonical_name: "alloc",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 1,
+ ),
+ name: CrateName(
+ "core",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Alloc,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 1,
+ ): CrateData {
+ root_file_id: FileId(
+ 2,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "core",
+ ),
+ canonical_name: "core",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Core,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 2,
+ ): CrateData {
+ root_file_id: FileId(
+ 3,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "panic_abort",
+ ),
+ canonical_name: "panic_abort",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 3,
+ ): CrateData {
+ root_file_id: FileId(
+ 4,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "panic_unwind",
+ ),
+ canonical_name: "panic_unwind",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 4,
+ ): CrateData {
+ root_file_id: FileId(
+ 5,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "proc_macro",
+ ),
+ canonical_name: "proc_macro",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 6,
+ ),
+ name: CrateName(
+ "std",
+ ),
+ prelude: true,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 5,
+ ): CrateData {
+ root_file_id: FileId(
+ 6,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "profiler_builtins",
+ ),
+ canonical_name: "profiler_builtins",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 6,
+ ): CrateData {
+ root_file_id: FileId(
+ 7,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "std",
+ ),
+ canonical_name: "std",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "alloc",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
- "core",
++ 3,
+ ),
+ name: CrateName(
- 3,
++ "panic_unwind",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 2,
+ ),
+ name: CrateName(
+ "panic_abort",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
- "panic_unwind",
++ 1,
+ ),
+ name: CrateName(
- 7,
++ "core",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 5,
+ ),
+ name: CrateName(
+ "profiler_builtins",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
- "std_detect",
++ 9,
+ ),
+ name: CrateName(
- 8,
++ "unwind",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
- "term",
++ 7,
+ ),
+ name: CrateName(
- 9,
++ "std_detect",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
- Dependency {
- crate_id: CrateId(
- 10,
- ),
- name: CrateName(
- "unwind",
- ),
- prelude: true,
- },
++ 8,
+ ),
+ name: CrateName(
+ "test",
+ ),
+ prelude: true,
+ },
- display_name: Some(
- CrateDisplayName {
- crate_name: CrateName(
- "term",
- ),
- canonical_name: "term",
- },
- ),
- cfg_options: CfgOptions(
- [],
- ),
- potential_cfg_options: CfgOptions(
- [],
- ),
- env: Env {
- entries: {},
- },
- dependencies: [],
- proc_macro: Err(
- "no proc macro loaded for sysroot crate",
- ),
- origin: Lang(
- Other,
- ),
- is_proc_macro: false,
- },
- CrateId(
- 9,
- ): CrateData {
- root_file_id: FileId(
- 10,
- ),
- edition: Edition2018,
- version: None,
+ ],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Std,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 7,
+ ): CrateData {
+ root_file_id: FileId(
+ 8,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "std_detect",
+ ),
+ canonical_name: "std_detect",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
+ 8,
+ ): CrateData {
+ root_file_id: FileId(
+ 9,
+ ),
+ edition: Edition2018,
+ version: None,
- 10,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "test",
+ ),
+ canonical_name: "test",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Test,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
- 11,
++ 9,
+ ): CrateData {
+ root_file_id: FileId(
- 11,
++ 10,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "unwind",
+ ),
+ canonical_name: "unwind",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [],
+ proc_macro: Err(
+ "no proc macro loaded for sysroot crate",
+ ),
+ origin: Lang(
+ Other,
+ ),
+ is_proc_macro: false,
+ },
+ CrateId(
- 12,
++ 10,
+ ): CrateData {
+ root_file_id: FileId(
- 9,
++ 11,
+ ),
+ edition: Edition2018,
+ version: None,
+ display_name: Some(
+ CrateDisplayName {
+ crate_name: CrateName(
+ "hello_world",
+ ),
+ canonical_name: "hello_world",
+ },
+ ),
+ cfg_options: CfgOptions(
+ [],
+ ),
+ potential_cfg_options: CfgOptions(
+ [],
+ ),
+ env: Env {
+ entries: {},
+ },
+ dependencies: [
+ Dependency {
+ crate_id: CrateId(
+ 1,
+ ),
+ name: CrateName(
+ "core",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 0,
+ ),
+ name: CrateName(
+ "alloc",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
+ 6,
+ ),
+ name: CrateName(
+ "std",
+ ),
+ prelude: true,
+ },
+ Dependency {
+ crate_id: CrateId(
++ 8,
+ ),
+ name: CrateName(
+ "test",
+ ),
+ prelude: false,
+ },
+ ],
+ proc_macro: Err(
+ "no proc macro dylib present",
+ ),
+ origin: CratesIo {
+ repo: None,
+ name: Some(
+ "hello_world",
+ ),
+ },
+ is_proc_macro: false,
+ },
+ },
+ }"#]],
+ );
+}
+
+#[test]
+fn rust_project_is_proc_macro_has_proc_macro_dep() {
+ let crate_graph = load_rust_project("is-proc-macro-project.json");
+ // Since the project only defines one crate (outside the sysroot crates),
+ // it should be the one with the biggest Id.
+ let crate_id = crate_graph.iter().max().unwrap();
+ let crate_data = &crate_graph[crate_id];
+ // Assert that the project crate with `is_proc_macro` has a dependency
+ // on the proc_macro sysroot crate.
+ crate_data.dependencies.iter().find(|&dep| dep.name.deref() == "proc_macro").unwrap();
+}
--- /dev/null
- rustc_cfg.clone(),
- cfg_overrides,
+//! Handles lowering of build-system specific workspace information (`cargo
+//! metadata` or `rust-project.json`) into representation stored in the salsa
+//! database -- `CrateGraph`.
+
+use std::{collections::VecDeque, fmt, fs, process::Command, sync::Arc};
+
+use anyhow::{format_err, Context, Result};
+use base_db::{
+ CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env,
+ FileId, LangCrateOrigin, ProcMacroLoadResult,
+};
+use cfg::{CfgDiff, CfgOptions};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::{FxHashMap, FxHashSet};
+use semver::Version;
+use stdx::{always, hash::NoHashHashMap};
+
+use crate::{
+ build_scripts::BuildScriptOutput,
+ cargo_workspace::{DepKind, PackageData, RustcSource},
+ cfg_flag::CfgFlag,
+ rustc_cfg,
+ sysroot::SysrootCrate,
+ utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package,
+ ProjectJson, ProjectManifest, Sysroot, TargetKind, WorkspaceBuildScripts,
+};
+
+/// A set of cfg-overrides per crate.
+///
+/// `Wildcard(..)` is useful e.g. disabling `#[cfg(test)]` on all crates,
+/// without having to first obtain a list of all crates.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum CfgOverrides {
+ /// A single global set of overrides matching all crates.
+ Wildcard(CfgDiff),
+ /// A set of overrides matching specific crates.
+ Selective(FxHashMap<String, CfgDiff>),
+}
+
+impl Default for CfgOverrides {
+ fn default() -> Self {
+ Self::Selective(FxHashMap::default())
+ }
+}
+
+impl CfgOverrides {
+ pub fn len(&self) -> usize {
+ match self {
+ CfgOverrides::Wildcard(_) => 1,
+ CfgOverrides::Selective(hash_map) => hash_map.len(),
+ }
+ }
+}
+
+/// `PackageRoot` describes a package root folder.
+/// Which may be an external dependency, or a member of
+/// the current workspace.
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct PackageRoot {
+ /// Is from the local filesystem and may be edited
+ pub is_local: bool,
+ pub include: Vec<AbsPathBuf>,
+ pub exclude: Vec<AbsPathBuf>,
+}
+
+#[derive(Clone, Eq, PartialEq)]
+pub enum ProjectWorkspace {
+ /// Project workspace was discovered by running `cargo metadata` and `rustc --print sysroot`.
+ Cargo {
+ cargo: CargoWorkspace,
+ build_scripts: WorkspaceBuildScripts,
+ sysroot: Option<Sysroot>,
+ rustc: Option<CargoWorkspace>,
+ /// Holds cfg flags for the current target. We get those by running
+ /// `rustc --print cfg`.
+ ///
+ /// FIXME: make this a per-crate map, as, eg, build.rs might have a
+ /// different target.
+ rustc_cfg: Vec<CfgFlag>,
+ cfg_overrides: CfgOverrides,
+ toolchain: Option<Version>,
+ },
+ /// Project workspace was manually specified using a `rust-project.json` file.
+ Json { project: ProjectJson, sysroot: Option<Sysroot>, rustc_cfg: Vec<CfgFlag> },
+
+ // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning.
+ // That's not the end user experience we should strive for.
+ // Ideally, you should be able to just open a random detached file in existing cargo projects, and get the basic features working.
+ // That needs some changes on the salsa-level though.
+ // In particular, we should split the unified CrateGraph (which currently has maximal durability) into proper crate graph, and a set of ad hoc roots (with minimal durability).
+ // Then, we need to hide the graph behind the queries such that most queries look only at the proper crate graph, and fall back to ad hoc roots only if there's no results.
+ // After this, we should be able to tweak the logic in reload.rs to add newly opened files, which don't belong to any existing crates, to the set of the detached files.
+ // //
+ /// Project with a set of disjoint files, not belonging to any particular workspace.
+ /// Backed by basic sysroot crates for basic completion and highlighting.
+ DetachedFiles { files: Vec<AbsPathBuf>, sysroot: Sysroot, rustc_cfg: Vec<CfgFlag> },
+}
+
+impl fmt::Debug for ProjectWorkspace {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Make sure this isn't too verbose.
+ match self {
+ ProjectWorkspace::Cargo {
+ cargo,
+ build_scripts: _,
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+ toolchain,
+ } => f
+ .debug_struct("Cargo")
+ .field("root", &cargo.workspace_root().file_name())
+ .field("n_packages", &cargo.packages().len())
+ .field("sysroot", &sysroot.is_some())
+ .field(
+ "n_rustc_compiler_crates",
+ &rustc.as_ref().map_or(0, |rc| rc.packages().len()),
+ )
+ .field("n_rustc_cfg", &rustc_cfg.len())
+ .field("n_cfg_overrides", &cfg_overrides.len())
+ .field("toolchain", &toolchain)
+ .finish(),
+ ProjectWorkspace::Json { project, sysroot, rustc_cfg } => {
+ let mut debug_struct = f.debug_struct("Json");
+ debug_struct.field("n_crates", &project.n_crates());
+ if let Some(sysroot) = sysroot {
+ debug_struct.field("n_sysroot_crates", &sysroot.crates().len());
+ }
+ debug_struct.field("n_rustc_cfg", &rustc_cfg.len());
+ debug_struct.finish()
+ }
+ ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f
+ .debug_struct("DetachedFiles")
+ .field("n_files", &files.len())
+ .field("n_sysroot_crates", &sysroot.crates().len())
+ .field("n_rustc_cfg", &rustc_cfg.len())
+ .finish(),
+ }
+ }
+}
+
+impl ProjectWorkspace {
+ pub fn load(
+ manifest: ProjectManifest,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Result<ProjectWorkspace> {
+ let res = match manifest {
+ ProjectManifest::ProjectJson(project_json) => {
+ let file = fs::read_to_string(&project_json).with_context(|| {
+ format!("Failed to read json file {}", project_json.display())
+ })?;
+ let data = serde_json::from_str(&file).with_context(|| {
+ format!("Failed to deserialize json file {}", project_json.display())
+ })?;
+ let project_location = project_json.parent().to_path_buf();
+ let project_json = ProjectJson::new(&project_location, data);
+ ProjectWorkspace::load_inline(
+ project_json,
+ config.target.as_deref(),
+ &config.extra_env,
+ )?
+ }
+ ProjectManifest::CargoToml(cargo_toml) => {
+ let cargo_version = utf8_stdout({
+ let mut cmd = Command::new(toolchain::cargo());
+ cmd.envs(&config.extra_env);
+ cmd.arg("--version");
+ cmd
+ })?;
+ let toolchain = cargo_version
+ .get("cargo ".len()..)
+ .and_then(|it| Version::parse(it.split_whitespace().next()?).ok());
+
+ let meta = CargoWorkspace::fetch_metadata(
+ &cargo_toml,
+ cargo_toml.parent(),
+ config,
+ progress,
+ )
+ .with_context(|| {
+ format!(
+ "Failed to read Cargo metadata from Cargo.toml file {}, {:?}",
+ cargo_toml.display(),
+ toolchain
+ )
+ })?;
+ let cargo = CargoWorkspace::new(meta);
+
+ let sysroot = match &config.sysroot {
+ Some(RustcSource::Path(path)) => {
+ Some(Sysroot::with_sysroot_dir(path.clone()).with_context(|| {
+ format!(
+ "Failed to find sysroot for Cargo.toml file {}.",
+ cargo_toml.display()
+ )
+ })?)
+ }
+ Some(RustcSource::Discover) => Some(
+ Sysroot::discover(cargo_toml.parent(), &config.extra_env).with_context(
+ || {
+ format!(
+ "Failed to find sysroot for Cargo.toml file {}. Is rust-src installed?",
+ cargo_toml.display()
+ )
+ },
+ )?,
+ ),
+ None => None,
+ };
+ if let Some(sysroot) = &sysroot {
+ tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
+ }
+
+ let rustc_dir = match &config.rustc_source {
+ Some(RustcSource::Path(path)) => ManifestPath::try_from(path.clone()).ok(),
+ Some(RustcSource::Discover) => {
+ Sysroot::discover_rustc(&cargo_toml, &config.extra_env)
+ }
+ None => None,
+ };
+ if let Some(rustc_dir) = &rustc_dir {
+ tracing::info!(rustc_dir = %rustc_dir.display(), "Using rustc source");
+ }
+
+ let rustc = match rustc_dir {
+ Some(rustc_dir) => Some({
+ let meta = CargoWorkspace::fetch_metadata(
+ &rustc_dir,
+ cargo_toml.parent(),
+ config,
+ progress,
+ )
+ .with_context(|| {
+ "Failed to read Cargo metadata for Rust sources".to_string()
+ })?;
+ CargoWorkspace::new(meta)
+ }),
+ None => None,
+ };
+
+ let rustc_cfg =
+ rustc_cfg::get(Some(&cargo_toml), config.target.as_deref(), &config.extra_env);
+
+ let cfg_overrides = config.cfg_overrides();
+ ProjectWorkspace::Cargo {
+ cargo,
+ build_scripts: WorkspaceBuildScripts::default(),
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+ toolchain,
+ }
+ }
+ };
+
+ Ok(res)
+ }
+
+ pub fn load_inline(
+ project_json: ProjectJson,
+ target: Option<&str>,
+ extra_env: &FxHashMap<String, String>,
+ ) -> Result<ProjectWorkspace> {
+ let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) {
+ (Some(sysroot), Some(sysroot_src)) => Some(Sysroot::load(sysroot, sysroot_src)?),
+ (Some(sysroot), None) => {
+ // assume sysroot is structured like rustup's and guess `sysroot_src`
+ let sysroot_src =
+ sysroot.join("lib").join("rustlib").join("src").join("rust").join("library");
+
+ Some(Sysroot::load(sysroot, sysroot_src)?)
+ }
+ (None, Some(sysroot_src)) => {
+ // assume sysroot is structured like rustup's and guess `sysroot`
+ let mut sysroot = sysroot_src.clone();
+ for _ in 0..5 {
+ sysroot.pop();
+ }
+ Some(Sysroot::load(sysroot, sysroot_src)?)
+ }
+ (None, None) => None,
+ };
+ if let Some(sysroot) = &sysroot {
+ tracing::info!(src_root = %sysroot.src_root().display(), root = %sysroot.root().display(), "Using sysroot");
+ }
+
+ let rustc_cfg = rustc_cfg::get(None, target, extra_env);
+ Ok(ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg })
+ }
+
+ pub fn load_detached_files(detached_files: Vec<AbsPathBuf>) -> Result<ProjectWorkspace> {
+ let sysroot = Sysroot::discover(
+ detached_files
+ .first()
+ .and_then(|it| it.parent())
+ .ok_or_else(|| format_err!("No detached files to load"))?,
+ &Default::default(),
+ )?;
+ let rustc_cfg = rustc_cfg::get(None, None, &Default::default());
+ Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg })
+ }
+
+ /// Runs the build scripts for this [`ProjectWorkspace`].
+ pub fn run_build_scripts(
+ &self,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Result<WorkspaceBuildScripts> {
+ match self {
+ ProjectWorkspace::Cargo { cargo, toolchain, .. } => {
+ WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, toolchain)
+ .with_context(|| {
+ format!(
+ "Failed to run build scripts for {}",
+ &cargo.workspace_root().display()
+ )
+ })
+ }
+ ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => {
+ Ok(WorkspaceBuildScripts::default())
+ }
+ }
+ }
+
+ /// Runs the build scripts for the given [`ProjectWorkspace`]s. Depending on the invocation
+ /// strategy this may run a single build process for all project workspaces.
+ pub fn run_all_build_scripts(
+ workspaces: &[ProjectWorkspace],
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Vec<Result<WorkspaceBuildScripts>> {
+ if matches!(config.invocation_strategy, InvocationStrategy::PerWorkspace)
+ || config.run_build_script_command.is_none()
+ {
+ return workspaces.iter().map(|it| it.run_build_scripts(config, progress)).collect();
+ }
+
+ let cargo_ws: Vec<_> = workspaces
+ .iter()
+ .filter_map(|it| match it {
+ ProjectWorkspace::Cargo { cargo, .. } => Some(cargo),
+ _ => None,
+ })
+ .collect();
+ let ref mut outputs = match WorkspaceBuildScripts::run_once(config, &cargo_ws, progress) {
+ Ok(it) => Ok(it.into_iter()),
+ // io::Error is not Clone?
+ Err(e) => Err(Arc::new(e)),
+ };
+
+ workspaces
+ .iter()
+ .map(|it| match it {
+ ProjectWorkspace::Cargo { cargo, .. } => match outputs {
+ Ok(outputs) => Ok(outputs.next().unwrap()),
+ Err(e) => Err(e.clone()).with_context(|| {
+ format!(
+ "Failed to run build scripts for {}",
+ &cargo.workspace_root().display()
+ )
+ }),
+ },
+ _ => Ok(WorkspaceBuildScripts::default()),
+ })
+ .collect()
+ }
+
+ pub fn set_build_scripts(&mut self, bs: WorkspaceBuildScripts) {
+ match self {
+ ProjectWorkspace::Cargo { build_scripts, .. } => *build_scripts = bs,
+ _ => {
+ always!(bs == WorkspaceBuildScripts::default());
+ }
+ }
+ }
+
++ pub fn find_sysroot_proc_macro_srv(&self) -> Option<AbsPathBuf> {
++ match self {
++ ProjectWorkspace::Cargo { sysroot: Some(sysroot), .. }
++ | ProjectWorkspace::Json { sysroot: Some(sysroot), .. } => {
++ let standalone_server_name =
++ format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
++ ["libexec", "lib"]
++ .into_iter()
++ .map(|segment| sysroot.root().join(segment).join(&standalone_server_name))
++ .find(|server_path| std::fs::metadata(&server_path).is_ok())
++ }
++ _ => None,
++ }
++ }
++
+ /// Returns the roots for the current `ProjectWorkspace`
+ /// The return type contains the path and whether or not
+ /// the root is a member of the current workspace
+ pub fn to_roots(&self) -> Vec<PackageRoot> {
+ let mk_sysroot = |sysroot: Option<&Sysroot>| {
+ sysroot.map(|sysroot| PackageRoot {
+ is_local: false,
+ include: vec![sysroot.src_root().to_path_buf()],
+ exclude: Vec::new(),
+ })
+ };
+ match self {
+ ProjectWorkspace::Json { project, sysroot, rustc_cfg: _ } => project
+ .crates()
+ .map(|(_, krate)| PackageRoot {
+ is_local: krate.is_workspace_member,
+ include: krate.include.clone(),
+ exclude: krate.exclude.clone(),
+ })
+ .collect::<FxHashSet<_>>()
+ .into_iter()
+ .chain(mk_sysroot(sysroot.as_ref()))
+ .collect::<Vec<_>>(),
+ ProjectWorkspace::Cargo {
+ cargo,
+ sysroot,
+ rustc,
+ rustc_cfg: _,
+ cfg_overrides: _,
+ build_scripts,
+ toolchain: _,
+ } => {
+ cargo
+ .packages()
+ .map(|pkg| {
+ let is_local = cargo[pkg].is_local;
+ let pkg_root = cargo[pkg].manifest.parent().to_path_buf();
+
+ let mut include = vec![pkg_root.clone()];
+ let out_dir =
+ build_scripts.get_output(pkg).and_then(|it| it.out_dir.clone());
+ include.extend(out_dir);
+
+ // In case target's path is manually set in Cargo.toml to be
+ // outside the package root, add its parent as an extra include.
+ // An example of this situation would look like this:
+ //
+ // ```toml
+ // [lib]
+ // path = "../../src/lib.rs"
+ // ```
+ let extra_targets = cargo[pkg]
+ .targets
+ .iter()
+ .filter(|&&tgt| cargo[tgt].kind == TargetKind::Lib)
+ .filter_map(|&tgt| cargo[tgt].root.parent())
+ .map(|tgt| tgt.normalize().to_path_buf())
+ .filter(|path| !path.starts_with(&pkg_root));
+ include.extend(extra_targets);
+
+ let mut exclude = vec![pkg_root.join(".git")];
+ if is_local {
+ exclude.push(pkg_root.join("target"));
+ } else {
+ exclude.push(pkg_root.join("tests"));
+ exclude.push(pkg_root.join("examples"));
+ exclude.push(pkg_root.join("benches"));
+ }
+ PackageRoot { is_local, include, exclude }
+ })
+ .chain(mk_sysroot(sysroot.as_ref()))
+ .chain(rustc.iter().flat_map(|rustc| {
+ rustc.packages().map(move |krate| PackageRoot {
+ is_local: false,
+ include: vec![rustc[krate].manifest.parent().to_path_buf()],
+ exclude: Vec::new(),
+ })
+ }))
+ .collect()
+ }
+ ProjectWorkspace::DetachedFiles { files, sysroot, .. } => files
+ .iter()
+ .map(|detached_file| PackageRoot {
+ is_local: true,
+ include: vec![detached_file.clone()],
+ exclude: Vec::new(),
+ })
+ .chain(mk_sysroot(Some(sysroot)))
+ .collect(),
+ }
+ }
+
+ pub fn n_packages(&self) -> usize {
+ match self {
+ ProjectWorkspace::Json { project, .. } => project.n_crates(),
+ ProjectWorkspace::Cargo { cargo, sysroot, rustc, .. } => {
+ let rustc_package_len = rustc.as_ref().map_or(0, |it| it.packages().len());
+ let sysroot_package_len = sysroot.as_ref().map_or(0, |it| it.crates().len());
+ cargo.packages().len() + sysroot_package_len + rustc_package_len
+ }
+ ProjectWorkspace::DetachedFiles { sysroot, files, .. } => {
+ sysroot.crates().len() + files.len()
+ }
+ }
+ }
+
+ pub fn to_crate_graph(
+ &self,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ extra_env: &FxHashMap<String, String>,
+ ) -> CrateGraph {
+ let _p = profile::span("ProjectWorkspace::to_crate_graph");
+
+ let mut crate_graph = match self {
+ ProjectWorkspace::Json { project, sysroot, rustc_cfg } => project_json_to_crate_graph(
+ rustc_cfg.clone(),
+ load_proc_macro,
+ load,
+ project,
+ sysroot,
+ extra_env,
+ ),
+ ProjectWorkspace::Cargo {
+ cargo,
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+ build_scripts,
+ toolchain: _,
+ } => cargo_to_crate_graph(
- build_scripts,
+ load_proc_macro,
+ load,
++ rustc,
+ cargo,
- rustc,
+ sysroot.as_ref(),
- public_deps.add(from, &mut crate_graph);
++ rustc_cfg.clone(),
++ cfg_overrides,
++ build_scripts,
+ ),
+ ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => {
+ detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot)
+ }
+ };
+ if crate_graph.patch_cfg_if() {
+ tracing::debug!("Patched std to depend on cfg-if")
+ } else {
+ tracing::debug!("Did not patch std to depend on cfg-if")
+ }
+ crate_graph
+ }
+}
+
+fn project_json_to_crate_graph(
+ rustc_cfg: Vec<CfgFlag>,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ project: &ProjectJson,
+ sysroot: &Option<Sysroot>,
+ extra_env: &FxHashMap<String, String>,
+) -> CrateGraph {
+ let mut crate_graph = CrateGraph::default();
+ let sysroot_deps = sysroot
+ .as_ref()
+ .map(|sysroot| sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load));
+
+ let mut cfg_cache: FxHashMap<&str, Vec<CfgFlag>> = FxHashMap::default();
+ let crates: NoHashHashMap<CrateId, CrateId> = project
+ .crates()
+ .filter_map(|(crate_id, krate)| {
+ let file_path = &krate.root_module;
+ let file_id = load(file_path)?;
+ Some((crate_id, krate, file_id))
+ })
+ .map(|(crate_id, krate, file_id)| {
+ let env = krate.env.clone().into_iter().collect();
+ let proc_macro = match krate.proc_macro_dylib_path.clone() {
+ Some(it) => load_proc_macro(
+ krate.display_name.as_ref().map(|it| it.canonical_name()).unwrap_or(""),
+ &it,
+ ),
+ None => Err("no proc macro dylib present".into()),
+ };
+
+ let target_cfgs = match krate.target.as_deref() {
+ Some(target) => cfg_cache
+ .entry(target)
+ .or_insert_with(|| rustc_cfg::get(None, Some(target), extra_env)),
+ None => &rustc_cfg,
+ };
+
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(target_cfgs.iter().chain(krate.cfg.iter()).cloned());
+ (
+ crate_id,
+ crate_graph.add_crate_root(
+ file_id,
+ krate.edition,
+ krate.display_name.clone(),
+ krate.version.clone(),
+ cfg_options.clone(),
+ cfg_options,
+ env,
+ proc_macro,
+ krate.is_proc_macro,
+ if krate.display_name.is_some() {
+ CrateOrigin::CratesIo {
+ repo: krate.repository.clone(),
+ name: krate
+ .display_name
+ .clone()
+ .map(|n| n.canonical_name().to_string()),
+ }
+ } else {
+ CrateOrigin::CratesIo { repo: None, name: None }
+ },
+ ),
+ )
+ })
+ .collect();
+
+ for (from, krate) in project.crates() {
+ if let Some(&from) = crates.get(&from) {
+ if let Some((public_deps, libproc_macro)) = &sysroot_deps {
- rustc_cfg: Vec<CfgFlag>,
- override_cfg: &CfgOverrides,
++ public_deps.add_to_crate_graph(&mut crate_graph, from);
+ if krate.is_proc_macro {
+ if let Some(proc_macro) = libproc_macro {
+ add_dep(
+ &mut crate_graph,
+ from,
+ CrateName::new("proc_macro").unwrap(),
+ *proc_macro,
+ );
+ }
+ }
+ }
+
+ for dep in &krate.deps {
+ if let Some(&to) = crates.get(&dep.crate_id) {
+ add_dep(&mut crate_graph, from, dep.name.clone(), to)
+ }
+ }
+ }
+ }
+ crate_graph
+}
+
+fn cargo_to_crate_graph(
- build_scripts: &WorkspaceBuildScripts,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
++ rustc: &Option<CargoWorkspace>,
+ cargo: &CargoWorkspace,
- rustc: &Option<CargoWorkspace>,
+ sysroot: Option<&Sysroot>,
- let mut cfg_options = CfgOptions::default();
- cfg_options.extend(rustc_cfg);
++ rustc_cfg: Vec<CfgFlag>,
++ override_cfg: &CfgOverrides,
++ build_scripts: &WorkspaceBuildScripts,
+) -> CrateGraph {
+ let _p = profile::span("cargo_to_crate_graph");
+ let mut crate_graph = CrateGraph::default();
+ let (public_deps, libproc_macro) = match sysroot {
+ Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load),
+ None => (SysrootPublicDeps::default(), None),
+ };
+
- cfg_options.insert_atom("debug_assertions".into());
-
++ let cfg_options = {
++ let mut cfg_options = CfgOptions::default();
++ cfg_options.extend(rustc_cfg);
++ cfg_options.insert_atom("debug_assertions".into());
++ cfg_options
++ };
+
+ let mut pkg_to_lib_crate = FxHashMap::default();
+
- public_deps.add(from, &mut crate_graph);
+ let mut pkg_crates = FxHashMap::default();
+ // Does any crate signal to rust-analyzer that they need the rustc_private crates?
+ let mut has_private = false;
+ // Next, create crates for each package, target pair
+ for pkg in cargo.packages() {
+ let mut cfg_options = cfg_options.clone();
+
+ let overrides = match override_cfg {
+ CfgOverrides::Wildcard(cfg_diff) => Some(cfg_diff),
+ CfgOverrides::Selective(cfg_overrides) => cfg_overrides.get(&cargo[pkg].name),
+ };
+
+ // Add test cfg for local crates
+ if cargo[pkg].is_local {
+ cfg_options.insert_atom("test".into());
+ }
+
+ if let Some(overrides) = overrides {
+ // FIXME: this is sort of a hack to deal with #![cfg(not(test))] vanishing such as seen
+ // in ed25519_dalek (#7243), and libcore (#9203) (although you only hit that one while
+ // working on rust-lang/rust as that's the only time it appears outside sysroot).
+ //
+ // A more ideal solution might be to reanalyze crates based on where the cursor is and
+ // figure out the set of cfgs that would have to apply to make it active.
+
+ cfg_options.apply_diff(overrides.clone());
+ };
+
+ has_private |= cargo[pkg].metadata.rustc_private;
+ let mut lib_tgt = None;
+ for &tgt in cargo[pkg].targets.iter() {
+ if cargo[tgt].kind != TargetKind::Lib && !cargo[pkg].is_member {
+ // For non-workspace-members, Cargo does not resolve dev-dependencies, so we don't
+ // add any targets except the library target, since those will not work correctly if
+ // they use dev-dependencies.
+ // In fact, they can break quite badly if multiple client workspaces get merged:
+ // https://github.com/rust-lang/rust-analyzer/issues/11300
+ continue;
+ }
+
+ if let Some(file_id) = load(&cargo[tgt].root) {
+ let crate_id = add_target_crate_root(
+ &mut crate_graph,
+ &cargo[pkg],
+ build_scripts.get_output(pkg),
+ cfg_options.clone(),
+ &mut |path| load_proc_macro(&cargo[tgt].name, path),
+ file_id,
+ &cargo[tgt].name,
+ cargo[tgt].is_proc_macro,
+ );
+ if cargo[tgt].kind == TargetKind::Lib {
+ lib_tgt = Some((crate_id, cargo[tgt].name.clone()));
+ pkg_to_lib_crate.insert(pkg, crate_id);
+ }
+ // Even crates that don't set proc-macro = true are allowed to depend on proc_macro
+ // (just none of the APIs work when called outside of a proc macro).
+ if let Some(proc_macro) = libproc_macro {
+ add_dep_with_prelude(
+ &mut crate_graph,
+ crate_id,
+ CrateName::new("proc_macro").unwrap(),
+ proc_macro,
+ cargo[tgt].is_proc_macro,
+ );
+ }
+
+ pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, cargo[tgt].kind));
+ }
+ }
+
+ // Set deps to the core, std and to the lib target of the current package
+ for &(from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
+ // Add sysroot deps first so that a lib target named `core` etc. can overwrite them.
- rustc_workspace,
++ public_deps.add_to_crate_graph(&mut crate_graph, from);
+
+ if let Some((to, name)) = lib_tgt.clone() {
+ if to != from && kind != TargetKind::BuildScript {
+ // (build script can not depend on its library target)
+
+ // For root projects with dashes in their name,
+ // cargo metadata does not do any normalization,
+ // so we do it ourselves currently
+ let name = CrateName::normalize_dashes(&name);
+ add_dep(&mut crate_graph, from, name, to);
+ }
+ }
+ }
+ }
+
+ // Now add a dep edge from all targets of upstream to the lib
+ // target of downstream.
+ for pkg in cargo.packages() {
+ for dep in cargo[pkg].dependencies.iter() {
+ let name = CrateName::new(&dep.name).unwrap();
+ if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
+ for &(from, kind) in pkg_crates.get(&pkg).into_iter().flatten() {
+ if dep.kind == DepKind::Build && kind != TargetKind::BuildScript {
+ // Only build scripts may depend on build dependencies.
+ continue;
+ }
+ if dep.kind != DepKind::Build && kind == TargetKind::BuildScript {
+ // Build scripts may only depend on build dependencies.
+ continue;
+ }
+
+ add_dep(&mut crate_graph, from, name.clone(), to)
+ }
+ }
+ }
+ }
+
+ if has_private {
+ // If the user provided a path to rustc sources, we add all the rustc_private crates
+ // and create dependencies on them for the crates which opt-in to that
+ if let Some(rustc_workspace) = rustc {
+ handle_rustc_crates(
+ &mut crate_graph,
- &cfg_options,
- override_cfg,
++ &mut pkg_to_lib_crate,
+ load,
- &mut pkg_to_lib_crate,
- &public_deps,
+ load_proc_macro,
- public_deps.add(detached_file_crate, &mut crate_graph);
++ rustc_workspace,
+ cargo,
++ &public_deps,
++ libproc_macro,
+ &pkg_crates,
++ &cfg_options,
++ override_cfg,
+ build_scripts,
+ );
+ }
+ }
+ crate_graph
+}
+
+fn detached_files_to_crate_graph(
+ rustc_cfg: Vec<CfgFlag>,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+ detached_files: &[AbsPathBuf],
+ sysroot: &Sysroot,
+) -> CrateGraph {
+ let _p = profile::span("detached_files_to_crate_graph");
+ let mut crate_graph = CrateGraph::default();
+ let (public_deps, _libproc_macro) =
+ sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load);
+
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(rustc_cfg);
+
+ for detached_file in detached_files {
+ let file_id = match load(detached_file) {
+ Some(file_id) => file_id,
+ None => {
+ tracing::error!("Failed to load detached file {:?}", detached_file);
+ continue;
+ }
+ };
+ let display_name = detached_file
+ .file_stem()
+ .and_then(|os_str| os_str.to_str())
+ .map(|file_stem| CrateDisplayName::from_canonical_name(file_stem.to_string()));
+ let detached_file_crate = crate_graph.add_crate_root(
+ file_id,
+ Edition::CURRENT,
+ display_name.clone(),
+ None,
+ cfg_options.clone(),
+ cfg_options.clone(),
+ Env::default(),
+ Ok(Vec::new()),
+ false,
+ CrateOrigin::CratesIo {
+ repo: None,
+ name: display_name.map(|n| n.canonical_name().to_string()),
+ },
+ );
+
- rustc_workspace: &CargoWorkspace,
++ public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate);
+ }
+ crate_graph
+}
+
+fn handle_rustc_crates(
+ crate_graph: &mut CrateGraph,
- cfg_options: &CfgOptions,
- override_cfg: &CfgOverrides,
++ pkg_to_lib_crate: &mut FxHashMap<Package, CrateId>,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
- pkg_to_lib_crate: &mut FxHashMap<Package, CrateId>,
- public_deps: &SysrootPublicDeps,
+ load_proc_macro: &mut dyn FnMut(&str, &AbsPath) -> ProcMacroLoadResult,
- rustc_workspace.packages().find(|package| rustc_workspace[*package].name == "rustc_driver");
++ rustc_workspace: &CargoWorkspace,
+ cargo: &CargoWorkspace,
++ public_deps: &SysrootPublicDeps,
++ libproc_macro: Option<CrateId>,
+ pkg_crates: &FxHashMap<Package, Vec<(CrateId, TargetKind)>>,
++ cfg_options: &CfgOptions,
++ override_cfg: &CfgOverrides,
+ build_scripts: &WorkspaceBuildScripts,
+) {
+ let mut rustc_pkg_crates = FxHashMap::default();
+ // The root package of the rustc-dev component is rustc_driver, so we match that
+ let root_pkg =
- public_deps.add(crate_id, crate_graph);
++ rustc_workspace.packages().find(|&package| rustc_workspace[package].name == "rustc_driver");
+ // The rustc workspace might be incomplete (such as if rustc-dev is not
+ // installed for the current toolchain) and `rustc_source` is set to discover.
+ if let Some(root_pkg) = root_pkg {
+ // Iterate through every crate in the dependency subtree of rustc_driver using BFS
+ let mut queue = VecDeque::new();
+ queue.push_back(root_pkg);
+ while let Some(pkg) = queue.pop_front() {
+ // Don't duplicate packages if they are dependent on a diamond pattern
+ // N.B. if this line is omitted, we try to analyze over 4_800_000 crates
+ // which is not ideal
+ if rustc_pkg_crates.contains_key(&pkg) {
+ continue;
+ }
+ for dep in &rustc_workspace[pkg].dependencies {
+ queue.push_back(dep.pkg);
+ }
+
+ let mut cfg_options = cfg_options.clone();
+
+ let overrides = match override_cfg {
+ CfgOverrides::Wildcard(cfg_diff) => Some(cfg_diff),
+ CfgOverrides::Selective(cfg_overrides) => {
+ cfg_overrides.get(&rustc_workspace[pkg].name)
+ }
+ };
+
+ if let Some(overrides) = overrides {
+ // FIXME: this is sort of a hack to deal with #![cfg(not(test))] vanishing such as seen
+ // in ed25519_dalek (#7243), and libcore (#9203) (although you only hit that one while
+ // working on rust-lang/rust as that's the only time it appears outside sysroot).
+ //
+ // A more ideal solution might be to reanalyze crates based on where the cursor is and
+ // figure out the set of cfgs that would have to apply to make it active.
+
+ cfg_options.apply_diff(overrides.clone());
+ };
+
+ for &tgt in rustc_workspace[pkg].targets.iter() {
+ if rustc_workspace[tgt].kind != TargetKind::Lib {
+ continue;
+ }
+ if let Some(file_id) = load(&rustc_workspace[tgt].root) {
+ let crate_id = add_target_crate_root(
+ crate_graph,
+ &rustc_workspace[pkg],
+ build_scripts.get_output(pkg),
+ cfg_options.clone(),
+ &mut |path| load_proc_macro(&rustc_workspace[tgt].name, path),
+ file_id,
+ &rustc_workspace[tgt].name,
+ rustc_workspace[tgt].is_proc_macro,
+ );
+ pkg_to_lib_crate.insert(pkg, crate_id);
+ // Add dependencies on core / std / alloc for this crate
- fn add(&self, from: CrateId, crate_graph: &mut CrateGraph) {
++ public_deps.add_to_crate_graph(crate_graph, crate_id);
++ if let Some(proc_macro) = libproc_macro {
++ add_dep_with_prelude(
++ crate_graph,
++ crate_id,
++ CrateName::new("proc_macro").unwrap(),
++ proc_macro,
++ rustc_workspace[tgt].is_proc_macro,
++ );
++ }
+ rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id);
+ }
+ }
+ }
+ }
+ // Now add a dep edge from all targets of upstream to the lib
+ // target of downstream.
+ for pkg in rustc_pkg_crates.keys().copied() {
+ for dep in rustc_workspace[pkg].dependencies.iter() {
+ let name = CrateName::new(&dep.name).unwrap();
+ if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) {
+ for &from in rustc_pkg_crates.get(&pkg).into_iter().flatten() {
+ add_dep(crate_graph, from, name.clone(), to);
+ }
+ }
+ }
+ }
+ // Add a dependency on the rustc_private crates for all targets of each package
+ // which opts in
+ for dep in rustc_workspace.packages() {
+ let name = CrateName::normalize_dashes(&rustc_workspace[dep].name);
+
+ if let Some(&to) = pkg_to_lib_crate.get(&dep) {
+ for pkg in cargo.packages() {
+ let package = &cargo[pkg];
+ if !package.metadata.rustc_private {
+ continue;
+ }
+ for (from, _) in pkg_crates.get(&pkg).into_iter().flatten() {
+ // Avoid creating duplicate dependencies
+ // This avoids the situation where `from` depends on e.g. `arrayvec`, but
+ // `rust_analyzer` thinks that it should use the one from the `rustc_source`
+ // instead of the one from `crates.io`
+ if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) {
+ add_dep(crate_graph, *from, name.clone(), to);
+ }
+ }
+ }
+ }
+ }
+}
+
+fn add_target_crate_root(
+ crate_graph: &mut CrateGraph,
+ pkg: &PackageData,
+ build_data: Option<&BuildScriptOutput>,
+ cfg_options: CfgOptions,
+ load_proc_macro: &mut dyn FnMut(&AbsPath) -> ProcMacroLoadResult,
+ file_id: FileId,
+ cargo_name: &str,
+ is_proc_macro: bool,
+) -> CrateId {
+ let edition = pkg.edition;
+ let mut potential_cfg_options = cfg_options.clone();
+ potential_cfg_options.extend(
+ pkg.features
+ .iter()
+ .map(|feat| CfgFlag::KeyValue { key: "feature".into(), value: feat.0.into() }),
+ );
+ let cfg_options = {
+ let mut opts = cfg_options;
+ for feature in pkg.active_features.iter() {
+ opts.insert_key_value("feature".into(), feature.into());
+ }
+ if let Some(cfgs) = build_data.as_ref().map(|it| &it.cfgs) {
+ opts.extend(cfgs.iter().cloned());
+ }
+ opts
+ };
+
+ let mut env = Env::default();
+ inject_cargo_env(pkg, &mut env);
+
+ if let Some(envs) = build_data.map(|it| &it.envs) {
+ for (k, v) in envs {
+ env.set(k, v.clone());
+ }
+ }
+
+ let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) {
+ Some(Some(it)) => load_proc_macro(it),
+ Some(None) => Err("no proc macro dylib present".into()),
+ None => Err("crate has not (yet) been built".into()),
+ };
+
+ let display_name = CrateDisplayName::from_canonical_name(cargo_name.to_string());
+ crate_graph.add_crate_root(
+ file_id,
+ edition,
+ Some(display_name),
+ Some(pkg.version.to_string()),
+ cfg_options,
+ potential_cfg_options,
+ env,
+ proc_macro,
+ is_proc_macro,
+ CrateOrigin::CratesIo { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) },
+ )
+}
+
+#[derive(Default)]
+struct SysrootPublicDeps {
+ deps: Vec<(CrateName, CrateId, bool)>,
+}
+
+impl SysrootPublicDeps {
+ /// Makes `from` depend on the public sysroot crates.
++ fn add_to_crate_graph(&self, crate_graph: &mut CrateGraph, from: CrateId) {
+ for (name, krate, prelude) in &self.deps {
+ add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude);
+ }
+ }
+}
+
+fn sysroot_to_crate_graph(
+ crate_graph: &mut CrateGraph,
+ sysroot: &Sysroot,
+ rustc_cfg: Vec<CfgFlag>,
+ load: &mut dyn FnMut(&AbsPath) -> Option<FileId>,
+) -> (SysrootPublicDeps, Option<CrateId>) {
+ let _p = profile::span("sysroot_to_crate_graph");
+ let mut cfg_options = CfgOptions::default();
+ cfg_options.extend(rustc_cfg);
+ let sysroot_crates: FxHashMap<SysrootCrate, CrateId> = sysroot
+ .crates()
+ .filter_map(|krate| {
+ let file_id = load(&sysroot[krate].root)?;
+
+ let env = Env::default();
+ let display_name = CrateDisplayName::from_canonical_name(sysroot[krate].name.clone());
+ let crate_id = crate_graph.add_crate_root(
+ file_id,
+ Edition::CURRENT,
+ Some(display_name),
+ None,
+ cfg_options.clone(),
+ cfg_options.clone(),
+ env,
+ Err("no proc macro loaded for sysroot crate".into()),
+ false,
+ CrateOrigin::Lang(LangCrateOrigin::from(&*sysroot[krate].name)),
+ );
+ Some((krate, crate_id))
+ })
+ .collect();
+
+ for from in sysroot.crates() {
+ for &to in sysroot[from].deps.iter() {
+ let name = CrateName::new(&sysroot[to].name).unwrap();
+ if let (Some(&from), Some(&to)) = (sysroot_crates.get(&from), sysroot_crates.get(&to)) {
+ add_dep(crate_graph, from, name, to);
+ }
+ }
+ }
+
+ let public_deps = SysrootPublicDeps {
+ deps: sysroot
+ .public_deps()
+ .map(|(name, idx, prelude)| {
+ (CrateName::new(name).unwrap(), sysroot_crates[&idx], prelude)
+ })
+ .collect::<Vec<_>>(),
+ };
+
+ let libproc_macro = sysroot.proc_macro().and_then(|it| sysroot_crates.get(&it).copied());
+ (public_deps, libproc_macro)
+}
+
+fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) {
+ add_dep_inner(graph, from, Dependency::new(name, to))
+}
+
+fn add_dep_with_prelude(
+ graph: &mut CrateGraph,
+ from: CrateId,
+ name: CrateName,
+ to: CrateId,
+ prelude: bool,
+) {
+ add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude))
+}
+
+fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) {
+ if let Err(err) = graph.add_dep(from, dep) {
+ tracing::error!("{}", err)
+ }
+}
+
+/// Recreates the compile-time environment variables that Cargo sets.
+///
+/// Should be synced with
+/// <https://doc.rust-lang.org/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-crates>
+///
+/// FIXME: ask Cargo to provide this data instead of re-deriving.
+fn inject_cargo_env(package: &PackageData, env: &mut Env) {
+ // FIXME: Missing variables:
+ // CARGO_BIN_NAME, CARGO_BIN_EXE_<name>
+
+ let manifest_dir = package.manifest.parent();
+ env.set("CARGO_MANIFEST_DIR", manifest_dir.as_os_str().to_string_lossy().into_owned());
+
+ // Not always right, but works for common cases.
+ env.set("CARGO", "cargo".into());
+
+ env.set("CARGO_PKG_VERSION", package.version.to_string());
+ env.set("CARGO_PKG_VERSION_MAJOR", package.version.major.to_string());
+ env.set("CARGO_PKG_VERSION_MINOR", package.version.minor.to_string());
+ env.set("CARGO_PKG_VERSION_PATCH", package.version.patch.to_string());
+ env.set("CARGO_PKG_VERSION_PRE", package.version.pre.to_string());
+
+ env.set("CARGO_PKG_AUTHORS", String::new());
+
+ env.set("CARGO_PKG_NAME", package.name.clone());
+ // FIXME: This isn't really correct (a package can have many crates with different names), but
+ // it's better than leaving the variable unset.
+ env.set("CARGO_CRATE_NAME", CrateName::normalize_dashes(&package.name).to_string());
+ env.set("CARGO_PKG_DESCRIPTION", String::new());
+ env.set("CARGO_PKG_HOMEPAGE", String::new());
+ env.set("CARGO_PKG_REPOSITORY", String::new());
+ env.set("CARGO_PKG_LICENSE", String::new());
+
+ env.set("CARGO_PKG_LICENSE_FILE", String::new());
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "rust-analyzer"
+version = "0.0.0"
+authors = ["rust-analyzer Team"]
+homepage = "https://github.com/rust-analyzer/rust-analyzer"
+description = "A language server for the Rust programming language"
+documentation = "https://rust-analyzer.github.io/manual.html"
+license = "MIT OR Apache-2.0"
+autobins = false
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[[bin]]
+name = "rust-analyzer"
+path = "src/bin/main.rs"
+
+[dependencies]
+anyhow = "1.0.62"
+crossbeam-channel = "0.5.5"
+dissimilar = "1.0.4"
+itertools = "0.10.5"
+scip = "0.1.1"
+lsp-types = { version = "=0.93.2", features = ["proposed"] }
+parking_lot = "0.12.1"
+xflags = "0.3.0"
+oorandom = "11.1.3"
+rustc-hash = "1.1.0"
+serde = { version = "1.0.137", features = ["derive"] }
+serde_json = { version = "1.0.81", features = ["preserve_order"] }
+threadpool = "1.8.1"
+rayon = "1.5.3"
+num_cpus = "1.13.1"
+mimalloc = { version = "0.1.30", default-features = false, optional = true }
+lsp-server = { version = "0.7.0", path = "../../lib/lsp-server" }
+tracing = "0.1.35"
+tracing-subscriber = { version = "0.3.16", default-features = false, features = [
+ "env-filter",
+ "registry",
+ "fmt",
+ "tracing-log",
+] }
+tracing-log = "0.1.3"
+tracing-tree = "0.2.1"
+always-assert = "0.1.2"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+flycheck = { path = "../flycheck", version = "0.0.0" }
+ide = { path = "../ide", version = "0.0.0" }
+ide-db = { path = "../ide-db", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+project-model = { path = "../project-model", version = "0.0.0" }
+syntax = { path = "../syntax", version = "0.0.0" }
+vfs = { path = "../vfs", version = "0.0.0" }
+vfs-notify = { path = "../vfs-notify", version = "0.0.0" }
+cfg = { path = "../cfg", version = "0.0.0" }
+toolchain = { path = "../toolchain", version = "0.0.0" }
+tt = { path = "../tt", version = "0.0.0" }
+proc-macro-api = { path = "../proc-macro-api", version = "0.0.0" }
+
+# This should only be used in CLI
+ide-ssr = { path = "../ide-ssr", version = "0.0.0" }
+hir = { path = "../hir", version = "0.0.0" }
+hir-def = { path = "../hir-def", version = "0.0.0" }
+hir-ty = { path = "../hir-ty", version = "0.0.0" }
+proc-macro-srv = { path = "../proc-macro-srv", version = "0.0.0" }
+
+[target.'cfg(windows)'.dependencies]
+winapi = "0.3.9"
+
+[target.'cfg(not(target_env = "msvc"))'.dependencies]
+jemallocator = { version = "0.5.0", package = "tikv-jemallocator", optional = true }
+
+[dev-dependencies]
+expect-test = "1.4.0"
+jod-thread = "0.1.2"
+xshell = "0.2.2"
+
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+mbe = { path = "../mbe" }
+
+[features]
+jemalloc = ["jemallocator", "profile/jemalloc"]
+force-always-assert = ["always-assert/force"]
+in-rust-tree = [
+ "proc-macro-srv/sysroot-abi",
+ "ide/in-rust-tree",
+ "syntax/in-rust-tree",
+]
--- /dev/null
- use ide::{FileId, RunnableKind, TestId};
+//! See `CargoTargetSpec`
+
+use std::mem;
+
+use cfg::{CfgAtom, CfgExpr};
- use crate::{global_state::GlobalStateSnapshot, Result};
++use ide::{Cancellable, FileId, RunnableKind, TestId};
+use project_model::{self, CargoFeatures, ManifestPath, TargetKind};
+use vfs::AbsPathBuf;
+
- ) -> Result<(Vec<String>, Vec<String>)> {
++use crate::global_state::GlobalStateSnapshot;
+
+/// Abstract representation of Cargo target.
+///
+/// We use it to cook up the set of cli args we need to pass to Cargo to
+/// build/test/run the target.
+#[derive(Clone)]
+pub(crate) struct CargoTargetSpec {
+ pub(crate) workspace_root: AbsPathBuf,
+ pub(crate) cargo_toml: ManifestPath,
+ pub(crate) package: String,
+ pub(crate) target: String,
+ pub(crate) target_kind: TargetKind,
+ pub(crate) required_features: Vec<String>,
+}
+
+impl CargoTargetSpec {
+ pub(crate) fn runnable_args(
+ snap: &GlobalStateSnapshot,
+ spec: Option<CargoTargetSpec>,
+ kind: &RunnableKind,
+ cfg: &Option<CfgExpr>,
- Ok((args, extra_args))
++ ) -> (Vec<String>, Vec<String>) {
+ let mut args = Vec::new();
+ let mut extra_args = Vec::new();
+
+ match kind {
+ RunnableKind::Test { test_id, attr } => {
+ args.push("test".to_owned());
+ extra_args.push(test_id.to_string());
+ if let TestId::Path(_) = test_id {
+ extra_args.push("--exact".to_owned());
+ }
+ extra_args.push("--nocapture".to_owned());
+ if attr.ignore {
+ extra_args.push("--ignored".to_owned());
+ }
+ }
+ RunnableKind::TestMod { path } => {
+ args.push("test".to_owned());
+ extra_args.push(path.clone());
+ extra_args.push("--nocapture".to_owned());
+ }
+ RunnableKind::Bench { test_id } => {
+ args.push("bench".to_owned());
+ extra_args.push(test_id.to_string());
+ if let TestId::Path(_) = test_id {
+ extra_args.push("--exact".to_owned());
+ }
+ extra_args.push("--nocapture".to_owned());
+ }
+ RunnableKind::DocTest { test_id } => {
+ args.push("test".to_owned());
+ args.push("--doc".to_owned());
+ extra_args.push(test_id.to_string());
+ extra_args.push("--nocapture".to_owned());
+ }
+ RunnableKind::Bin => {
+ let subcommand = match spec {
+ Some(CargoTargetSpec { target_kind: TargetKind::Test, .. }) => "test",
+ _ => "run",
+ };
+ args.push(subcommand.to_owned());
+ }
+ }
+
+ let target_required_features = if let Some(mut spec) = spec {
+ let required_features = mem::take(&mut spec.required_features);
+ spec.push_to(&mut args, kind);
+ required_features
+ } else {
+ Vec::new()
+ };
+
+ let cargo_config = snap.config.cargo();
+
+ match &cargo_config.features {
+ CargoFeatures::All => {
+ args.push("--all-features".to_owned());
+ for feature in target_required_features {
+ args.push("--features".to_owned());
+ args.push(feature);
+ }
+ }
+ CargoFeatures::Selected { features, no_default_features } => {
+ let mut feats = Vec::new();
+ if let Some(cfg) = cfg.as_ref() {
+ required_features(cfg, &mut feats);
+ }
+
+ feats.extend(features.iter().cloned());
+ feats.extend(target_required_features);
+
+ feats.dedup();
+ for feature in feats {
+ args.push("--features".to_owned());
+ args.push(feature);
+ }
+
+ if *no_default_features {
+ args.push("--no-default-features".to_owned());
+ }
+ }
+ }
- ) -> Result<Option<CargoTargetSpec>> {
++ (args, extra_args)
+ }
+
+ pub(crate) fn for_file(
+ global_state_snapshot: &GlobalStateSnapshot,
+ file_id: FileId,
++ ) -> Cancellable<Option<CargoTargetSpec>> {
+ let crate_id = match &*global_state_snapshot.analysis.crates_for(file_id)? {
+ &[crate_id, ..] => crate_id,
+ _ => return Ok(None),
+ };
+ let (cargo_ws, target) = match global_state_snapshot.cargo_target_for_crate_root(crate_id) {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ let target_data = &cargo_ws[target];
+ let package_data = &cargo_ws[target_data.package];
+ let res = CargoTargetSpec {
+ workspace_root: cargo_ws.workspace_root().to_path_buf(),
+ cargo_toml: package_data.manifest.clone(),
+ package: cargo_ws.package_flag(package_data),
+ target: target_data.name.clone(),
+ target_kind: target_data.kind,
+ required_features: target_data.required_features.clone(),
+ };
+
+ Ok(Some(res))
+ }
+
+ pub(crate) fn push_to(self, buf: &mut Vec<String>, kind: &RunnableKind) {
+ buf.push("--package".to_owned());
+ buf.push(self.package);
+
+ // Can't mix --doc with other target flags
+ if let RunnableKind::DocTest { .. } = kind {
+ return;
+ }
+ match self.target_kind {
+ TargetKind::Bin => {
+ buf.push("--bin".to_owned());
+ buf.push(self.target);
+ }
+ TargetKind::Test => {
+ buf.push("--test".to_owned());
+ buf.push(self.target);
+ }
+ TargetKind::Bench => {
+ buf.push("--bench".to_owned());
+ buf.push(self.target);
+ }
+ TargetKind::Example => {
+ buf.push("--example".to_owned());
+ buf.push(self.target);
+ }
+ TargetKind::Lib => {
+ buf.push("--lib".to_owned());
+ }
+ TargetKind::Other | TargetKind::BuildScript => (),
+ }
+ }
+}
+
+/// Fill minimal features needed
+fn required_features(cfg_expr: &CfgExpr, features: &mut Vec<String>) {
+ match cfg_expr {
+ CfgExpr::Atom(CfgAtom::KeyValue { key, value }) if key == "feature" => {
+ features.push(value.to_string())
+ }
+ CfgExpr::All(preds) => {
+ preds.iter().for_each(|cfg| required_features(cfg, features));
+ }
+ CfgExpr::Any(preds) => {
+ for cfg in preds {
+ let len_features = features.len();
+ required_features(cfg, features);
+ if len_features != features.len() {
+ break;
+ }
+ }
+ }
+ _ => {}
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use cfg::CfgExpr;
+ use mbe::syntax_node_to_token_tree;
+ use syntax::{
+ ast::{self, AstNode},
+ SmolStr,
+ };
+
+ fn check(cfg: &str, expected_features: &[&str]) {
+ let cfg_expr = {
+ let source_file = ast::SourceFile::parse(cfg).ok().unwrap();
+ let tt = source_file.syntax().descendants().find_map(ast::TokenTree::cast).unwrap();
+ let (tt, _) = syntax_node_to_token_tree(tt.syntax());
+ CfgExpr::parse(&tt)
+ };
+
+ let mut features = vec![];
+ required_features(&cfg_expr, &mut features);
+
+ let expected_features =
+ expected_features.iter().map(|&it| SmolStr::new(it)).collect::<Vec<_>>();
+
+ assert_eq!(features, expected_features);
+ }
+
+ #[test]
+ fn test_cfg_expr_minimal_features_needed() {
+ check(r#"#![cfg(feature = "baz")]"#, &["baz"]);
+ check(r#"#![cfg(all(feature = "baz", feature = "foo"))]"#, &["baz", "foo"]);
+ check(r#"#![cfg(any(feature = "baz", feature = "foo", unix))]"#, &["baz"]);
+ check(r#"#![cfg(foo)]"#, &[]);
+ }
+}
--- /dev/null
- let mut path = AbsPathBuf::assert(std::env::current_exe()?);
- let mut args = vec!["proc-macro"];
+//! Loads a Cargo project into a static instance of analysis, without support
+//! for incorporating changes.
+use std::{path::Path, sync::Arc};
+
+use anyhow::Result;
+use crossbeam_channel::{unbounded, Receiver};
+use hir::db::DefDatabase;
+use ide::{AnalysisHost, Change};
+use ide_db::{base_db::CrateGraph, FxHashMap};
+use proc_macro_api::ProcMacroServer;
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use vfs::{loader::Handle, AbsPath, AbsPathBuf};
+
+use crate::reload::{load_proc_macro, ProjectFolders, SourceRootConfig};
+
+// Note: Since this type is used by external tools that use rust-analyzer as a library
+// what otherwise would be `pub(crate)` has to be `pub` here instead.
+pub struct LoadCargoConfig {
+ pub load_out_dirs_from_check: bool,
+ pub with_proc_macro: bool,
+ pub prefill_caches: bool,
+}
+
+// Note: Since this function is used by external tools that use rust-analyzer as a library
+// what otherwise would be `pub(crate)` has to be `pub` here instead.
+pub fn load_workspace_at(
+ root: &Path,
+ cargo_config: &CargoConfig,
+ load_config: &LoadCargoConfig,
+ progress: &dyn Fn(String),
+) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
+ let root = AbsPathBuf::assert(std::env::current_dir()?.join(root));
+ let root = ProjectManifest::discover_single(&root)?;
+ let mut workspace = ProjectWorkspace::load(root, cargo_config, progress)?;
+
+ if load_config.load_out_dirs_from_check {
+ let build_scripts = workspace.run_build_scripts(cargo_config, progress)?;
+ workspace.set_build_scripts(build_scripts)
+ }
+
+ load_workspace(workspace, &cargo_config.extra_env, load_config)
+}
+
+// Note: Since this function is used by external tools that use rust-analyzer as a library
+// what otherwise would be `pub(crate)` has to be `pub` here instead.
+//
+// The reason both, `load_workspace_at` and `load_workspace` are `pub` is that some of
+// these tools need access to `ProjectWorkspace`, too, which `load_workspace_at` hides.
+pub fn load_workspace(
+ ws: ProjectWorkspace,
+ extra_env: &FxHashMap<String, String>,
+ load_config: &LoadCargoConfig,
+) -> Result<(AnalysisHost, vfs::Vfs, Option<ProcMacroServer>)> {
+ let (sender, receiver) = unbounded();
+ let mut vfs = vfs::Vfs::default();
+ let mut loader = {
+ let loader =
+ vfs_notify::NotifyHandle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+ Box::new(loader)
+ };
+
+ let proc_macro_client = if load_config.with_proc_macro {
- if let ProjectWorkspace::Cargo { sysroot, .. } | ProjectWorkspace::Json { sysroot, .. } =
- &ws
- {
- if let Some(sysroot) = sysroot.as_ref() {
- let standalone_server_name =
- format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
- let server_path = sysroot.root().join("libexec").join(&standalone_server_name);
- if std::fs::metadata(&server_path).is_ok() {
- path = server_path;
- args = vec![];
- }
- }
- }
-
- ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|e| e.to_string())
++ let (server_path, args): (_, &[_]) = match ws.find_sysroot_proc_macro_srv() {
++ Some(server_path) => (server_path, &[]),
++ None => (AbsPathBuf::assert(std::env::current_exe()?), &["proc-macro"]),
++ };
+
++ ProcMacroServer::spawn(server_path, args).map_err(|e| e.to_string())
+ } else {
+ Err("proc macro server disabled".to_owned())
+ };
+
+ let crate_graph = ws.to_crate_graph(
+ &mut |_, path: &AbsPath| {
+ load_proc_macro(proc_macro_client.as_ref().map_err(|e| &**e), path, &[])
+ },
+ &mut |path: &AbsPath| {
+ let contents = loader.load_sync(path);
+ let path = vfs::VfsPath::from(path.to_path_buf());
+ vfs.set_file_contents(path.clone(), contents);
+ vfs.file_id(&path)
+ },
+ extra_env,
+ );
+
+ let project_folders = ProjectFolders::new(&[ws], &[]);
+ loader.set_config(vfs::loader::Config {
+ load: project_folders.load,
+ watch: vec![],
+ version: 0,
+ });
+
+ tracing::debug!("crate graph: {:?}", crate_graph);
+ let host =
+ load_crate_graph(crate_graph, project_folders.source_root_config, &mut vfs, &receiver);
+
+ if load_config.prefill_caches {
+ host.analysis().parallel_prime_caches(1, |_| {})?;
+ }
+ Ok((host, vfs, proc_macro_client.ok()))
+}
+
+fn load_crate_graph(
+ crate_graph: CrateGraph,
+ source_root_config: SourceRootConfig,
+ vfs: &mut vfs::Vfs,
+ receiver: &Receiver<vfs::loader::Message>,
+) -> AnalysisHost {
+ let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::<usize>().ok());
+ let mut host = AnalysisHost::new(lru_cap);
+ let mut analysis_change = Change::new();
+
+ host.raw_database_mut().set_enable_proc_attr_macros(true);
+
+ // wait until Vfs has loaded all roots
+ for task in receiver {
+ match task {
+ vfs::loader::Message::Progress { n_done, n_total, config_version: _ } => {
+ if n_done == n_total {
+ break;
+ }
+ }
+ vfs::loader::Message::Loaded { files } => {
+ for (path, contents) in files {
+ vfs.set_file_contents(path.into(), contents);
+ }
+ }
+ }
+ }
+ let changes = vfs.take_changes();
+ for file in changes {
+ if file.exists() {
+ let contents = vfs.file_contents(file.file_id).to_vec();
+ if let Ok(text) = String::from_utf8(contents) {
+ analysis_change.change_file(file.file_id, Some(Arc::new(text)))
+ }
+ }
+ }
+ let source_roots = source_root_config.partition(vfs);
+ analysis_change.set_roots(source_roots);
+
+ analysis_change.set_crate_graph(crate_graph);
+
+ host.apply_change(analysis_change);
+ host
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ use hir::Crate;
+
+ #[test]
+ fn test_loading_rust_analyzer() {
+ let path = Path::new(env!("CARGO_MANIFEST_DIR")).parent().unwrap().parent().unwrap();
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: false,
+ with_proc_macro: false,
+ prefill_caches: false,
+ };
+ let (host, _vfs, _proc_macro) =
+ load_workspace_at(path, &cargo_config, &load_cargo_config, &|_| {}).unwrap();
+
+ let n_crates = Crate::all(host.raw_database()).len();
+ // RA has quite a few crates, but the exact count doesn't matter
+ assert!(n_crates > 20);
+ }
+}
--- /dev/null
- let mut index = scip_types::Index {
- metadata: Some(scip_types::Metadata {
- version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(),
- tool_info: Some(scip_types::ToolInfo {
- name: "rust-analyzer".to_owned(),
- version: "0.1".to_owned(),
- arguments: vec![],
- ..Default::default()
- })
- .into(),
- project_root: format!(
- "file://{}",
- path.normalize()
- .as_os_str()
- .to_str()
- .ok_or(anyhow::anyhow!("Unable to normalize project_root path"))?
- .to_string()
- ),
- text_document_encoding: scip_types::TextEncoding::UTF8.into(),
- ..Default::default()
+//! SCIP generator
+
+use std::{
+ collections::{HashMap, HashSet},
+ time::Instant,
+};
+
+use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
+use hir::Name;
+use ide::{
+ LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
+ TokenStaticData,
+};
+use ide_db::LineIndexDatabase;
+use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace};
+use scip::types as scip_types;
+use std::env;
+
+use crate::cli::{
+ flags,
+ load_cargo::{load_workspace, LoadCargoConfig},
+ Result,
+};
+
+impl flags::Scip {
+ pub fn run(self) -> Result<()> {
+ eprintln!("Generating SCIP start...");
+ let now = Instant::now();
+ let cargo_config = CargoConfig::default();
+
+ let no_progress = &|s| (eprintln!("rust-analyzer: Loading {}", s));
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: true,
+ prefill_caches: true,
+ };
+ let path = vfs::AbsPathBuf::assert(env::current_dir()?.join(&self.path));
+ let rootpath = path.normalize();
+ let manifest = ProjectManifest::discover_single(&path)?;
+
+ let workspace = ProjectWorkspace::load(manifest, &cargo_config, no_progress)?;
+
+ let (host, vfs, _) =
+ load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?;
+ let db = host.raw_database();
+ let analysis = host.analysis();
+
+ let si = StaticIndex::compute(&analysis);
+
- ..Default::default()
++ let metadata = scip_types::Metadata {
++ version: scip_types::ProtocolVersion::UnspecifiedProtocolVersion.into(),
++ tool_info: Some(scip_types::ToolInfo {
++ name: "rust-analyzer".to_owned(),
++ version: "0.1".to_owned(),
++ arguments: vec![],
++ special_fields: Default::default(),
+ })
+ .into(),
- let mut doc = scip_types::Document {
- relative_path,
- language: "rust".to_string(),
- ..Default::default()
- };
++ project_root: format!(
++ "file://{}",
++ path.normalize()
++ .as_os_str()
++ .to_str()
++ .ok_or(anyhow::anyhow!("Unable to normalize project_root path"))?
++ .to_string()
++ ),
++ text_document_encoding: scip_types::TextEncoding::UTF8.into(),
++ special_fields: Default::default(),
+ };
++ let mut documents = Vec::new();
+
+ let mut symbols_emitted: HashSet<TokenId> = HashSet::default();
+ let mut tokens_to_symbol: HashMap<TokenId, String> = HashMap::new();
+
+ for StaticIndexedFile { file_id, tokens, .. } in si.files {
+ let mut local_count = 0;
+ let mut new_local_symbol = || {
+ let new_symbol = scip::types::Symbol::new_local(local_count);
+ local_count += 1;
+
+ new_symbol
+ };
+
+ let relative_path = match get_relative_filepath(&vfs, &rootpath, file_id) {
+ Some(relative_path) => relative_path,
+ None => continue,
+ };
+
+ let line_index = LineIndex {
+ index: db.line_index(file_id),
+ encoding: PositionEncoding::Utf8,
+ endings: LineEndings::Unix,
+ };
+
- tokens.into_iter().for_each(|(range, id)| {
++ let mut occurrences = Vec::new();
++ let mut symbols = Vec::new();
+
- let mut occurrence = scip_types::Occurrence::default();
- occurrence.range = text_range_to_scip_range(&line_index, range);
- occurrence.symbol = tokens_to_symbol
++ tokens.into_iter().for_each(|(text_range, id)| {
+ let token = si.tokens.get(id).unwrap();
+
- if def.range == range {
- occurrence.symbol_roles |= scip_types::SymbolRole::Definition as i32;
++ let range = text_range_to_scip_range(&line_index, text_range);
++ let symbol = tokens_to_symbol
+ .entry(id)
+ .or_insert_with(|| {
+ let symbol = token_to_symbol(&token).unwrap_or_else(&mut new_local_symbol);
+ scip::symbol::format_symbol(symbol)
+ })
+ .clone();
+
++ let mut symbol_roles = Default::default();
++
+ if let Some(def) = token.definition {
- let mut symbol_info = scip_types::SymbolInformation::default();
- symbol_info.symbol = occurrence.symbol.clone();
- if let Some(hover) = &token.hover {
- if !hover.markup.as_str().is_empty() {
- symbol_info.documentation = vec![hover.markup.as_str().to_string()];
- }
- }
-
- doc.symbols.push(symbol_info)
++ if def.range == text_range {
++ symbol_roles |= scip_types::SymbolRole::Definition as i32;
+ }
+
+ if symbols_emitted.insert(id) {
- doc.occurrences.push(occurrence);
++ let documentation = token
++ .hover
++ .as_ref()
++ .map(|hover| hover.markup.as_str())
++ .filter(|it| !it.is_empty())
++ .map(|it| vec![it.to_owned()]);
++ let symbol_info = scip_types::SymbolInformation {
++ symbol: symbol.clone(),
++ documentation: documentation.unwrap_or_default(),
++ relationships: Vec::new(),
++ special_fields: Default::default(),
++ };
++
++ symbols.push(symbol_info)
+ }
+ }
+
- if doc.occurrences.is_empty() {
++ occurrences.push(scip_types::Occurrence {
++ range,
++ symbol,
++ symbol_roles,
++ override_documentation: Vec::new(),
++ syntax_kind: Default::default(),
++ diagnostics: Vec::new(),
++ special_fields: Default::default(),
++ });
+ });
+
- index.documents.push(doc);
++ if occurrences.is_empty() {
+ continue;
+ }
+
- ..Default::default()
++ documents.push(scip_types::Document {
++ relative_path,
++ language: "rust".to_string(),
++ occurrences,
++ symbols,
++ special_fields: Default::default(),
++ });
+ }
+
++ let index = scip_types::Index {
++ metadata: Some(metadata).into(),
++ documents,
++ external_symbols: Vec::new(),
++ special_fields: Default::default(),
++ };
++
+ scip::write_message_to_file("index.scip", index)
+ .map_err(|err| anyhow::anyhow!("Failed to write scip to file: {}", err))?;
+
+ eprintln!("Generating SCIP finished {:?}", now.elapsed());
+ Ok(())
+ }
+}
+
+fn get_relative_filepath(
+ vfs: &vfs::Vfs,
+ rootpath: &vfs::AbsPathBuf,
+ file_id: ide::FileId,
+) -> Option<String> {
+ Some(vfs.file_path(file_id).as_path()?.strip_prefix(&rootpath)?.as_ref().to_str()?.to_string())
+}
+
+// SCIP Ranges have a (very large) optimization that ranges if they are on the same line
+// only encode as a vector of [start_line, start_col, end_col].
+//
+// This transforms a line index into the optimized SCIP Range.
+fn text_range_to_scip_range(line_index: &LineIndex, range: TextRange) -> Vec<i32> {
+ let LineCol { line: start_line, col: start_col } = line_index.index.line_col(range.start());
+ let LineCol { line: end_line, col: end_col } = line_index.index.line_col(range.end());
+
+ if start_line == end_line {
+ vec![start_line as i32, start_col as i32, end_col as i32]
+ } else {
+ vec![start_line as i32, start_col as i32, end_line as i32, end_col as i32]
+ }
+}
+
+fn new_descriptor_str(
+ name: &str,
+ suffix: scip_types::descriptor::Suffix,
+) -> scip_types::Descriptor {
+ scip_types::Descriptor {
+ name: name.to_string(),
+ disambiguator: "".to_string(),
+ suffix: suffix.into(),
- ..Default::default()
++ special_fields: Default::default(),
+ }
+}
+
+fn new_descriptor(name: Name, suffix: scip_types::descriptor::Suffix) -> scip_types::Descriptor {
+ let mut name = name.to_string();
+ if name.contains("'") {
+ name = format!("`{}`", name);
+ }
+
+ new_descriptor_str(name.as_str(), suffix)
+}
+
+/// Loosely based on `def_to_moniker`
+///
+/// Only returns a Symbol when it's a non-local symbol.
+/// So if the visibility isn't outside of a document, then it will return None
+fn token_to_symbol(token: &TokenStaticData) -> Option<scip_types::Symbol> {
+ use scip_types::descriptor::Suffix::*;
+
+ let moniker = token.moniker.as_ref()?;
+
+ let package_name = moniker.package_information.name.clone();
+ let version = moniker.package_information.version.clone();
+ let descriptors = moniker
+ .identifier
+ .description
+ .iter()
+ .map(|desc| {
+ new_descriptor(
+ desc.name.clone(),
+ match desc.desc {
+ MonikerDescriptorKind::Namespace => Namespace,
+ MonikerDescriptorKind::Type => Type,
+ MonikerDescriptorKind::Term => Term,
+ MonikerDescriptorKind::Method => Method,
+ MonikerDescriptorKind::TypeParameter => TypeParameter,
+ MonikerDescriptorKind::Parameter => Parameter,
+ MonikerDescriptorKind::Macro => Macro,
+ MonikerDescriptorKind::Meta => Meta,
+ },
+ )
+ })
+ .collect();
+
+ Some(scip_types::Symbol {
+ scheme: "rust-analyzer".into(),
+ package: Some(scip_types::Package {
+ manager: "cargo".to_string(),
+ name: package_name,
+ version: version.unwrap_or_else(|| ".".to_string()),
- ..Default::default()
++ special_fields: Default::default(),
+ })
+ .into(),
+ descriptors,
++ special_fields: Default::default(),
+ })
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use ide::{AnalysisHost, FilePosition, StaticIndex, TextSize};
+ use ide_db::base_db::fixture::ChangeFixture;
+ use scip::symbol::format_symbol;
+
+ fn position(ra_fixture: &str) -> (AnalysisHost, FilePosition) {
+ let mut host = AnalysisHost::default();
+ let change_fixture = ChangeFixture::parse(ra_fixture);
+ host.raw_database_mut().apply_change(change_fixture.change);
+ let (file_id, range_or_offset) =
+ change_fixture.file_position.expect("expected a marker ($0)");
+ let offset = range_or_offset.expect_offset();
+ (host, FilePosition { file_id, offset })
+ }
+
+ /// If expected == "", then assert that there are no symbols (this is basically local symbol)
+ #[track_caller]
+ fn check_symbol(ra_fixture: &str, expected: &str) {
+ let (host, position) = position(ra_fixture);
+
+ let analysis = host.analysis();
+ let si = StaticIndex::compute(&analysis);
+
+ let FilePosition { file_id, offset } = position;
+
+ let mut found_symbol = None;
+ for file in &si.files {
+ if file.file_id != file_id {
+ continue;
+ }
+ for &(range, id) in &file.tokens {
+ if range.contains(offset - TextSize::from(1)) {
+ let token = si.tokens.get(id).unwrap();
+ found_symbol = token_to_symbol(token);
+ break;
+ }
+ }
+ }
+
+ if expected == "" {
+ assert!(found_symbol.is_none(), "must have no symbols {:?}", found_symbol);
+ return;
+ }
+
+ assert!(found_symbol.is_some(), "must have one symbol {:?}", found_symbol);
+ let res = found_symbol.unwrap();
+ let formatted = format_symbol(res);
+ assert_eq!(formatted, expected);
+ }
+
+ #[test]
+ fn basic() {
+ check_symbol(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::example_mod::func;
+fn main() {
+ func$0();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod example_mod {
+ pub fn func() {}
+}
+"#,
+ "rust-analyzer cargo foo 0.1.0 example_mod/func().",
+ );
+ }
+
+ #[test]
+ fn symbol_for_trait() {
+ check_symbol(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ pub fn func$0() {}
+ }
+}
+"#,
+ "rust-analyzer cargo foo 0.1.0 module/MyTrait#func().",
+ );
+ }
+
+ #[test]
+ fn symbol_for_trait_constant() {
+ check_symbol(
+ r#"
+ //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ pub mod module {
+ pub trait MyTrait {
+ const MY_CONST$0: u8;
+ }
+ }
+ "#,
+ "rust-analyzer cargo foo 0.1.0 module/MyTrait#MY_CONST.",
+ );
+ }
+
+ #[test]
+ fn symbol_for_trait_type() {
+ check_symbol(
+ r#"
+ //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ pub mod module {
+ pub trait MyTrait {
+ type MyType$0;
+ }
+ }
+ "#,
+ // "foo::module::MyTrait::MyType",
+ "rust-analyzer cargo foo 0.1.0 module/MyTrait#[MyType]",
+ );
+ }
+
+ #[test]
+ fn symbol_for_trait_impl_function() {
+ check_symbol(
+ r#"
+ //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ pub mod module {
+ pub trait MyTrait {
+ pub fn func() {}
+ }
+
+ struct MyStruct {}
+
+ impl MyTrait for MyStruct {
+ pub fn func$0() {}
+ }
+ }
+ "#,
+ // "foo::module::MyStruct::MyTrait::func",
+ "rust-analyzer cargo foo 0.1.0 module/MyStruct#MyTrait#func().",
+ );
+ }
+
+ #[test]
+ fn symbol_for_field() {
+ check_symbol(
+ r#"
+ //- /lib.rs crate:main deps:foo
+ use foo::St;
+ fn main() {
+ let x = St { a$0: 2 };
+ }
+ //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ pub struct St {
+ pub a: i32,
+ }
+ "#,
+ "rust-analyzer cargo foo 0.1.0 St#a.",
+ );
+ }
+
+ #[test]
+ fn local_symbol_for_local() {
+ check_symbol(
+ r#"
+ //- /lib.rs crate:main deps:foo
+ use foo::module::func;
+ fn main() {
+ func();
+ }
+ //- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+ pub mod module {
+ pub fn func() {
+ let x$0 = 2;
+ }
+ }
+ "#,
+ "",
+ );
+ }
+
+ #[test]
+ fn global_symbol_for_pub_struct() {
+ check_symbol(
+ r#"
+ //- /lib.rs crate:main
+ mod foo;
+
+ fn main() {
+ let _bar = foo::Bar { i: 0 };
+ }
+ //- /foo.rs
+ pub struct Bar$0 {
+ pub i: i32,
+ }
+ "#,
+ "rust-analyzer cargo main . foo/Bar#",
+ );
+ }
+
+ #[test]
+ fn global_symbol_for_pub_struct_reference() {
+ check_symbol(
+ r#"
+ //- /lib.rs crate:main
+ mod foo;
+
+ fn main() {
+ let _bar = foo::Bar$0 { i: 0 };
+ }
+ //- /foo.rs
+ pub struct Bar {
+ pub i: i32,
+ }
+ "#,
+ "rust-analyzer cargo main . foo/Bar#",
+ );
+ }
+}
--- /dev/null
- cargo_unsetTest: Vec<String> = "[\"core\"]",
+//! Config used by the language server.
+//!
+//! We currently get this config from `initialize` LSP request, which is not the
+//! best way to do it, but was the simplest thing we could implement.
+//!
+//! Of particular interest is the `feature_flags` hash map: while other fields
+//! configure the server itself, feature flags are passed into analysis, and
+//! tweak things like automatic insertion of `()` in completions.
+
+use std::{fmt, iter, path::PathBuf};
+
+use flycheck::FlycheckConfig;
+use ide::{
+ AssistConfig, CallableSnippets, CompletionConfig, DiagnosticsConfig, ExprFillDefaultMode,
+ HighlightConfig, HighlightRelatedConfig, HoverConfig, HoverDocFormat, InlayHintsConfig,
+ JoinLinesConfig, Snippet, SnippetScope,
+};
+use ide_db::{
+ imports::insert_use::{ImportGranularity, InsertUseConfig, PrefixKind},
+ SnippetCap,
+};
+use itertools::Itertools;
+use lsp_types::{ClientCapabilities, MarkupKind};
+use project_model::{
+ CargoConfig, CargoFeatures, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource,
+ UnsetTestCrates,
+};
+use rustc_hash::{FxHashMap, FxHashSet};
+use serde::{de::DeserializeOwned, Deserialize};
+use vfs::AbsPathBuf;
+
+use crate::{
+ caps::completion_item_edit_resolve,
+ diagnostics::DiagnosticsMapConfig,
+ line_index::PositionEncoding,
+ lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
+};
+
+mod patch_old_style;
+
+// Conventions for configuration keys to preserve maximal extendability without breakage:
+// - Toggles (be it binary true/false or with more options in-between) should almost always suffix as `_enable`
+// This has the benefit of namespaces being extensible, and if the suffix doesn't fit later it can be changed without breakage.
+// - In general be wary of using the namespace of something verbatim, it prevents us from adding subkeys in the future
+// - Don't use abbreviations unless really necessary
+// - foo_command = overrides the subcommand, foo_overrideCommand allows full overwriting, extra args only applies for foo_command
+
+// Defines the server-side configuration of the rust-analyzer. We generate
+// *parts* of VS Code's `package.json` config from this. Run `cargo test` to
+// re-generate that file.
+//
+// However, editor specific config, which the server doesn't know about, should
+// be specified directly in `package.json`.
+//
+// To deprecate an option by replacing it with another name use `new_name | `old_name` so that we keep
+// parsing the old name.
+config_data! {
+ struct ConfigData {
+ /// Whether to insert #[must_use] when generating `as_` methods
+ /// for enum variants.
+ assist_emitMustUse: bool = "false",
+ /// Placeholder expression to use for missing expressions in assists.
+ assist_expressionFillDefault: ExprFillDefaultDef = "\"todo\"",
+
+ /// Warm up caches on project load.
+ cachePriming_enable: bool = "true",
+ /// How many worker threads to handle priming caches. The default `0` means to pick automatically.
+ cachePriming_numThreads: ParallelCachePrimingNumThreads = "0",
+
+ /// Automatically refresh project info via `cargo metadata` on
+ /// `Cargo.toml` or `.cargo/config.toml` changes.
+ cargo_autoreload: bool = "true",
+ /// Run build scripts (`build.rs`) for more precise code analysis.
+ cargo_buildScripts_enable: bool = "true",
+ /// Specifies the working directory for running build scripts.
+ /// - "workspace": run build scripts for a workspace in the workspace's root directory.
+ /// This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.
+ /// - "root": run build scripts in the project's root directory.
+ /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
+ /// is set.
+ cargo_buildScripts_invocationLocation: InvocationLocation = "\"workspace\"",
+ /// Specifies the invocation strategy to use when running the build scripts command.
+ /// If `per_workspace` is set, the command will be executed for each workspace.
+ /// If `once` is set, the command will be executed once.
+ /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
+ /// is set.
+ cargo_buildScripts_invocationStrategy: InvocationStrategy = "\"per_workspace\"",
+ /// Override the command rust-analyzer uses to run build scripts and
+ /// build procedural macros. The command is required to output json
+ /// and should therefore include `--message-format=json` or a similar
+ /// option.
+ ///
+ /// By default, a cargo invocation will be constructed for the configured
+ /// targets and features, with the following base command line:
+ ///
+ /// ```bash
+ /// cargo check --quiet --workspace --message-format=json --all-targets
+ /// ```
+ /// .
+ cargo_buildScripts_overrideCommand: Option<Vec<String>> = "null",
+ /// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
+ /// avoid checking unnecessary things.
+ cargo_buildScripts_useRustcWrapper: bool = "true",
+ /// Extra environment variables that will be set when running cargo, rustc
+ /// or other commands within the workspace. Useful for setting RUSTFLAGS.
+ cargo_extraEnv: FxHashMap<String, String> = "{}",
+ /// List of features to activate.
+ ///
+ /// Set this to `"all"` to pass `--all-features` to cargo.
+ cargo_features: CargoFeaturesDef = "[]",
+ /// Whether to pass `--no-default-features` to cargo.
+ cargo_noDefaultFeatures: bool = "false",
+ /// Relative path to the sysroot, or "discover" to try to automatically find it via
+ /// "rustc --print sysroot".
+ ///
+ /// Unsetting this disables sysroot loading.
+ ///
+ /// This option does not take effect until rust-analyzer is restarted.
+ cargo_sysroot: Option<String> = "\"discover\"",
+ /// Compilation target override (target triple).
++ // FIXME(@poliorcetics): move to multiple targets here too, but this will need more work
++ // than `checkOnSave_target`
+ cargo_target: Option<String> = "null",
+ /// Unsets `#[cfg(test)]` for the specified crates.
- /// should therefor include `--message-format=json` or a similar option.
++ cargo_unsetTest: Vec<String> = "[\"core\"]",
+
+ /// Check all targets and tests (`--all-targets`).
+ checkOnSave_allTargets: bool = "true",
+ /// Cargo command to use for `cargo check`.
+ checkOnSave_command: String = "\"check\"",
+ /// Run specified `cargo check` command for diagnostics on save.
+ checkOnSave_enable: bool = "true",
+ /// Extra arguments for `cargo check`.
+ checkOnSave_extraArgs: Vec<String> = "[]",
+ /// Extra environment variables that will be set when running `cargo check`.
+ /// Extends `#rust-analyzer.cargo.extraEnv#`.
+ checkOnSave_extraEnv: FxHashMap<String, String> = "{}",
+ /// List of features to activate. Defaults to
+ /// `#rust-analyzer.cargo.features#`.
+ ///
+ /// Set to `"all"` to pass `--all-features` to Cargo.
+ checkOnSave_features: Option<CargoFeaturesDef> = "null",
+ /// Specifies the working directory for running checks.
+ /// - "workspace": run checks for workspaces in the corresponding workspaces' root directories.
+ // FIXME: Ideally we would support this in some way
+ /// This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.
+ /// - "root": run checks in the project's root directory.
+ /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
+ /// is set.
+ checkOnSave_invocationLocation: InvocationLocation = "\"workspace\"",
+ /// Specifies the invocation strategy to use when running the checkOnSave command.
+ /// If `per_workspace` is set, the command will be executed for each workspace.
+ /// If `once` is set, the command will be executed once.
+ /// This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
+ /// is set.
+ checkOnSave_invocationStrategy: InvocationStrategy = "\"per_workspace\"",
+ /// Whether to pass `--no-default-features` to Cargo. Defaults to
+ /// `#rust-analyzer.cargo.noDefaultFeatures#`.
+ checkOnSave_noDefaultFeatures: Option<bool> = "null",
+ /// Override the command rust-analyzer uses instead of `cargo check` for
+ /// diagnostics on save. The command is required to output json and
- /// Check for a specific target. Defaults to
- /// `#rust-analyzer.cargo.target#`.
- checkOnSave_target: Option<String> = "null",
++ /// should therefore include `--message-format=json` or a similar option.
+ ///
+ /// If you're changing this because you're using some tool wrapping
+ /// Cargo, you might also want to change
+ /// `#rust-analyzer.cargo.buildScripts.overrideCommand#`.
+ ///
+ /// If there are multiple linked projects, this command is invoked for
+ /// each of them, with the working directory being the project root
+ /// (i.e., the folder containing the `Cargo.toml`).
+ ///
+ /// An example command would be:
+ ///
+ /// ```bash
+ /// cargo check --workspace --message-format=json --all-targets
+ /// ```
+ /// .
+ checkOnSave_overrideCommand: Option<Vec<String>> = "null",
- /// Whether to show inlay type hints for compiler inserted reborrows.
++ /// Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.
++ ///
++ /// Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g.
++ /// `["aarch64-apple-darwin", "x86_64-apple-darwin"]`.
++ ///
++ /// Aliased as `"checkOnSave.targets"`.
++ checkOnSave_target | checkOnSave_targets: CheckOnSaveTargets = "[]",
+
+ /// Toggles the additional completions that automatically add imports when completed.
+ /// Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
+ completion_autoimport_enable: bool = "true",
+ /// Toggles the additional completions that automatically show method calls and field accesses
+ /// with `self` prefixed to them when inside a method.
+ completion_autoself_enable: bool = "true",
+ /// Whether to add parenthesis and argument snippets when completing function.
+ completion_callable_snippets: CallableCompletionDef = "\"fill_arguments\"",
+ /// Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
+ completion_postfix_enable: bool = "true",
+ /// Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
+ completion_privateEditable_enable: bool = "false",
+ /// Custom completion snippets.
+ // NOTE: Keep this list in sync with the feature docs of user snippets.
+ completion_snippets_custom: FxHashMap<String, SnippetDef> = r#"{
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ }
+ }"#,
+
+ /// List of rust-analyzer diagnostics to disable.
+ diagnostics_disabled: FxHashSet<String> = "[]",
+ /// Whether to show native rust-analyzer diagnostics.
+ diagnostics_enable: bool = "true",
+ /// Whether to show experimental rust-analyzer diagnostics that might
+ /// have more false positives than usual.
+ diagnostics_experimental_enable: bool = "false",
+ /// Map of prefixes to be substituted when parsing diagnostic file paths.
+ /// This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
+ diagnostics_remapPrefix: FxHashMap<String, String> = "{}",
+ /// List of warnings that should be displayed with hint severity.
+ ///
+ /// The warnings will be indicated by faded text or three dots in code
+ /// and will not show up in the `Problems Panel`.
+ diagnostics_warningsAsHint: Vec<String> = "[]",
+ /// List of warnings that should be displayed with info severity.
+ ///
+ /// The warnings will be indicated by a blue squiggly underline in code
+ /// and a blue icon in the `Problems Panel`.
+ diagnostics_warningsAsInfo: Vec<String> = "[]",
+
+ /// These directories will be ignored by rust-analyzer. They are
+ /// relative to the workspace root, and globs are not supported. You may
+ /// also need to add the folders to Code's `files.watcherExclude`.
+ files_excludeDirs: Vec<PathBuf> = "[]",
+ /// Controls file watching implementation.
+ files_watcher: FilesWatcherDef = "\"client\"",
++
+ /// Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
+ highlightRelated_breakPoints_enable: bool = "true",
+ /// Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
+ highlightRelated_exitPoints_enable: bool = "true",
+ /// Enables highlighting of related references while the cursor is on any identifier.
+ highlightRelated_references_enable: bool = "true",
+ /// Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
+ highlightRelated_yieldPoints_enable: bool = "true",
+
+ /// Whether to show `Debug` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_debug_enable: bool = "true",
+ /// Whether to show HoverActions in Rust files.
+ hover_actions_enable: bool = "true",
+ /// Whether to show `Go to Type Definition` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_gotoTypeDef_enable: bool = "true",
+ /// Whether to show `Implementations` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_implementations_enable: bool = "true",
+ /// Whether to show `References` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_references_enable: bool = "false",
+ /// Whether to show `Run` action. Only applies when
+ /// `#rust-analyzer.hover.actions.enable#` is set.
+ hover_actions_run_enable: bool = "true",
+
+ /// Whether to show documentation on hover.
+ hover_documentation_enable: bool = "true",
+ /// Whether to show keyword hover popups. Only applies when
+ /// `#rust-analyzer.hover.documentation.enable#` is set.
+ hover_documentation_keywords_enable: bool = "true",
+ /// Use markdown syntax for links in hover.
+ hover_links_enable: bool = "true",
+
+ /// Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
+ imports_granularity_enforce: bool = "false",
+ /// How imports should be grouped into use statements.
+ imports_granularity_group: ImportGranularityDef = "\"crate\"",
+ /// Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
+ imports_group_enable: bool = "true",
+ /// Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
+ imports_merge_glob: bool = "true",
+ /// Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
+ imports_prefer_no_std: bool = "false",
+ /// The path structure for newly inserted paths to use.
+ imports_prefix: ImportPrefixDef = "\"plain\"",
+
+ /// Whether to show inlay type hints for binding modes.
+ inlayHints_bindingModeHints_enable: bool = "false",
+ /// Whether to show inlay type hints for method chains.
+ inlayHints_chainingHints_enable: bool = "true",
+ /// Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
+ inlayHints_closingBraceHints_enable: bool = "true",
+ /// Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
+ /// to always show them).
+ inlayHints_closingBraceHints_minLines: usize = "25",
+ /// Whether to show inlay type hints for return types of closures.
+ inlayHints_closureReturnTypeHints_enable: ClosureReturnTypeHintsDef = "\"never\"",
++ /// Whether to show inlay hints for type adjustments.
++ inlayHints_expressionAdjustmentHints_enable: AdjustmentHintsDef = "\"never\"",
+ /// Whether to show inlay type hints for elided lifetimes in function signatures.
+ inlayHints_lifetimeElisionHints_enable: LifetimeElisionDef = "\"never\"",
+ /// Whether to prefer using parameter names as the name for elided lifetime hints if possible.
+ inlayHints_lifetimeElisionHints_useParameterNames: bool = "false",
+ /// Maximum length for inlay hints. Set to null to have an unlimited length.
+ inlayHints_maxLength: Option<usize> = "25",
+ /// Whether to show function parameter name inlay hints at the call
+ /// site.
+ inlayHints_parameterHints_enable: bool = "true",
- target_triple: self
- .data
- .checkOnSave_target
- .clone()
- .or_else(|| self.data.cargo_target.clone()),
++ /// Whether to show inlay hints for compiler inserted reborrows.
++ /// This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.
+ inlayHints_reborrowHints_enable: ReborrowHintsDef = "\"never\"",
+ /// Whether to render leading colons for type hints, and trailing colons for parameter hints.
+ inlayHints_renderColons: bool = "true",
+ /// Whether to show inlay type hints for variables.
+ inlayHints_typeHints_enable: bool = "true",
+ /// Whether to hide inlay type hints for `let` statements that initialize to a closure.
+ /// Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
+ inlayHints_typeHints_hideClosureInitialization: bool = "false",
+ /// Whether to hide inlay type hints for constructors.
+ inlayHints_typeHints_hideNamedConstructor: bool = "false",
+
+ /// Join lines merges consecutive declaration and initialization of an assignment.
+ joinLines_joinAssignments: bool = "true",
+ /// Join lines inserts else between consecutive ifs.
+ joinLines_joinElseIf: bool = "true",
+ /// Join lines removes trailing commas.
+ joinLines_removeTrailingComma: bool = "true",
+ /// Join lines unwraps trivial blocks.
+ joinLines_unwrapTrivialBlock: bool = "true",
+
+
+ /// Whether to show `Debug` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_debug_enable: bool = "true",
+ /// Whether to show CodeLens in Rust files.
+ lens_enable: bool = "true",
+ /// Internal config: use custom client-side commands even when the
+ /// client doesn't set the corresponding capability.
+ lens_forceCustomCommands: bool = "true",
+ /// Whether to show `Implementations` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_implementations_enable: bool = "true",
+ /// Where to render annotations.
+ lens_location: AnnotationLocation = "\"above_name\"",
+ /// Whether to show `References` lens for Struct, Enum, and Union.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_adt_enable: bool = "false",
+ /// Whether to show `References` lens for Enum Variants.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_enumVariant_enable: bool = "false",
+ /// Whether to show `Method References` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_references_method_enable: bool = "false",
+ /// Whether to show `References` lens for Trait.
+ /// Only applies when `#rust-analyzer.lens.enable#` is set.
+ lens_references_trait_enable: bool = "false",
+ /// Whether to show `Run` lens. Only applies when
+ /// `#rust-analyzer.lens.enable#` is set.
+ lens_run_enable: bool = "true",
+
+ /// Disable project auto-discovery in favor of explicitly specified set
+ /// of projects.
+ ///
+ /// Elements must be paths pointing to `Cargo.toml`,
+ /// `rust-project.json`, or JSON objects in `rust-project.json` format.
+ linkedProjects: Vec<ManifestOrProjectJson> = "[]",
+
+ /// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
+ lru_capacity: Option<usize> = "null",
+
+ /// Whether to show `can't find Cargo.toml` error message.
+ notifications_cargoTomlNotFound: bool = "true",
+
+ /// Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
+ procMacro_attributes_enable: bool = "true",
+ /// Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
+ procMacro_enable: bool = "true",
+ /// These proc-macros will be ignored when trying to expand them.
+ ///
+ /// This config takes a map of crate names with the exported proc-macro names to ignore as values.
+ procMacro_ignored: FxHashMap<Box<str>, Box<[Box<str>]>> = "{}",
+ /// Internal config, path to proc-macro server executable (typically,
+ /// this is rust-analyzer itself, but we override this in tests).
+ procMacro_server: Option<PathBuf> = "null",
+
+ /// Exclude imports from find-all-references.
+ references_excludeImports: bool = "false",
+
+ /// Command to be executed instead of 'cargo' for runnables.
+ runnables_command: Option<String> = "null",
+ /// Additional arguments to be passed to cargo for runnables such as
+ /// tests or binaries. For example, it may be `--release`.
+ runnables_extraArgs: Vec<String> = "[]",
+
+ /// Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
+ /// projects, or "discover" to try to automatically find it if the `rustc-dev` component
+ /// is installed.
+ ///
+ /// Any project which uses rust-analyzer with the rustcPrivate
+ /// crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
+ ///
+ /// This option does not take effect until rust-analyzer is restarted.
+ rustc_source: Option<String> = "null",
+
+ /// Additional arguments to `rustfmt`.
+ rustfmt_extraArgs: Vec<String> = "[]",
+ /// Advanced option, fully override the command rust-analyzer uses for
+ /// formatting.
+ rustfmt_overrideCommand: Option<Vec<String>> = "null",
+ /// Enables the use of rustfmt's unstable range formatting command for the
+ /// `textDocument/rangeFormatting` request. The rustfmt option is unstable and only
+ /// available on a nightly build.
+ rustfmt_rangeFormatting_enable: bool = "false",
+
+ /// Inject additional highlighting into doc comments.
+ ///
+ /// When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
+ /// doc links.
+ semanticHighlighting_doc_comment_inject_enable: bool = "true",
+ /// Use semantic tokens for operators.
+ ///
+ /// When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
+ /// they are tagged with modifiers.
+ semanticHighlighting_operator_enable: bool = "true",
+ /// Use specialized semantic tokens for operators.
+ ///
+ /// When enabled, rust-analyzer will emit special token types for operator tokens instead
+ /// of the generic `operator` token type.
+ semanticHighlighting_operator_specialization_enable: bool = "false",
+ /// Use semantic tokens for punctuations.
+ ///
+ /// When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
+ /// they are tagged with modifiers or have a special role.
+ semanticHighlighting_punctuation_enable: bool = "false",
+ /// When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
+ /// calls.
+ semanticHighlighting_punctuation_separate_macro_bang: bool = "false",
+ /// Use specialized semantic tokens for punctuations.
+ ///
+ /// When enabled, rust-analyzer will emit special token types for punctuation tokens instead
+ /// of the generic `punctuation` token type.
+ semanticHighlighting_punctuation_specialization_enable: bool = "false",
+ /// Use semantic tokens for strings.
+ ///
+ /// In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
+ /// By disabling semantic tokens for strings, other grammars can be used to highlight
+ /// their contents.
+ semanticHighlighting_strings_enable: bool = "true",
+
+ /// Show full signature of the callable. Only shows parameters if disabled.
+ signatureInfo_detail: SignatureDetail = "\"full\"",
+ /// Show documentation.
+ signatureInfo_documentation_enable: bool = "true",
+
+ /// Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
+ typing_autoClosingAngleBrackets_enable: bool = "false",
+
+ /// Workspace symbol search kind.
+ workspace_symbol_search_kind: WorkspaceSymbolSearchKindDef = "\"only_types\"",
+ /// Limits the number of items returned from a workspace symbol search (Defaults to 128).
+ /// Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
+ /// Other clients requires all results upfront and might require a higher limit.
+ workspace_symbol_search_limit: usize = "128",
+ /// Workspace symbol search scope.
+ workspace_symbol_search_scope: WorkspaceSymbolSearchScopeDef = "\"workspace\"",
+ }
+}
+
+impl Default for ConfigData {
+ fn default() -> Self {
+ ConfigData::from_json(serde_json::Value::Null, &mut Vec::new())
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct Config {
+ pub discovered_projects: Option<Vec<ProjectManifest>>,
+ caps: lsp_types::ClientCapabilities,
+ root_path: AbsPathBuf,
+ data: ConfigData,
+ detached_files: Vec<AbsPathBuf>,
+ snippets: Vec<Snippet>,
+}
+
+type ParallelCachePrimingNumThreads = u8;
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum LinkedProject {
+ ProjectManifest(ProjectManifest),
+ InlineJsonProject(ProjectJson),
+}
+
+impl From<ProjectManifest> for LinkedProject {
+ fn from(v: ProjectManifest) -> Self {
+ LinkedProject::ProjectManifest(v)
+ }
+}
+
+impl From<ProjectJson> for LinkedProject {
+ fn from(v: ProjectJson) -> Self {
+ LinkedProject::InlineJsonProject(v)
+ }
+}
+
+pub struct CallInfoConfig {
+ pub params_only: bool,
+ pub docs: bool,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct LensConfig {
+ // runnables
+ pub run: bool,
+ pub debug: bool,
+
+ // implementations
+ pub implementations: bool,
+
+ // references
+ pub method_refs: bool,
+ pub refs_adt: bool, // for Struct, Enum, Union and Trait
+ pub refs_trait: bool, // for Struct, Enum, Union and Trait
+ pub enum_variant_refs: bool,
+
+ // annotations
+ pub location: AnnotationLocation,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Deserialize)]
+#[serde(rename_all = "snake_case")]
+pub enum AnnotationLocation {
+ AboveName,
+ AboveWholeItem,
+}
+
+impl From<AnnotationLocation> for ide::AnnotationLocation {
+ fn from(location: AnnotationLocation) -> Self {
+ match location {
+ AnnotationLocation::AboveName => ide::AnnotationLocation::AboveName,
+ AnnotationLocation::AboveWholeItem => ide::AnnotationLocation::AboveWholeItem,
+ }
+ }
+}
+
+impl LensConfig {
+ pub fn any(&self) -> bool {
+ self.run
+ || self.debug
+ || self.implementations
+ || self.method_refs
+ || self.refs_adt
+ || self.refs_trait
+ || self.enum_variant_refs
+ }
+
+ pub fn none(&self) -> bool {
+ !self.any()
+ }
+
+ pub fn runnable(&self) -> bool {
+ self.run || self.debug
+ }
+
+ pub fn references(&self) -> bool {
+ self.method_refs || self.refs_adt || self.refs_trait || self.enum_variant_refs
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct HoverActionsConfig {
+ pub implementations: bool,
+ pub references: bool,
+ pub run: bool,
+ pub debug: bool,
+ pub goto_type_def: bool,
+}
+
+impl HoverActionsConfig {
+ pub const NO_ACTIONS: Self = Self {
+ implementations: false,
+ references: false,
+ run: false,
+ debug: false,
+ goto_type_def: false,
+ };
+
+ pub fn any(&self) -> bool {
+ self.implementations || self.references || self.runnable() || self.goto_type_def
+ }
+
+ pub fn none(&self) -> bool {
+ !self.any()
+ }
+
+ pub fn runnable(&self) -> bool {
+ self.run || self.debug
+ }
+}
+
+#[derive(Debug, Clone)]
+pub struct FilesConfig {
+ pub watcher: FilesWatcher,
+ pub exclude: Vec<AbsPathBuf>,
+}
+
+#[derive(Debug, Clone)]
+pub enum FilesWatcher {
+ Client,
+ Server,
+}
+
+#[derive(Debug, Clone)]
+pub struct NotificationsConfig {
+ pub cargo_toml_not_found: bool,
+}
+
+#[derive(Debug, Clone)]
+pub enum RustfmtConfig {
+ Rustfmt { extra_args: Vec<String>, enable_range_formatting: bool },
+ CustomCommand { command: String, args: Vec<String> },
+}
+
+/// Configuration for runnable items, such as `main` function or tests.
+#[derive(Debug, Clone)]
+pub struct RunnablesConfig {
+ /// Custom command to be executed instead of `cargo` for runnables.
+ pub override_cargo: Option<String>,
+ /// Additional arguments for the `cargo`, e.g. `--release`.
+ pub cargo_extra_args: Vec<String>,
+}
+
+/// Configuration for workspace symbol search requests.
+#[derive(Debug, Clone)]
+pub struct WorkspaceSymbolConfig {
+ /// In what scope should the symbol be searched in.
+ pub search_scope: WorkspaceSymbolSearchScope,
+ /// What kind of symbol is being searched for.
+ pub search_kind: WorkspaceSymbolSearchKind,
+ /// How many items are returned at most.
+ pub search_limit: usize,
+}
+
+pub struct ClientCommandsConfig {
+ pub run_single: bool,
+ pub debug_single: bool,
+ pub show_reference: bool,
+ pub goto_location: bool,
+ pub trigger_parameter_hints: bool,
+}
+
+#[derive(Debug)]
+pub struct ConfigUpdateError {
+ errors: Vec<(String, serde_json::Error)>,
+}
+
+impl fmt::Display for ConfigUpdateError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let errors = self.errors.iter().format_with("\n", |(key, e), f| {
+ f(key)?;
+ f(&": ")?;
+ f(e)
+ });
+ write!(
+ f,
+ "rust-analyzer found {} invalid config value{}:\n{}",
+ self.errors.len(),
+ if self.errors.len() == 1 { "" } else { "s" },
+ errors
+ )
+ }
+}
+
+impl Config {
+ pub fn new(root_path: AbsPathBuf, caps: ClientCapabilities) -> Self {
+ Config {
+ caps,
+ data: ConfigData::default(),
+ detached_files: Vec::new(),
+ discovered_projects: None,
+ root_path,
+ snippets: Default::default(),
+ }
+ }
+
+ pub fn update(&mut self, mut json: serde_json::Value) -> Result<(), ConfigUpdateError> {
+ tracing::info!("updating config from JSON: {:#}", json);
+ if json.is_null() || json.as_object().map_or(false, |it| it.is_empty()) {
+ return Ok(());
+ }
+ let mut errors = Vec::new();
+ self.detached_files =
+ get_field::<Vec<PathBuf>>(&mut json, &mut errors, "detachedFiles", None, "[]")
+ .into_iter()
+ .map(AbsPathBuf::assert)
+ .collect();
+ patch_old_style::patch_json_for_outdated_configs(&mut json);
+ self.data = ConfigData::from_json(json, &mut errors);
+ tracing::debug!("deserialized config data: {:#?}", self.data);
+ self.snippets.clear();
+ for (name, def) in self.data.completion_snippets_custom.iter() {
+ if def.prefix.is_empty() && def.postfix.is_empty() {
+ continue;
+ }
+ let scope = match def.scope {
+ SnippetScopeDef::Expr => SnippetScope::Expr,
+ SnippetScopeDef::Type => SnippetScope::Type,
+ SnippetScopeDef::Item => SnippetScope::Item,
+ };
+ match Snippet::new(
+ &def.prefix,
+ &def.postfix,
+ &def.body,
+ def.description.as_ref().unwrap_or(name),
+ &def.requires,
+ scope,
+ ) {
+ Some(snippet) => self.snippets.push(snippet),
+ None => errors.push((
+ format!("snippet {name} is invalid"),
+ <serde_json::Error as serde::de::Error>::custom(
+ "snippet path is invalid or triggers are missing",
+ ),
+ )),
+ }
+ }
+
+ self.validate(&mut errors);
+
+ if errors.is_empty() {
+ Ok(())
+ } else {
+ Err(ConfigUpdateError { errors })
+ }
+ }
+
+ fn validate(&self, error_sink: &mut Vec<(String, serde_json::Error)>) {
+ use serde::de::Error;
+ if self.data.checkOnSave_command.is_empty() {
+ error_sink.push((
+ "/checkOnSave/command".to_string(),
+ serde_json::Error::custom("expected a non-empty string"),
+ ));
+ }
+ }
+
+ pub fn json_schema() -> serde_json::Value {
+ ConfigData::json_schema()
+ }
+
+ pub fn root_path(&self) -> &AbsPathBuf {
+ &self.root_path
+ }
+
+ pub fn caps(&self) -> &lsp_types::ClientCapabilities {
+ &self.caps
+ }
+
+ pub fn detached_files(&self) -> &[AbsPathBuf] {
+ &self.detached_files
+ }
+}
+
+macro_rules! try_ {
+ ($expr:expr) => {
+ || -> _ { Some($expr) }()
+ };
+}
+macro_rules! try_or {
+ ($expr:expr, $or:expr) => {
+ try_!($expr).unwrap_or($or)
+ };
+}
+
+macro_rules! try_or_def {
+ ($expr:expr) => {
+ try_!($expr).unwrap_or_default()
+ };
+}
+
+impl Config {
+ pub fn linked_projects(&self) -> Vec<LinkedProject> {
+ match self.data.linkedProjects.as_slice() {
+ [] => match self.discovered_projects.as_ref() {
+ Some(discovered_projects) => {
+ let exclude_dirs: Vec<_> = self
+ .data
+ .files_excludeDirs
+ .iter()
+ .map(|p| self.root_path.join(p))
+ .collect();
+ discovered_projects
+ .iter()
+ .filter(|p| {
+ let (ProjectManifest::ProjectJson(path)
+ | ProjectManifest::CargoToml(path)) = p;
+ !exclude_dirs.iter().any(|p| path.starts_with(p))
+ })
+ .cloned()
+ .map(LinkedProject::from)
+ .collect()
+ }
+ None => Vec::new(),
+ },
+ linked_projects => linked_projects
+ .iter()
+ .filter_map(|linked_project| match linked_project {
+ ManifestOrProjectJson::Manifest(it) => {
+ let path = self.root_path.join(it);
+ ProjectManifest::from_manifest_file(path)
+ .map_err(|e| tracing::error!("failed to load linked project: {}", e))
+ .ok()
+ .map(Into::into)
+ }
+ ManifestOrProjectJson::ProjectJson(it) => {
+ Some(ProjectJson::new(&self.root_path, it.clone()).into())
+ }
+ })
+ .collect(),
+ }
+ }
+
+ pub fn did_save_text_document_dynamic_registration(&self) -> bool {
+ let caps = try_or_def!(self.caps.text_document.as_ref()?.synchronization.clone()?);
+ caps.did_save == Some(true) && caps.dynamic_registration == Some(true)
+ }
+
+ pub fn did_change_watched_files_dynamic_registration(&self) -> bool {
+ try_or_def!(
+ self.caps.workspace.as_ref()?.did_change_watched_files.as_ref()?.dynamic_registration?
+ )
+ }
+
+ pub fn prefill_caches(&self) -> bool {
+ self.data.cachePriming_enable
+ }
+
+ pub fn location_link(&self) -> bool {
+ try_or_def!(self.caps.text_document.as_ref()?.definition?.link_support?)
+ }
+
+ pub fn line_folding_only(&self) -> bool {
+ try_or_def!(self.caps.text_document.as_ref()?.folding_range.as_ref()?.line_folding_only?)
+ }
+
+ pub fn hierarchical_symbols(&self) -> bool {
+ try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .document_symbol
+ .as_ref()?
+ .hierarchical_document_symbol_support?
+ )
+ }
+
+ pub fn code_action_literals(&self) -> bool {
+ try_!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .code_action
+ .as_ref()?
+ .code_action_literal_support
+ .as_ref()?)
+ .is_some()
+ }
+
+ pub fn work_done_progress(&self) -> bool {
+ try_or_def!(self.caps.window.as_ref()?.work_done_progress?)
+ }
+
+ pub fn will_rename(&self) -> bool {
+ try_or_def!(self.caps.workspace.as_ref()?.file_operations.as_ref()?.will_rename?)
+ }
+
+ pub fn change_annotation_support(&self) -> bool {
+ try_!(self
+ .caps
+ .workspace
+ .as_ref()?
+ .workspace_edit
+ .as_ref()?
+ .change_annotation_support
+ .as_ref()?)
+ .is_some()
+ }
+
+ pub fn code_action_resolve(&self) -> bool {
+ try_or_def!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .code_action
+ .as_ref()?
+ .resolve_support
+ .as_ref()?
+ .properties
+ .as_slice())
+ .iter()
+ .any(|it| it == "edit")
+ }
+
+ pub fn signature_help_label_offsets(&self) -> bool {
+ try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .signature_help
+ .as_ref()?
+ .signature_information
+ .as_ref()?
+ .parameter_information
+ .as_ref()?
+ .label_offset_support?
+ )
+ }
+
+ pub fn completion_label_details_support(&self) -> bool {
+ try_!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .label_details_support
+ .as_ref()?)
+ .is_some()
+ }
+
+ pub fn position_encoding(&self) -> PositionEncoding {
+ if supports_utf8(&self.caps) {
+ PositionEncoding::Utf8
+ } else {
+ PositionEncoding::Utf16
+ }
+ }
+
+ fn experimental(&self, index: &'static str) -> bool {
+ try_or_def!(self.caps.experimental.as_ref()?.get(index)?.as_bool()?)
+ }
+
+ pub fn code_action_group(&self) -> bool {
+ self.experimental("codeActionGroup")
+ }
+
+ pub fn server_status_notification(&self) -> bool {
+ self.experimental("serverStatusNotification")
+ }
+
+ pub fn publish_diagnostics(&self) -> bool {
+ self.data.diagnostics_enable
+ }
+
+ pub fn diagnostics(&self) -> DiagnosticsConfig {
+ DiagnosticsConfig {
+ proc_attr_macros_enabled: self.expand_proc_attr_macros(),
+ proc_macros_enabled: self.data.procMacro_enable,
+ disable_experimental: !self.data.diagnostics_experimental_enable,
+ disabled: self.data.diagnostics_disabled.clone(),
+ expr_fill_default: match self.data.assist_expressionFillDefault {
+ ExprFillDefaultDef::Todo => ExprFillDefaultMode::Todo,
+ ExprFillDefaultDef::Default => ExprFillDefaultMode::Default,
+ },
+ insert_use: self.insert_use_config(),
+ prefer_no_std: self.data.imports_prefer_no_std,
+ }
+ }
+
+ pub fn diagnostics_map(&self) -> DiagnosticsMapConfig {
+ DiagnosticsMapConfig {
+ remap_prefix: self.data.diagnostics_remapPrefix.clone(),
+ warnings_as_info: self.data.diagnostics_warningsAsInfo.clone(),
+ warnings_as_hint: self.data.diagnostics_warningsAsHint.clone(),
+ }
+ }
+
+ pub fn extra_env(&self) -> &FxHashMap<String, String> {
+ &self.data.cargo_extraEnv
+ }
+
+ pub fn check_on_save_extra_env(&self) -> FxHashMap<String, String> {
+ let mut extra_env = self.data.cargo_extraEnv.clone();
+ extra_env.extend(self.data.checkOnSave_extraEnv.clone());
+ extra_env
+ }
+
+ pub fn lru_capacity(&self) -> Option<usize> {
+ self.data.lru_capacity
+ }
+
+ pub fn proc_macro_srv(&self) -> Option<(AbsPathBuf, /* is path explicitly set */ bool)> {
+ if !self.data.procMacro_enable {
+ return None;
+ }
+ Some(match &self.data.procMacro_server {
+ Some(it) => (
+ AbsPathBuf::try_from(it.clone()).unwrap_or_else(|path| self.root_path.join(path)),
+ true,
+ ),
+ None => (AbsPathBuf::assert(std::env::current_exe().ok()?), false),
+ })
+ }
+
+ pub fn dummy_replacements(&self) -> &FxHashMap<Box<str>, Box<[Box<str>]>> {
+ &self.data.procMacro_ignored
+ }
+
+ pub fn expand_proc_attr_macros(&self) -> bool {
+ self.data.procMacro_enable && self.data.procMacro_attributes_enable
+ }
+
+ pub fn files(&self) -> FilesConfig {
+ FilesConfig {
+ watcher: match self.data.files_watcher {
+ FilesWatcherDef::Client if self.did_change_watched_files_dynamic_registration() => {
+ FilesWatcher::Client
+ }
+ _ => FilesWatcher::Server,
+ },
+ exclude: self.data.files_excludeDirs.iter().map(|it| self.root_path.join(it)).collect(),
+ }
+ }
+
+ pub fn notifications(&self) -> NotificationsConfig {
+ NotificationsConfig { cargo_toml_not_found: self.data.notifications_cargoTomlNotFound }
+ }
+
+ pub fn cargo_autoreload(&self) -> bool {
+ self.data.cargo_autoreload
+ }
+
+ pub fn run_build_scripts(&self) -> bool {
+ self.data.cargo_buildScripts_enable || self.data.procMacro_enable
+ }
+
+ pub fn cargo(&self) -> CargoConfig {
+ let rustc_source = self.data.rustc_source.as_ref().map(|rustc_src| {
+ if rustc_src == "discover" {
+ RustcSource::Discover
+ } else {
+ RustcSource::Path(self.root_path.join(rustc_src))
+ }
+ });
+ let sysroot = self.data.cargo_sysroot.as_ref().map(|sysroot| {
+ if sysroot == "discover" {
+ RustcSource::Discover
+ } else {
+ RustcSource::Path(self.root_path.join(sysroot))
+ }
+ });
+
+ CargoConfig {
+ features: match &self.data.cargo_features {
+ CargoFeaturesDef::All => CargoFeatures::All,
+ CargoFeaturesDef::Selected(features) => CargoFeatures::Selected {
+ features: features.clone(),
+ no_default_features: self.data.cargo_noDefaultFeatures,
+ },
+ },
+ target: self.data.cargo_target.clone(),
+ sysroot,
+ rustc_source,
+ unset_test_crates: UnsetTestCrates::Only(self.data.cargo_unsetTest.clone()),
+ wrap_rustc_in_build_scripts: self.data.cargo_buildScripts_useRustcWrapper,
+ invocation_strategy: match self.data.cargo_buildScripts_invocationStrategy {
+ InvocationStrategy::Once => project_model::InvocationStrategy::Once,
+ InvocationStrategy::PerWorkspace => project_model::InvocationStrategy::PerWorkspace,
+ },
+ invocation_location: match self.data.cargo_buildScripts_invocationLocation {
+ InvocationLocation::Root => {
+ project_model::InvocationLocation::Root(self.root_path.clone())
+ }
+ InvocationLocation::Workspace => project_model::InvocationLocation::Workspace,
+ },
+ run_build_script_command: self.data.cargo_buildScripts_overrideCommand.clone(),
+ extra_env: self.data.cargo_extraEnv.clone(),
+ }
+ }
+
+ pub fn rustfmt(&self) -> RustfmtConfig {
+ match &self.data.rustfmt_overrideCommand {
+ Some(args) if !args.is_empty() => {
+ let mut args = args.clone();
+ let command = args.remove(0);
+ RustfmtConfig::CustomCommand { command, args }
+ }
+ Some(_) | None => RustfmtConfig::Rustfmt {
+ extra_args: self.data.rustfmt_extraArgs.clone(),
+ enable_range_formatting: self.data.rustfmt_rangeFormatting_enable,
+ },
+ }
+ }
+
+ pub fn flycheck(&self) -> Option<FlycheckConfig> {
+ if !self.data.checkOnSave_enable {
+ return None;
+ }
+ let flycheck_config = match &self.data.checkOnSave_overrideCommand {
+ Some(args) if !args.is_empty() => {
+ let mut args = args.clone();
+ let command = args.remove(0);
+ FlycheckConfig::CustomCommand {
+ command,
+ args,
+ extra_env: self.check_on_save_extra_env(),
+ invocation_strategy: match self.data.checkOnSave_invocationStrategy {
+ InvocationStrategy::Once => flycheck::InvocationStrategy::Once,
+ InvocationStrategy::PerWorkspace => {
+ flycheck::InvocationStrategy::PerWorkspace
+ }
+ },
+ invocation_location: match self.data.checkOnSave_invocationLocation {
+ InvocationLocation::Root => {
+ flycheck::InvocationLocation::Root(self.root_path.clone())
+ }
+ InvocationLocation::Workspace => flycheck::InvocationLocation::Workspace,
+ },
+ }
+ }
+ Some(_) | None => FlycheckConfig::CargoCommand {
+ command: self.data.checkOnSave_command.clone(),
- reborrow_hints: match self.data.inlayHints_reborrowHints_enable {
- ReborrowHintsDef::Always => ide::ReborrowHints::Always,
- ReborrowHintsDef::Never => ide::ReborrowHints::Never,
- ReborrowHintsDef::Mutable => ide::ReborrowHints::MutableOnly,
++ target_triples: match &self.data.checkOnSave_target.0[..] {
++ [] => self.data.cargo_target.clone().into_iter().collect(),
++ targets => targets.into(),
++ },
+ all_targets: self.data.checkOnSave_allTargets,
+ no_default_features: self
+ .data
+ .checkOnSave_noDefaultFeatures
+ .unwrap_or(self.data.cargo_noDefaultFeatures),
+ all_features: matches!(
+ self.data.checkOnSave_features.as_ref().unwrap_or(&self.data.cargo_features),
+ CargoFeaturesDef::All
+ ),
+ features: match self
+ .data
+ .checkOnSave_features
+ .clone()
+ .unwrap_or_else(|| self.data.cargo_features.clone())
+ {
+ CargoFeaturesDef::All => vec![],
+ CargoFeaturesDef::Selected(it) => it,
+ },
+ extra_args: self.data.checkOnSave_extraArgs.clone(),
+ extra_env: self.check_on_save_extra_env(),
+ },
+ };
+ Some(flycheck_config)
+ }
+
+ pub fn runnables(&self) -> RunnablesConfig {
+ RunnablesConfig {
+ override_cargo: self.data.runnables_command.clone(),
+ cargo_extra_args: self.data.runnables_extraArgs.clone(),
+ }
+ }
+
+ pub fn inlay_hints(&self) -> InlayHintsConfig {
+ InlayHintsConfig {
+ render_colons: self.data.inlayHints_renderColons,
+ type_hints: self.data.inlayHints_typeHints_enable,
+ parameter_hints: self.data.inlayHints_parameterHints_enable,
+ chaining_hints: self.data.inlayHints_chainingHints_enable,
+ closure_return_type_hints: match self.data.inlayHints_closureReturnTypeHints_enable {
+ ClosureReturnTypeHintsDef::Always => ide::ClosureReturnTypeHints::Always,
+ ClosureReturnTypeHintsDef::Never => ide::ClosureReturnTypeHints::Never,
+ ClosureReturnTypeHintsDef::WithBlock => ide::ClosureReturnTypeHints::WithBlock,
+ },
+ lifetime_elision_hints: match self.data.inlayHints_lifetimeElisionHints_enable {
+ LifetimeElisionDef::Always => ide::LifetimeElisionHints::Always,
+ LifetimeElisionDef::Never => ide::LifetimeElisionHints::Never,
+ LifetimeElisionDef::SkipTrivial => ide::LifetimeElisionHints::SkipTrivial,
+ },
+ hide_named_constructor_hints: self.data.inlayHints_typeHints_hideNamedConstructor,
+ hide_closure_initialization_hints: self
+ .data
+ .inlayHints_typeHints_hideClosureInitialization,
++ adjustment_hints: match self.data.inlayHints_expressionAdjustmentHints_enable {
++ AdjustmentHintsDef::Always => ide::AdjustmentHints::Always,
++ AdjustmentHintsDef::Never => match self.data.inlayHints_reborrowHints_enable {
++ ReborrowHintsDef::Always | ReborrowHintsDef::Mutable => {
++ ide::AdjustmentHints::ReborrowOnly
++ }
++ ReborrowHintsDef::Never => ide::AdjustmentHints::Never,
++ },
++ AdjustmentHintsDef::Reborrow => ide::AdjustmentHints::ReborrowOnly,
+ },
+ binding_mode_hints: self.data.inlayHints_bindingModeHints_enable,
+ param_names_for_lifetime_elision_hints: self
+ .data
+ .inlayHints_lifetimeElisionHints_useParameterNames,
+ max_length: self.data.inlayHints_maxLength,
+ closing_brace_hints_min_lines: if self.data.inlayHints_closingBraceHints_enable {
+ Some(self.data.inlayHints_closingBraceHints_minLines)
+ } else {
+ None
+ },
+ }
+ }
+
+ fn insert_use_config(&self) -> InsertUseConfig {
+ InsertUseConfig {
+ granularity: match self.data.imports_granularity_group {
+ ImportGranularityDef::Preserve => ImportGranularity::Preserve,
+ ImportGranularityDef::Item => ImportGranularity::Item,
+ ImportGranularityDef::Crate => ImportGranularity::Crate,
+ ImportGranularityDef::Module => ImportGranularity::Module,
+ },
+ enforce_granularity: self.data.imports_granularity_enforce,
+ prefix_kind: match self.data.imports_prefix {
+ ImportPrefixDef::Plain => PrefixKind::Plain,
+ ImportPrefixDef::ByCrate => PrefixKind::ByCrate,
+ ImportPrefixDef::BySelf => PrefixKind::BySelf,
+ },
+ group: self.data.imports_group_enable,
+ skip_glob_imports: !self.data.imports_merge_glob,
+ }
+ }
+
+ pub fn completion(&self) -> CompletionConfig {
+ CompletionConfig {
+ enable_postfix_completions: self.data.completion_postfix_enable,
+ enable_imports_on_the_fly: self.data.completion_autoimport_enable
+ && completion_item_edit_resolve(&self.caps),
+ enable_self_on_the_fly: self.data.completion_autoself_enable,
+ enable_private_editable: self.data.completion_privateEditable_enable,
+ callable: match self.data.completion_callable_snippets {
+ CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments),
+ CallableCompletionDef::AddParentheses => Some(CallableSnippets::AddParentheses),
+ CallableCompletionDef::None => None,
+ },
+ insert_use: self.insert_use_config(),
+ prefer_no_std: self.data.imports_prefer_no_std,
+ snippet_cap: SnippetCap::new(try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .snippet_support?
+ )),
+ snippets: self.snippets.clone(),
+ }
+ }
+
+ pub fn find_all_refs_exclude_imports(&self) -> bool {
+ self.data.references_excludeImports
+ }
+
+ pub fn snippet_cap(&self) -> bool {
+ self.experimental("snippetTextEdit")
+ }
+
+ pub fn assist(&self) -> AssistConfig {
+ AssistConfig {
+ snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")),
+ allowed: None,
+ insert_use: self.insert_use_config(),
+ prefer_no_std: self.data.imports_prefer_no_std,
+ assist_emit_must_use: self.data.assist_emitMustUse,
+ }
+ }
+
+ pub fn join_lines(&self) -> JoinLinesConfig {
+ JoinLinesConfig {
+ join_else_if: self.data.joinLines_joinElseIf,
+ remove_trailing_comma: self.data.joinLines_removeTrailingComma,
+ unwrap_trivial_blocks: self.data.joinLines_unwrapTrivialBlock,
+ join_assignments: self.data.joinLines_joinAssignments,
+ }
+ }
+
+ pub fn call_info(&self) -> CallInfoConfig {
+ CallInfoConfig {
+ params_only: matches!(self.data.signatureInfo_detail, SignatureDetail::Parameters),
+ docs: self.data.signatureInfo_documentation_enable,
+ }
+ }
+
+ pub fn lens(&self) -> LensConfig {
+ LensConfig {
+ run: self.data.lens_enable && self.data.lens_run_enable,
+ debug: self.data.lens_enable && self.data.lens_debug_enable,
+ implementations: self.data.lens_enable && self.data.lens_implementations_enable,
+ method_refs: self.data.lens_enable && self.data.lens_references_method_enable,
+ refs_adt: self.data.lens_enable && self.data.lens_references_adt_enable,
+ refs_trait: self.data.lens_enable && self.data.lens_references_trait_enable,
+ enum_variant_refs: self.data.lens_enable
+ && self.data.lens_references_enumVariant_enable,
+ location: self.data.lens_location,
+ }
+ }
+
+ pub fn hover_actions(&self) -> HoverActionsConfig {
+ let enable = self.experimental("hoverActions") && self.data.hover_actions_enable;
+ HoverActionsConfig {
+ implementations: enable && self.data.hover_actions_implementations_enable,
+ references: enable && self.data.hover_actions_references_enable,
+ run: enable && self.data.hover_actions_run_enable,
+ debug: enable && self.data.hover_actions_debug_enable,
+ goto_type_def: enable && self.data.hover_actions_gotoTypeDef_enable,
+ }
+ }
+
+ pub fn highlighting_config(&self) -> HighlightConfig {
+ HighlightConfig {
+ strings: self.data.semanticHighlighting_strings_enable,
+ punctuation: self.data.semanticHighlighting_punctuation_enable,
+ specialize_punctuation: self
+ .data
+ .semanticHighlighting_punctuation_specialization_enable,
+ macro_bang: self.data.semanticHighlighting_punctuation_separate_macro_bang,
+ operator: self.data.semanticHighlighting_operator_enable,
+ specialize_operator: self.data.semanticHighlighting_operator_specialization_enable,
+ inject_doc_comment: self.data.semanticHighlighting_doc_comment_inject_enable,
+ syntactic_name_ref_highlighting: false,
+ }
+ }
+
+ pub fn hover(&self) -> HoverConfig {
+ HoverConfig {
+ links_in_hover: self.data.hover_links_enable,
+ documentation: self.data.hover_documentation_enable.then(|| {
+ let is_markdown = try_or_def!(self
+ .caps
+ .text_document
+ .as_ref()?
+ .hover
+ .as_ref()?
+ .content_format
+ .as_ref()?
+ .as_slice())
+ .contains(&MarkupKind::Markdown);
+ if is_markdown {
+ HoverDocFormat::Markdown
+ } else {
+ HoverDocFormat::PlainText
+ }
+ }),
+ keywords: self.data.hover_documentation_keywords_enable,
+ }
+ }
+
+ pub fn workspace_symbol(&self) -> WorkspaceSymbolConfig {
+ WorkspaceSymbolConfig {
+ search_scope: match self.data.workspace_symbol_search_scope {
+ WorkspaceSymbolSearchScopeDef::Workspace => WorkspaceSymbolSearchScope::Workspace,
+ WorkspaceSymbolSearchScopeDef::WorkspaceAndDependencies => {
+ WorkspaceSymbolSearchScope::WorkspaceAndDependencies
+ }
+ },
+ search_kind: match self.data.workspace_symbol_search_kind {
+ WorkspaceSymbolSearchKindDef::OnlyTypes => WorkspaceSymbolSearchKind::OnlyTypes,
+ WorkspaceSymbolSearchKindDef::AllSymbols => WorkspaceSymbolSearchKind::AllSymbols,
+ },
+ search_limit: self.data.workspace_symbol_search_limit,
+ }
+ }
+
+ pub fn semantic_tokens_refresh(&self) -> bool {
+ try_or_def!(self.caps.workspace.as_ref()?.semantic_tokens.as_ref()?.refresh_support?)
+ }
+
+ pub fn code_lens_refresh(&self) -> bool {
+ try_or_def!(self.caps.workspace.as_ref()?.code_lens.as_ref()?.refresh_support?)
+ }
+
+ pub fn insert_replace_support(&self) -> bool {
+ try_or_def!(
+ self.caps
+ .text_document
+ .as_ref()?
+ .completion
+ .as_ref()?
+ .completion_item
+ .as_ref()?
+ .insert_replace_support?
+ )
+ }
+
+ pub fn client_commands(&self) -> ClientCommandsConfig {
+ let commands =
+ try_or!(self.caps.experimental.as_ref()?.get("commands")?, &serde_json::Value::Null);
+ let commands: Option<lsp_ext::ClientCommandOptions> =
+ serde_json::from_value(commands.clone()).ok();
+ let force = commands.is_none() && self.data.lens_forceCustomCommands;
+ let commands = commands.map(|it| it.commands).unwrap_or_default();
+
+ let get = |name: &str| commands.iter().any(|it| it == name) || force;
+
+ ClientCommandsConfig {
+ run_single: get("rust-analyzer.runSingle"),
+ debug_single: get("rust-analyzer.debugSingle"),
+ show_reference: get("rust-analyzer.showReferences"),
+ goto_location: get("rust-analyzer.gotoLocation"),
+ trigger_parameter_hints: get("editor.action.triggerParameterHints"),
+ }
+ }
+
+ pub fn highlight_related(&self) -> HighlightRelatedConfig {
+ HighlightRelatedConfig {
+ references: self.data.highlightRelated_references_enable,
+ break_points: self.data.highlightRelated_breakPoints_enable,
+ exit_points: self.data.highlightRelated_exitPoints_enable,
+ yield_points: self.data.highlightRelated_yieldPoints_enable,
+ }
+ }
+
+ pub fn prime_caches_num_threads(&self) -> u8 {
+ match self.data.cachePriming_numThreads {
+ 0 => num_cpus::get_physical().try_into().unwrap_or(u8::MAX),
+ n => n,
+ }
+ }
+
+ pub fn typing_autoclose_angle(&self) -> bool {
+ self.data.typing_autoClosingAngleBrackets_enable
+ }
+}
+// Deserialization definitions
+
+macro_rules! create_bool_or_string_de {
+ ($ident:ident<$bool:literal, $string:literal>) => {
+ fn $ident<'de, D>(d: D) -> Result<(), D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct V;
+ impl<'de> serde::de::Visitor<'de> for V {
+ type Value = ();
+
+ fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
+ formatter.write_str(concat!(
+ stringify!($bool),
+ " or \"",
+ stringify!($string),
+ "\""
+ ))
+ }
+
+ fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match v {
+ $bool => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Bool(v),
+ &self,
+ )),
+ }
+ }
+
+ fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ match v {
+ $string => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Str(v),
+ &self,
+ )),
+ }
+ }
+
+ fn visit_enum<A>(self, a: A) -> Result<Self::Value, A::Error>
+ where
+ A: serde::de::EnumAccess<'de>,
+ {
+ use serde::de::VariantAccess;
+ let (variant, va) = a.variant::<&'de str>()?;
+ va.unit_variant()?;
+ match variant {
+ $string => Ok(()),
+ _ => Err(serde::de::Error::invalid_value(
+ serde::de::Unexpected::Str(variant),
+ &self,
+ )),
+ }
+ }
+ }
+ d.deserialize_any(V)
+ }
+ };
+}
+create_bool_or_string_de!(true_or_always<true, "always">);
+create_bool_or_string_de!(false_or_never<false, "never">);
+
+macro_rules! named_unit_variant {
+ ($variant:ident) => {
+ pub(super) fn $variant<'de, D>(deserializer: D) -> Result<(), D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ struct V;
+ impl<'de> serde::de::Visitor<'de> for V {
+ type Value = ();
+ fn expecting(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.write_str(concat!("\"", stringify!($variant), "\""))
+ }
+ fn visit_str<E: serde::de::Error>(self, value: &str) -> Result<Self::Value, E> {
+ if value == stringify!($variant) {
+ Ok(())
+ } else {
+ Err(E::invalid_value(serde::de::Unexpected::Str(value), &self))
+ }
+ }
+ }
+ deserializer.deserialize_str(V)
+ }
+ };
+}
+
+mod de_unit_v {
+ named_unit_variant!(all);
+ named_unit_variant!(skip_trivial);
+ named_unit_variant!(mutable);
++ named_unit_variant!(reborrow);
+ named_unit_variant!(with_block);
+}
+
+#[derive(Deserialize, Debug, Clone, Copy)]
+#[serde(rename_all = "snake_case")]
+enum SnippetScopeDef {
+ Expr,
+ Item,
+ Type,
+}
+
+impl Default for SnippetScopeDef {
+ fn default() -> Self {
+ SnippetScopeDef::Expr
+ }
+}
+
+#[derive(Deserialize, Debug, Clone, Default)]
+#[serde(default)]
+struct SnippetDef {
+ #[serde(deserialize_with = "single_or_array")]
+ prefix: Vec<String>,
+ #[serde(deserialize_with = "single_or_array")]
+ postfix: Vec<String>,
+ description: Option<String>,
+ #[serde(deserialize_with = "single_or_array")]
+ body: Vec<String>,
+ #[serde(deserialize_with = "single_or_array")]
+ requires: Vec<String>,
+ scope: SnippetScopeDef,
+}
+
+fn single_or_array<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>
+where
+ D: serde::Deserializer<'de>,
+{
+ struct SingleOrVec;
+
+ impl<'de> serde::de::Visitor<'de> for SingleOrVec {
+ type Value = Vec<String>;
+
+ fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ formatter.write_str("string or array of strings")
+ }
+
+ fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Ok(vec![value.to_owned()])
+ }
+
+ fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
+ where
+ A: serde::de::SeqAccess<'de>,
+ {
+ Deserialize::deserialize(serde::de::value::SeqAccessDeserializer::new(seq))
+ }
+ }
+
+ deserializer.deserialize_any(SingleOrVec)
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum ManifestOrProjectJson {
+ Manifest(PathBuf),
+ ProjectJson(ProjectJsonData),
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum ExprFillDefaultDef {
+ Todo,
+ Default,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum ImportGranularityDef {
+ Preserve,
+ Item,
+ Crate,
+ Module,
+}
+
+#[derive(Deserialize, Debug, Copy, Clone)]
+#[serde(rename_all = "snake_case")]
+enum CallableCompletionDef {
+ FillArguments,
+ AddParentheses,
+ None,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum CargoFeaturesDef {
+ #[serde(deserialize_with = "de_unit_v::all")]
+ All,
+ Selected(Vec<String>),
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum InvocationStrategy {
+ Once,
+ PerWorkspace,
+}
+
++#[derive(Deserialize, Debug, Clone)]
++struct CheckOnSaveTargets(#[serde(deserialize_with = "single_or_array")] Vec<String>);
++
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum InvocationLocation {
+ Root,
+ Workspace,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum LifetimeElisionDef {
+ #[serde(deserialize_with = "true_or_always")]
+ Always,
+ #[serde(deserialize_with = "false_or_never")]
+ Never,
+ #[serde(deserialize_with = "de_unit_v::skip_trivial")]
+ SkipTrivial,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum ClosureReturnTypeHintsDef {
+ #[serde(deserialize_with = "true_or_always")]
+ Always,
+ #[serde(deserialize_with = "false_or_never")]
+ Never,
+ #[serde(deserialize_with = "de_unit_v::with_block")]
+ WithBlock,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(untagged)]
+enum ReborrowHintsDef {
+ #[serde(deserialize_with = "true_or_always")]
+ Always,
+ #[serde(deserialize_with = "false_or_never")]
+ Never,
+ #[serde(deserialize_with = "de_unit_v::mutable")]
+ Mutable,
+}
+
++#[derive(Deserialize, Debug, Clone)]
++#[serde(untagged)]
++enum AdjustmentHintsDef {
++ #[serde(deserialize_with = "true_or_always")]
++ Always,
++ #[serde(deserialize_with = "false_or_never")]
++ Never,
++ #[serde(deserialize_with = "de_unit_v::reborrow")]
++ Reborrow,
++}
++
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum FilesWatcherDef {
+ Client,
+ Notify,
+ Server,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum ImportPrefixDef {
+ Plain,
+ #[serde(alias = "self")]
+ BySelf,
+ #[serde(alias = "crate")]
+ ByCrate,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum WorkspaceSymbolSearchScopeDef {
+ Workspace,
+ WorkspaceAndDependencies,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum SignatureDetail {
+ Full,
+ Parameters,
+}
+
+#[derive(Deserialize, Debug, Clone)]
+#[serde(rename_all = "snake_case")]
+enum WorkspaceSymbolSearchKindDef {
+ OnlyTypes,
+ AllSymbols,
+}
+
+macro_rules! _config_data {
+ (struct $name:ident {
+ $(
+ $(#[doc=$doc:literal])*
+ $field:ident $(| $alias:ident)*: $ty:ty = $default:expr,
+ )*
+ }) => {
+ #[allow(non_snake_case)]
+ #[derive(Debug, Clone)]
+ struct $name { $($field: $ty,)* }
+ impl $name {
+ fn from_json(mut json: serde_json::Value, error_sink: &mut Vec<(String, serde_json::Error)>) -> $name {
+ $name {$(
+ $field: get_field(
+ &mut json,
+ error_sink,
+ stringify!($field),
+ None$(.or(Some(stringify!($alias))))*,
+ $default,
+ ),
+ )*}
+ }
+
+ fn json_schema() -> serde_json::Value {
+ schema(&[
+ $({
+ let field = stringify!($field);
+ let ty = stringify!($ty);
+
+ (field, ty, &[$($doc),*], $default)
+ },)*
+ ])
+ }
+
+ #[cfg(test)]
+ fn manual() -> String {
+ manual(&[
+ $({
+ let field = stringify!($field);
+ let ty = stringify!($ty);
+
+ (field, ty, &[$($doc),*], $default)
+ },)*
+ ])
+ }
+ }
+
+ #[test]
+ fn fields_are_sorted() {
+ [$(stringify!($field)),*].windows(2).for_each(|w| assert!(w[0] <= w[1], "{} <= {} does not hold", w[0], w[1]));
+ }
+ };
+}
+use _config_data as config_data;
+
+fn get_field<T: DeserializeOwned>(
+ json: &mut serde_json::Value,
+ error_sink: &mut Vec<(String, serde_json::Error)>,
+ field: &'static str,
+ alias: Option<&'static str>,
+ default: &str,
+) -> T {
+ let default = serde_json::from_str(default).unwrap();
+ // XXX: check alias first, to work-around the VS Code where it pre-fills the
+ // defaults instead of sending an empty object.
+ alias
+ .into_iter()
+ .chain(iter::once(field))
+ .find_map(move |field| {
+ let mut pointer = field.replace('_', "/");
+ pointer.insert(0, '/');
+ json.pointer_mut(&pointer).and_then(|it| match serde_json::from_value(it.take()) {
+ Ok(it) => Some(it),
+ Err(e) => {
+ tracing::warn!("Failed to deserialize config field at {}: {:?}", pointer, e);
+ error_sink.push((pointer, e));
+ None
+ }
+ })
+ })
+ .unwrap_or(default)
+}
+
+fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value {
+ for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) {
+ fn key(f: &str) -> &str {
+ f.splitn(2, '_').next().unwrap()
+ }
+ assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2);
+ }
+
+ let map = fields
+ .iter()
+ .map(|(field, ty, doc, default)| {
+ let name = field.replace('_', ".");
+ let name = format!("rust-analyzer.{}", name);
+ let props = field_props(field, ty, doc, default);
+ (name, props)
+ })
+ .collect::<serde_json::Map<_, _>>();
+ map.into()
+}
+
+fn field_props(field: &str, ty: &str, doc: &[&str], default: &str) -> serde_json::Value {
+ let doc = doc_comment_to_string(doc);
+ let doc = doc.trim_end_matches('\n');
+ assert!(
+ doc.ends_with('.') && doc.starts_with(char::is_uppercase),
+ "bad docs for {}: {:?}",
+ field,
+ doc
+ );
+ let default = default.parse::<serde_json::Value>().unwrap();
+
+ let mut map = serde_json::Map::default();
+ macro_rules! set {
+ ($($key:literal: $value:tt),*$(,)?) => {{$(
+ map.insert($key.into(), serde_json::json!($value));
+ )*}};
+ }
+ set!("markdownDescription": doc);
+ set!("default": default);
+
+ match ty {
+ "bool" => set!("type": "boolean"),
+ "usize" => set!("type": "integer", "minimum": 0),
+ "String" => set!("type": "string"),
+ "Vec<String>" => set! {
+ "type": "array",
+ "items": { "type": "string" },
+ },
+ "Vec<PathBuf>" => set! {
+ "type": "array",
+ "items": { "type": "string" },
+ },
+ "FxHashSet<String>" => set! {
+ "type": "array",
+ "items": { "type": "string" },
+ "uniqueItems": true,
+ },
+ "FxHashMap<Box<str>, Box<[Box<str>]>>" => set! {
+ "type": "object",
+ },
+ "FxHashMap<String, SnippetDef>" => set! {
+ "type": "object",
+ },
+ "FxHashMap<String, String>" => set! {
+ "type": "object",
+ },
+ "Option<usize>" => set! {
+ "type": ["null", "integer"],
+ "minimum": 0,
+ },
+ "Option<String>" => set! {
+ "type": ["null", "string"],
+ },
+ "Option<PathBuf>" => set! {
+ "type": ["null", "string"],
+ },
+ "Option<bool>" => set! {
+ "type": ["null", "boolean"],
+ },
+ "Option<Vec<String>>" => set! {
+ "type": ["null", "array"],
+ "items": { "type": "string" },
+ },
+ "MergeBehaviorDef" => set! {
+ "type": "string",
+ "enum": ["none", "crate", "module"],
+ "enumDescriptions": [
+ "Do not merge imports at all.",
+ "Merge imports from the same crate into a single `use` statement.",
+ "Merge imports from the same module into a single `use` statement."
+ ],
+ },
+ "ExprFillDefaultDef" => set! {
+ "type": "string",
+ "enum": ["todo", "default"],
+ "enumDescriptions": [
+ "Fill missing expressions with the `todo` macro",
+ "Fill missing expressions with reasonable defaults, `new` or `default` constructors."
+ ],
+ },
+ "ImportGranularityDef" => set! {
+ "type": "string",
+ "enum": ["preserve", "crate", "module", "item"],
+ "enumDescriptions": [
+ "Do not change the granularity of any imports and preserve the original structure written by the developer.",
+ "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
+ "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
+ "Flatten imports so that each has its own use statement."
+ ],
+ },
+ "ImportPrefixDef" => set! {
+ "type": "string",
+ "enum": [
+ "plain",
+ "self",
+ "crate"
+ ],
+ "enumDescriptions": [
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.",
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item. Prefixes `self` in front of the path if it starts with a module.",
+ "Force import paths to be absolute by always starting them with `crate` or the extern crate name they come from."
+ ],
+ },
+ "Vec<ManifestOrProjectJson>" => set! {
+ "type": "array",
+ "items": { "type": ["string", "object"] },
+ },
+ "WorkspaceSymbolSearchScopeDef" => set! {
+ "type": "string",
+ "enum": ["workspace", "workspace_and_dependencies"],
+ "enumDescriptions": [
+ "Search in current workspace only.",
+ "Search in current workspace and dependencies."
+ ],
+ },
+ "WorkspaceSymbolSearchKindDef" => set! {
+ "type": "string",
+ "enum": ["only_types", "all_symbols"],
+ "enumDescriptions": [
+ "Search for types only.",
+ "Search for all symbols kinds."
+ ],
+ },
+ "ParallelCachePrimingNumThreads" => set! {
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ "LifetimeElisionDef" => set! {
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "skip_trivial"
+ ],
+ "enumDescriptions": [
+ "Always show lifetime elision hints.",
+ "Never show lifetime elision hints.",
+ "Only show lifetime elision hints if a return type is involved."
+ ]
+ },
+ "ClosureReturnTypeHintsDef" => set! {
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "with_block"
+ ],
+ "enumDescriptions": [
+ "Always show type hints for return types of closures.",
+ "Never show type hints for return types of closures.",
+ "Only show type hints for return types of closures with blocks."
+ ]
+ },
+ "ReborrowHintsDef" => set! {
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "mutable"
+ ],
+ "enumDescriptions": [
+ "Always show reborrow hints.",
+ "Never show reborrow hints.",
+ "Only show mutable reborrow hints."
+ ]
+ },
++ "AdjustmentHintsDef" => set! {
++ "type": "string",
++ "enum": [
++ "always",
++ "never",
++ "reborrow"
++ ],
++ "enumDescriptions": [
++ "Always show all adjustment hints.",
++ "Never show adjustment hints.",
++ "Only show auto borrow and dereference adjustment hints."
++ ]
++ },
+ "CargoFeaturesDef" => set! {
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo",
+ ]
+ },
+ {
+ "type": "array",
+ "items": { "type": "string" }
+ }
+ ],
+ },
+ "Option<CargoFeaturesDef>" => set! {
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo",
+ ]
+ },
+ {
+ "type": "array",
+ "items": { "type": "string" }
+ },
+ { "type": "null" }
+ ],
+ },
+ "CallableCompletionDef" => set! {
+ "type": "string",
+ "enum": [
+ "fill_arguments",
+ "add_parentheses",
+ "none",
+ ],
+ "enumDescriptions": [
+ "Add call parentheses and pre-fill arguments.",
+ "Add call parentheses.",
+ "Do no snippet completions for callables."
+ ]
+ },
+ "SignatureDetail" => set! {
+ "type": "string",
+ "enum": ["full", "parameters"],
+ "enumDescriptions": [
+ "Show the entire signature.",
+ "Show only the parameters."
+ ],
+ },
+ "FilesWatcherDef" => set! {
+ "type": "string",
+ "enum": ["client", "server"],
+ "enumDescriptions": [
+ "Use the client (editor) to watch files for changes",
+ "Use server-side file watching",
+ ],
+ },
+ "AnnotationLocation" => set! {
+ "type": "string",
+ "enum": ["above_name", "above_whole_item"],
+ "enumDescriptions": [
+ "Render annotations above the name of the item.",
+ "Render annotations above the whole item, including documentation comments and attributes."
+ ],
+ },
+ "InvocationStrategy" => set! {
+ "type": "string",
+ "enum": ["per_workspace", "once"],
+ "enumDescriptions": [
+ "The command will be executed for each workspace.",
+ "The command will be executed once."
+ ],
+ },
+ "InvocationLocation" => set! {
+ "type": "string",
+ "enum": ["workspace", "root"],
+ "enumDescriptions": [
+ "The command will be executed in the corresponding workspace root.",
+ "The command will be executed in the project root."
+ ],
+ },
++ "CheckOnSaveTargets" => set! {
++ "anyOf": [
++ {
++ "type": "string",
++ },
++ {
++ "type": "array",
++ "items": { "type": "string" }
++ },
++ ],
++ },
+ _ => panic!("missing entry for {}: {}", ty, default),
+ }
+
+ map.into()
+}
+
+#[cfg(test)]
+fn manual(fields: &[(&'static str, &'static str, &[&str], &str)]) -> String {
+ fields
+ .iter()
+ .map(|(field, _ty, doc, default)| {
+ let name = format!("rust-analyzer.{}", field.replace('_', "."));
+ let doc = doc_comment_to_string(*doc);
+ if default.contains('\n') {
+ format!(
+ r#"[[{}]]{}::
++
+--
+Default:
+----
+{}
+----
+{}
+--
+"#,
+ name, name, default, doc
+ )
+ } else {
+ format!("[[{}]]{} (default: `{}`)::\n+\n--\n{}--\n", name, name, default, doc)
+ }
+ })
+ .collect::<String>()
+}
+
+fn doc_comment_to_string(doc: &[&str]) -> String {
+ doc.iter().map(|it| it.strip_prefix(' ').unwrap_or(it)).map(|it| format!("{}\n", it)).collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use std::fs;
+
+ use test_utils::{ensure_file_contents, project_root};
+
+ use super::*;
+
+ #[test]
+ fn generate_package_json_config() {
+ let s = Config::json_schema();
+ let schema = format!("{:#}", s);
+ let mut schema = schema
+ .trim_start_matches('{')
+ .trim_end_matches('}')
+ .replace(" ", " ")
+ .replace('\n', "\n ")
+ .trim_start_matches('\n')
+ .trim_end()
+ .to_string();
+ schema.push_str(",\n");
+
+ // Transform the asciidoc form link to markdown style.
+ //
+ // https://link[text] => [text](https://link)
+ let url_matches = schema.match_indices("https://");
+ let mut url_offsets = url_matches.map(|(idx, _)| idx).collect::<Vec<usize>>();
+ url_offsets.reverse();
+ for idx in url_offsets {
+ let link = &schema[idx..];
+ // matching on whitespace to ignore normal links
+ if let Some(link_end) = link.find(|c| c == ' ' || c == '[') {
+ if link.chars().nth(link_end) == Some('[') {
+ if let Some(link_text_end) = link.find(']') {
+ let link_text = link[link_end..(link_text_end + 1)].to_string();
+
+ schema.replace_range((idx + link_end)..(idx + link_text_end + 1), "");
+ schema.insert(idx, '(');
+ schema.insert(idx + link_end + 1, ')');
+ schema.insert_str(idx, &link_text);
+ }
+ }
+ }
+ }
+
+ let package_json_path = project_root().join("editors/code/package.json");
+ let mut package_json = fs::read_to_string(&package_json_path).unwrap();
+
+ let start_marker = " \"$generated-start\": {},\n";
+ let end_marker = " \"$generated-end\": {}\n";
+
+ let start = package_json.find(start_marker).unwrap() + start_marker.len();
+ let end = package_json.find(end_marker).unwrap();
+
+ let p = remove_ws(&package_json[start..end]);
+ let s = remove_ws(&schema);
+ if !p.contains(&s) {
+ package_json.replace_range(start..end, &schema);
+ ensure_file_contents(&package_json_path, &package_json)
+ }
+ }
+
+ #[test]
+ fn generate_config_documentation() {
+ let docs_path = project_root().join("docs/user/generated_config.adoc");
+ let expected = ConfigData::manual();
+ ensure_file_contents(&docs_path, &expected);
+ }
+
+ fn remove_ws(text: &str) -> String {
+ text.replace(char::is_whitespace, "")
+ }
+}
--- /dev/null
-
- let mut message = message.clone();
- if needs_primary_span_label {
- if let Some(primary_span_label) = &primary_span.label {
- format_to!(message, "\n{}", primary_span_label);
+//! This module provides the functionality needed to convert diagnostics from
+//! `cargo check` json format to the LSP diagnostic format.
+use std::collections::HashMap;
+
+use flycheck::{Applicability, DiagnosticLevel, DiagnosticSpan};
+use itertools::Itertools;
+use stdx::format_to;
+use vfs::{AbsPath, AbsPathBuf};
+
+use crate::{
+ global_state::GlobalStateSnapshot, line_index::PositionEncoding, lsp_ext,
+ to_proto::url_from_abs_path,
+};
+
+use super::{DiagnosticsMapConfig, Fix};
+
+/// Determines the LSP severity from a diagnostic
+fn diagnostic_severity(
+ config: &DiagnosticsMapConfig,
+ level: flycheck::DiagnosticLevel,
+ code: Option<flycheck::DiagnosticCode>,
+) -> Option<lsp_types::DiagnosticSeverity> {
+ let res = match level {
+ DiagnosticLevel::Ice => lsp_types::DiagnosticSeverity::ERROR,
+ DiagnosticLevel::Error => lsp_types::DiagnosticSeverity::ERROR,
+ DiagnosticLevel::Warning => match &code {
+ // HACK: special case for `warnings` rustc lint.
+ Some(code)
+ if config.warnings_as_hint.iter().any(|lint| {
+ lint == "warnings" || ide_db::helpers::lint_eq_or_in_group(&code.code, lint)
+ }) =>
+ {
+ lsp_types::DiagnosticSeverity::HINT
+ }
+ // HACK: special case for `warnings` rustc lint.
+ Some(code)
+ if config.warnings_as_info.iter().any(|lint| {
+ lint == "warnings" || ide_db::helpers::lint_eq_or_in_group(&code.code, lint)
+ }) =>
+ {
+ lsp_types::DiagnosticSeverity::INFORMATION
+ }
+ _ => lsp_types::DiagnosticSeverity::WARNING,
+ },
+ DiagnosticLevel::Note => lsp_types::DiagnosticSeverity::INFORMATION,
+ DiagnosticLevel::Help => lsp_types::DiagnosticSeverity::HINT,
+ _ => return None,
+ };
+ Some(res)
+}
+
+/// Checks whether a file name is from macro invocation and does not refer to an actual file.
+fn is_dummy_macro_file(file_name: &str) -> bool {
+ // FIXME: current rustc does not seem to emit `<macro file>` files anymore?
+ file_name.starts_with('<') && file_name.ends_with('>')
+}
+
+/// Converts a Rust span to a LSP location
+fn location(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ span: &DiagnosticSpan,
+ snap: &GlobalStateSnapshot,
+) -> lsp_types::Location {
+ let file_name = resolve_path(config, workspace_root, &span.file_name);
+ let uri = url_from_abs_path(&file_name);
+
+ let range = {
+ let position_encoding = snap.config.position_encoding();
+ lsp_types::Range::new(
+ position(&position_encoding, span, span.line_start, span.column_start),
+ position(&position_encoding, span, span.line_end, span.column_end),
+ )
+ };
+ lsp_types::Location::new(uri, range)
+}
+
+fn position(
+ position_encoding: &PositionEncoding,
+ span: &DiagnosticSpan,
+ line_offset: usize,
+ column_offset: usize,
+) -> lsp_types::Position {
+ let line_index = line_offset - span.line_start;
+
+ let mut true_column_offset = column_offset;
+ if let Some(line) = span.text.get(line_index) {
+ if line.text.chars().count() == line.text.len() {
+ // all one byte utf-8 char
+ return lsp_types::Position {
+ line: (line_offset as u32).saturating_sub(1),
+ character: (column_offset as u32).saturating_sub(1),
+ };
+ }
+ let mut char_offset = 0;
+ let len_func = match position_encoding {
+ PositionEncoding::Utf8 => char::len_utf8,
+ PositionEncoding::Utf16 => char::len_utf16,
+ };
+ for c in line.text.chars() {
+ char_offset += 1;
+ if char_offset > column_offset {
+ break;
+ }
+ true_column_offset += len_func(c) - 1;
+ }
+ }
+
+ lsp_types::Position {
+ line: (line_offset as u32).saturating_sub(1),
+ character: (true_column_offset as u32).saturating_sub(1),
+ }
+}
+
+/// Extracts a suitable "primary" location from a rustc diagnostic.
+///
+/// This takes locations pointing into the standard library, or generally outside the current
+/// workspace into account and tries to avoid those, in case macros are involved.
+fn primary_location(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ span: &DiagnosticSpan,
+ snap: &GlobalStateSnapshot,
+) -> lsp_types::Location {
+ let span_stack = std::iter::successors(Some(span), |span| Some(&span.expansion.as_ref()?.span));
+ for span in span_stack.clone() {
+ let abs_path = resolve_path(config, workspace_root, &span.file_name);
+ if !is_dummy_macro_file(&span.file_name) && abs_path.starts_with(workspace_root) {
+ return location(config, workspace_root, span, snap);
+ }
+ }
+
+ // Fall back to the outermost macro invocation if no suitable span comes up.
+ let last_span = span_stack.last().unwrap();
+ location(config, workspace_root, last_span, snap)
+}
+
+/// Converts a secondary Rust span to a LSP related information
+///
+/// If the span is unlabelled this will return `None`.
+fn diagnostic_related_information(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ span: &DiagnosticSpan,
+ snap: &GlobalStateSnapshot,
+) -> Option<lsp_types::DiagnosticRelatedInformation> {
+ let message = span.label.clone()?;
+ let location = location(config, workspace_root, span, snap);
+ Some(lsp_types::DiagnosticRelatedInformation { location, message })
+}
+
+/// Resolves paths applying any matching path prefix remappings, and then
+/// joining the path to the workspace root.
+fn resolve_path(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ file_name: &str,
+) -> AbsPathBuf {
+ match config
+ .remap_prefix
+ .iter()
+ .find_map(|(from, to)| file_name.strip_prefix(from).map(|file_name| (to, file_name)))
+ {
+ Some((to, file_name)) => workspace_root.join(format!("{}{}", to, file_name)),
+ None => workspace_root.join(file_name),
+ }
+}
+
+struct SubDiagnostic {
+ related: lsp_types::DiagnosticRelatedInformation,
+ suggested_fix: Option<Fix>,
+}
+
+enum MappedRustChildDiagnostic {
+ SubDiagnostic(SubDiagnostic),
+ MessageLine(String),
+}
+
+fn map_rust_child_diagnostic(
+ config: &DiagnosticsMapConfig,
+ workspace_root: &AbsPath,
+ rd: &flycheck::Diagnostic,
+ snap: &GlobalStateSnapshot,
+) -> MappedRustChildDiagnostic {
+ let spans: Vec<&DiagnosticSpan> = rd.spans.iter().filter(|s| s.is_primary).collect();
+ if spans.is_empty() {
+ // `rustc` uses these spanless children as a way to print multi-line
+ // messages
+ return MappedRustChildDiagnostic::MessageLine(rd.message.clone());
+ }
+
+ let mut edit_map: HashMap<lsp_types::Url, Vec<lsp_types::TextEdit>> = HashMap::new();
+ let mut suggested_replacements = Vec::new();
+ for &span in &spans {
+ if let Some(suggested_replacement) = &span.suggested_replacement {
+ if !suggested_replacement.is_empty() {
+ suggested_replacements.push(suggested_replacement);
+ }
+ let location = location(config, workspace_root, span, snap);
+ let edit = lsp_types::TextEdit::new(location.range, suggested_replacement.clone());
+
+ // Only actually emit a quickfix if the suggestion is "valid enough".
+ // We accept both "MaybeIncorrect" and "MachineApplicable". "MaybeIncorrect" means that
+ // the suggestion is *complete* (contains no placeholders where code needs to be
+ // inserted), but might not be what the user wants, or might need minor adjustments.
+ if matches!(
+ span.suggestion_applicability,
+ None | Some(Applicability::MaybeIncorrect | Applicability::MachineApplicable)
+ ) {
+ edit_map.entry(location.uri).or_default().push(edit);
+ }
+ }
+ }
+
+ // rustc renders suggestion diagnostics by appending the suggested replacement, so do the same
+ // here, otherwise the diagnostic text is missing useful information.
+ let mut message = rd.message.clone();
+ if !suggested_replacements.is_empty() {
+ message.push_str(": ");
+ let suggestions =
+ suggested_replacements.iter().map(|suggestion| format!("`{}`", suggestion)).join(", ");
+ message.push_str(&suggestions);
+ }
+
+ if edit_map.is_empty() {
+ MappedRustChildDiagnostic::SubDiagnostic(SubDiagnostic {
+ related: lsp_types::DiagnosticRelatedInformation {
+ location: location(config, workspace_root, spans[0], snap),
+ message,
+ },
+ suggested_fix: None,
+ })
+ } else {
+ MappedRustChildDiagnostic::SubDiagnostic(SubDiagnostic {
+ related: lsp_types::DiagnosticRelatedInformation {
+ location: location(config, workspace_root, spans[0], snap),
+ message: message.clone(),
+ },
+ suggested_fix: Some(Fix {
+ ranges: spans
+ .iter()
+ .map(|&span| location(config, workspace_root, span, snap).range)
+ .collect(),
+ action: lsp_ext::CodeAction {
+ title: message,
+ group: None,
+ kind: Some(lsp_types::CodeActionKind::QUICKFIX),
+ edit: Some(lsp_ext::SnippetWorkspaceEdit {
+ // FIXME: there's no good reason to use edit_map here....
+ changes: Some(edit_map),
+ document_changes: None,
+ change_annotations: None,
+ }),
+ is_preferred: Some(true),
+ data: None,
+ command: None,
+ },
+ }),
+ })
+ }
+}
+
+#[derive(Debug)]
+pub(crate) struct MappedRustDiagnostic {
+ pub(crate) url: lsp_types::Url,
+ pub(crate) diagnostic: lsp_types::Diagnostic,
+ pub(crate) fix: Option<Fix>,
+}
+
+/// Converts a Rust root diagnostic to LSP form
+///
+/// This flattens the Rust diagnostic by:
+///
+/// 1. Creating a LSP diagnostic with the root message and primary span.
+/// 2. Adding any labelled secondary spans to `relatedInformation`
+/// 3. Categorising child diagnostics as either `SuggestedFix`es,
+/// `relatedInformation` or additional message lines.
+///
+/// If the diagnostic has no primary span this will return `None`
+pub(crate) fn map_rust_diagnostic_to_lsp(
+ config: &DiagnosticsMapConfig,
+ rd: &flycheck::Diagnostic,
+ workspace_root: &AbsPath,
+ snap: &GlobalStateSnapshot,
+) -> Vec<MappedRustDiagnostic> {
+ let primary_spans: Vec<&DiagnosticSpan> = rd.spans.iter().filter(|s| s.is_primary).collect();
+ if primary_spans.is_empty() {
+ return Vec::new();
+ }
+
+ let severity = diagnostic_severity(config, rd.level, rd.code.clone());
+
+ let mut source = String::from("rustc");
+ let mut code = rd.code.as_ref().map(|c| c.code.clone());
+ if let Some(code_val) = &code {
+ // See if this is an RFC #2103 scoped lint (e.g. from Clippy)
+ let scoped_code: Vec<&str> = code_val.split("::").collect();
+ if scoped_code.len() == 2 {
+ source = String::from(scoped_code[0]);
+ code = Some(String::from(scoped_code[1]));
+ }
+ }
+
+ let mut needs_primary_span_label = true;
+ let mut subdiagnostics = Vec::new();
+ let mut tags = Vec::new();
+
+ for secondary_span in rd.spans.iter().filter(|s| !s.is_primary) {
+ let related = diagnostic_related_information(config, workspace_root, secondary_span, snap);
+ if let Some(related) = related {
+ subdiagnostics.push(SubDiagnostic { related, suggested_fix: None });
+ }
+ }
+
+ let mut message = rd.message.clone();
+ for child in &rd.children {
+ let child = map_rust_child_diagnostic(config, workspace_root, child, snap);
+ match child {
+ MappedRustChildDiagnostic::SubDiagnostic(sub) => {
+ subdiagnostics.push(sub);
+ }
+ MappedRustChildDiagnostic::MessageLine(message_line) => {
+ format_to!(message, "\n{}", message_line);
+
+ // These secondary messages usually duplicate the content of the
+ // primary span label.
+ needs_primary_span_label = false;
+ }
+ }
+ }
+
+ if let Some(code) = &rd.code {
+ let code = code.code.as_str();
+ if matches!(
+ code,
+ "dead_code"
+ | "unknown_lints"
+ | "unreachable_code"
+ | "unused_attributes"
+ | "unused_imports"
+ | "unused_macros"
+ | "unused_variables"
+ ) {
+ tags.push(lsp_types::DiagnosticTag::UNNECESSARY);
+ }
+
+ if matches!(code, "deprecated") {
+ tags.push(lsp_types::DiagnosticTag::DEPRECATED);
+ }
+ }
+
+ let code_description = match source.as_str() {
+ "rustc" => rustc_code_description(code.as_deref()),
+ "clippy" => clippy_code_description(code.as_deref()),
+ _ => None,
+ };
+
+ primary_spans
+ .iter()
+ .flat_map(|primary_span| {
+ let primary_location = primary_location(config, workspace_root, primary_span, snap);
- }
-
++ let message = {
++ let mut message = message.clone();
++ if needs_primary_span_label {
++ if let Some(primary_span_label) = &primary_span.label {
++ format_to!(message, "\n{}", primary_span_label);
++ }
+ }
- data: None,
++ message
++ };
+ // Each primary diagnostic span may result in multiple LSP diagnostics.
+ let mut diagnostics = Vec::new();
+
+ let mut related_info_macro_calls = vec![];
+
+ // If error occurs from macro expansion, add related info pointing to
+ // where the error originated
+ // Also, we would generate an additional diagnostic, so that exact place of macro
+ // will be highlighted in the error origin place.
+ let span_stack = std::iter::successors(Some(*primary_span), |span| {
+ Some(&span.expansion.as_ref()?.span)
+ });
+ for (i, span) in span_stack.enumerate() {
+ if is_dummy_macro_file(&span.file_name) {
+ continue;
+ }
+
+ // First span is the original diagnostic, others are macro call locations that
+ // generated that code.
+ let is_in_macro_call = i != 0;
+
+ let secondary_location = location(config, workspace_root, span, snap);
+ if secondary_location == primary_location {
+ continue;
+ }
+ related_info_macro_calls.push(lsp_types::DiagnosticRelatedInformation {
+ location: secondary_location.clone(),
+ message: if is_in_macro_call {
+ "Error originated from macro call here".to_string()
+ } else {
+ "Actual error occurred here".to_string()
+ },
+ });
+ // For the additional in-macro diagnostic we add the inverse message pointing to the error location in code.
+ let information_for_additional_diagnostic =
+ vec![lsp_types::DiagnosticRelatedInformation {
+ location: primary_location.clone(),
+ message: "Exact error occurred here".to_string(),
+ }];
+
+ let diagnostic = lsp_types::Diagnostic {
+ range: secondary_location.range,
+ // downgrade to hint if we're pointing at the macro
+ severity: Some(lsp_types::DiagnosticSeverity::HINT),
+ code: code.clone().map(lsp_types::NumberOrString::String),
+ code_description: code_description.clone(),
+ source: Some(source.clone()),
+ message: message.clone(),
+ related_information: Some(information_for_additional_diagnostic),
+ tags: if tags.is_empty() { None } else { Some(tags.clone()) },
- data: None,
++ data: Some(serde_json::json!({ "rendered": rd.rendered })),
+ };
+ diagnostics.push(MappedRustDiagnostic {
+ url: secondary_location.uri,
+ diagnostic,
+ fix: None,
+ });
+ }
+
+ // Emit the primary diagnostic.
+ diagnostics.push(MappedRustDiagnostic {
+ url: primary_location.uri.clone(),
+ diagnostic: lsp_types::Diagnostic {
+ range: primary_location.range,
+ severity,
+ code: code.clone().map(lsp_types::NumberOrString::String),
+ code_description: code_description.clone(),
+ source: Some(source.clone()),
+ message,
+ related_information: {
+ let info = related_info_macro_calls
+ .iter()
+ .cloned()
+ .chain(subdiagnostics.iter().map(|sub| sub.related.clone()))
+ .collect::<Vec<_>>();
+ if info.is_empty() {
+ None
+ } else {
+ Some(info)
+ }
+ },
+ tags: if tags.is_empty() { None } else { Some(tags.clone()) },
- let actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap);
++ data: Some(serde_json::json!({ "rendered": rd.rendered })),
+ },
+ fix: None,
+ });
+
+ // Emit hint-level diagnostics for all `related_information` entries such as "help"s.
+ // This is useful because they will show up in the user's editor, unlike
+ // `related_information`, which just produces hard-to-read links, at least in VS Code.
+ let back_ref = lsp_types::DiagnosticRelatedInformation {
+ location: primary_location,
+ message: "original diagnostic".to_string(),
+ };
+ for sub in &subdiagnostics {
+ diagnostics.push(MappedRustDiagnostic {
+ url: sub.related.location.uri.clone(),
+ fix: sub.suggested_fix.clone(),
+ diagnostic: lsp_types::Diagnostic {
+ range: sub.related.location.range,
+ severity: Some(lsp_types::DiagnosticSeverity::HINT),
+ code: code.clone().map(lsp_types::NumberOrString::String),
+ code_description: code_description.clone(),
+ source: Some(source.clone()),
+ message: sub.related.message.clone(),
+ related_information: Some(vec![back_ref.clone()]),
+ tags: None, // don't apply modifiers again
+ data: None,
+ },
+ });
+ }
+
+ diagnostics
+ })
+ .collect()
+}
+
+fn rustc_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescription> {
+ code.filter(|code| {
+ let mut chars = code.chars();
+ chars.next().map_or(false, |c| c == 'E')
+ && chars.by_ref().take(4).all(|c| c.is_ascii_digit())
+ && chars.next().is_none()
+ })
+ .and_then(|code| {
+ lsp_types::Url::parse(&format!("https://doc.rust-lang.org/error-index.html#{}", code))
+ .ok()
+ .map(|href| lsp_types::CodeDescription { href })
+ })
+}
+
+fn clippy_code_description(code: Option<&str>) -> Option<lsp_types::CodeDescription> {
+ code.and_then(|code| {
+ lsp_types::Url::parse(&format!(
+ "https://rust-lang.github.io/rust-clippy/master/index.html#{}",
+ code
+ ))
+ .ok()
+ .map(|href| lsp_types::CodeDescription { href })
+ })
+}
+
+#[cfg(test)]
+#[cfg(not(windows))]
+mod tests {
+ use std::path::Path;
+
+ use crate::{config::Config, global_state::GlobalState};
+
+ use super::*;
+
+ use expect_test::{expect_file, ExpectFile};
+ use lsp_types::ClientCapabilities;
+
+ fn check(diagnostics_json: &str, expect: ExpectFile) {
+ check_with_config(DiagnosticsMapConfig::default(), diagnostics_json, expect)
+ }
+
+ fn check_with_config(config: DiagnosticsMapConfig, diagnostics_json: &str, expect: ExpectFile) {
+ let diagnostic: flycheck::Diagnostic = serde_json::from_str(diagnostics_json).unwrap();
+ let workspace_root: &AbsPath = Path::new("/test/").try_into().unwrap();
+ let (sender, _) = crossbeam_channel::unbounded();
+ let state = GlobalState::new(
+ sender,
+ Config::new(workspace_root.to_path_buf(), ClientCapabilities::default()),
+ );
+ let snap = state.snapshot();
++ let mut actual = map_rust_diagnostic_to_lsp(&config, &diagnostic, workspace_root, &snap);
++ actual.iter_mut().for_each(|diag| diag.diagnostic.data = None);
+ expect.assert_debug_eq(&actual)
+ }
+
+ #[test]
+ fn rustc_incompatible_type_for_trait() {
+ check(
+ r##"{
+ "message": "method `next` has an incompatible type for trait",
+ "code": {
+ "code": "E0053",
+ "explanation": "\nThe parameters of any trait method must match between a trait implementation\nand the trait definition.\n\nHere are a couple examples of this error:\n\n```compile_fail,E0053\ntrait Foo {\n fn foo(x: u16);\n fn bar(&self);\n}\n\nstruct Bar;\n\nimpl Foo for Bar {\n // error, expected u16, found i16\n fn foo(x: i16) { }\n\n // error, types differ in mutability\n fn bar(&mut self) { }\n}\n```\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "compiler/ty/list_iter.rs",
+ "byte_start": 1307,
+ "byte_end": 1350,
+ "line_start": 52,
+ "line_end": 52,
+ "column_start": 5,
+ "column_end": 48,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " fn next(&self) -> Option<&'list ty::Ref<M>> {",
+ "highlight_start": 5,
+ "highlight_end": 48
+ }
+ ],
+ "label": "types differ in mutability",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "expected type `fn(&mut ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&ty::Ref<M>>`\n found type `fn(&ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&'list ty::Ref<M>>`",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0053]: method `next` has an incompatible type for trait\n --> compiler/ty/list_iter.rs:52:5\n |\n52 | fn next(&self) -> Option<&'list ty::Ref<M>> {\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ types differ in mutability\n |\n = note: expected type `fn(&mut ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&ty::Ref<M>>`\n found type `fn(&ty::list_iter::ListIterator<'list, M>) -> std::option::Option<&'list ty::Ref<M>>`\n\n"
+ }
+ "##,
+ expect_file!["./test_data/rustc_incompatible_type_for_trait.txt"],
+ );
+ }
+
+ #[test]
+ fn rustc_unused_variable() {
+ check(
+ r##"{
+ "message": "unused variable: `foo`",
+ "code": {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_variables)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider prefixing with an underscore",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_foo",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_unused_variable.txt"],
+ );
+ }
+
+ #[test]
+ #[cfg(not(windows))]
+ fn rustc_unused_variable_as_info() {
+ check_with_config(
+ DiagnosticsMapConfig {
+ warnings_as_info: vec!["unused_variables".to_string()],
+ ..DiagnosticsMapConfig::default()
+ },
+ r##"{
+ "message": "unused variable: `foo`",
+ "code": {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_variables)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider prefixing with an underscore",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_foo",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_unused_variable_as_info.txt"],
+ );
+ }
+
+ #[test]
+ #[cfg(not(windows))]
+ fn rustc_unused_variable_as_hint() {
+ check_with_config(
+ DiagnosticsMapConfig {
+ warnings_as_hint: vec!["unused_variables".to_string()],
+ ..DiagnosticsMapConfig::default()
+ },
+ r##"{
+ "message": "unused variable: `foo`",
+ "code": {
+ "code": "unused_variables",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "#[warn(unused_variables)] on by default",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider prefixing with an underscore",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "driver/subcommand/repl.rs",
+ "byte_start": 9228,
+ "byte_end": 9231,
+ "line_start": 291,
+ "line_end": 291,
+ "column_start": 9,
+ "column_end": 12,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let foo = 42;",
+ "highlight_start": 9,
+ "highlight_end": 12
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "_foo",
+ "suggestion_applicability": "MachineApplicable",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: unused variable: `foo`\n --> driver/subcommand/repl.rs:291:9\n |\n291 | let foo = 42;\n | ^^^ help: consider prefixing with an underscore: `_foo`\n |\n = note: #[warn(unused_variables)] on by default\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_unused_variable_as_hint.txt"],
+ );
+ }
+
+ #[test]
+ fn rustc_wrong_number_of_parameters() {
+ check(
+ r##"{
+ "message": "this function takes 2 parameters but 3 parameters were supplied",
+ "code": {
+ "code": "E0061",
+ "explanation": "\nThe number of arguments passed to a function must match the number of arguments\nspecified in the function signature.\n\nFor example, a function like:\n\n```\nfn f(a: u16, b: &str) {}\n```\n\nMust always be called with exactly two arguments, e.g., `f(2, \"test\")`.\n\nNote that Rust does not have a notion of optional function arguments or\nvariadic functions (except for its C-FFI).\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "compiler/ty/select.rs",
+ "byte_start": 8787,
+ "byte_end": 9241,
+ "line_start": 219,
+ "line_end": 231,
+ "column_start": 5,
+ "column_end": 6,
+ "is_primary": false,
+ "text": [
+ {
+ "text": " pub fn add_evidence(",
+ "highlight_start": 5,
+ "highlight_end": 25
+ },
+ {
+ "text": " &mut self,",
+ "highlight_start": 1,
+ "highlight_end": 19
+ },
+ {
+ "text": " target_poly: &ty::Ref<ty::Poly>,",
+ "highlight_start": 1,
+ "highlight_end": 41
+ },
+ {
+ "text": " evidence_poly: &ty::Ref<ty::Poly>,",
+ "highlight_start": 1,
+ "highlight_end": 43
+ },
+ {
+ "text": " ) {",
+ "highlight_start": 1,
+ "highlight_end": 8
+ },
+ {
+ "text": " match target_poly {",
+ "highlight_start": 1,
+ "highlight_end": 28
+ },
+ {
+ "text": " ty::Ref::Var(tvar, _) => self.add_var_evidence(tvar, evidence_poly),",
+ "highlight_start": 1,
+ "highlight_end": 81
+ },
+ {
+ "text": " ty::Ref::Fixed(target_ty) => {",
+ "highlight_start": 1,
+ "highlight_end": 43
+ },
+ {
+ "text": " let evidence_ty = evidence_poly.resolve_to_ty();",
+ "highlight_start": 1,
+ "highlight_end": 65
+ },
+ {
+ "text": " self.add_evidence_ty(target_ty, evidence_poly, evidence_ty)",
+ "highlight_start": 1,
+ "highlight_end": 76
+ },
+ {
+ "text": " }",
+ "highlight_start": 1,
+ "highlight_end": 14
+ },
+ {
+ "text": " }",
+ "highlight_start": 1,
+ "highlight_end": 10
+ },
+ {
+ "text": " }",
+ "highlight_start": 1,
+ "highlight_end": 6
+ }
+ ],
+ "label": "defined here",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ },
+ {
+ "file_name": "compiler/ty/select.rs",
+ "byte_start": 4045,
+ "byte_end": 4057,
+ "line_start": 104,
+ "line_end": 104,
+ "column_start": 18,
+ "column_end": 30,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " self.add_evidence(target_fixed, evidence_fixed, false);",
+ "highlight_start": 18,
+ "highlight_end": 30
+ }
+ ],
+ "label": "expected 2 parameters",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error[E0061]: this function takes 2 parameters but 3 parameters were supplied\n --> compiler/ty/select.rs:104:18\n |\n104 | self.add_evidence(target_fixed, evidence_fixed, false);\n | ^^^^^^^^^^^^ expected 2 parameters\n...\n219 | / pub fn add_evidence(\n220 | | &mut self,\n221 | | target_poly: &ty::Ref<ty::Poly>,\n222 | | evidence_poly: &ty::Ref<ty::Poly>,\n... |\n230 | | }\n231 | | }\n | |_____- defined here\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_wrong_number_of_parameters.txt"],
+ );
+ }
+
+ #[test]
+ fn clippy_pass_by_ref() {
+ check(
+ r##"{
+ "message": "this argument is passed by reference, but would be more efficient if passed by value",
+ "code": {
+ "code": "clippy::trivially_copy_pass_by_ref",
+ "explanation": null
+ },
+ "level": "warning",
+ "spans": [
+ {
+ "file_name": "compiler/mir/tagset.rs",
+ "byte_start": 941,
+ "byte_end": 946,
+ "line_start": 42,
+ "line_end": 42,
+ "column_start": 24,
+ "column_end": 29,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " pub fn is_disjoint(&self, other: Self) -> bool {",
+ "highlight_start": 24,
+ "highlight_end": 29
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "lint level defined here",
+ "code": null,
+ "level": "note",
+ "spans": [
+ {
+ "file_name": "compiler/lib.rs",
+ "byte_start": 8,
+ "byte_end": 19,
+ "line_start": 1,
+ "line_end": 1,
+ "column_start": 9,
+ "column_end": 20,
+ "is_primary": true,
+ "text": [
+ {
+ "text": "#![warn(clippy::all)]",
+ "highlight_start": 9,
+ "highlight_end": 20
+ }
+ ],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "#[warn(clippy::trivially_copy_pass_by_ref)] implied by #[warn(clippy::all)]",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref",
+ "code": null,
+ "level": "help",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ },
+ {
+ "message": "consider passing by value instead",
+ "code": null,
+ "level": "help",
+ "spans": [
+ {
+ "file_name": "compiler/mir/tagset.rs",
+ "byte_start": 941,
+ "byte_end": 946,
+ "line_start": 42,
+ "line_end": 42,
+ "column_start": 24,
+ "column_end": 29,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " pub fn is_disjoint(&self, other: Self) -> bool {",
+ "highlight_start": 24,
+ "highlight_end": 29
+ }
+ ],
+ "label": null,
+ "suggested_replacement": "self",
+ "suggestion_applicability": "Unspecified",
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "warning: this argument is passed by reference, but would be more efficient if passed by value\n --> compiler/mir/tagset.rs:42:24\n |\n42 | pub fn is_disjoint(&self, other: Self) -> bool {\n | ^^^^^ help: consider passing by value instead: `self`\n |\nnote: lint level defined here\n --> compiler/lib.rs:1:9\n |\n1 | #![warn(clippy::all)]\n | ^^^^^^^^^^^\n = note: #[warn(clippy::trivially_copy_pass_by_ref)] implied by #[warn(clippy::all)]\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#trivially_copy_pass_by_ref\n\n"
+ }"##,
+ expect_file!["./test_data/clippy_pass_by_ref.txt"],
+ );
+ }
+
+ #[test]
+ fn rustc_range_map_lsp_position() {
+ check(
+ r##"{
+ "message": "mismatched types",
+ "code": {
+ "code": "E0308",
+ "explanation": "Expected type did not match the received type.\n\nErroneous code examples:\n\n```compile_fail,E0308\nfn plus_one(x: i32) -> i32 {\n x + 1\n}\n\nplus_one(\"Not a number\");\n// ^^^^^^^^^^^^^^ expected `i32`, found `&str`\n\nif \"Not a bool\" {\n// ^^^^^^^^^^^^ expected `bool`, found `&str`\n}\n\nlet x: f32 = \"Not a float\";\n// --- ^^^^^^^^^^^^^ expected `f32`, found `&str`\n// |\n// expected due to this\n```\n\nThis error occurs when an expression was used in a place where the compiler\nexpected an expression of a different type. It can occur in several cases, the\nmost common being when calling a function and passing an argument which has a\ndifferent type than the matching type in the function declaration.\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "crates/test_diagnostics/src/main.rs",
+ "byte_start": 87,
+ "byte_end": 105,
+ "line_start": 4,
+ "line_end": 4,
+ "column_start": 18,
+ "column_end": 24,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let x: u32 = \"𐐀𐐀𐐀𐐀\"; // 17-23",
+ "highlight_start": 18,
+ "highlight_end": 24
+ }
+ ],
+ "label": "expected `u32`, found `&str`",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ },
+ {
+ "file_name": "crates/test_diagnostics/src/main.rs",
+ "byte_start": 81,
+ "byte_end": 84,
+ "line_start": 4,
+ "line_end": 4,
+ "column_start": 12,
+ "column_end": 15,
+ "is_primary": false,
+ "text": [
+ {
+ "text": " let x: u32 = \"𐐀𐐀𐐀𐐀\"; // 17-23",
+ "highlight_start": 12,
+ "highlight_end": 15
+ }
+ ],
+ "label": "expected due to this",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error[E0308]: mismatched types\n --> crates/test_diagnostics/src/main.rs:4:18\n |\n4 | let x: u32 = \"𐐀𐐀𐐀𐐀\"; // 17-23\n | --- ^^^^^^ expected `u32`, found `&str`\n | |\n | expected due to this\n\n"
+ }"##,
+ expect_file!("./test_data/rustc_range_map_lsp_position.txt"),
+ )
+ }
+
+ #[test]
+ fn rustc_mismatched_type() {
+ check(
+ r##"{
+ "message": "mismatched types",
+ "code": {
+ "code": "E0308",
+ "explanation": "\nThis error occurs when the compiler was unable to infer the concrete type of a\nvariable. It can occur for several cases, the most common of which is a\nmismatch in the expected type that the compiler inferred for a variable's\ninitializing expression, and the actual type explicitly assigned to the\nvariable.\n\nFor example:\n\n```compile_fail,E0308\nlet x: i32 = \"I am not a number!\";\n// ~~~ ~~~~~~~~~~~~~~~~~~~~\n// | |\n// | initializing expression;\n// | compiler infers type `&str`\n// |\n// type `i32` assigned to variable `x`\n```\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "runtime/compiler_support.rs",
+ "byte_start": 1589,
+ "byte_end": 1594,
+ "line_start": 48,
+ "line_end": 48,
+ "column_start": 65,
+ "column_end": 70,
+ "is_primary": true,
+ "text": [
+ {
+ "text": " let layout = alloc::Layout::from_size_align_unchecked(size, align);",
+ "highlight_start": 65,
+ "highlight_end": 70
+ }
+ ],
+ "label": "expected usize, found u32",
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [],
+ "rendered": "error[E0308]: mismatched types\n --> runtime/compiler_support.rs:48:65\n |\n48 | let layout = alloc::Layout::from_size_align_unchecked(size, align);\n | ^^^^^ expected usize, found u32\n\n"
+ }"##,
+ expect_file!["./test_data/rustc_mismatched_type.txt"],
+ );
+ }
+
+ #[test]
+ fn handles_macro_location() {
+ check(
+ r##"{
+ "rendered": "error[E0277]: can't compare `{integer}` with `&str`\n --> src/main.rs:2:5\n |\n2 | assert_eq!(1, \"love\");\n | ^^^^^^^^^^^^^^^^^^^^^^ no implementation for `{integer} == &str`\n |\n = help: the trait `std::cmp::PartialEq<&str>` is not implemented for `{integer}`\n = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)\n\n",
+ "children": [
+ {
+ "children": [],
+ "code": null,
+ "level": "help",
+ "message": "the trait `std::cmp::PartialEq<&str>` is not implemented for `{integer}`",
+ "rendered": null,
+ "spans": []
+ }
+ ],
+ "code": {
+ "code": "E0277",
+ "explanation": "\nYou tried to use a type which doesn't implement some trait in a place which\nexpected that trait. Erroneous code example:\n\n```compile_fail,E0277\n// here we declare the Foo trait with a bar method\ntrait Foo {\n fn bar(&self);\n}\n\n// we now declare a function which takes an object implementing the Foo trait\nfn some_func<T: Foo>(foo: T) {\n foo.bar();\n}\n\nfn main() {\n // we now call the method with the i32 type, which doesn't implement\n // the Foo trait\n some_func(5i32); // error: the trait bound `i32 : Foo` is not satisfied\n}\n```\n\nIn order to fix this error, verify that the type you're using does implement\nthe trait. Example:\n\n```\ntrait Foo {\n fn bar(&self);\n}\n\nfn some_func<T: Foo>(foo: T) {\n foo.bar(); // we can now use this method since i32 implements the\n // Foo trait\n}\n\n// we implement the trait on the i32 type\nimpl Foo for i32 {\n fn bar(&self) {}\n}\n\nfn main() {\n some_func(5i32); // ok!\n}\n```\n\nOr in a generic context, an erroneous code example would look like:\n\n```compile_fail,E0277\nfn some_func<T>(foo: T) {\n println!(\"{:?}\", foo); // error: the trait `core::fmt::Debug` is not\n // implemented for the type `T`\n}\n\nfn main() {\n // We now call the method with the i32 type,\n // which *does* implement the Debug trait.\n some_func(5i32);\n}\n```\n\nNote that the error here is in the definition of the generic function: Although\nwe only call it with a parameter that does implement `Debug`, the compiler\nstill rejects the function: It must work with all possible input types. In\norder to make this example compile, we need to restrict the generic type we're\naccepting:\n\n```\nuse std::fmt;\n\n// Restrict the input type to types that implement Debug.\nfn some_func<T: fmt::Debug>(foo: T) {\n println!(\"{:?}\", foo);\n}\n\nfn main() {\n // Calling the method is still fine, as i32 implements Debug.\n some_func(5i32);\n\n // This would fail to compile now:\n // struct WithoutDebug;\n // some_func(WithoutDebug);\n}\n```\n\nRust only looks at the signature of the called function, as such it must\nalready specify all requirements that will be used for every type parameter.\n"
+ },
+ "level": "error",
+ "message": "can't compare `{integer}` with `&str`",
+ "spans": [
+ {
+ "byte_end": 155,
+ "byte_start": 153,
+ "column_end": 33,
+ "column_start": 31,
+ "expansion": {
+ "def_site_span": {
+ "byte_end": 940,
+ "byte_start": 0,
+ "column_end": 6,
+ "column_start": 1,
+ "expansion": null,
+ "file_name": "<::core::macros::assert_eq macros>",
+ "is_primary": false,
+ "label": null,
+ "line_end": 36,
+ "line_start": 1,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 35,
+ "highlight_start": 1,
+ "text": "($ left : expr, $ right : expr) =>"
+ },
+ {
+ "highlight_end": 3,
+ "highlight_start": 1,
+ "text": "({"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " match (& $ left, & $ right)"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 34,
+ "highlight_start": 1,
+ "text": " (left_val, right_val) =>"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 46,
+ "highlight_start": 1,
+ "text": " if ! (* left_val == * right_val)"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 25,
+ "highlight_start": 1,
+ "text": " panic !"
+ },
+ {
+ "highlight_end": 57,
+ "highlight_start": 1,
+ "text": " (r#\"assertion failed: `(left == right)`"
+ },
+ {
+ "highlight_end": 16,
+ "highlight_start": 1,
+ "text": " left: `{:?}`,"
+ },
+ {
+ "highlight_end": 18,
+ "highlight_start": 1,
+ "text": " right: `{:?}`\"#,"
+ },
+ {
+ "highlight_end": 47,
+ "highlight_start": 1,
+ "text": " & * left_val, & * right_val)"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 42,
+ "highlight_start": 1,
+ "text": " }) ; ($ left : expr, $ right : expr,) =>"
+ },
+ {
+ "highlight_end": 49,
+ "highlight_start": 1,
+ "text": "({ $ crate :: assert_eq ! ($ left, $ right) }) ;"
+ },
+ {
+ "highlight_end": 53,
+ "highlight_start": 1,
+ "text": "($ left : expr, $ right : expr, $ ($ arg : tt) +) =>"
+ },
+ {
+ "highlight_end": 3,
+ "highlight_start": 1,
+ "text": "({"
+ },
+ {
+ "highlight_end": 37,
+ "highlight_start": 1,
+ "text": " match (& ($ left), & ($ right))"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 34,
+ "highlight_start": 1,
+ "text": " (left_val, right_val) =>"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 46,
+ "highlight_start": 1,
+ "text": " if ! (* left_val == * right_val)"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " {"
+ },
+ {
+ "highlight_end": 25,
+ "highlight_start": 1,
+ "text": " panic !"
+ },
+ {
+ "highlight_end": 57,
+ "highlight_start": 1,
+ "text": " (r#\"assertion failed: `(left == right)`"
+ },
+ {
+ "highlight_end": 16,
+ "highlight_start": 1,
+ "text": " left: `{:?}`,"
+ },
+ {
+ "highlight_end": 22,
+ "highlight_start": 1,
+ "text": " right: `{:?}`: {}\"#,"
+ },
+ {
+ "highlight_end": 72,
+ "highlight_start": 1,
+ "text": " & * left_val, & * right_val, $ crate :: format_args !"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " ($ ($ arg) +))"
+ },
+ {
+ "highlight_end": 15,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " }"
+ },
+ {
+ "highlight_end": 6,
+ "highlight_start": 1,
+ "text": " }) ;"
+ }
+ ]
+ },
+ "macro_decl_name": "assert_eq!",
+ "span": {
+ "byte_end": 38,
+ "byte_start": 16,
+ "column_end": 27,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 2,
+ "line_start": 2,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 27,
+ "highlight_start": 5,
+ "text": " assert_eq!(1, \"love\");"
+ }
+ ]
+ }
+ },
+ "file_name": "<::core::macros::assert_eq macros>",
+ "is_primary": true,
+ "label": "no implementation for `{integer} == &str`",
+ "line_end": 7,
+ "line_start": 7,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 33,
+ "highlight_start": 31,
+ "text": " if ! (* left_val == * right_val)"
+ }
+ ]
+ }
+ ]
+ }"##,
+ expect_file!["./test_data/handles_macro_location.txt"],
+ );
+ }
+
+ #[test]
+ fn macro_compiler_error() {
+ check(
+ r##"{
+ "rendered": "error: Please register your known path in the path module\n --> crates/hir_def/src/path.rs:265:9\n |\n265 | compile_error!(\"Please register your known path in the path module\")\n | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n | \n ::: crates/hir_def/src/data.rs:80:16\n |\n80 | let path = path![std::future::Future];\n | -------------------------- in this macro invocation\n\n",
+ "children": [],
+ "code": null,
+ "level": "error",
+ "message": "Please register your known path in the path module",
+ "spans": [
+ {
+ "byte_end": 8285,
+ "byte_start": 8217,
+ "column_end": 77,
+ "column_start": 9,
+ "expansion": {
+ "def_site_span": {
+ "byte_end": 8294,
+ "byte_start": 7858,
+ "column_end": 2,
+ "column_start": 1,
+ "expansion": null,
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 267,
+ "line_start": 254,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 28,
+ "highlight_start": 1,
+ "text": "macro_rules! __known_path {"
+ },
+ {
+ "highlight_end": 37,
+ "highlight_start": 1,
+ "text": " (std::iter::IntoIterator) => {};"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " (std::result::Result) => {};"
+ },
+ {
+ "highlight_end": 29,
+ "highlight_start": 1,
+ "text": " (std::ops::Range) => {};"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeFrom) => {};"
+ },
+ {
+ "highlight_end": 33,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeFull) => {};"
+ },
+ {
+ "highlight_end": 31,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeTo) => {};"
+ },
+ {
+ "highlight_end": 40,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeToInclusive) => {};"
+ },
+ {
+ "highlight_end": 38,
+ "highlight_start": 1,
+ "text": " (std::ops::RangeInclusive) => {};"
+ },
+ {
+ "highlight_end": 27,
+ "highlight_start": 1,
+ "text": " (std::ops::Try) => {};"
+ },
+ {
+ "highlight_end": 22,
+ "highlight_start": 1,
+ "text": " ($path:path) => {"
+ },
+ {
+ "highlight_end": 77,
+ "highlight_start": 1,
+ "text": " compile_error!(\"Please register your known path in the path module\")"
+ },
+ {
+ "highlight_end": 7,
+ "highlight_start": 1,
+ "text": " };"
+ },
+ {
+ "highlight_end": 2,
+ "highlight_start": 1,
+ "text": "}"
+ }
+ ]
+ },
+ "macro_decl_name": "$crate::__known_path!",
+ "span": {
+ "byte_end": 8427,
+ "byte_start": 8385,
+ "column_end": 51,
+ "column_start": 9,
+ "expansion": {
+ "def_site_span": {
+ "byte_end": 8611,
+ "byte_start": 8312,
+ "column_end": 2,
+ "column_start": 1,
+ "expansion": null,
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 277,
+ "line_start": 270,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 22,
+ "highlight_start": 1,
+ "text": "macro_rules! __path {"
+ },
+ {
+ "highlight_end": 43,
+ "highlight_start": 1,
+ "text": " ($start:ident $(:: $seg:ident)*) => ({"
+ },
+ {
+ "highlight_end": 51,
+ "highlight_start": 1,
+ "text": " $crate::__known_path!($start $(:: $seg)*);"
+ },
+ {
+ "highlight_end": 87,
+ "highlight_start": 1,
+ "text": " $crate::path::ModPath::from_simple_segments($crate::path::PathKind::Abs, vec!["
+ },
+ {
+ "highlight_end": 76,
+ "highlight_start": 1,
+ "text": " $crate::path::__name![$start], $($crate::path::__name![$seg],)*"
+ },
+ {
+ "highlight_end": 11,
+ "highlight_start": 1,
+ "text": " ])"
+ },
+ {
+ "highlight_end": 8,
+ "highlight_start": 1,
+ "text": " });"
+ },
+ {
+ "highlight_end": 2,
+ "highlight_start": 1,
+ "text": "}"
+ }
+ ]
+ },
+ "macro_decl_name": "path!",
+ "span": {
+ "byte_end": 2966,
+ "byte_start": 2940,
+ "column_end": 42,
+ "column_start": 16,
+ "expansion": null,
+ "file_name": "crates/hir_def/src/data.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 80,
+ "line_start": 80,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 42,
+ "highlight_start": 16,
+ "text": " let path = path![std::future::Future];"
+ }
+ ]
+ }
+ },
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": false,
+ "label": null,
+ "line_end": 272,
+ "line_start": 272,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 51,
+ "highlight_start": 9,
+ "text": " $crate::__known_path!($start $(:: $seg)*);"
+ }
+ ]
+ }
+ },
+ "file_name": "crates/hir_def/src/path.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 265,
+ "line_start": 265,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 77,
+ "highlight_start": 9,
+ "text": " compile_error!(\"Please register your known path in the path module\")"
+ }
+ ]
+ }
+ ]
+ }
+ "##,
+ expect_file!["./test_data/macro_compiler_error.txt"],
+ );
+ }
+
+ #[test]
+ fn snap_multi_line_fix() {
+ check(
+ r##"{
+ "rendered": "warning: returning the result of a let binding from a block\n --> src/main.rs:4:5\n |\n3 | let a = (0..10).collect();\n | -------------------------- unnecessary let binding\n4 | a\n | ^\n |\n = note: `#[warn(clippy::let_and_return)]` on by default\n = help: for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#let_and_return\nhelp: return the expression directly\n |\n3 | \n4 | (0..10).collect()\n |\n\n",
+ "children": [
+ {
+ "children": [],
+ "code": null,
+ "level": "note",
+ "message": "`#[warn(clippy::let_and_return)]` on by default",
+ "rendered": null,
+ "spans": []
+ },
+ {
+ "children": [],
+ "code": null,
+ "level": "help",
+ "message": "for further information visit https://rust-lang.github.io/rust-clippy/master/index.html#let_and_return",
+ "rendered": null,
+ "spans": []
+ },
+ {
+ "children": [],
+ "code": null,
+ "level": "help",
+ "message": "return the expression directly",
+ "rendered": null,
+ "spans": [
+ {
+ "byte_end": 55,
+ "byte_start": 29,
+ "column_end": 31,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 3,
+ "line_start": 3,
+ "suggested_replacement": "",
+ "suggestion_applicability": "MachineApplicable",
+ "text": [
+ {
+ "highlight_end": 31,
+ "highlight_start": 5,
+ "text": " let a = (0..10).collect();"
+ }
+ ]
+ },
+ {
+ "byte_end": 61,
+ "byte_start": 60,
+ "column_end": 6,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 4,
+ "line_start": 4,
+ "suggested_replacement": "(0..10).collect()",
+ "suggestion_applicability": "MachineApplicable",
+ "text": [
+ {
+ "highlight_end": 6,
+ "highlight_start": 5,
+ "text": " a"
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "code": {
+ "code": "clippy::let_and_return",
+ "explanation": null
+ },
+ "level": "warning",
+ "message": "returning the result of a let binding from a block",
+ "spans": [
+ {
+ "byte_end": 55,
+ "byte_start": 29,
+ "column_end": 31,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": false,
+ "label": "unnecessary let binding",
+ "line_end": 3,
+ "line_start": 3,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 31,
+ "highlight_start": 5,
+ "text": " let a = (0..10).collect();"
+ }
+ ]
+ },
+ {
+ "byte_end": 61,
+ "byte_start": 60,
+ "column_end": 6,
+ "column_start": 5,
+ "expansion": null,
+ "file_name": "src/main.rs",
+ "is_primary": true,
+ "label": null,
+ "line_end": 4,
+ "line_start": 4,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "text": [
+ {
+ "highlight_end": 6,
+ "highlight_start": 5,
+ "text": " a"
+ }
+ ]
+ }
+ ]
+ }
+ "##,
+ expect_file!["./test_data/snap_multi_line_fix.txt"],
+ );
+ }
+
+ #[test]
+ fn reasonable_line_numbers_from_empty_file() {
+ check(
+ r##"{
+ "message": "`main` function not found in crate `current`",
+ "code": {
+ "code": "E0601",
+ "explanation": "No `main` function was found in a binary crate.\n\nTo fix this error, add a `main` function:\n\n```\nfn main() {\n // Your program will start here.\n println!(\"Hello world!\");\n}\n```\n\nIf you don't know the basics of Rust, you can look at the\n[Rust Book][rust-book] to get started.\n\n[rust-book]: https://doc.rust-lang.org/book/\n"
+ },
+ "level": "error",
+ "spans": [
+ {
+ "file_name": "src/bin/current.rs",
+ "byte_start": 0,
+ "byte_end": 0,
+ "line_start": 0,
+ "line_end": 0,
+ "column_start": 1,
+ "column_end": 1,
+ "is_primary": true,
+ "text": [],
+ "label": null,
+ "suggested_replacement": null,
+ "suggestion_applicability": null,
+ "expansion": null
+ }
+ ],
+ "children": [
+ {
+ "message": "consider adding a `main` function to `src/bin/current.rs`",
+ "code": null,
+ "level": "note",
+ "spans": [],
+ "children": [],
+ "rendered": null
+ }
+ ],
+ "rendered": "error[E0601]: `main` function not found in crate `current`\n |\n = note: consider adding a `main` function to `src/bin/current.rs`\n\n"
+ }"##,
+ expect_file!["./test_data/reasonable_line_numbers_from_empty_file.txt"],
+ );
+ }
+}
--- /dev/null
- let text_range = TextRange::new(start, end);
- Ok(text_range)
+//! Conversion lsp_types types to rust-analyzer specific ones.
+use anyhow::format_err;
+use ide::{Annotation, AnnotationKind, AssistKind, LineCol, LineColUtf16};
+use ide_db::base_db::{FileId, FilePosition, FileRange};
+use syntax::{TextRange, TextSize};
+use vfs::AbsPathBuf;
+
+use crate::{
+ from_json,
+ global_state::GlobalStateSnapshot,
+ line_index::{LineIndex, PositionEncoding},
+ lsp_ext,
+ lsp_utils::invalid_params_error,
+ Result,
+};
+
+pub(crate) fn abs_path(url: &lsp_types::Url) -> Result<AbsPathBuf> {
+ let path = url.to_file_path().map_err(|()| "url is not a file")?;
+ Ok(AbsPathBuf::try_from(path).unwrap())
+}
+
+pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {
+ abs_path(url).map(vfs::VfsPath::from)
+}
+
+pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
+ let line_col = match line_index.encoding {
+ PositionEncoding::Utf8 => {
+ LineCol { line: position.line as u32, col: position.character as u32 }
+ }
+ PositionEncoding::Utf16 => {
+ let line_col =
+ LineColUtf16 { line: position.line as u32, col: position.character as u32 };
+ line_index.index.to_utf8(line_col)
+ }
+ };
+ let text_size =
+ line_index.index.offset(line_col).ok_or_else(|| format_err!("Invalid offset"))?;
+ Ok(text_size)
+}
+
+pub(crate) fn text_range(line_index: &LineIndex, range: lsp_types::Range) -> Result<TextRange> {
+ let start = offset(line_index, range.start)?;
+ let end = offset(line_index, range.end)?;
++ match end < start {
++ true => Err(format_err!("Invalid Range").into()),
++ false => Ok(TextRange::new(start, end)),
++ }
+}
+
+pub(crate) fn file_id(snap: &GlobalStateSnapshot, url: &lsp_types::Url) -> Result<FileId> {
+ snap.url_to_file_id(url)
+}
+
+pub(crate) fn file_position(
+ snap: &GlobalStateSnapshot,
+ tdpp: lsp_types::TextDocumentPositionParams,
+) -> Result<FilePosition> {
+ let file_id = file_id(snap, &tdpp.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = offset(&line_index, tdpp.position)?;
+ Ok(FilePosition { file_id, offset })
+}
+
+pub(crate) fn file_range(
+ snap: &GlobalStateSnapshot,
+ text_document_identifier: lsp_types::TextDocumentIdentifier,
+ range: lsp_types::Range,
+) -> Result<FileRange> {
+ let file_id = file_id(snap, &text_document_identifier.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let range = text_range(&line_index, range)?;
+ Ok(FileRange { file_id, range })
+}
+
+pub(crate) fn assist_kind(kind: lsp_types::CodeActionKind) -> Option<AssistKind> {
+ let assist_kind = match &kind {
+ k if k == &lsp_types::CodeActionKind::EMPTY => AssistKind::None,
+ k if k == &lsp_types::CodeActionKind::QUICKFIX => AssistKind::QuickFix,
+ k if k == &lsp_types::CodeActionKind::REFACTOR => AssistKind::Refactor,
+ k if k == &lsp_types::CodeActionKind::REFACTOR_EXTRACT => AssistKind::RefactorExtract,
+ k if k == &lsp_types::CodeActionKind::REFACTOR_INLINE => AssistKind::RefactorInline,
+ k if k == &lsp_types::CodeActionKind::REFACTOR_REWRITE => AssistKind::RefactorRewrite,
+ _ => return None,
+ };
+
+ Some(assist_kind)
+}
+
+pub(crate) fn annotation(
+ snap: &GlobalStateSnapshot,
+ code_lens: lsp_types::CodeLens,
+) -> Result<Annotation> {
+ let data =
+ code_lens.data.ok_or_else(|| invalid_params_error("code lens without data".to_string()))?;
+ let resolve = from_json::<lsp_ext::CodeLensResolveData>("CodeLensResolveData", &data)?;
+
+ match resolve {
+ lsp_ext::CodeLensResolveData::Impls(params) => {
+ let pos @ FilePosition { file_id, .. } =
+ file_position(snap, params.text_document_position_params)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ Ok(Annotation {
+ range: text_range(&line_index, code_lens.range)?,
+ kind: AnnotationKind::HasImpls { pos, data: None },
+ })
+ }
+ lsp_ext::CodeLensResolveData::References(params) => {
+ let pos @ FilePosition { file_id, .. } = file_position(snap, params)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ Ok(Annotation {
+ range: text_range(&line_index, code_lens.range)?,
+ kind: AnnotationKind::HasReferences { pos, data: None },
+ })
+ }
+ }
+}
--- /dev/null
- pub(crate) fetch_workspaces_queue: OpQueue<Vec<anyhow::Result<ProjectWorkspace>>>,
+//! The context or environment in which the language server functions. In our
+//! server implementation this is know as the `WorldState`.
+//!
+//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
+
+use std::{sync::Arc, time::Instant};
+
+use crossbeam_channel::{unbounded, Receiver, Sender};
+use flycheck::FlycheckHandle;
+use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
+use ide_db::base_db::{CrateId, FileLoader, SourceDatabase};
+use lsp_types::{SemanticTokens, Url};
+use parking_lot::{Mutex, RwLock};
+use proc_macro_api::ProcMacroServer;
+use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
+use rustc_hash::FxHashMap;
+use stdx::hash::NoHashHashMap;
+use vfs::AnchoredPathBuf;
+
+use crate::{
+ config::Config,
+ diagnostics::{CheckFixes, DiagnosticCollection},
+ from_proto,
+ line_index::{LineEndings, LineIndex},
+ lsp_ext,
+ main_loop::Task,
+ mem_docs::MemDocs,
+ op_queue::OpQueue,
+ reload::{self, SourceRootConfig},
+ task_pool::TaskPool,
+ to_proto::url_from_abs_path,
+ Result,
+};
+
+// Enforces drop order
+pub(crate) struct Handle<H, C> {
+ pub(crate) handle: H,
+ pub(crate) receiver: C,
+}
+
+pub(crate) type ReqHandler = fn(&mut GlobalState, lsp_server::Response);
+pub(crate) type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>;
+
+/// `GlobalState` is the primary mutable state of the language server
+///
+/// The most interesting components are `vfs`, which stores a consistent
+/// snapshot of the file systems, and `analysis_host`, which stores our
+/// incremental salsa database.
+///
+/// Note that this struct has more than one impl in various modules!
+pub(crate) struct GlobalState {
+ sender: Sender<lsp_server::Message>,
+ req_queue: ReqQueue,
+ pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>,
+ pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
+ pub(crate) config: Arc<Config>,
+ pub(crate) analysis_host: AnalysisHost,
+ pub(crate) diagnostics: DiagnosticCollection,
+ pub(crate) mem_docs: MemDocs,
+ pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
+ pub(crate) shutdown_requested: bool,
+ pub(crate) proc_macro_changed: bool,
+ pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
+ pub(crate) source_root_config: SourceRootConfig,
+ pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>,
+
+ pub(crate) flycheck: Arc<[FlycheckHandle]>,
+ pub(crate) flycheck_sender: Sender<flycheck::Message>,
+ pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
+
+ pub(crate) vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
+ pub(crate) vfs_config_version: u32,
+ pub(crate) vfs_progress_config_version: u32,
+ pub(crate) vfs_progress_n_total: usize,
+ pub(crate) vfs_progress_n_done: usize,
+
+ /// `workspaces` field stores the data we actually use, while the `OpQueue`
+ /// stores the result of the last fetch.
+ ///
+ /// If the fetch (partially) fails, we do not update the current value.
+ ///
+ /// The handling of build data is subtle. We fetch workspace in two phases:
+ ///
+ /// *First*, we run `cargo metadata`, which gives us fast results for
+ /// initial analysis.
+ ///
+ /// *Second*, we run `cargo check` which runs build scripts and compiles
+ /// proc macros.
+ ///
+ /// We need both for the precise analysis, but we want rust-analyzer to be
+ /// at least partially available just after the first phase. That's because
+ /// first phase is much faster, and is much less likely to fail.
+ ///
+ /// This creates a complication -- by the time the second phase completes,
+ /// the results of the fist phase could be invalid. That is, while we run
+ /// `cargo check`, the user edits `Cargo.toml`, we notice this, and the new
+ /// `cargo metadata` completes before `cargo check`.
+ ///
+ /// An additional complication is that we want to avoid needless work. When
+ /// the user just adds comments or whitespace to Cargo.toml, we do not want
+ /// to invalidate any salsa caches.
+ pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
++ pub(crate) fetch_workspaces_queue: OpQueue<Option<Vec<anyhow::Result<ProjectWorkspace>>>>,
+ pub(crate) fetch_build_data_queue:
+ OpQueue<(Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
+
+ pub(crate) prime_caches_queue: OpQueue<()>,
+}
+
+/// An immutable snapshot of the world's state at a point in time.
+pub(crate) struct GlobalStateSnapshot {
+ pub(crate) config: Arc<Config>,
+ pub(crate) analysis: Analysis,
+ pub(crate) check_fixes: CheckFixes,
+ mem_docs: MemDocs,
+ pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
+ vfs: Arc<RwLock<(vfs::Vfs, NoHashHashMap<FileId, LineEndings>)>>,
+ pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
+ pub(crate) proc_macros_loaded: bool,
+ pub(crate) flycheck: Arc<[FlycheckHandle]>,
+}
+
+impl std::panic::UnwindSafe for GlobalStateSnapshot {}
+
+impl GlobalState {
+ pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> GlobalState {
+ let loader = {
+ let (sender, receiver) = unbounded::<vfs::loader::Message>();
+ let handle: vfs_notify::NotifyHandle =
+ vfs::loader::Handle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+ let handle = Box::new(handle) as Box<dyn vfs::loader::Handle>;
+ Handle { handle, receiver }
+ };
+
+ let task_pool = {
+ let (sender, receiver) = unbounded();
+ let handle = TaskPool::new(sender);
+ Handle { handle, receiver }
+ };
+
+ let analysis_host = AnalysisHost::new(config.lru_capacity());
+ let (flycheck_sender, flycheck_receiver) = unbounded();
+ let mut this = GlobalState {
+ sender,
+ req_queue: ReqQueue::default(),
+ task_pool,
+ loader,
+ config: Arc::new(config.clone()),
+ analysis_host,
+ diagnostics: Default::default(),
+ mem_docs: MemDocs::default(),
+ semantic_tokens_cache: Arc::new(Default::default()),
+ shutdown_requested: false,
+ proc_macro_changed: false,
+ last_reported_status: None,
+ source_root_config: SourceRootConfig::default(),
+ proc_macro_clients: vec![],
+
+ flycheck: Arc::new([]),
+ flycheck_sender,
+ flycheck_receiver,
+
+ vfs: Arc::new(RwLock::new((vfs::Vfs::default(), NoHashHashMap::default()))),
+ vfs_config_version: 0,
+ vfs_progress_config_version: 0,
+ vfs_progress_n_total: 0,
+ vfs_progress_n_done: 0,
+
+ workspaces: Arc::new(Vec::new()),
+ fetch_workspaces_queue: OpQueue::default(),
+ prime_caches_queue: OpQueue::default(),
+
+ fetch_build_data_queue: OpQueue::default(),
+ };
+ // Apply any required database inputs from the config.
+ this.update_configuration(config);
+ this
+ }
+
+ pub(crate) fn process_changes(&mut self) -> bool {
+ let _p = profile::span("GlobalState::process_changes");
+ // A file was added or deleted
+ let mut has_structure_changes = false;
+ let mut workspace_structure_change = None;
+
+ let (change, changed_files) = {
+ let mut change = Change::new();
+ let (vfs, line_endings_map) = &mut *self.vfs.write();
+ let mut changed_files = vfs.take_changes();
+ if changed_files.is_empty() {
+ return false;
+ }
+
+ // important: this needs to be a stable sort, the order between changes is relevant
+ // for the same file ids
+ changed_files.sort_by_key(|file| file.file_id);
+ // We need to fix up the changed events a bit, if we have a create or modify for a file
+ // id that is followed by a delete we actually no longer observe the file text from the
+ // create or modify which may cause problems later on
+ changed_files.dedup_by(|a, b| {
+ use vfs::ChangeKind::*;
+
+ if a.file_id != b.file_id {
+ return false;
+ }
+
+ match (a.change_kind, b.change_kind) {
+ // duplicate can be merged
+ (Create, Create) | (Modify, Modify) | (Delete, Delete) => true,
+ // just leave the create, modify is irrelevant
+ (Create, Modify) => {
+ std::mem::swap(a, b);
+ true
+ }
+ // modify becomes irrelevant if the file is deleted
+ (Modify, Delete) => true,
+ // we should fully remove this occurrence,
+ // but leaving just a delete works as well
+ (Create, Delete) => true,
+ // this is equivalent to a modify
+ (Delete, Create) => {
+ a.change_kind = Modify;
+ true
+ }
+ // can't really occur
+ (Modify, Create) => false,
+ (Delete, Modify) => false,
+ }
+ });
+
+ for file in &changed_files {
+ if let Some(path) = vfs.file_path(file.file_id).as_path() {
+ let path = path.to_path_buf();
+ if reload::should_refresh_for_change(&path, file.change_kind) {
+ workspace_structure_change = Some(path);
+ }
+ if file.is_created_or_deleted() {
+ has_structure_changes = true;
+ }
+ }
+
+ // Clear native diagnostics when their file gets deleted
+ if !file.exists() {
+ self.diagnostics.clear_native_for(file.file_id);
+ }
+
+ let text = if file.exists() {
+ let bytes = vfs.file_contents(file.file_id).to_vec();
+ String::from_utf8(bytes).ok().and_then(|text| {
+ let (text, line_endings) = LineEndings::normalize(text);
+ line_endings_map.insert(file.file_id, line_endings);
+ Some(Arc::new(text))
+ })
+ } else {
+ None
+ };
+ change.change_file(file.file_id, text);
+ }
+ if has_structure_changes {
+ let roots = self.source_root_config.partition(vfs);
+ change.set_roots(roots);
+ }
+ (change, changed_files)
+ };
+
+ self.analysis_host.apply_change(change);
+
+ {
+ let raw_database = self.analysis_host.raw_database();
+ // FIXME: ideally we should only trigger a workspace fetch for non-library changes
+ // but somethings going wrong with the source root business when we add a new local
+ // crate see https://github.com/rust-lang/rust-analyzer/issues/13029
+ if let Some(path) = workspace_structure_change {
+ self.fetch_workspaces_queue
+ .request_op(format!("workspace vfs file change: {}", path.display()));
+ }
+ self.proc_macro_changed =
+ changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
+ let crates = raw_database.relevant_crates(file.file_id);
+ let crate_graph = raw_database.crate_graph();
+
+ crates.iter().any(|&krate| crate_graph[krate].is_proc_macro)
+ });
+ }
+
+ true
+ }
+
+ pub(crate) fn snapshot(&self) -> GlobalStateSnapshot {
+ GlobalStateSnapshot {
+ config: Arc::clone(&self.config),
+ workspaces: Arc::clone(&self.workspaces),
+ analysis: self.analysis_host.analysis(),
+ vfs: Arc::clone(&self.vfs),
+ check_fixes: Arc::clone(&self.diagnostics.check_fixes),
+ mem_docs: self.mem_docs.clone(),
+ semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
+ proc_macros_loaded: !self.fetch_build_data_queue.last_op_result().0.is_empty(),
+ flycheck: self.flycheck.clone(),
+ }
+ }
+
+ pub(crate) fn send_request<R: lsp_types::request::Request>(
+ &mut self,
+ params: R::Params,
+ handler: ReqHandler,
+ ) {
+ let request = self.req_queue.outgoing.register(R::METHOD.to_string(), params, handler);
+ self.send(request.into());
+ }
+
+ pub(crate) fn complete_request(&mut self, response: lsp_server::Response) {
+ let handler = self
+ .req_queue
+ .outgoing
+ .complete(response.id.clone())
+ .expect("received response for unknown request");
+ handler(self, response)
+ }
+
+ pub(crate) fn send_notification<N: lsp_types::notification::Notification>(
+ &mut self,
+ params: N::Params,
+ ) {
+ let not = lsp_server::Notification::new(N::METHOD.to_string(), params);
+ self.send(not.into());
+ }
+
+ pub(crate) fn register_request(
+ &mut self,
+ request: &lsp_server::Request,
+ request_received: Instant,
+ ) {
+ self.req_queue
+ .incoming
+ .register(request.id.clone(), (request.method.clone(), request_received));
+ }
+
+ pub(crate) fn respond(&mut self, response: lsp_server::Response) {
+ if let Some((method, start)) = self.req_queue.incoming.complete(response.id.clone()) {
+ if let Some(err) = &response.error {
+ if err.message.starts_with("server panicked") {
+ self.poke_rust_analyzer_developer(format!("{}, check the log", err.message))
+ }
+ }
+
+ let duration = start.elapsed();
+ tracing::debug!("handled {} - ({}) in {:0.2?}", method, response.id, duration);
+ self.send(response.into());
+ }
+ }
+
+ pub(crate) fn cancel(&mut self, request_id: lsp_server::RequestId) {
+ if let Some(response) = self.req_queue.incoming.cancel(request_id) {
+ self.send(response.into());
+ }
+ }
+
+ pub(crate) fn is_completed(&self, request: &lsp_server::Request) -> bool {
+ self.req_queue.incoming.is_completed(&request.id)
+ }
+
+ fn send(&mut self, message: lsp_server::Message) {
+ self.sender.send(message).unwrap()
+ }
+}
+
+impl Drop for GlobalState {
+ fn drop(&mut self) {
+ self.analysis_host.request_cancellation();
+ }
+}
+
+impl GlobalStateSnapshot {
+ pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> {
+ url_to_file_id(&self.vfs.read().0, url)
+ }
+
+ pub(crate) fn file_id_to_url(&self, id: FileId) -> Url {
+ file_id_to_url(&self.vfs.read().0, id)
+ }
+
+ pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
+ let endings = self.vfs.read().1[&file_id];
+ let index = self.analysis.file_line_index(file_id)?;
+ let res = LineIndex { index, endings, encoding: self.config.position_encoding() };
+ Ok(res)
+ }
+
+ pub(crate) fn url_file_version(&self, url: &Url) -> Option<i32> {
+ let path = from_proto::vfs_path(url).ok()?;
+ Some(self.mem_docs.get(&path)?.version)
+ }
+
+ pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url {
+ let mut base = self.vfs.read().0.file_path(path.anchor);
+ base.pop();
+ let path = base.join(&path.path).unwrap();
+ let path = path.as_path().unwrap();
+ url_from_abs_path(path)
+ }
+
+ pub(crate) fn file_id_to_file_path(&self, file_id: FileId) -> vfs::VfsPath {
+ self.vfs.read().0.file_path(file_id)
+ }
+
+ pub(crate) fn cargo_target_for_crate_root(
+ &self,
+ crate_id: CrateId,
+ ) -> Option<(&CargoWorkspace, Target)> {
+ let file_id = self.analysis.crate_root(crate_id).ok()?;
+ let path = self.vfs.read().0.file_path(file_id);
+ let path = path.as_path()?;
+ self.workspaces.iter().find_map(|ws| match ws {
+ ProjectWorkspace::Cargo { cargo, .. } => {
+ cargo.target_by_root(path).map(|it| (cargo, it))
+ }
+ ProjectWorkspace::Json { .. } => None,
+ ProjectWorkspace::DetachedFiles { .. } => None,
+ })
+ }
+}
+
+pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
+ let path = vfs.file_path(id);
+ let path = path.as_path().unwrap();
+ url_from_abs_path(path)
+}
+
+pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
+ let path = from_proto::vfs_path(url)?;
+ let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?;
+ Ok(res)
+}
--- /dev/null
- AnnotationConfig, AssistKind, AssistResolveStrategy, FileId, FilePosition, FileRange,
- HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable, RunnableKind,
- SingleResolve, SourceChange, TextEdit,
+//! This module is responsible for implementing handlers for Language Server
+//! Protocol. The majority of requests are fulfilled by calling into the
+//! `ide` crate.
+
+use std::{
+ io::Write as _,
+ process::{self, Stdio},
+};
+
+use anyhow::Context;
+use ide::{
- to_proto::workspace_edit(&snap, source_change).map(Some)
++ AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FileId, FilePosition,
++ FileRange, HoverAction, HoverGotoTypeData, Query, RangeInfo, ReferenceCategory, Runnable,
++ RunnableKind, SingleResolve, SourceChange, TextEdit,
+};
+use ide_db::SymbolKind;
+use lsp_server::ErrorCode;
+use lsp_types::{
+ CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem,
+ CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
+ CodeLens, CompletionItem, Diagnostic, DiagnosticTag, DocumentFormattingParams, FoldingRange,
+ FoldingRangeParams, HoverContents, InlayHint, InlayHintParams, Location, LocationLink,
+ NumberOrString, Position, PrepareRenameResponse, Range, RenameParams,
+ SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, SemanticTokensParams,
+ SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
+ SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
+};
+use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
+use serde_json::json;
+use stdx::{format_to, never};
+use syntax::{algo, ast, AstNode, TextRange, TextSize, T};
+use vfs::AbsPathBuf;
+
+use crate::{
+ cargo_target_spec::CargoTargetSpec,
+ config::{RustfmtConfig, WorkspaceSymbolConfig},
+ diff::diff,
+ from_proto,
+ global_state::{GlobalState, GlobalStateSnapshot},
+ line_index::LineEndings,
+ lsp_ext::{self, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams},
+ lsp_utils::{all_edits_are_disjoint, invalid_params_error},
+ to_proto, LspError, Result,
+};
+
+pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
+ state.proc_macro_clients.clear();
+ state.proc_macro_changed = false;
+ state.fetch_workspaces_queue.request_op("reload workspace request".to_string());
+ state.fetch_build_data_queue.request_op("reload workspace request".to_string());
+ Ok(())
+}
+
+pub(crate) fn handle_cancel_flycheck(state: &mut GlobalState, _: ()) -> Result<()> {
+ let _p = profile::span("handle_stop_flycheck");
+ state.flycheck.iter().for_each(|flycheck| flycheck.cancel());
+ Ok(())
+}
+
+pub(crate) fn handle_analyzer_status(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::AnalyzerStatusParams,
+) -> Result<String> {
+ let _p = profile::span("handle_analyzer_status");
+
+ let mut buf = String::new();
+
+ let mut file_id = None;
+ if let Some(tdi) = params.text_document {
+ match from_proto::file_id(&snap, &tdi.uri) {
+ Ok(it) => file_id = Some(it),
+ Err(_) => format_to!(buf, "file {} not found in vfs", tdi.uri),
+ }
+ }
+
+ if snap.workspaces.is_empty() {
+ buf.push_str("No workspaces\n")
+ } else {
+ buf.push_str("Workspaces:\n");
+ format_to!(
+ buf,
+ "Loaded {:?} packages across {} workspace{}.\n",
+ snap.workspaces.iter().map(|w| w.n_packages()).sum::<usize>(),
+ snap.workspaces.len(),
+ if snap.workspaces.len() == 1 { "" } else { "s" }
+ );
+ }
+ buf.push_str("\nAnalysis:\n");
+ buf.push_str(
+ &snap
+ .analysis
+ .status(file_id)
+ .unwrap_or_else(|_| "Analysis retrieval was cancelled".to_owned()),
+ );
+ Ok(buf)
+}
+
+pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<String> {
+ let _p = profile::span("handle_memory_usage");
+ let mut mem = state.analysis_host.per_query_memory_usage();
+ mem.push(("Remaining".into(), profile::memory_usage().allocated));
+
+ let mut out = String::new();
+ for (name, bytes) in mem {
+ format_to!(out, "{:>8} {}\n", bytes, name);
+ }
+ Ok(out)
+}
+
+pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Result<()> {
+ state.analysis_host.shuffle_crate_graph();
+ Ok(())
+}
+
+pub(crate) fn handle_syntax_tree(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::SyntaxTreeParams,
+) -> Result<String> {
+ let _p = profile::span("handle_syntax_tree");
+ let id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(id)?;
+ let text_range = params.range.and_then(|r| from_proto::text_range(&line_index, r).ok());
+ let res = snap.analysis.syntax_tree(id, text_range)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_hir(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_hir");
+ let position = from_proto::file_position(&snap, params)?;
+ let res = snap.analysis.view_hir(position)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_file_text(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentIdentifier,
+) -> Result<String> {
+ let file_id = from_proto::file_id(&snap, ¶ms.uri)?;
+ Ok(snap.analysis.file_text(file_id)?.to_string())
+}
+
+pub(crate) fn handle_view_item_tree(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::ViewItemTreeParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_item_tree");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let res = snap.analysis.view_item_tree(file_id)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_crate_graph(
+ snap: GlobalStateSnapshot,
+ params: ViewCrateGraphParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_crate_graph");
+ let dot = snap.analysis.view_crate_graph(params.full)??;
+ Ok(dot)
+}
+
+pub(crate) fn handle_expand_macro(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::ExpandMacroParams,
+) -> Result<Option<lsp_ext::ExpandedMacro>> {
+ let _p = profile::span("handle_expand_macro");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = from_proto::offset(&line_index, params.position)?;
+
+ let res = snap.analysis.expand_macro(FilePosition { file_id, offset })?;
+ Ok(res.map(|it| lsp_ext::ExpandedMacro { name: it.name, expansion: it.expansion }))
+}
+
+pub(crate) fn handle_selection_range(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::SelectionRangeParams,
+) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
+ let _p = profile::span("handle_selection_range");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let res: Result<Vec<lsp_types::SelectionRange>> = params
+ .positions
+ .into_iter()
+ .map(|position| {
+ let offset = from_proto::offset(&line_index, position)?;
+ let mut ranges = Vec::new();
+ {
+ let mut range = TextRange::new(offset, offset);
+ loop {
+ ranges.push(range);
+ let frange = FileRange { file_id, range };
+ let next = snap.analysis.extend_selection(frange)?;
+ if next == range {
+ break;
+ } else {
+ range = next
+ }
+ }
+ }
+ let mut range = lsp_types::SelectionRange {
+ range: to_proto::range(&line_index, *ranges.last().unwrap()),
+ parent: None,
+ };
+ for &r in ranges.iter().rev().skip(1) {
+ range = lsp_types::SelectionRange {
+ range: to_proto::range(&line_index, r),
+ parent: Some(Box::new(range)),
+ }
+ }
+ Ok(range)
+ })
+ .collect();
+
+ Ok(Some(res?))
+}
+
+pub(crate) fn handle_matching_brace(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::MatchingBraceParams,
+) -> Result<Vec<Position>> {
+ let _p = profile::span("handle_matching_brace");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ params
+ .positions
+ .into_iter()
+ .map(|position| {
+ let offset = from_proto::offset(&line_index, position);
+ offset.map(|offset| {
+ let offset = match snap.analysis.matching_brace(FilePosition { file_id, offset }) {
+ Ok(Some(matching_brace_offset)) => matching_brace_offset,
+ Err(_) | Ok(None) => offset,
+ };
+ to_proto::position(&line_index, offset)
+ })
+ })
+ .collect()
+}
+
+pub(crate) fn handle_join_lines(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::JoinLinesParams,
+) -> Result<Vec<lsp_types::TextEdit>> {
+ let _p = profile::span("handle_join_lines");
+
+ let config = snap.config.join_lines();
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut res = TextEdit::default();
+ for range in params.ranges {
+ let range = from_proto::text_range(&line_index, range)?;
+ let edit = snap.analysis.join_lines(&config, FileRange { file_id, range })?;
+ match res.union(edit) {
+ Ok(()) => (),
+ Err(_edit) => {
+ // just ignore overlapping edits
+ }
+ }
+ }
+
+ Ok(to_proto::text_edit_vec(&line_index, res))
+}
+
+pub(crate) fn handle_on_enter(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+ let _p = profile::span("handle_on_enter");
+ let position = from_proto::file_position(&snap, params)?;
+ let edit = match snap.analysis.on_enter(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let line_index = snap.file_line_index(position.file_id)?;
+ let edit = to_proto::snippet_text_edit_vec(&line_index, true, edit);
+ Ok(Some(edit))
+}
+
+pub(crate) fn handle_on_type_formatting(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentOnTypeFormattingParams,
+) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+ let _p = profile::span("handle_on_type_formatting");
+ let mut position = from_proto::file_position(&snap, params.text_document_position)?;
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ // in `ide`, the `on_type` invariant is that
+ // `text.char_at(position) == typed_char`.
+ position.offset -= TextSize::of('.');
+ let char_typed = params.ch.chars().next().unwrap_or('\0');
+
+ let text = snap.analysis.file_text(position.file_id)?;
+ if stdx::never!(!text[usize::from(position.offset)..].starts_with(char_typed)) {
+ return Ok(None);
+ }
+
+ // We have an assist that inserts ` ` after typing `->` in `fn foo() ->{`,
+ // but it requires precise cursor positioning to work, and one can't
+ // position the cursor with on_type formatting. So, let's just toggle this
+ // feature off here, hoping that we'll enable it one day, 😿.
+ if char_typed == '>' {
+ return Ok(None);
+ }
+
+ let edit =
+ snap.analysis.on_char_typed(position, char_typed, snap.config.typing_autoclose_angle())?;
+ let edit = match edit {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ // This should be a single-file edit
+ let (_, text_edit) = edit.source_file_edits.into_iter().next().unwrap();
+
+ let change = to_proto::snippet_text_edit_vec(&line_index, edit.is_snippet, text_edit);
+ Ok(Some(change))
+}
+
+pub(crate) fn handle_document_symbol(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentSymbolParams,
+) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
+ let _p = profile::span("handle_document_symbol");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut parents: Vec<(lsp_types::DocumentSymbol, Option<usize>)> = Vec::new();
+
+ for symbol in snap.analysis.file_structure(file_id)? {
+ let mut tags = Vec::new();
+ if symbol.deprecated {
+ tags.push(SymbolTag::DEPRECATED)
+ };
+
+ #[allow(deprecated)]
+ let doc_symbol = lsp_types::DocumentSymbol {
+ name: symbol.label,
+ detail: symbol.detail,
+ kind: to_proto::structure_node_kind(symbol.kind),
+ tags: Some(tags),
+ deprecated: Some(symbol.deprecated),
+ range: to_proto::range(&line_index, symbol.node_range),
+ selection_range: to_proto::range(&line_index, symbol.navigation_range),
+ children: None,
+ };
+ parents.push((doc_symbol, symbol.parent));
+ }
+
+ // Builds hierarchy from a flat list, in reverse order (so that indices
+ // makes sense)
+ let document_symbols = {
+ let mut acc = Vec::new();
+ while let Some((mut node, parent_idx)) = parents.pop() {
+ if let Some(children) = &mut node.children {
+ children.reverse();
+ }
+ let parent = match parent_idx {
+ None => &mut acc,
+ Some(i) => parents[i].0.children.get_or_insert_with(Vec::new),
+ };
+ parent.push(node);
+ }
+ acc.reverse();
+ acc
+ };
+
+ let res = if snap.config.hierarchical_symbols() {
+ document_symbols.into()
+ } else {
+ let url = to_proto::url(&snap, file_id);
+ let mut symbol_information = Vec::<SymbolInformation>::new();
+ for symbol in document_symbols {
+ flatten_document_symbol(&symbol, None, &url, &mut symbol_information);
+ }
+ symbol_information.into()
+ };
+ return Ok(Some(res));
+
+ fn flatten_document_symbol(
+ symbol: &lsp_types::DocumentSymbol,
+ container_name: Option<String>,
+ url: &Url,
+ res: &mut Vec<SymbolInformation>,
+ ) {
+ let mut tags = Vec::new();
+
+ #[allow(deprecated)]
+ if let Some(true) = symbol.deprecated {
+ tags.push(SymbolTag::DEPRECATED)
+ }
+
+ #[allow(deprecated)]
+ res.push(SymbolInformation {
+ name: symbol.name.clone(),
+ kind: symbol.kind,
+ tags: Some(tags),
+ deprecated: symbol.deprecated,
+ location: Location::new(url.clone(), symbol.range),
+ container_name,
+ });
+
+ for child in symbol.children.iter().flatten() {
+ flatten_document_symbol(child, Some(symbol.name.clone()), url, res);
+ }
+ }
+}
+
+pub(crate) fn handle_workspace_symbol(
+ snap: GlobalStateSnapshot,
+ params: WorkspaceSymbolParams,
+) -> Result<Option<Vec<SymbolInformation>>> {
+ let _p = profile::span("handle_workspace_symbol");
+
+ let config = snap.config.workspace_symbol();
+ let (all_symbols, libs) = decide_search_scope_and_kind(¶ms, &config);
+ let limit = config.search_limit;
+
+ let query = {
+ let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect();
+ let mut q = Query::new(query);
+ if !all_symbols {
+ q.only_types();
+ }
+ if libs {
+ q.libs();
+ }
+ q.limit(limit);
+ q
+ };
+ let mut res = exec_query(&snap, query)?;
+ if res.is_empty() && !all_symbols {
+ let mut query = Query::new(params.query);
+ query.limit(limit);
+ res = exec_query(&snap, query)?;
+ }
+
+ return Ok(Some(res));
+
+ fn decide_search_scope_and_kind(
+ params: &WorkspaceSymbolParams,
+ config: &WorkspaceSymbolConfig,
+ ) -> (bool, bool) {
+ // Support old-style parsing of markers in the query.
+ let mut all_symbols = params.query.contains('#');
+ let mut libs = params.query.contains('*');
+
+ // If no explicit marker was set, check request params. If that's also empty
+ // use global config.
+ if !all_symbols {
+ let search_kind = match params.search_kind {
+ Some(ref search_kind) => search_kind,
+ None => &config.search_kind,
+ };
+ all_symbols = match search_kind {
+ lsp_ext::WorkspaceSymbolSearchKind::OnlyTypes => false,
+ lsp_ext::WorkspaceSymbolSearchKind::AllSymbols => true,
+ }
+ }
+
+ if !libs {
+ let search_scope = match params.search_scope {
+ Some(ref search_scope) => search_scope,
+ None => &config.search_scope,
+ };
+ libs = match search_scope {
+ lsp_ext::WorkspaceSymbolSearchScope::Workspace => false,
+ lsp_ext::WorkspaceSymbolSearchScope::WorkspaceAndDependencies => true,
+ }
+ }
+
+ (all_symbols, libs)
+ }
+
+ fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> {
+ let mut res = Vec::new();
+ for nav in snap.analysis.symbol_search(query)? {
+ let container_name = nav.container_name.as_ref().map(|v| v.to_string());
+
+ #[allow(deprecated)]
+ let info = SymbolInformation {
+ name: nav.name.to_string(),
+ kind: nav
+ .kind
+ .map(to_proto::symbol_kind)
+ .unwrap_or(lsp_types::SymbolKind::VARIABLE),
+ tags: None,
+ location: to_proto::location_from_nav(snap, nav)?,
+ container_name,
+ deprecated: None,
+ };
+ res.push(info);
+ }
+ Ok(res)
+ }
+}
+
+pub(crate) fn handle_will_rename_files(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::RenameFilesParams,
+) -> Result<Option<lsp_types::WorkspaceEdit>> {
+ let _p = profile::span("handle_will_rename_files");
+
+ let source_changes: Vec<SourceChange> = params
+ .files
+ .into_iter()
+ .filter_map(|file_rename| {
+ let from = Url::parse(&file_rename.old_uri).ok()?;
+ let to = Url::parse(&file_rename.new_uri).ok()?;
+
+ let from_path = from.to_file_path().ok()?;
+ let to_path = to.to_file_path().ok()?;
+
+ // Limit to single-level moves for now.
+ match (from_path.parent(), to_path.parent()) {
+ (Some(p1), Some(p2)) if p1 == p2 => {
+ if from_path.is_dir() {
+ // add '/' to end of url -- from `file://path/to/folder` to `file://path/to/folder/`
+ let mut old_folder_name = from_path.file_stem()?.to_str()?.to_string();
+ old_folder_name.push('/');
+ let from_with_trailing_slash = from.join(&old_folder_name).ok()?;
+
+ let imitate_from_url = from_with_trailing_slash.join("mod.rs").ok()?;
+ let new_file_name = to_path.file_name()?.to_str()?;
+ Some((
+ snap.url_to_file_id(&imitate_from_url).ok()?,
+ new_file_name.to_string(),
+ ))
+ } else {
+ let old_name = from_path.file_stem()?.to_str()?;
+ let new_name = to_path.file_stem()?.to_str()?;
+ match (old_name, new_name) {
+ ("mod", _) => None,
+ (_, "mod") => None,
+ _ => Some((snap.url_to_file_id(&from).ok()?, new_name.to_string())),
+ }
+ }
+ }
+ _ => None,
+ }
+ })
+ .filter_map(|(file_id, new_name)| {
+ snap.analysis.will_rename_file(file_id, &new_name).ok()?
+ })
+ .collect();
+
+ // Drop file system edits since we're just renaming things on the same level
+ let mut source_changes = source_changes.into_iter();
+ let mut source_change = source_changes.next().unwrap_or_default();
+ source_change.file_system_edits.clear();
+ // no collect here because we want to merge text edits on same file ids
+ source_change.extend(source_changes.flat_map(|it| it.source_file_edits));
+ if source_change.source_file_edits.is_empty() {
+ Ok(None)
+ } else {
- to_proto::workspace_edit(&snap, source_change)
++ Ok(Some(to_proto::workspace_edit(&snap, source_change)?))
+ }
+}
+
+pub(crate) fn handle_goto_definition(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::GotoDefinitionParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_goto_definition");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_definition(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_declaration(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoDeclarationParams,
+) -> Result<Option<lsp_types::request::GotoDeclarationResponse>> {
+ let _p = profile::span("handle_goto_declaration");
+ let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
+ let nav_info = match snap.analysis.goto_declaration(position)? {
+ None => return handle_goto_definition(snap, params),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_implementation(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoImplementationParams,
+) -> Result<Option<lsp_types::request::GotoImplementationResponse>> {
+ let _p = profile::span("handle_goto_implementation");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_implementation(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_type_definition(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoTypeDefinitionParams,
+) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
+ let _p = profile::span("handle_goto_type_definition");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_type_definition(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_parent_module(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_parent_module");
+ if let Ok(file_path) = ¶ms.text_document.uri.to_file_path() {
+ if file_path.file_name().unwrap_or_default() == "Cargo.toml" {
+ // search workspaces for parent packages or fallback to workspace root
+ let abs_path_buf = match AbsPathBuf::try_from(file_path.to_path_buf()).ok() {
+ Some(abs_path_buf) => abs_path_buf,
+ None => return Ok(None),
+ };
+
+ let manifest_path = match ManifestPath::try_from(abs_path_buf).ok() {
+ Some(manifest_path) => manifest_path,
+ None => return Ok(None),
+ };
+
+ let links: Vec<LocationLink> = snap
+ .workspaces
+ .iter()
+ .filter_map(|ws| match ws {
+ ProjectWorkspace::Cargo { cargo, .. } => cargo.parent_manifests(&manifest_path),
+ _ => None,
+ })
+ .flatten()
+ .map(|parent_manifest_path| LocationLink {
+ origin_selection_range: None,
+ target_uri: to_proto::url_from_abs_path(&parent_manifest_path),
+ target_range: Range::default(),
+ target_selection_range: Range::default(),
+ })
+ .collect::<_>();
+ return Ok(Some(links.into()));
+ }
+
+ // check if invoked at the crate root
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let crate_id = match snap.analysis.crates_for(file_id)?.first() {
+ Some(&crate_id) => crate_id,
+ None => return Ok(None),
+ };
+ let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ if snap.analysis.crate_root(crate_id)? == file_id {
+ let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
+ let res = vec![LocationLink {
+ origin_selection_range: None,
+ target_uri: cargo_toml_url,
+ target_range: Range::default(),
+ target_selection_range: Range::default(),
+ }]
+ .into();
+ return Ok(Some(res));
+ }
+ }
+
+ // locate parent module by semantics
+ let position = from_proto::file_position(&snap, params)?;
+ let navs = snap.analysis.parent_module(position)?;
+ let res = to_proto::goto_definition_response(&snap, None, navs)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_runnables(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::RunnablesParams,
+) -> Result<Vec<lsp_ext::Runnable>> {
+ let _p = profile::span("handle_runnables");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok());
+ let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
+
+ let expect_test = match offset {
+ Some(offset) => {
+ let source_file = snap.analysis.parse(file_id)?;
+ algo::find_node_at_offset::<ast::MacroCall>(source_file.syntax(), offset)
+ .and_then(|it| it.path()?.segment()?.name_ref())
+ .map_or(false, |it| it.text() == "expect" || it.text() == "expect_file")
+ }
+ None => false,
+ };
+
+ let mut res = Vec::new();
+ for runnable in snap.analysis.runnables(file_id)? {
+ if should_skip_for_offset(&runnable, offset) {
+ continue;
+ }
+ if should_skip_target(&runnable, cargo_spec.as_ref()) {
+ continue;
+ }
+ let mut runnable = to_proto::runnable(&snap, runnable)?;
+ if expect_test {
+ runnable.label = format!("{} + expect", runnable.label);
+ runnable.args.expect_test = Some(true);
+ }
+ res.push(runnable);
+ }
+
+ // Add `cargo check` and `cargo test` for all targets of the whole package
+ let config = snap.config.runnables();
+ match cargo_spec {
+ Some(spec) => {
+ for cmd in ["check", "test"] {
+ res.push(lsp_ext::Runnable {
+ label: format!("cargo {} -p {} --all-targets", cmd, spec.package),
+ location: None,
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: Some(spec.workspace_root.clone().into()),
+ override_cargo: config.override_cargo.clone(),
+ cargo_args: vec![
+ cmd.to_string(),
+ "--package".to_string(),
+ spec.package.clone(),
+ "--all-targets".to_string(),
+ ],
+ cargo_extra_args: config.cargo_extra_args.clone(),
+ executable_args: Vec::new(),
+ expect_test: None,
+ },
+ })
+ }
+ }
+ None => {
+ if !snap.config.linked_projects().is_empty()
+ || !snap
+ .config
+ .discovered_projects
+ .as_ref()
+ .map(|projects| projects.is_empty())
+ .unwrap_or(true)
+ {
+ res.push(lsp_ext::Runnable {
+ label: "cargo check --workspace".to_string(),
+ location: None,
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: None,
+ override_cargo: config.override_cargo,
+ cargo_args: vec!["check".to_string(), "--workspace".to_string()],
+ cargo_extra_args: config.cargo_extra_args,
+ executable_args: Vec::new(),
+ expect_test: None,
+ },
+ });
+ }
+ }
+ }
+ Ok(res)
+}
+
+fn should_skip_for_offset(runnable: &Runnable, offset: Option<TextSize>) -> bool {
+ match offset {
+ None => false,
+ _ if matches!(&runnable.kind, RunnableKind::TestMod { .. }) => false,
+ Some(offset) => !runnable.nav.full_range.contains_inclusive(offset),
+ }
+}
+
+pub(crate) fn handle_related_tests(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Vec<lsp_ext::TestInfo>> {
+ let _p = profile::span("handle_related_tests");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let tests = snap.analysis.related_tests(position, None)?;
+ let mut res = Vec::new();
+ for it in tests {
+ if let Ok(runnable) = to_proto::runnable(&snap, it) {
+ res.push(lsp_ext::TestInfo { runnable })
+ }
+ }
+
+ Ok(res)
+}
+
+pub(crate) fn handle_completion(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CompletionParams,
+) -> Result<Option<lsp_types::CompletionResponse>> {
+ let _p = profile::span("handle_completion");
+ let text_document_position = params.text_document_position.clone();
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+ let completion_trigger_character =
+ params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
+
+ if Some(':') == completion_trigger_character {
+ let source_file = snap.analysis.parse(position.file_id)?;
+ let left_token = source_file.syntax().token_at_offset(position.offset).left_biased();
+ let completion_triggered_after_single_colon = match left_token {
+ Some(left_token) => left_token.kind() == T![:],
+ None => true,
+ };
+ if completion_triggered_after_single_colon {
+ return Ok(None);
+ }
+ }
+
+ let completion_config = &snap.config.completion();
+ let items = match snap.analysis.completions(
+ completion_config,
+ position,
+ completion_trigger_character,
+ )? {
+ None => return Ok(None),
+ Some(items) => items,
+ };
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ let items =
+ to_proto::completion_items(&snap.config, &line_index, text_document_position, items);
+
+ let completion_list = lsp_types::CompletionList { is_incomplete: true, items };
+ Ok(Some(completion_list.into()))
+}
+
+pub(crate) fn handle_completion_resolve(
+ snap: GlobalStateSnapshot,
+ mut original_completion: CompletionItem,
+) -> Result<CompletionItem> {
+ let _p = profile::span("handle_completion_resolve");
+
+ if !all_edits_are_disjoint(&original_completion, &[]) {
+ return Err(invalid_params_error(
+ "Received a completion with overlapping edits, this is not LSP-compliant".to_string(),
+ )
+ .into());
+ }
+
+ let data = match original_completion.data.take() {
+ Some(it) => it,
+ None => return Ok(original_completion),
+ };
+
+ let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?;
+
+ let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = from_proto::offset(&line_index, resolve_data.position.position)?;
+
+ let additional_edits = snap
+ .analysis
+ .resolve_completion_edits(
+ &snap.config.completion(),
+ FilePosition { file_id, offset },
+ resolve_data
+ .imports
+ .into_iter()
+ .map(|import| (import.full_import_path, import.imported_name)),
+ )?
+ .into_iter()
+ .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
+ .collect::<Vec<_>>();
+
+ if !all_edits_are_disjoint(&original_completion, &additional_edits) {
+ return Err(LspError::new(
+ ErrorCode::InternalError as i32,
+ "Import edit overlaps with the original completion edits, this is not LSP-compliant"
+ .into(),
+ )
+ .into());
+ }
+
+ if let Some(original_additional_edits) = original_completion.additional_text_edits.as_mut() {
+ original_additional_edits.extend(additional_edits.into_iter())
+ } else {
+ original_completion.additional_text_edits = Some(additional_edits);
+ }
+
+ Ok(original_completion)
+}
+
+pub(crate) fn handle_folding_range(
+ snap: GlobalStateSnapshot,
+ params: FoldingRangeParams,
+) -> Result<Option<Vec<FoldingRange>>> {
+ let _p = profile::span("handle_folding_range");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let folds = snap.analysis.folding_ranges(file_id)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let line_folding_only = snap.config.line_folding_only();
+ let res = folds
+ .into_iter()
+ .map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it))
+ .collect();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_signature_help(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::SignatureHelpParams,
+) -> Result<Option<lsp_types::SignatureHelp>> {
+ let _p = profile::span("handle_signature_help");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let help = match snap.analysis.signature_help(position)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+ let config = snap.config.call_info();
+ let res = to_proto::signature_help(help, config, snap.config.signature_help_label_offsets());
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_hover(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::HoverParams,
+) -> Result<Option<lsp_ext::Hover>> {
+ let _p = profile::span("handle_hover");
+ let range = match params.position {
+ PositionOrRange::Position(position) => Range::new(position, position),
+ PositionOrRange::Range(range) => range,
+ };
+
+ let file_range = from_proto::file_range(&snap, params.text_document, range)?;
+ let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
+ None => return Ok(None),
+ Some(info) => info,
+ };
+
+ let line_index = snap.file_line_index(file_range.file_id)?;
+ let range = to_proto::range(&line_index, info.range);
+ let markup_kind =
+ snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
+ let hover = lsp_ext::Hover {
+ hover: lsp_types::Hover {
+ contents: HoverContents::Markup(to_proto::markup_content(
+ info.info.markup,
+ markup_kind,
+ )),
+ range: Some(range),
+ },
+ actions: if snap.config.hover_actions().none() {
+ Vec::new()
+ } else {
+ prepare_hover_actions(&snap, &info.info.actions)
+ },
+ };
+
+ Ok(Some(hover))
+}
+
+pub(crate) fn handle_prepare_rename(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<PrepareRenameResponse>> {
+ let _p = profile::span("handle_prepare_rename");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?;
+
+ let line_index = snap.file_line_index(position.file_id)?;
+ let range = to_proto::range(&line_index, change.range);
+ Ok(Some(PrepareRenameResponse::Range(range)))
+}
+
+pub(crate) fn handle_rename(
+ snap: GlobalStateSnapshot,
+ params: RenameParams,
+) -> Result<Option<WorkspaceEdit>> {
+ let _p = profile::span("handle_rename");
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+
+ let mut change =
+ snap.analysis.rename(position, &*params.new_name)?.map_err(to_proto::rename_error)?;
+
+ // this is kind of a hack to prevent double edits from happening when moving files
+ // When a module gets renamed by renaming the mod declaration this causes the file to move
+ // which in turn will trigger a WillRenameFiles request to the server for which we reply with a
+ // a second identical set of renames, the client will then apply both edits causing incorrect edits
+ // with this we only emit source_file_edits in the WillRenameFiles response which will do the rename instead
+ // See https://github.com/microsoft/vscode-languageserver-node/issues/752 for more info
+ if !change.file_system_edits.is_empty() && snap.config.will_rename() {
+ change.source_file_edits.clear();
+ }
+ let workspace_edit = to_proto::workspace_edit(&snap, change)?;
+ Ok(Some(workspace_edit))
+}
+
+pub(crate) fn handle_references(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::ReferenceParams,
+) -> Result<Option<Vec<Location>>> {
+ let _p = profile::span("handle_references");
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+
+ let exclude_imports = snap.config.find_all_refs_exclude_imports();
+
+ let refs = match snap.analysis.find_all_refs(position, None)? {
+ None => return Ok(None),
+ Some(refs) => refs,
+ };
+
+ let include_declaration = params.context.include_declaration;
+ let locations = refs
+ .into_iter()
+ .flat_map(|refs| {
+ let decl = if include_declaration {
+ refs.declaration.map(|decl| FileRange {
+ file_id: decl.nav.file_id,
+ range: decl.nav.focus_or_full_range(),
+ })
+ } else {
+ None
+ };
+ refs.references
+ .into_iter()
+ .flat_map(|(file_id, refs)| {
+ refs.into_iter()
+ .filter(|&(_, category)| {
+ !exclude_imports || category != Some(ReferenceCategory::Import)
+ })
+ .map(move |(range, _)| FileRange { file_id, range })
+ })
+ .chain(decl)
+ })
+ .filter_map(|frange| to_proto::location(&snap, frange).ok())
+ .collect();
+
+ Ok(Some(locations))
+}
+
+pub(crate) fn handle_formatting(
+ snap: GlobalStateSnapshot,
+ params: DocumentFormattingParams,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let _p = profile::span("handle_formatting");
+
+ run_rustfmt(&snap, params.text_document, None)
+}
+
+pub(crate) fn handle_range_formatting(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentRangeFormattingParams,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let _p = profile::span("handle_range_formatting");
+
+ run_rustfmt(&snap, params.text_document, Some(params.range))
+}
+
+pub(crate) fn handle_code_action(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CodeActionParams,
+) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
+ let _p = profile::span("handle_code_action");
+
+ if !snap.config.code_action_literals() {
+ // We intentionally don't support command-based actions, as those either
+ // require either custom client-code or server-initiated edits. Server
+ // initiated edits break causality, so we avoid those.
+ return Ok(None);
+ }
+
+ let line_index =
+ snap.file_line_index(from_proto::file_id(&snap, ¶ms.text_document.uri)?)?;
+ let frange = from_proto::file_range(&snap, params.text_document.clone(), params.range)?;
+
+ let mut assists_config = snap.config.assist();
+ assists_config.allowed = params
+ .context
+ .only
+ .clone()
+ .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
+
+ let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
+
+ let code_action_resolve_cap = snap.config.code_action_resolve();
+ let resolve = if code_action_resolve_cap {
+ AssistResolveStrategy::None
+ } else {
+ AssistResolveStrategy::All
+ };
+ let assists = snap.analysis.assists_with_fixes(
+ &assists_config,
+ &snap.config.diagnostics(),
+ resolve,
+ frange,
+ )?;
+ for (index, assist) in assists.into_iter().enumerate() {
+ let resolve_data =
+ if code_action_resolve_cap { Some((index, params.clone())) } else { None };
+ let code_action = to_proto::code_action(&snap, assist, resolve_data)?;
+ res.push(code_action)
+ }
+
+ // Fixes from `cargo check`.
+ for fix in
+ snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten()
+ {
+ // FIXME: this mapping is awkward and shouldn't exist. Refactor
+ // `snap.check_fixes` to not convert to LSP prematurely.
+ let intersect_fix_range = fix
+ .ranges
+ .iter()
+ .copied()
+ .filter_map(|range| from_proto::text_range(&line_index, range).ok())
+ .any(|fix_range| fix_range.intersect(frange.range).is_some());
+ if intersect_fix_range {
+ res.push(fix.action.clone());
+ }
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_code_action_resolve(
+ snap: GlobalStateSnapshot,
+ mut code_action: lsp_ext::CodeAction,
+) -> Result<lsp_ext::CodeAction> {
+ let _p = profile::span("handle_code_action_resolve");
+ let params = match code_action.data.take() {
+ Some(it) => it,
+ None => return Err(invalid_params_error("code action without data".to_string()).into()),
+ };
+
+ let file_id = from_proto::file_id(&snap, ¶ms.code_action_params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
+ let frange = FileRange { file_id, range };
+
+ let mut assists_config = snap.config.assist();
+ assists_config.allowed = params
+ .code_action_params
+ .context
+ .only
+ .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
+
+ let (assist_index, assist_resolve) = match parse_action_id(¶ms.id) {
+ Ok(parsed_data) => parsed_data,
+ Err(e) => {
+ return Err(invalid_params_error(format!(
+ "Failed to parse action id string '{}': {}",
+ params.id, e
+ ))
+ .into())
+ }
+ };
+
+ let expected_assist_id = assist_resolve.assist_id.clone();
+ let expected_kind = assist_resolve.assist_kind;
+
+ let assists = snap.analysis.assists_with_fixes(
+ &assists_config,
+ &snap.config.diagnostics(),
+ AssistResolveStrategy::Single(assist_resolve),
+ frange,
+ )?;
+
+ let assist = match assists.get(assist_index) {
+ Some(assist) => assist,
+ None => return Err(invalid_params_error(format!(
+ "Failed to find the assist for index {} provided by the resolve request. Resolve request assist id: {}",
+ assist_index, params.id,
+ ))
+ .into())
+ };
+ if assist.id.0 != expected_assist_id || assist.id.1 != expected_kind {
+ return Err(invalid_params_error(format!(
+ "Mismatching assist at index {} for the resolve parameters given. Resolve request assist id: {}, actual id: {:?}.",
+ assist_index, params.id, assist.id
+ ))
+ .into());
+ }
+ let ca = to_proto::code_action(&snap, assist.clone(), None)?;
+ code_action.edit = ca.edit;
+ code_action.command = ca.command;
+ Ok(code_action)
+}
+
+fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
+ let id_parts = action_id.split(':').collect::<Vec<_>>();
+ match id_parts.as_slice() {
+ [assist_id_string, assist_kind_string, index_string] => {
+ let assist_kind: AssistKind = assist_kind_string.parse()?;
+ let index: usize = match index_string.parse() {
+ Ok(index) => index,
+ Err(e) => return Err(format!("Incorrect index string: {}", e)),
+ };
+ Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind }))
+ }
+ _ => Err("Action id contains incorrect number of segments".to_string()),
+ }
+}
+
+pub(crate) fn handle_code_lens(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CodeLensParams,
+) -> Result<Option<Vec<CodeLens>>> {
+ let _p = profile::span("handle_code_lens");
+
+ let lens_config = snap.config.lens();
+ if lens_config.none() {
+ // early return before any db query!
+ return Ok(Some(Vec::default()));
+ }
+
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let cargo_target_spec = CargoTargetSpec::for_file(&snap, file_id)?;
+
+ let annotations = snap.analysis.annotations(
+ &AnnotationConfig {
+ binary_target: cargo_target_spec
+ .map(|spec| {
+ matches!(
+ spec.target_kind,
+ TargetKind::Bin | TargetKind::Example | TargetKind::Test
+ )
+ })
+ .unwrap_or(false),
+ annotate_runnables: lens_config.runnable(),
+ annotate_impls: lens_config.implementations,
+ annotate_references: lens_config.refs_adt,
+ annotate_method_references: lens_config.method_refs,
+ annotate_enum_variant_references: lens_config.enum_variant_refs,
+ location: lens_config.location.into(),
+ },
+ file_id,
+ )?;
+
+ let mut res = Vec::new();
+ for a in annotations {
+ to_proto::code_lens(&mut res, &snap, a)?;
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_code_lens_resolve(
+ snap: GlobalStateSnapshot,
+ code_lens: CodeLens,
+) -> Result<CodeLens> {
+ let annotation = from_proto::annotation(&snap, code_lens.clone())?;
+ let annotation = snap.analysis.resolve_annotation(annotation)?;
+
+ let mut acc = Vec::new();
+ to_proto::code_lens(&mut acc, &snap, annotation)?;
+
+ let res = match acc.pop() {
+ Some(it) if acc.is_empty() => it,
+ _ => {
+ never!();
+ code_lens
+ }
+ };
+
+ Ok(res)
+}
+
+pub(crate) fn handle_document_highlight(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentHighlightParams,
+) -> Result<Option<Vec<lsp_types::DocumentHighlight>>> {
+ let _p = profile::span("handle_document_highlight");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ let refs = match snap.analysis.highlight_related(snap.config.highlight_related(), position)? {
+ None => return Ok(None),
+ Some(refs) => refs,
+ };
+ let res = refs
+ .into_iter()
+ .map(|ide::HighlightedRange { range, category }| lsp_types::DocumentHighlight {
+ range: to_proto::range(&line_index, range),
+ kind: category.and_then(to_proto::document_highlight_kind),
+ })
+ .collect();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_ssr(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::SsrParams,
+) -> Result<lsp_types::WorkspaceEdit> {
+ let _p = profile::span("handle_ssr");
+ let selections = params
+ .selections
+ .iter()
+ .map(|range| from_proto::file_range(&snap, params.position.text_document.clone(), *range))
+ .collect::<Result<Vec<_>, _>>()?;
+ let position = from_proto::file_position(&snap, params.position)?;
+ let source_change = snap.analysis.structural_search_replace(
+ ¶ms.query,
+ params.parse_only,
+ position,
+ selections,
+ )??;
- let file_id = from_proto::file_id(&snap, document_uri)?;
- let line_index = snap.file_line_index(file_id)?;
- let range = from_proto::file_range(
++ to_proto::workspace_edit(&snap, source_change).map_err(Into::into)
+}
+
+pub(crate) fn publish_diagnostics(
+ snap: &GlobalStateSnapshot,
+ file_id: FileId,
+) -> Result<Vec<Diagnostic>> {
+ let _p = profile::span("publish_diagnostics");
+ let line_index = snap.file_line_index(file_id)?;
+
+ let diagnostics: Vec<Diagnostic> = snap
+ .analysis
+ .diagnostics(&snap.config.diagnostics(), AssistResolveStrategy::None, file_id)?
+ .into_iter()
+ .map(|d| Diagnostic {
+ range: to_proto::range(&line_index, d.range),
+ severity: Some(to_proto::diagnostic_severity(d.severity)),
+ code: Some(NumberOrString::String(d.code.as_str().to_string())),
+ code_description: Some(lsp_types::CodeDescription {
+ href: lsp_types::Url::parse(&format!(
+ "https://rust-analyzer.github.io/manual.html#{}",
+ d.code.as_str()
+ ))
+ .unwrap(),
+ }),
+ source: Some("rust-analyzer".to_string()),
+ message: d.message,
+ related_information: None,
+ tags: if d.unused { Some(vec![DiagnosticTag::UNNECESSARY]) } else { None },
+ data: None,
+ })
+ .collect();
+ Ok(diagnostics)
+}
+
+pub(crate) fn handle_inlay_hints(
+ snap: GlobalStateSnapshot,
+ params: InlayHintParams,
+) -> Result<Option<Vec<InlayHint>>> {
+ let _p = profile::span("handle_inlay_hints");
+ let document_uri = ¶ms.text_document.uri;
- .collect::<Result<Vec<_>>>()?,
++ let FileRange { file_id, range } = from_proto::file_range(
+ &snap,
+ TextDocumentIdentifier::new(document_uri.to_owned()),
+ params.range,
+ )?;
++ let line_index = snap.file_line_index(file_id)?;
+ let inlay_hints_config = snap.config.inlay_hints();
+ Ok(Some(
+ snap.analysis
+ .inlay_hints(&inlay_hints_config, file_id, Some(range))?
+ .into_iter()
+ .map(|it| {
+ to_proto::inlay_hint(&snap, &line_index, inlay_hints_config.render_colons, it)
+ })
- .collect::<Result<Vec<_>>>()?;
++ .collect::<Cancellable<Vec<_>>>()?,
+ ))
+}
+
+pub(crate) fn handle_inlay_hints_resolve(
+ snap: GlobalStateSnapshot,
+ mut hint: InlayHint,
+) -> Result<InlayHint> {
+ let _p = profile::span("handle_inlay_hints_resolve");
+ let data = match hint.data.take() {
+ Some(it) => it,
+ None => return Ok(hint),
+ };
+
+ let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
+
+ let file_range = from_proto::file_range(
+ &snap,
+ resolve_data.text_document,
+ match resolve_data.position {
+ PositionOrRange::Position(pos) => Range::new(pos, pos),
+ PositionOrRange::Range(range) => range,
+ },
+ )?;
+ let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
+ None => return Ok(hint),
+ Some(info) => info,
+ };
+
+ let markup_kind =
+ snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
+
+ // FIXME: hover actions?
+ hint.tooltip = Some(lsp_types::InlayHintTooltip::MarkupContent(to_proto::markup_content(
+ info.info.markup,
+ markup_kind,
+ )));
+ Ok(hint)
+}
+
+pub(crate) fn handle_call_hierarchy_prepare(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyPrepareParams,
+) -> Result<Option<Vec<CallHierarchyItem>>> {
+ let _p = profile::span("handle_call_hierarchy_prepare");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+
+ let nav_info = match snap.analysis.call_hierarchy(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let RangeInfo { range: _, info: navs } = nav_info;
+ let res = navs
+ .into_iter()
+ .filter(|it| it.kind == Some(SymbolKind::Function))
+ .map(|it| to_proto::call_hierarchy_item(&snap, it))
++ .collect::<Cancellable<Vec<_>>>()?;
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_call_hierarchy_incoming(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyIncomingCallsParams,
+) -> Result<Option<Vec<CallHierarchyIncomingCall>>> {
+ let _p = profile::span("handle_call_hierarchy_incoming");
+ let item = params.item;
+
+ let doc = TextDocumentIdentifier::new(item.uri);
+ let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
+ let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
+
+ let call_items = match snap.analysis.incoming_calls(fpos)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let mut res = vec![];
+
+ for call_item in call_items.into_iter() {
+ let file_id = call_item.target.file_id;
+ let line_index = snap.file_line_index(file_id)?;
+ let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
+ res.push(CallHierarchyIncomingCall {
+ from: item,
+ from_ranges: call_item
+ .ranges
+ .into_iter()
+ .map(|it| to_proto::range(&line_index, it))
+ .collect(),
+ });
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_call_hierarchy_outgoing(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyOutgoingCallsParams,
+) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> {
+ let _p = profile::span("handle_call_hierarchy_outgoing");
+ let item = params.item;
+
+ let doc = TextDocumentIdentifier::new(item.uri);
+ let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
+ let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
+
+ let call_items = match snap.analysis.outgoing_calls(fpos)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let mut res = vec![];
+
+ for call_item in call_items.into_iter() {
+ let file_id = call_item.target.file_id;
+ let line_index = snap.file_line_index(file_id)?;
+ let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
+ res.push(CallHierarchyOutgoingCall {
+ to: item,
+ from_ranges: call_item
+ .ranges
+ .into_iter()
+ .map(|it| to_proto::range(&line_index, it))
+ .collect(),
+ });
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_semantic_tokens_full(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensParams,
+) -> Result<Option<SemanticTokensResult>> {
+ let _p = profile::span("handle_semantic_tokens_full");
+
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut highlight_config = snap.config.highlighting_config();
+ // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
+ highlight_config.syntactic_name_ref_highlighting = !snap.proc_macros_loaded;
+
+ let highlights = snap.analysis.highlight(highlight_config, file_id)?;
+ let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
+
+ // Unconditionally cache the tokens
+ snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
+
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_semantic_tokens_full_delta(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensDeltaParams,
+) -> Result<Option<SemanticTokensFullDeltaResult>> {
+ let _p = profile::span("handle_semantic_tokens_full_delta");
+
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut highlight_config = snap.config.highlighting_config();
+ // Avoid flashing a bunch of unresolved references when the proc-macro servers haven't been spawned yet.
+ highlight_config.syntactic_name_ref_highlighting = !snap.proc_macros_loaded;
+
+ let highlights = snap.analysis.highlight(highlight_config, file_id)?;
+ let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
+
+ let mut cache = snap.semantic_tokens_cache.lock();
+ let cached_tokens = cache.entry(params.text_document.uri).or_default();
+
+ if let Some(prev_id) = &cached_tokens.result_id {
+ if *prev_id == params.previous_result_id {
+ let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens);
+ *cached_tokens = semantic_tokens;
+ return Ok(Some(delta.into()));
+ }
+ }
+
+ *cached_tokens = semantic_tokens.clone();
+
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_semantic_tokens_range(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensRangeParams,
+) -> Result<Option<SemanticTokensRangeResult>> {
+ let _p = profile::span("handle_semantic_tokens_range");
+
+ let frange = from_proto::file_range(&snap, params.text_document, params.range)?;
+ let text = snap.analysis.file_text(frange.file_id)?;
+ let line_index = snap.file_line_index(frange.file_id)?;
+
+ let highlights = snap.analysis.highlight_range(snap.config.highlighting_config(), frange)?;
+ let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_open_docs(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<lsp_types::Url>> {
+ let _p = profile::span("handle_open_docs");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let remote = snap.analysis.external_docs(position)?;
+
+ Ok(remote.and_then(|remote| Url::parse(&remote).ok()))
+}
+
+pub(crate) fn handle_open_cargo_toml(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::OpenCargoTomlParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_open_cargo_toml");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+
+ let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
+ let res: lsp_types::GotoDefinitionResponse =
+ Location::new(cargo_toml_url, Range::default()).into();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_move_item(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::MoveItemParams,
+) -> Result<Vec<lsp_ext::SnippetTextEdit>> {
+ let _p = profile::span("handle_move_item");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let range = from_proto::file_range(&snap, params.text_document, params.range)?;
+
+ let direction = match params.direction {
+ lsp_ext::MoveItemDirection::Up => ide::Direction::Up,
+ lsp_ext::MoveItemDirection::Down => ide::Direction::Down,
+ };
+
+ match snap.analysis.move_item(range, direction)? {
+ Some(text_edit) => {
+ let line_index = snap.file_line_index(file_id)?;
+ Ok(to_proto::snippet_text_edit_vec(&line_index, true, text_edit))
+ }
+ None => Ok(vec![]),
+ }
+}
+
+fn to_command_link(command: lsp_types::Command, tooltip: String) -> lsp_ext::CommandLink {
+ lsp_ext::CommandLink { tooltip: Some(tooltip), command }
+}
+
+fn show_impl_command_link(
+ snap: &GlobalStateSnapshot,
+ position: &FilePosition,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if snap.config.hover_actions().implementations && snap.config.client_commands().show_reference {
+ if let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) {
+ let uri = to_proto::url(snap, position.file_id);
+ let line_index = snap.file_line_index(position.file_id).ok()?;
+ let position = to_proto::position(&line_index, position.offset);
+ let locations: Vec<_> = nav_data
+ .info
+ .into_iter()
+ .filter_map(|nav| to_proto::location_from_nav(snap, nav).ok())
+ .collect();
+ let title = to_proto::implementation_title(locations.len());
+ let command = to_proto::command::show_references(title, &uri, position, locations);
+
+ return Some(lsp_ext::CommandLinkGroup {
+ commands: vec![to_command_link(command, "Go to implementations".into())],
+ ..Default::default()
+ });
+ }
+ }
+ None
+}
+
+fn show_ref_command_link(
+ snap: &GlobalStateSnapshot,
+ position: &FilePosition,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if snap.config.hover_actions().references && snap.config.client_commands().show_reference {
+ if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) {
+ let uri = to_proto::url(snap, position.file_id);
+ let line_index = snap.file_line_index(position.file_id).ok()?;
+ let position = to_proto::position(&line_index, position.offset);
+ let locations: Vec<_> = ref_search_res
+ .into_iter()
+ .flat_map(|res| res.references)
+ .flat_map(|(file_id, ranges)| {
+ ranges.into_iter().filter_map(move |(range, _)| {
+ to_proto::location(snap, FileRange { file_id, range }).ok()
+ })
+ })
+ .collect();
+ let title = to_proto::reference_title(locations.len());
+ let command = to_proto::command::show_references(title, &uri, position, locations);
+
+ return Some(lsp_ext::CommandLinkGroup {
+ commands: vec![to_command_link(command, "Go to references".into())],
+ ..Default::default()
+ });
+ }
+ }
+ None
+}
+
+fn runnable_action_links(
+ snap: &GlobalStateSnapshot,
+ runnable: Runnable,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ let hover_actions_config = snap.config.hover_actions();
+ if !hover_actions_config.runnable() {
+ return None;
+ }
+
+ let cargo_spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id).ok()?;
+ if should_skip_target(&runnable, cargo_spec.as_ref()) {
+ return None;
+ }
+
+ let client_commands_config = snap.config.client_commands();
+ if !(client_commands_config.run_single || client_commands_config.debug_single) {
+ return None;
+ }
+
+ let title = runnable.title();
+ let r = to_proto::runnable(snap, runnable).ok()?;
+
+ let mut group = lsp_ext::CommandLinkGroup::default();
+
+ if hover_actions_config.run && client_commands_config.run_single {
+ let run_command = to_proto::command::run_single(&r, &title);
+ group.commands.push(to_command_link(run_command, r.label.clone()));
+ }
+
+ if hover_actions_config.debug && client_commands_config.debug_single {
+ let dbg_command = to_proto::command::debug_single(&r);
+ group.commands.push(to_command_link(dbg_command, r.label));
+ }
+
+ Some(group)
+}
+
+fn goto_type_action_links(
+ snap: &GlobalStateSnapshot,
+ nav_targets: &[HoverGotoTypeData],
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if !snap.config.hover_actions().goto_type_def
+ || nav_targets.is_empty()
+ || !snap.config.client_commands().goto_location
+ {
+ return None;
+ }
+
+ Some(lsp_ext::CommandLinkGroup {
+ title: Some("Go to ".into()),
+ commands: nav_targets
+ .iter()
+ .filter_map(|it| {
+ to_proto::command::goto_location(snap, &it.nav)
+ .map(|cmd| to_command_link(cmd, it.mod_path.clone()))
+ })
+ .collect(),
+ })
+}
+
+fn prepare_hover_actions(
+ snap: &GlobalStateSnapshot,
+ actions: &[HoverAction],
+) -> Vec<lsp_ext::CommandLinkGroup> {
+ actions
+ .iter()
+ .filter_map(|it| match it {
+ HoverAction::Implementation(position) => show_impl_command_link(snap, position),
+ HoverAction::Reference(position) => show_ref_command_link(snap, position),
+ HoverAction::Runnable(r) => runnable_action_links(snap, r.clone()),
+ HoverAction::GoToType(targets) => goto_type_action_links(snap, targets),
+ })
+ .collect()
+}
+
+fn should_skip_target(runnable: &Runnable, cargo_spec: Option<&CargoTargetSpec>) -> bool {
+ match runnable.kind {
+ RunnableKind::Bin => {
+ // Do not suggest binary run on other target than binary
+ match &cargo_spec {
+ Some(spec) => !matches!(
+ spec.target_kind,
+ TargetKind::Bin | TargetKind::Example | TargetKind::Test
+ ),
+ None => true,
+ }
+ }
+ _ => false,
+ }
+}
+
+fn run_rustfmt(
+ snap: &GlobalStateSnapshot,
+ text_document: TextDocumentIdentifier,
+ range: Option<lsp_types::Range>,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let file_id = from_proto::file_id(snap, &text_document.uri)?;
+ let file = snap.analysis.file_text(file_id)?;
+
+ // find the edition of the package the file belongs to
+ // (if it belongs to multiple we'll just pick the first one and pray)
+ let edition = snap
+ .analysis
+ .relevant_crates_for(file_id)?
+ .into_iter()
+ .find_map(|crate_id| snap.cargo_target_for_crate_root(crate_id))
+ .map(|(ws, target)| ws[ws[target].package].edition);
+
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut command = match snap.config.rustfmt() {
+ RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => {
+ let mut cmd = process::Command::new(toolchain::rustfmt());
+ cmd.envs(snap.config.extra_env());
+ cmd.args(extra_args);
+ // try to chdir to the file so we can respect `rustfmt.toml`
+ // FIXME: use `rustfmt --config-path` once
+ // https://github.com/rust-lang/rustfmt/issues/4660 gets fixed
+ match text_document.uri.to_file_path() {
+ Ok(mut path) => {
+ // pop off file name
+ if path.pop() && path.is_dir() {
+ cmd.current_dir(path);
+ }
+ }
+ Err(_) => {
+ tracing::error!(
+ "Unable to get file path for {}, rustfmt.toml might be ignored",
+ text_document.uri
+ );
+ }
+ }
+ if let Some(edition) = edition {
+ cmd.arg("--edition");
+ cmd.arg(edition.to_string());
+ }
+
+ if let Some(range) = range {
+ if !enable_range_formatting {
+ return Err(LspError::new(
+ ErrorCode::InvalidRequest as i32,
+ String::from(
+ "rustfmt range formatting is unstable. \
+ Opt-in by using a nightly build of rustfmt and setting \
+ `rustfmt.rangeFormatting.enable` to true in your LSP configuration",
+ ),
+ )
+ .into());
+ }
+
+ let frange = from_proto::file_range(snap, text_document, range)?;
+ let start_line = line_index.index.line_col(frange.range.start()).line;
+ let end_line = line_index.index.line_col(frange.range.end()).line;
+
+ cmd.arg("--unstable-features");
+ cmd.arg("--file-lines");
+ cmd.arg(
+ json!([{
+ "file": "stdin",
+ "range": [start_line, end_line]
+ }])
+ .to_string(),
+ );
+ }
+
+ cmd
+ }
+ RustfmtConfig::CustomCommand { command, args } => {
+ let mut cmd = process::Command::new(command);
+ cmd.envs(snap.config.extra_env());
+ cmd.args(args);
+ cmd
+ }
+ };
+
+ let mut rustfmt = command
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .context(format!("Failed to spawn {:?}", command))?;
+
+ rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
+
+ let output = rustfmt.wait_with_output()?;
+ let captured_stdout = String::from_utf8(output.stdout)?;
+ let captured_stderr = String::from_utf8(output.stderr).unwrap_or_default();
+
+ if !output.status.success() {
+ let rustfmt_not_installed =
+ captured_stderr.contains("not installed") || captured_stderr.contains("not available");
+
+ return match output.status.code() {
+ Some(1) if !rustfmt_not_installed => {
+ // While `rustfmt` doesn't have a specific exit code for parse errors this is the
+ // likely cause exiting with 1. Most Language Servers swallow parse errors on
+ // formatting because otherwise an error is surfaced to the user on top of the
+ // syntax error diagnostics they're already receiving. This is especially jarring
+ // if they have format on save enabled.
+ tracing::warn!(
+ ?command,
+ %captured_stderr,
+ "rustfmt exited with status 1"
+ );
+ Ok(None)
+ }
+ _ => {
+ // Something else happened - e.g. `rustfmt` is missing or caught a signal
+ Err(LspError::new(
+ -32900,
+ format!(
+ r#"rustfmt exited with:
+ Status: {}
+ stdout: {}
+ stderr: {}"#,
+ output.status, captured_stdout, captured_stderr,
+ ),
+ )
+ .into())
+ }
+ };
+ }
+
+ let (new_text, new_line_endings) = LineEndings::normalize(captured_stdout);
+
+ if line_index.endings != new_line_endings {
+ // If line endings are different, send the entire file.
+ // Diffing would not work here, as the line endings might be the only
+ // difference.
+ Ok(Some(to_proto::text_edit_vec(
+ &line_index,
+ TextEdit::replace(TextRange::up_to(TextSize::of(&*file)), new_text),
+ )))
+ } else if *file == new_text {
+ // The document is already formatted correctly -- no edits needed.
+ Ok(None)
+ } else {
+ Ok(Some(to_proto::text_edit_vec(&line_index, diff(&file, &new_text))))
+ }
+}
--- /dev/null
- if !src.as_bytes().contains(&b'\r') {
- return (src, LineEndings::Unix);
- }
-
+//! Enhances `ide::LineIndex` with additional info required to convert offsets
+//! into lsp positions.
+//!
+//! We maintain invariant that all internal strings use `\n` as line separator.
+//! This module does line ending conversion and detection (so that we can
+//! convert back to `\r\n` on the way out).
+
+use std::sync::Arc;
+
+pub enum PositionEncoding {
+ Utf8,
+ Utf16,
+}
+
+pub(crate) struct LineIndex {
+ pub(crate) index: Arc<ide::LineIndex>,
+ pub(crate) endings: LineEndings,
+ pub(crate) encoding: PositionEncoding,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub(crate) enum LineEndings {
+ Unix,
+ Dos,
+}
+
+impl LineEndings {
+ /// Replaces `\r\n` with `\n` in-place in `src`.
+ pub(crate) fn normalize(src: String) -> (String, LineEndings) {
- None => tail.len(),
- Some(idx) => idx + gap_len,
+ // We replace `\r\n` with `\n` in-place, which doesn't break utf-8 encoding.
+ // While we *can* call `as_mut_vec` and do surgery on the live string
+ // directly, let's rather steal the contents of `src`. This makes the code
+ // safe even if a panic occurs.
+
+ let mut buf = src.into_bytes();
+ let mut gap_len = 0;
+ let mut tail = buf.as_mut_slice();
++ let mut crlf_seen = false;
++
++ let find_crlf = |src: &[u8]| src.windows(2).position(|it| it == b"\r\n");
++
+ loop {
+ let idx = match find_crlf(&tail[gap_len..]) {
- let new_len = buf.len() - gap_len;
++ None if crlf_seen => tail.len(),
++ // SAFETY: buf is unchanged and therefore still contains utf8 data
++ None => return (unsafe { String::from_utf8_unchecked(buf) }, LineEndings::Unix),
++ Some(idx) => {
++ crlf_seen = true;
++ idx + gap_len
++ }
+ };
+ tail.copy_within(gap_len..idx, 0);
+ tail = &mut tail[idx - gap_len..];
+ if tail.len() == gap_len {
+ break;
+ }
+ gap_len += 1;
+ }
+
+ // Account for removed `\r`.
+ // After `set_len`, `buf` is guaranteed to contain utf-8 again.
- return (src, LineEndings::Dos);
+ let src = unsafe {
++ let new_len = buf.len() - gap_len;
+ buf.set_len(new_len);
+ String::from_utf8_unchecked(buf)
+ };
- fn find_crlf(src: &[u8]) -> Option<usize> {
- src.windows(2).position(|it| it == b"\r\n")
- }
++ (src, LineEndings::Dos)
++ }
++}
+
++#[cfg(test)]
++mod tests {
++ use super::*;
++
++ #[test]
++ fn unix() {
++ let src = "a\nb\nc\n\n\n\n";
++ let (res, endings) = LineEndings::normalize(src.into());
++ assert_eq!(endings, LineEndings::Unix);
++ assert_eq!(res, src);
++ }
++
++ #[test]
++ fn dos() {
++ let src = "\r\na\r\n\r\nb\r\nc\r\n\r\n\r\n\r\n";
++ let (res, endings) = LineEndings::normalize(src.into());
++ assert_eq!(endings, LineEndings::Dos);
++ assert_eq!(res, "\na\n\nb\nc\n\n\n\n");
++ }
++
++ #[test]
++ fn mixed() {
++ let src = "a\r\nb\r\nc\r\n\n\r\n\n";
++ let (res, endings) = LineEndings::normalize(src.into());
++ assert_eq!(endings, LineEndings::Dos);
++ assert_eq!(res, "a\nb\nc\n\n\n\n");
++ }
++
++ #[test]
++ fn none() {
++ let src = "abc";
++ let (res, endings) = LineEndings::normalize(src.into());
++ assert_eq!(endings, LineEndings::Unix);
++ assert_eq!(res, src);
+ }
+}
--- /dev/null
- use std::{ops::Range, sync::Arc};
+//! Utilities for LSP-related boilerplate code.
- old_text: &mut String,
- content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
- ) {
++use std::{mem, ops::Range, sync::Arc};
+
+use lsp_server::Notification;
+
+use crate::{
+ from_proto,
+ global_state::GlobalState,
+ line_index::{LineEndings, LineIndex, PositionEncoding},
+ LspError,
+};
+
+pub(crate) fn invalid_params_error(message: String) -> LspError {
+ LspError { code: lsp_server::ErrorCode::InvalidParams as i32, message }
+}
+
+pub(crate) fn notification_is<N: lsp_types::notification::Notification>(
+ notification: &Notification,
+) -> bool {
+ notification.method == N::METHOD
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub(crate) enum Progress {
+ Begin,
+ Report,
+ End,
+}
+
+impl Progress {
+ pub(crate) fn fraction(done: usize, total: usize) -> f64 {
+ assert!(done <= total);
+ done as f64 / total.max(1) as f64
+ }
+}
+
+impl GlobalState {
+ pub(crate) fn show_message(&mut self, typ: lsp_types::MessageType, message: String) {
+ let message = message;
+ self.send_notification::<lsp_types::notification::ShowMessage>(
+ lsp_types::ShowMessageParams { typ, message },
+ )
+ }
+
+ /// Sends a notification to the client containing the error `message`.
+ /// If `additional_info` is [`Some`], appends a note to the notification telling to check the logs.
+ /// This will always log `message` + `additional_info` to the server's error log.
+ pub(crate) fn show_and_log_error(&mut self, message: String, additional_info: Option<String>) {
+ let mut message = message;
+ match additional_info {
+ Some(additional_info) => {
+ tracing::error!("{}\n\n{}", &message, &additional_info);
+ if tracing::enabled!(tracing::Level::ERROR) {
+ message.push_str("\n\nCheck the server logs for additional info.");
+ }
+ }
+ None => tracing::error!("{}", &message),
+ }
+
+ self.send_notification::<lsp_types::notification::ShowMessage>(
+ lsp_types::ShowMessageParams { typ: lsp_types::MessageType::ERROR, message },
+ )
+ }
+
+ /// rust-analyzer is resilient -- if it fails, this doesn't usually affect
+ /// the user experience. Part of that is that we deliberately hide panics
+ /// from the user.
+ ///
+ /// We do however want to pester rust-analyzer developers with panics and
+ /// other "you really gotta fix that" messages. The current strategy is to
+ /// be noisy for "from source" builds or when profiling is enabled.
+ ///
+ /// It's unclear if making from source `cargo xtask install` builds more
+ /// panicky is a good idea, let's see if we can keep our awesome bleeding
+ /// edge users from being upset!
+ pub(crate) fn poke_rust_analyzer_developer(&mut self, message: String) {
+ let from_source_build = option_env!("POKE_RA_DEVS").is_some();
+ let profiling_enabled = std::env::var("RA_PROFILE").is_ok();
+ if from_source_build || profiling_enabled {
+ self.show_message(lsp_types::MessageType::ERROR, message)
+ }
+ }
+
+ pub(crate) fn report_progress(
+ &mut self,
+ title: &str,
+ state: Progress,
+ message: Option<String>,
+ fraction: Option<f64>,
+ cancel_token: Option<String>,
+ ) {
+ if !self.config.work_done_progress() {
+ return;
+ }
+ let percentage = fraction.map(|f| {
+ assert!((0.0..=1.0).contains(&f));
+ (f * 100.0) as u32
+ });
+ let cancellable = Some(cancel_token.is_some());
+ let token = lsp_types::ProgressToken::String(
+ cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{}", title)),
+ );
+ let work_done_progress = match state {
+ Progress::Begin => {
+ self.send_request::<lsp_types::request::WorkDoneProgressCreate>(
+ lsp_types::WorkDoneProgressCreateParams { token: token.clone() },
+ |_, _| (),
+ );
+
+ lsp_types::WorkDoneProgress::Begin(lsp_types::WorkDoneProgressBegin {
+ title: title.into(),
+ cancellable,
+ message,
+ percentage,
+ })
+ }
+ Progress::Report => {
+ lsp_types::WorkDoneProgress::Report(lsp_types::WorkDoneProgressReport {
+ cancellable,
+ message,
+ percentage,
+ })
+ }
+ Progress::End => {
+ lsp_types::WorkDoneProgress::End(lsp_types::WorkDoneProgressEnd { message })
+ }
+ };
+ self.send_notification::<lsp_types::notification::Progress>(lsp_types::ProgressParams {
+ token,
+ value: lsp_types::ProgressParamsValue::WorkDone(work_done_progress),
+ });
+ }
+}
+
+pub(crate) fn apply_document_changes(
- index: Arc::new(ide::LineIndex::new(old_text)),
++ file_contents: impl FnOnce() -> String,
++ mut content_changes: Vec<lsp_types::TextDocumentContentChangeEvent>,
++) -> String {
++ // Skip to the last full document change, as it invalidates all previous changes anyways.
++ let mut start = content_changes
++ .iter()
++ .rev()
++ .position(|change| change.range.is_none())
++ .map(|idx| content_changes.len() - idx - 1)
++ .unwrap_or(0);
++
++ let mut text: String = match content_changes.get_mut(start) {
++ // peek at the first content change as an optimization
++ Some(lsp_types::TextDocumentContentChangeEvent { range: None, text, .. }) => {
++ let text = mem::take(text);
++ start += 1;
++
++ // The only change is a full document update
++ if start == content_changes.len() {
++ return text;
++ }
++ text
++ }
++ Some(_) => file_contents(),
++ // we received no content changes
++ None => return file_contents(),
++ };
++
+ let mut line_index = LineIndex {
- enum IndexValid {
- All,
- UpToLineExclusive(u32),
- }
-
- impl IndexValid {
- fn covers(&self, line: u32) -> bool {
- match *self {
- IndexValid::UpToLineExclusive(to) => to > line,
- _ => true,
- }
- }
- }
-
- let mut index_valid = IndexValid::All;
++ // the index will be overwritten in the bottom loop's first iteration
++ index: Arc::new(ide::LineIndex::new(&text)),
+ // We don't care about line endings or offset encoding here.
+ endings: LineEndings::Unix,
+ encoding: PositionEncoding::Utf16,
+ };
+
+ // The changes we got must be applied sequentially, but can cross lines so we
+ // have to keep our line index updated.
+ // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we
+ // remember the last valid line in the index and only rebuild it if needed.
+ // The VFS will normalize the end of lines to `\n`.
- match change.range {
- Some(range) => {
- if !index_valid.covers(range.end.line) {
- line_index.index = Arc::new(ide::LineIndex::new(old_text));
- }
- index_valid = IndexValid::UpToLineExclusive(range.start.line);
- if let Ok(range) = from_proto::text_range(&line_index, range) {
- old_text.replace_range(Range::<usize>::from(range), &change.text);
- }
++ let mut index_valid = !0u32;
+ for change in content_changes {
- None => {
- *old_text = change.text;
- index_valid = IndexValid::UpToLineExclusive(0);
++ // The None case can't happen as we have handled it above already
++ if let Some(range) = change.range {
++ if index_valid <= range.end.line {
++ *Arc::make_mut(&mut line_index.index) = ide::LineIndex::new(&text);
+ }
- let mut text = String::new();
- apply_document_changes(&mut text, vec![]);
++ index_valid = range.start.line;
++ if let Ok(range) = from_proto::text_range(&line_index, range) {
++ text.replace_range(Range::<usize>::from(range), &change.text);
+ }
+ }
+ }
++ text
+}
+
+/// Checks that the edits inside the completion and the additional edits do not overlap.
+/// LSP explicitly forbids the additional edits to overlap both with the main edit and themselves.
+pub(crate) fn all_edits_are_disjoint(
+ completion: &lsp_types::CompletionItem,
+ additional_edits: &[lsp_types::TextEdit],
+) -> bool {
+ let mut edit_ranges = Vec::new();
+ match completion.text_edit.as_ref() {
+ Some(lsp_types::CompletionTextEdit::Edit(edit)) => {
+ edit_ranges.push(edit.range);
+ }
+ Some(lsp_types::CompletionTextEdit::InsertAndReplace(edit)) => {
+ let replace = edit.replace;
+ let insert = edit.insert;
+ if replace.start != insert.start
+ || insert.start > insert.end
+ || insert.end > replace.end
+ {
+ // insert has to be a prefix of replace but it is not
+ return false;
+ }
+ edit_ranges.push(replace);
+ }
+ None => {}
+ }
+ if let Some(additional_changes) = completion.additional_text_edits.as_ref() {
+ edit_ranges.extend(additional_changes.iter().map(|edit| edit.range));
+ };
+ edit_ranges.extend(additional_edits.iter().map(|edit| edit.range));
+ edit_ranges.sort_by_key(|range| (range.start, range.end));
+ edit_ranges
+ .iter()
+ .zip(edit_ranges.iter().skip(1))
+ .all(|(previous, next)| previous.end <= next.start)
+}
+
+#[cfg(test)]
+mod tests {
+ use lsp_types::{
+ CompletionItem, CompletionTextEdit, InsertReplaceEdit, Position, Range,
+ TextDocumentContentChangeEvent,
+ };
+
+ use super::*;
+
+ #[test]
+ fn test_apply_document_changes() {
+ macro_rules! c {
+ [$($sl:expr, $sc:expr; $el:expr, $ec:expr => $text:expr),+] => {
+ vec![$(TextDocumentContentChangeEvent {
+ range: Some(Range {
+ start: Position { line: $sl, character: $sc },
+ end: Position { line: $el, character: $ec },
+ }),
+ range_length: None,
+ text: String::from($text),
+ }),+]
+ };
+ }
+
- apply_document_changes(
- &mut text,
++ let text = apply_document_changes(|| String::new(), vec![]);
+ assert_eq!(text, "");
- apply_document_changes(&mut text, c![0, 3; 0, 3 => " quick"]);
++ let text = apply_document_changes(
++ || text,
+ vec![TextDocumentContentChangeEvent {
+ range: None,
+ range_length: None,
+ text: String::from("the"),
+ }],
+ );
+ assert_eq!(text, "the");
- apply_document_changes(&mut text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]);
++ let text = apply_document_changes(|| text, c![0, 3; 0, 3 => " quick"]);
+ assert_eq!(text, "the quick");
- apply_document_changes(&mut text, c![0, 11; 0, 11 => "\ndream"]);
++ let text = apply_document_changes(|| text, c![0, 0; 0, 4 => "", 0, 5; 0, 5 => " foxes"]);
+ assert_eq!(text, "quick foxes");
- apply_document_changes(&mut text, c![1, 0; 1, 0 => "have "]);
++ let text = apply_document_changes(|| text, c![0, 11; 0, 11 => "\ndream"]);
+ assert_eq!(text, "quick foxes\ndream");
- apply_document_changes(
- &mut text,
++ let text = apply_document_changes(|| text, c![1, 0; 1, 0 => "have "]);
+ assert_eq!(text, "quick foxes\nhave dream");
- apply_document_changes(&mut text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]);
++ let text = apply_document_changes(
++ || text,
+ c![0, 0; 0, 0 => "the ", 1, 4; 1, 4 => " quiet", 1, 16; 1, 16 => "s\n"],
+ );
+ assert_eq!(text, "the quick foxes\nhave quiet dreams\n");
- apply_document_changes(
- &mut text,
++ let text = apply_document_changes(|| text, c![0, 15; 0, 15 => "\n", 2, 17; 2, 17 => "\n"]);
+ assert_eq!(text, "the quick foxes\n\nhave quiet dreams\n\n");
- apply_document_changes(&mut text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
++ let text = apply_document_changes(
++ || text,
+ c![1, 0; 1, 0 => "DREAM", 2, 0; 2, 0 => "they ", 3, 0; 3, 0 => "DON'T THEY?"],
+ );
+ assert_eq!(text, "the quick foxes\nDREAM\nthey have quiet dreams\nDON'T THEY?\n");
- text = String::from("❤️");
- apply_document_changes(&mut text, c![0, 0; 0, 0 => "a"]);
++ let text = apply_document_changes(|| text, c![0, 10; 1, 5 => "", 2, 0; 2, 12 => ""]);
+ assert_eq!(text, "the quick \nthey have quiet dreams\n");
+
- text = String::from("a\nb");
- apply_document_changes(&mut text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]);
++ let text = String::from("❤️");
++ let text = apply_document_changes(|| text, c![0, 0; 0, 0 => "a"]);
+ assert_eq!(text, "a❤️");
+
- text = String::from("a\nb");
- apply_document_changes(&mut text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]);
++ let text = String::from("a\nb");
++ let text = apply_document_changes(|| text, c![0, 1; 1, 0 => "\nțc", 0, 1; 1, 1 => "d"]);
+ assert_eq!(text, "adcb");
+
++ let text = String::from("a\nb");
++ let text = apply_document_changes(|| text, c![0, 1; 1, 0 => "ț\nc", 0, 2; 0, 2 => "c"]);
+ assert_eq!(text, "ațc\ncb");
+ }
+
+ #[test]
+ fn empty_completion_disjoint_tests() {
+ let empty_completion =
+ CompletionItem::new_simple("label".to_string(), "detail".to_string());
+
+ let disjoint_edit_1 = lsp_types::TextEdit::new(
+ Range::new(Position::new(2, 2), Position::new(3, 3)),
+ "new_text".to_string(),
+ );
+ let disjoint_edit_2 = lsp_types::TextEdit::new(
+ Range::new(Position::new(3, 3), Position::new(4, 4)),
+ "new_text".to_string(),
+ );
+
+ let joint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(5, 5)),
+ "new_text".to_string(),
+ );
+
+ assert!(
+ all_edits_are_disjoint(&empty_completion, &[]),
+ "Empty completion has all its edits disjoint"
+ );
+ assert!(
+ all_edits_are_disjoint(
+ &empty_completion,
+ &[disjoint_edit_1.clone(), disjoint_edit_2.clone()]
+ ),
+ "Empty completion is disjoint to whatever disjoint extra edits added"
+ );
+
+ assert!(
+ !all_edits_are_disjoint(
+ &empty_completion,
+ &[disjoint_edit_1, disjoint_edit_2, joint_edit]
+ ),
+ "Empty completion does not prevent joint extra edits from failing the validation"
+ );
+ }
+
+ #[test]
+ fn completion_with_joint_edits_disjoint_tests() {
+ let disjoint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(2, 2)),
+ "new_text".to_string(),
+ );
+ let disjoint_edit_2 = lsp_types::TextEdit::new(
+ Range::new(Position::new(2, 2), Position::new(3, 3)),
+ "new_text".to_string(),
+ );
+ let joint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(5, 5)),
+ "new_text".to_string(),
+ );
+
+ let mut completion_with_joint_edits =
+ CompletionItem::new_simple("label".to_string(), "detail".to_string());
+ completion_with_joint_edits.additional_text_edits =
+ Some(vec![disjoint_edit.clone(), joint_edit.clone()]);
+ assert!(
+ !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
+ "Completion with disjoint edits fails the validation even with empty extra edits"
+ );
+
+ completion_with_joint_edits.text_edit =
+ Some(CompletionTextEdit::Edit(disjoint_edit.clone()));
+ completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit.clone()]);
+ assert!(
+ !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
+ "Completion with disjoint edits fails the validation even with empty extra edits"
+ );
+
+ completion_with_joint_edits.text_edit =
+ Some(CompletionTextEdit::InsertAndReplace(InsertReplaceEdit {
+ new_text: "new_text".to_string(),
+ insert: disjoint_edit.range,
+ replace: disjoint_edit_2.range,
+ }));
+ completion_with_joint_edits.additional_text_edits = Some(vec![joint_edit]);
+ assert!(
+ !all_edits_are_disjoint(&completion_with_joint_edits, &[]),
+ "Completion with disjoint edits fails the validation even with empty extra edits"
+ );
+ }
+
+ #[test]
+ fn completion_with_disjoint_edits_disjoint_tests() {
+ let disjoint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(2, 2)),
+ "new_text".to_string(),
+ );
+ let disjoint_edit_2 = lsp_types::TextEdit::new(
+ Range::new(Position::new(2, 2), Position::new(3, 3)),
+ "new_text".to_string(),
+ );
+ let joint_edit = lsp_types::TextEdit::new(
+ Range::new(Position::new(1, 1), Position::new(5, 5)),
+ "new_text".to_string(),
+ );
+
+ let mut completion_with_disjoint_edits =
+ CompletionItem::new_simple("label".to_string(), "detail".to_string());
+ completion_with_disjoint_edits.text_edit = Some(CompletionTextEdit::Edit(disjoint_edit));
+ let completion_with_disjoint_edits = completion_with_disjoint_edits;
+
+ assert!(
+ all_edits_are_disjoint(&completion_with_disjoint_edits, &[]),
+ "Completion with disjoint edits is valid"
+ );
+ assert!(
+ !all_edits_are_disjoint(&completion_with_disjoint_edits, &[joint_edit]),
+ "Completion with disjoint edits and joint extra edit is invalid"
+ );
+ assert!(
+ all_edits_are_disjoint(&completion_with_disjoint_edits, &[disjoint_edit_2]),
+ "Completion with disjoint edits and joint extra edit is valid"
+ );
+ }
+}
--- /dev/null
- self.fetch_workspaces_queue.op_completed(workspaces);
+//! The main loop of `rust-analyzer` responsible for dispatching LSP
+//! requests/replies and notifications back to the client.
+use std::{
+ fmt,
+ ops::Deref,
+ sync::Arc,
+ time::{Duration, Instant},
+};
+
+use always_assert::always;
+use crossbeam_channel::{select, Receiver};
+use flycheck::FlycheckHandle;
+use ide_db::base_db::{SourceDatabaseExt, VfsPath};
+use itertools::Itertools;
+use lsp_server::{Connection, Notification, Request};
+use lsp_types::notification::Notification as _;
+use vfs::{ChangeKind, FileId};
+
+use crate::{
+ config::Config,
+ dispatch::{NotificationDispatcher, RequestDispatcher},
+ from_proto,
+ global_state::{file_id_to_url, url_to_file_id, GlobalState},
+ handlers, lsp_ext,
+ lsp_utils::{apply_document_changes, notification_is, Progress},
+ mem_docs::DocumentData,
+ reload::{self, BuildDataProgress, ProjectWorkspaceProgress},
+ Result,
+};
+
+pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
+ tracing::info!("initial config: {:#?}", config);
+
+ // Windows scheduler implements priority boosts: if thread waits for an
+ // event (like a condvar), and event fires, priority of the thread is
+ // temporary bumped. This optimization backfires in our case: each time the
+ // `main_loop` schedules a task to run on a threadpool, the worker threads
+ // gets a higher priority, and (on a machine with fewer cores) displaces the
+ // main loop! We work-around this by marking the main loop as a
+ // higher-priority thread.
+ //
+ // https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities
+ // https://docs.microsoft.com/en-us/windows/win32/procthread/priority-boosts
+ // https://github.com/rust-lang/rust-analyzer/issues/2835
+ #[cfg(windows)]
+ unsafe {
+ use winapi::um::processthreadsapi::*;
+ let thread = GetCurrentThread();
+ let thread_priority_above_normal = 1;
+ SetThreadPriority(thread, thread_priority_above_normal);
+ }
+
+ GlobalState::new(connection.sender, config).run(connection.receiver)
+}
+
+enum Event {
+ Lsp(lsp_server::Message),
+ Task(Task),
+ Vfs(vfs::loader::Message),
+ Flycheck(flycheck::Message),
+}
+
+#[derive(Debug)]
+pub(crate) enum Task {
+ Response(lsp_server::Response),
+ Retry(lsp_server::Request),
+ Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
+ PrimeCaches(PrimeCachesProgress),
+ FetchWorkspace(ProjectWorkspaceProgress),
+ FetchBuildData(BuildDataProgress),
+}
+
+#[derive(Debug)]
+pub(crate) enum PrimeCachesProgress {
+ Begin,
+ Report(ide::ParallelPrimeCachesProgress),
+ End { cancelled: bool },
+}
+
+impl fmt::Debug for Event {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let debug_verbose_not = |not: &Notification, f: &mut fmt::Formatter<'_>| {
+ f.debug_struct("Notification").field("method", ¬.method).finish()
+ };
+
+ match self {
+ Event::Lsp(lsp_server::Message::Notification(not)) => {
+ if notification_is::<lsp_types::notification::DidOpenTextDocument>(not)
+ || notification_is::<lsp_types::notification::DidChangeTextDocument>(not)
+ {
+ return debug_verbose_not(not, f);
+ }
+ }
+ Event::Task(Task::Response(resp)) => {
+ return f
+ .debug_struct("Response")
+ .field("id", &resp.id)
+ .field("error", &resp.error)
+ .finish();
+ }
+ _ => (),
+ }
+ match self {
+ Event::Lsp(it) => fmt::Debug::fmt(it, f),
+ Event::Task(it) => fmt::Debug::fmt(it, f),
+ Event::Vfs(it) => fmt::Debug::fmt(it, f),
+ Event::Flycheck(it) => fmt::Debug::fmt(it, f),
+ }
+ }
+}
+
+impl GlobalState {
+ fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
+ if self.config.linked_projects().is_empty()
+ && self.config.detached_files().is_empty()
+ && self.config.notifications().cargo_toml_not_found
+ {
+ self.show_and_log_error("rust-analyzer failed to discover workspace".to_string(), None);
+ };
+
+ if self.config.did_save_text_document_dynamic_registration() {
+ let save_registration_options = lsp_types::TextDocumentSaveRegistrationOptions {
+ include_text: Some(false),
+ text_document_registration_options: lsp_types::TextDocumentRegistrationOptions {
+ document_selector: Some(vec![
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/*.rs".into()),
+ },
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/Cargo.toml".into()),
+ },
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/Cargo.lock".into()),
+ },
+ ]),
+ },
+ };
+
+ let registration = lsp_types::Registration {
+ id: "textDocument/didSave".to_string(),
+ method: "textDocument/didSave".to_string(),
+ register_options: Some(serde_json::to_value(save_registration_options).unwrap()),
+ };
+ self.send_request::<lsp_types::request::RegisterCapability>(
+ lsp_types::RegistrationParams { registrations: vec![registration] },
+ |_, _| (),
+ );
+ }
+
+ self.fetch_workspaces_queue.request_op("startup".to_string());
+ if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
+ self.fetch_workspaces(cause);
+ }
+
+ while let Some(event) = self.next_event(&inbox) {
+ if let Event::Lsp(lsp_server::Message::Notification(not)) = &event {
+ if not.method == lsp_types::notification::Exit::METHOD {
+ return Ok(());
+ }
+ }
+ self.handle_event(event)?
+ }
+
+ Err("client exited without proper shutdown sequence".into())
+ }
+
+ fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
+ select! {
+ recv(inbox) -> msg =>
+ msg.ok().map(Event::Lsp),
+
+ recv(self.task_pool.receiver) -> task =>
+ Some(Event::Task(task.unwrap())),
+
+ recv(self.loader.receiver) -> task =>
+ Some(Event::Vfs(task.unwrap())),
+
+ recv(self.flycheck_receiver) -> task =>
+ Some(Event::Flycheck(task.unwrap())),
+ }
+ }
+
+ fn handle_event(&mut self, event: Event) -> Result<()> {
+ let loop_start = Instant::now();
+ // NOTE: don't count blocking select! call as a loop-turn time
+ let _p = profile::span("GlobalState::handle_event");
+
+ tracing::debug!("{:?} handle_event({:?})", loop_start, event);
+ let task_queue_len = self.task_pool.handle.len();
+ if task_queue_len > 0 {
+ tracing::info!("task queue len: {}", task_queue_len);
+ }
+
+ let was_quiescent = self.is_quiescent();
+ match event {
+ Event::Lsp(msg) => match msg {
+ lsp_server::Message::Request(req) => self.on_new_request(loop_start, req),
+ lsp_server::Message::Notification(not) => {
+ self.on_notification(not)?;
+ }
+ lsp_server::Message::Response(resp) => self.complete_request(resp),
+ },
+ Event::Task(task) => {
+ let _p = profile::span("GlobalState::handle_event/task");
+ let mut prime_caches_progress = Vec::new();
+
+ self.handle_task(&mut prime_caches_progress, task);
+ // Coalesce multiple task events into one loop turn
+ while let Ok(task) = self.task_pool.receiver.try_recv() {
+ self.handle_task(&mut prime_caches_progress, task);
+ }
+
+ for progress in prime_caches_progress {
+ let (state, message, fraction);
+ match progress {
+ PrimeCachesProgress::Begin => {
+ state = Progress::Begin;
+ message = None;
+ fraction = 0.0;
+ }
+ PrimeCachesProgress::Report(report) => {
+ state = Progress::Report;
+
+ message = match &report.crates_currently_indexing[..] {
+ [crate_name] => Some(format!(
+ "{}/{} ({})",
+ report.crates_done, report.crates_total, crate_name
+ )),
+ [crate_name, rest @ ..] => Some(format!(
+ "{}/{} ({} + {} more)",
+ report.crates_done,
+ report.crates_total,
+ crate_name,
+ rest.len()
+ )),
+ _ => None,
+ };
+
+ fraction = Progress::fraction(report.crates_done, report.crates_total);
+ }
+ PrimeCachesProgress::End { cancelled } => {
+ state = Progress::End;
+ message = None;
+ fraction = 1.0;
+
+ self.prime_caches_queue.op_completed(());
+ if cancelled {
+ self.prime_caches_queue
+ .request_op("restart after cancellation".to_string());
+ }
+ }
+ };
+
+ self.report_progress("Indexing", state, message, Some(fraction), None);
+ }
+ }
+ Event::Vfs(message) => {
+ let _p = profile::span("GlobalState::handle_event/vfs");
+ self.handle_vfs_msg(message);
+ // Coalesce many VFS event into a single loop turn
+ while let Ok(message) = self.loader.receiver.try_recv() {
+ self.handle_vfs_msg(message);
+ }
+ }
+ Event::Flycheck(message) => {
+ let _p = profile::span("GlobalState::handle_event/flycheck");
+ self.handle_flycheck_msg(message);
+ // Coalesce many flycheck updates into a single loop turn
+ while let Ok(message) = self.flycheck_receiver.try_recv() {
+ self.handle_flycheck_msg(message);
+ }
+ }
+ }
+
+ let state_changed = self.process_changes();
+ let memdocs_added_or_removed = self.mem_docs.take_changes();
+
+ if self.is_quiescent() {
+ let became_quiescent = !(was_quiescent
+ || self.fetch_workspaces_queue.op_requested()
+ || self.fetch_build_data_queue.op_requested());
+
+ if became_quiescent {
+ // Project has loaded properly, kick off initial flycheck
+ self.flycheck.iter().for_each(FlycheckHandle::restart);
+ if self.config.prefill_caches() {
+ self.prime_caches_queue.request_op("became quiescent".to_string());
+ }
+ }
+
+ if !was_quiescent || state_changed {
+ // Refresh semantic tokens if the client supports it.
+ if self.config.semantic_tokens_refresh() {
+ self.semantic_tokens_cache.lock().clear();
+ self.send_request::<lsp_types::request::SemanticTokensRefresh>((), |_, _| ());
+ }
+
+ // Refresh code lens if the client supports it.
+ if self.config.code_lens_refresh() {
+ self.send_request::<lsp_types::request::CodeLensRefresh>((), |_, _| ());
+ }
+ }
+
+ if !was_quiescent || state_changed || memdocs_added_or_removed {
+ if self.config.publish_diagnostics() {
+ self.update_diagnostics()
+ }
+ }
+ }
+
+ if let Some(diagnostic_changes) = self.diagnostics.take_changes() {
+ for file_id in diagnostic_changes {
+ let db = self.analysis_host.raw_database();
+ let source_root = db.file_source_root(file_id);
+ if db.source_root(source_root).is_library {
+ // Only publish diagnostics for files in the workspace, not from crates.io deps
+ // or the sysroot.
+ // While theoretically these should never have errors, we have quite a few false
+ // positives particularly in the stdlib, and those diagnostics would stay around
+ // forever if we emitted them here.
+ continue;
+ }
+
+ let uri = file_id_to_url(&self.vfs.read().0, file_id);
+ let mut diagnostics =
+ self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
+
+ // VSCode assumes diagnostic messages to be non-empty strings, so we need to patch
+ // empty diagnostics. Neither the docs of VSCode nor the LSP spec say whether
+ // diagnostic messages are actually allowed to be empty or not and patching this
+ // in the VSCode client does not work as the assertion happens in the protocol
+ // conversion. So this hack is here to stay, and will be considered a hack
+ // until the LSP decides to state that empty messages are allowed.
+
+ // See https://github.com/rust-lang/rust-analyzer/issues/11404
+ // See https://github.com/rust-lang/rust-analyzer/issues/13130
+ let patch_empty = |message: &mut String| {
+ if message.is_empty() {
+ *message = " ".to_string();
+ }
+ };
+
+ for d in &mut diagnostics {
+ patch_empty(&mut d.message);
+ if let Some(dri) = &mut d.related_information {
+ for dri in dri {
+ patch_empty(&mut dri.message);
+ }
+ }
+ }
+
+ let version = from_proto::vfs_path(&uri)
+ .map(|path| self.mem_docs.get(&path).map(|it| it.version))
+ .unwrap_or_default();
+
+ self.send_notification::<lsp_types::notification::PublishDiagnostics>(
+ lsp_types::PublishDiagnosticsParams { uri, diagnostics, version },
+ );
+ }
+ }
+
+ if self.config.cargo_autoreload() {
+ if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
+ self.fetch_workspaces(cause);
+ }
+ }
+
+ if !self.fetch_workspaces_queue.op_in_progress() {
+ if let Some(cause) = self.fetch_build_data_queue.should_start_op() {
+ self.fetch_build_data(cause);
+ }
+ }
+
+ if let Some(cause) = self.prime_caches_queue.should_start_op() {
+ tracing::debug!(%cause, "will prime caches");
+ let num_worker_threads = self.config.prime_caches_num_threads();
+
+ self.task_pool.handle.spawn_with_sender({
+ let analysis = self.snapshot().analysis;
+ move |sender| {
+ sender.send(Task::PrimeCaches(PrimeCachesProgress::Begin)).unwrap();
+ let res = analysis.parallel_prime_caches(num_worker_threads, |progress| {
+ let report = PrimeCachesProgress::Report(progress);
+ sender.send(Task::PrimeCaches(report)).unwrap();
+ });
+ sender
+ .send(Task::PrimeCaches(PrimeCachesProgress::End {
+ cancelled: res.is_err(),
+ }))
+ .unwrap();
+ }
+ });
+ }
+
+ let status = self.current_status();
+ if self.last_reported_status.as_ref() != Some(&status) {
+ self.last_reported_status = Some(status.clone());
+
+ if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message) {
+ self.show_message(lsp_types::MessageType::ERROR, message.clone());
+ }
+
+ if self.config.server_status_notification() {
+ self.send_notification::<lsp_ext::ServerStatusNotification>(status);
+ }
+ }
+
+ let loop_duration = loop_start.elapsed();
+ if loop_duration > Duration::from_millis(100) && was_quiescent {
+ tracing::warn!("overly long loop turn: {:?}", loop_duration);
+ self.poke_rust_analyzer_developer(format!(
+ "overly long loop turn: {:?}",
+ loop_duration
+ ));
+ }
+ Ok(())
+ }
+
+ fn handle_task(&mut self, prime_caches_progress: &mut Vec<PrimeCachesProgress>, task: Task) {
+ match task {
+ Task::Response(response) => self.respond(response),
+ // Only retry requests that haven't been cancelled. Otherwise we do unnecessary work.
+ Task::Retry(req) if !self.is_completed(&req) => self.on_request(req),
+ Task::Retry(_) => (),
+ Task::Diagnostics(diagnostics_per_file) => {
+ for (file_id, diagnostics) in diagnostics_per_file {
+ self.diagnostics.set_native_diagnostics(file_id, diagnostics)
+ }
+ }
+ Task::PrimeCaches(progress) => match progress {
+ PrimeCachesProgress::Begin => prime_caches_progress.push(progress),
+ PrimeCachesProgress::Report(_) => {
+ match prime_caches_progress.last_mut() {
+ Some(last @ PrimeCachesProgress::Report(_)) => {
+ // Coalesce subsequent update events.
+ *last = progress;
+ }
+ _ => prime_caches_progress.push(progress),
+ }
+ }
+ PrimeCachesProgress::End { .. } => prime_caches_progress.push(progress),
+ },
+ Task::FetchWorkspace(progress) => {
+ let (state, msg) = match progress {
+ ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
+ ProjectWorkspaceProgress::Report(msg) => (Progress::Report, Some(msg)),
+ ProjectWorkspaceProgress::End(workspaces) => {
- let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap();
- apply_document_changes(&mut text, params.content_changes);
++ self.fetch_workspaces_queue.op_completed(Some(workspaces));
+
+ let old = Arc::clone(&self.workspaces);
+ self.switch_workspaces("fetched workspace".to_string());
+ let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
+
+ if self.config.run_build_scripts() && workspaces_updated {
+ self.fetch_build_data_queue.request_op(format!("workspace updated"));
+ }
+
+ (Progress::End, None)
+ }
+ };
+
+ self.report_progress("Fetching", state, msg, None, None);
+ }
+ Task::FetchBuildData(progress) => {
+ let (state, msg) = match progress {
+ BuildDataProgress::Begin => (Some(Progress::Begin), None),
+ BuildDataProgress::Report(msg) => (Some(Progress::Report), Some(msg)),
+ BuildDataProgress::End(build_data_result) => {
+ self.fetch_build_data_queue.op_completed(build_data_result);
+
+ self.switch_workspaces("fetched build data".to_string());
+
+ (Some(Progress::End), None)
+ }
+ };
+
+ if let Some(state) = state {
+ self.report_progress("Loading", state, msg, None, None);
+ }
+ }
+ }
+ }
+
+ fn handle_vfs_msg(&mut self, message: vfs::loader::Message) {
+ match message {
+ vfs::loader::Message::Loaded { files } => {
+ let vfs = &mut self.vfs.write().0;
+ for (path, contents) in files {
+ let path = VfsPath::from(path);
+ if !self.mem_docs.contains(&path) {
+ vfs.set_file_contents(path, contents);
+ }
+ }
+ }
+ vfs::loader::Message::Progress { n_total, n_done, config_version } => {
+ always!(config_version <= self.vfs_config_version);
+
+ self.vfs_progress_config_version = config_version;
+ self.vfs_progress_n_total = n_total;
+ self.vfs_progress_n_done = n_done;
+
+ let state = if n_done == 0 {
+ Progress::Begin
+ } else if n_done < n_total {
+ Progress::Report
+ } else {
+ assert_eq!(n_done, n_total);
+ Progress::End
+ };
+ self.report_progress(
+ "Roots Scanned",
+ state,
+ Some(format!("{}/{}", n_done, n_total)),
+ Some(Progress::fraction(n_done, n_total)),
+ None,
+ )
+ }
+ }
+ }
+
+ fn handle_flycheck_msg(&mut self, message: flycheck::Message) {
+ match message {
+ flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {
+ let snap = self.snapshot();
+ let diagnostics = crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
+ &self.config.diagnostics_map(),
+ &diagnostic,
+ &workspace_root,
+ &snap,
+ );
+ for diag in diagnostics {
+ match url_to_file_id(&self.vfs.read().0, &diag.url) {
+ Ok(file_id) => self.diagnostics.add_check_diagnostic(
+ id,
+ file_id,
+ diag.diagnostic,
+ diag.fix,
+ ),
+ Err(err) => {
+ tracing::error!(
+ "flycheck {id}: File with cargo diagnostic not found in VFS: {}",
+ err
+ );
+ }
+ };
+ }
+ }
+
+ flycheck::Message::Progress { id, progress } => {
+ let (state, message) = match progress {
+ flycheck::Progress::DidStart => {
+ self.diagnostics.clear_check(id);
+ (Progress::Begin, None)
+ }
+ flycheck::Progress::DidCheckCrate(target) => (Progress::Report, Some(target)),
+ flycheck::Progress::DidCancel => (Progress::End, None),
+ flycheck::Progress::DidFailToRestart(err) => {
+ self.show_and_log_error(
+ "cargo check failed".to_string(),
+ Some(err.to_string()),
+ );
+ return;
+ }
+ flycheck::Progress::DidFinish(result) => {
+ if let Err(err) = result {
+ self.show_and_log_error(
+ "cargo check failed".to_string(),
+ Some(err.to_string()),
+ );
+ }
+ (Progress::End, None)
+ }
+ };
+
+ // When we're running multiple flychecks, we have to include a disambiguator in
+ // the title, or the editor complains. Note that this is a user-facing string.
+ let title = if self.flycheck.len() == 1 {
+ match self.config.flycheck() {
+ Some(config) => format!("{}", config),
+ None => "cargo check".to_string(),
+ }
+ } else {
+ format!("cargo check (#{})", id + 1)
+ };
+ self.report_progress(
+ &title,
+ state,
+ message,
+ None,
+ Some(format!("rust-analyzer/checkOnSave/{}", id)),
+ );
+ }
+ }
+ }
+
+ /// Registers and handles a request. This should only be called once per incoming request.
+ fn on_new_request(&mut self, request_received: Instant, req: Request) {
+ self.register_request(&req, request_received);
+ self.on_request(req);
+ }
+
+ /// Handles a request.
+ fn on_request(&mut self, req: Request) {
+ let mut dispatcher = RequestDispatcher { req: Some(req), global_state: self };
+ dispatcher.on_sync_mut::<lsp_types::request::Shutdown>(|s, ()| {
+ s.shutdown_requested = true;
+ Ok(())
+ });
+
+ if let RequestDispatcher { req: Some(req), global_state: this } = &mut dispatcher {
+ if this.shutdown_requested {
+ this.respond(lsp_server::Response::new_err(
+ req.id.clone(),
+ lsp_server::ErrorCode::InvalidRequest as i32,
+ "Shutdown already requested.".to_owned(),
+ ));
+ return;
+ }
+
+ // Avoid flashing a bunch of unresolved references during initial load.
+ if this.workspaces.is_empty() && !this.is_quiescent() {
+ this.respond(lsp_server::Response::new_err(
+ req.id.clone(),
+ lsp_server::ErrorCode::ContentModified as i32,
+ "waiting for cargo metadata or cargo check".to_owned(),
+ ));
+ return;
+ }
+ }
+
+ dispatcher
+ .on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
+ .on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
+ .on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
+ .on_sync_mut::<lsp_ext::CancelFlycheck>(handlers::handle_cancel_flycheck)
+ .on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
+ .on_sync::<lsp_ext::OnEnter>(handlers::handle_on_enter)
+ .on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
+ .on_sync::<lsp_ext::MatchingBrace>(handlers::handle_matching_brace)
+ .on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
+ .on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
+ .on::<lsp_ext::ViewHir>(handlers::handle_view_hir)
+ .on::<lsp_ext::ViewFileText>(handlers::handle_view_file_text)
+ .on::<lsp_ext::ViewCrateGraph>(handlers::handle_view_crate_graph)
+ .on::<lsp_ext::ViewItemTree>(handlers::handle_view_item_tree)
+ .on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)
+ .on::<lsp_ext::ParentModule>(handlers::handle_parent_module)
+ .on::<lsp_ext::Runnables>(handlers::handle_runnables)
+ .on::<lsp_ext::RelatedTests>(handlers::handle_related_tests)
+ .on::<lsp_ext::CodeActionRequest>(handlers::handle_code_action)
+ .on::<lsp_ext::CodeActionResolveRequest>(handlers::handle_code_action_resolve)
+ .on::<lsp_ext::HoverRequest>(handlers::handle_hover)
+ .on::<lsp_ext::ExternalDocs>(handlers::handle_open_docs)
+ .on::<lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml)
+ .on::<lsp_ext::MoveItem>(handlers::handle_move_item)
+ .on::<lsp_ext::WorkspaceSymbol>(handlers::handle_workspace_symbol)
+ .on::<lsp_ext::OnTypeFormatting>(handlers::handle_on_type_formatting)
+ .on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)
+ .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
+ .on::<lsp_types::request::GotoDeclaration>(handlers::handle_goto_declaration)
+ .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
+ .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
+ .on::<lsp_types::request::InlayHintRequest>(handlers::handle_inlay_hints)
+ .on::<lsp_types::request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
+ .on::<lsp_types::request::Completion>(handlers::handle_completion)
+ .on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
+ .on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)
+ .on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)
+ .on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)
+ .on::<lsp_types::request::SignatureHelpRequest>(handlers::handle_signature_help)
+ .on::<lsp_types::request::PrepareRenameRequest>(handlers::handle_prepare_rename)
+ .on::<lsp_types::request::Rename>(handlers::handle_rename)
+ .on::<lsp_types::request::References>(handlers::handle_references)
+ .on::<lsp_types::request::Formatting>(handlers::handle_formatting)
+ .on::<lsp_types::request::RangeFormatting>(handlers::handle_range_formatting)
+ .on::<lsp_types::request::DocumentHighlightRequest>(handlers::handle_document_highlight)
+ .on::<lsp_types::request::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)
+ .on::<lsp_types::request::CallHierarchyIncomingCalls>(
+ handlers::handle_call_hierarchy_incoming,
+ )
+ .on::<lsp_types::request::CallHierarchyOutgoingCalls>(
+ handlers::handle_call_hierarchy_outgoing,
+ )
+ .on::<lsp_types::request::SemanticTokensFullRequest>(
+ handlers::handle_semantic_tokens_full,
+ )
+ .on::<lsp_types::request::SemanticTokensFullDeltaRequest>(
+ handlers::handle_semantic_tokens_full_delta,
+ )
+ .on::<lsp_types::request::SemanticTokensRangeRequest>(
+ handlers::handle_semantic_tokens_range,
+ )
+ .on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files)
+ .on::<lsp_ext::Ssr>(handlers::handle_ssr)
+ .finish();
+ }
+
+ /// Handles an incoming notification.
+ fn on_notification(&mut self, not: Notification) -> Result<()> {
+ NotificationDispatcher { not: Some(not), global_state: self }
+ .on::<lsp_types::notification::Cancel>(|this, params| {
+ let id: lsp_server::RequestId = match params.id {
+ lsp_types::NumberOrString::Number(id) => id.into(),
+ lsp_types::NumberOrString::String(id) => id.into(),
+ };
+ this.cancel(id);
+ Ok(())
+ })?
+ .on::<lsp_types::notification::WorkDoneProgressCancel>(|this, params| {
+ if let lsp_types::NumberOrString::String(s) = ¶ms.token {
+ if let Some(id) = s.strip_prefix("rust-analyzer/checkOnSave/") {
+ if let Ok(id) = u32::from_str_radix(id, 10) {
+ if let Some(flycheck) = this.flycheck.get(id as usize) {
+ flycheck.cancel();
+ }
+ }
+ }
+ }
+ // Just ignore this. It is OK to continue sending progress
+ // notifications for this token, as the client can't know when
+ // we accepted notification.
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidOpenTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
+ let already_exists = this
+ .mem_docs
+ .insert(path.clone(), DocumentData::new(params.text_document.version))
+ .is_err();
+ if already_exists {
+ tracing::error!("duplicate DidOpenTextDocument: {}", path);
+ }
+ this.vfs
+ .write()
+ .0
+ .set_file_contents(path, Some(params.text_document.text.into_bytes()));
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
+ match this.mem_docs.get_mut(&path) {
+ Some(doc) => {
+ // The version passed in DidChangeTextDocument is the version after all edits are applied
+ // so we should apply it before the vfs is notified.
+ doc.version = params.text_document.version;
+ }
+ None => {
+ tracing::error!("unexpected DidChangeTextDocument: {}", path);
+ return Ok(());
+ }
+ };
+
+ let vfs = &mut this.vfs.write().0;
+ let file_id = vfs.file_id(&path).unwrap();
++ let text = apply_document_changes(
++ || std::str::from_utf8(vfs.file_contents(file_id)).unwrap().into(),
++ params.content_changes,
++ );
+
+ vfs.set_file_contents(path, Some(text.into_bytes()));
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidCloseTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
+ if this.mem_docs.remove(&path).is_err() {
+ tracing::error!("orphan DidCloseTextDocument: {}", path);
+ }
+
+ this.semantic_tokens_cache.lock().remove(¶ms.text_document.uri);
+
+ if let Some(path) = path.as_path() {
+ this.loader.handle.invalidate(path.to_path_buf());
+ }
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
+ if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) {
+ // Re-fetch workspaces if a workspace related file has changed
+ if let Some(abs_path) = vfs_path.as_path() {
+ if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
+ this.fetch_workspaces_queue
+ .request_op(format!("DidSaveTextDocument {}", abs_path.display()));
+ }
+ }
+
+ let file_id = this.vfs.read().0.file_id(&vfs_path);
+ if let Some(file_id) = file_id {
+ let world = this.snapshot();
+ let mut updated = false;
+ let task = move || -> std::result::Result<(), ide::Cancelled> {
+ // Trigger flychecks for all workspaces that depend on the saved file
+ // Crates containing or depending on the saved file
+ let crate_ids: Vec<_> = world
+ .analysis
+ .crates_for(file_id)?
+ .into_iter()
+ .flat_map(|id| world.analysis.transitive_rev_deps(id))
+ .flatten()
+ .sorted()
+ .unique()
+ .collect();
+
+ let crate_root_paths: Vec<_> = crate_ids
+ .iter()
+ .filter_map(|&crate_id| {
+ world
+ .analysis
+ .crate_root(crate_id)
+ .map(|file_id| {
+ world
+ .file_id_to_file_path(file_id)
+ .as_path()
+ .map(ToOwned::to_owned)
+ })
+ .transpose()
+ })
+ .collect::<ide::Cancellable<_>>()?;
+ let crate_root_paths: Vec<_> =
+ crate_root_paths.iter().map(Deref::deref).collect();
+
+ // Find all workspaces that have at least one target containing the saved file
+ let workspace_ids =
+ world.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
+ project_model::ProjectWorkspace::Cargo { cargo, .. } => {
+ cargo.packages().any(|pkg| {
+ cargo[pkg].targets.iter().any(|&it| {
+ crate_root_paths.contains(&cargo[it].root.as_path())
+ })
+ })
+ }
+ project_model::ProjectWorkspace::Json { project, .. } => {
+ project.crates().any(|(c, _)| {
+ crate_ids.iter().any(|&crate_id| crate_id == c)
+ })
+ }
+ project_model::ProjectWorkspace::DetachedFiles { .. } => false,
+ });
+
+ // Find and trigger corresponding flychecks
+ for flycheck in world.flycheck.iter() {
+ for (id, _) in workspace_ids.clone() {
+ if id == flycheck.id() {
+ updated = true;
+ flycheck.restart();
+ continue;
+ }
+ }
+ }
+ // No specific flycheck was triggered, so let's trigger all of them.
+ if !updated {
+ for flycheck in world.flycheck.iter() {
+ flycheck.restart();
+ }
+ }
+ Ok(())
+ };
+ this.task_pool.handle.spawn_with_sender(move |_| {
+ if let Err(e) = std::panic::catch_unwind(task) {
+ tracing::error!("DidSaveTextDocument flycheck task panicked: {e:?}")
+ }
+ });
+ return Ok(());
+ }
+ }
+
+ // No specific flycheck was triggered, so let's trigger all of them.
+ for flycheck in this.flycheck.iter() {
+ flycheck.restart();
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeConfiguration>(|this, _params| {
+ // As stated in https://github.com/microsoft/language-server-protocol/issues/676,
+ // this notification's parameters should be ignored and the actual config queried separately.
+ this.send_request::<lsp_types::request::WorkspaceConfiguration>(
+ lsp_types::ConfigurationParams {
+ items: vec![lsp_types::ConfigurationItem {
+ scope_uri: None,
+ section: Some("rust-analyzer".to_string()),
+ }],
+ },
+ |this, resp| {
+ tracing::debug!("config update response: '{:?}", resp);
+ let lsp_server::Response { error, result, .. } = resp;
+
+ match (error, result) {
+ (Some(err), _) => {
+ tracing::error!("failed to fetch the server settings: {:?}", err)
+ }
+ (None, Some(mut configs)) => {
+ if let Some(json) = configs.get_mut(0) {
+ // Note that json can be null according to the spec if the client can't
+ // provide a configuration. This is handled in Config::update below.
+ let mut config = Config::clone(&*this.config);
+ if let Err(error) = config.update(json.take()) {
+ this.show_message(
+ lsp_types::MessageType::WARNING,
+ error.to_string(),
+ );
+ }
+ this.update_configuration(config);
+ }
+ }
+ (None, None) => tracing::error!(
+ "received empty server settings response from the client"
+ ),
+ }
+ },
+ );
+
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeWatchedFiles>(|this, params| {
+ for change in params.changes {
+ if let Ok(path) = from_proto::abs_path(&change.uri) {
+ this.loader.handle.invalidate(path);
+ }
+ }
+ Ok(())
+ })?
+ .finish();
+ Ok(())
+ }
+
+ fn update_diagnostics(&mut self) {
+ let subscriptions = self
+ .mem_docs
+ .iter()
+ .map(|path| self.vfs.read().0.file_id(path).unwrap())
+ .collect::<Vec<_>>();
+
+ tracing::trace!("updating notifications for {:?}", subscriptions);
+
+ let snapshot = self.snapshot();
+ self.task_pool.handle.spawn(move || {
+ let diagnostics = subscriptions
+ .into_iter()
+ .filter_map(|file_id| {
+ handlers::publish_diagnostics(&snapshot, file_id)
+ .ok()
+ .map(|diags| (file_id, diags))
+ })
+ .collect::<Vec<_>>();
+ Task::Diagnostics(diagnostics)
+ })
+ }
+}
--- /dev/null
- /// For these document, there true contents is maintained by the client. It
+//! In-memory document information.
+
+use std::mem;
+
+use rustc_hash::FxHashMap;
+use vfs::VfsPath;
+
+/// Holds the set of in-memory documents.
+///
++/// For these document, their true contents is maintained by the client. It
+/// might be different from what's on disk.
+#[derive(Default, Clone)]
+pub(crate) struct MemDocs {
+ mem_docs: FxHashMap<VfsPath, DocumentData>,
+ added_or_removed: bool,
+}
+
+impl MemDocs {
+ pub(crate) fn contains(&self, path: &VfsPath) -> bool {
+ self.mem_docs.contains_key(path)
+ }
++
+ pub(crate) fn insert(&mut self, path: VfsPath, data: DocumentData) -> Result<(), ()> {
+ self.added_or_removed = true;
+ match self.mem_docs.insert(path, data) {
+ Some(_) => Err(()),
+ None => Ok(()),
+ }
+ }
++
+ pub(crate) fn remove(&mut self, path: &VfsPath) -> Result<(), ()> {
+ self.added_or_removed = true;
+ match self.mem_docs.remove(path) {
+ Some(_) => Ok(()),
+ None => Err(()),
+ }
+ }
++
+ pub(crate) fn get(&self, path: &VfsPath) -> Option<&DocumentData> {
+ self.mem_docs.get(path)
+ }
++
+ pub(crate) fn get_mut(&mut self, path: &VfsPath) -> Option<&mut DocumentData> {
+ // NB: don't set `self.added_or_removed` here, as that purposefully only
+ // tracks changes to the key set.
+ self.mem_docs.get_mut(path)
+ }
++
+ pub(crate) fn iter(&self) -> impl Iterator<Item = &VfsPath> {
+ self.mem_docs.keys()
+ }
++
+ pub(crate) fn take_changes(&mut self) -> bool {
+ mem::replace(&mut self.added_or_removed, false)
+ }
+}
+
+/// Information about a document that the Language Client
+/// knows about.
+/// Its lifetime is driven by the textDocument/didOpen and textDocument/didClose
+/// client notifications.
+#[derive(Debug, Clone)]
+pub(crate) struct DocumentData {
+ pub(crate) version: i32,
+}
+
+impl DocumentData {
+ pub(crate) fn new(version: i32) -> Self {
+ DocumentData { version }
+ }
+}
--- /dev/null
- let workspaces = self
- .fetch_workspaces_queue
- .last_op_result()
- .iter()
- .filter_map(|res| res.as_ref().ok().cloned())
- .collect::<Vec<_>>();
+//! Project loading & configuration updates.
+//!
+//! This is quite tricky. The main problem is time and changes -- there's no
+//! fixed "project" rust-analyzer is working with, "current project" is itself
+//! mutable state. For example, when the user edits `Cargo.toml` by adding a new
+//! dependency, project model changes. What's more, switching project model is
+//! not instantaneous -- it takes time to run `cargo metadata` and (for proc
+//! macros) `cargo check`.
+//!
+//! The main guiding principle here is, as elsewhere in rust-analyzer,
+//! robustness. We try not to assume that the project model exists or is
+//! correct. Instead, we try to provide a best-effort service. Even if the
+//! project is currently loading and we don't have a full project model, we
+//! still want to respond to various requests.
+use std::{mem, sync::Arc};
+
+use flycheck::{FlycheckConfig, FlycheckHandle};
+use hir::db::DefDatabase;
+use ide::Change;
+use ide_db::base_db::{
+ CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
+ ProcMacroLoadResult, SourceRoot, VfsPath,
+};
+use proc_macro_api::{MacroDylib, ProcMacroServer};
+use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
+use syntax::SmolStr;
+use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
+
+use crate::{
+ config::{Config, FilesWatcher, LinkedProject},
+ global_state::GlobalState,
+ lsp_ext,
+ main_loop::Task,
+ op_queue::Cause,
+};
+
+#[derive(Debug)]
+pub(crate) enum ProjectWorkspaceProgress {
+ Begin,
+ Report(String),
+ End(Vec<anyhow::Result<ProjectWorkspace>>),
+}
+
+#[derive(Debug)]
+pub(crate) enum BuildDataProgress {
+ Begin,
+ Report(String),
+ End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
+}
+
+impl GlobalState {
+ pub(crate) fn is_quiescent(&self) -> bool {
+ !(self.fetch_workspaces_queue.op_in_progress()
+ || self.fetch_build_data_queue.op_in_progress()
+ || self.vfs_progress_config_version < self.vfs_config_version
+ || self.vfs_progress_n_done < self.vfs_progress_n_total)
+ }
+
+ pub(crate) fn update_configuration(&mut self, config: Config) {
+ let _p = profile::span("GlobalState::update_configuration");
+ let old_config = mem::replace(&mut self.config, Arc::new(config));
+ if self.config.lru_capacity() != old_config.lru_capacity() {
+ self.analysis_host.update_lru_capacity(self.config.lru_capacity());
+ }
+ if self.config.linked_projects() != old_config.linked_projects() {
+ self.fetch_workspaces_queue.request_op("linked projects changed".to_string())
+ } else if self.config.flycheck() != old_config.flycheck() {
+ self.reload_flycheck();
+ }
+
+ if self.analysis_host.raw_database().enable_proc_attr_macros()
+ != self.config.expand_proc_attr_macros()
+ {
+ self.analysis_host
+ .raw_database_mut()
+ .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
+ }
+ }
+
+ pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
+ let mut status = lsp_ext::ServerStatusParams {
+ health: lsp_ext::Health::Ok,
+ quiescent: self.is_quiescent(),
+ message: None,
+ };
+
+ if self.proc_macro_changed {
+ status.health = lsp_ext::Health::Warning;
+ status.message =
+ Some("Reload required due to source changes of a procedural macro.".into())
+ }
+ if let Err(_) = self.fetch_build_data_error() {
+ status.health = lsp_ext::Health::Warning;
+ status.message =
+ Some("Failed to run build scripts of some packages, check the logs.".to_string());
+ }
+ if !self.config.cargo_autoreload()
+ && self.is_quiescent()
+ && self.fetch_workspaces_queue.op_requested()
+ {
+ status.health = lsp_ext::Health::Warning;
+ status.message = Some("Workspace reload required".to_string())
+ }
+
+ if let Err(error) = self.fetch_workspace_error() {
+ status.health = lsp_ext::Health::Error;
+ status.message = Some(error)
+ }
++
++ if self.config.linked_projects().is_empty()
++ && self.config.detached_files().is_empty()
++ && self.config.notifications().cargo_toml_not_found
++ {
++ status.health = lsp_ext::Health::Warning;
++ status.message = Some("Workspace reload required".to_string())
++ }
+ status
+ }
+
+ pub(crate) fn fetch_workspaces(&mut self, cause: Cause) {
+ tracing::info!(%cause, "will fetch workspaces");
+
+ self.task_pool.handle.spawn_with_sender({
+ let linked_projects = self.config.linked_projects();
+ let detached_files = self.config.detached_files().to_vec();
+ let cargo_config = self.config.cargo();
+
+ move |sender| {
+ let progress = {
+ let sender = sender.clone();
+ move |msg| {
+ sender
+ .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
+ .unwrap()
+ }
+ };
+
+ sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
+
+ let mut workspaces = linked_projects
+ .iter()
+ .map(|project| match project {
+ LinkedProject::ProjectManifest(manifest) => {
+ project_model::ProjectWorkspace::load(
+ manifest.clone(),
+ &cargo_config,
+ &progress,
+ )
+ }
+ LinkedProject::InlineJsonProject(it) => {
+ project_model::ProjectWorkspace::load_inline(
+ it.clone(),
+ cargo_config.target.as_deref(),
+ &cargo_config.extra_env,
+ )
+ }
+ })
+ .collect::<Vec<_>>();
+
+ if !detached_files.is_empty() {
+ workspaces
+ .push(project_model::ProjectWorkspace::load_detached_files(detached_files));
+ }
+
+ tracing::info!("did fetch workspaces {:?}", workspaces);
+ sender
+ .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
+ .unwrap();
+ }
+ });
+ }
+
+ pub(crate) fn fetch_build_data(&mut self, cause: Cause) {
+ tracing::info!(%cause, "will fetch build data");
+ let workspaces = Arc::clone(&self.workspaces);
+ let config = self.config.cargo();
+ self.task_pool.handle.spawn_with_sender(move |sender| {
+ sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
+
+ let progress = {
+ let sender = sender.clone();
+ move |msg| {
+ sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
+ }
+ };
+ let res = ProjectWorkspace::run_all_build_scripts(&workspaces, &config, &progress);
+
+ sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
+ });
+ }
+
+ pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
+ let _p = profile::span("GlobalState::switch_workspaces");
+ tracing::info!(%cause, "will switch workspaces");
+
+ if let Err(error_message) = self.fetch_workspace_error() {
+ self.show_and_log_error(error_message, None);
+ if !self.workspaces.is_empty() {
+ // It only makes sense to switch to a partially broken workspace
+ // if we don't have any workspace at all yet.
+ return;
+ }
+ }
+
+ if let Err(error) = self.fetch_build_data_error() {
+ self.show_and_log_error("failed to run build scripts".to_string(), Some(error));
+ }
+
- let standalone_server_name =
- format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
-
++ let Some(workspaces) = self.fetch_workspaces_queue.last_op_result() else { return; };
++ let workspaces =
++ workspaces.iter().filter_map(|res| res.as_ref().ok().cloned()).collect::<Vec<_>>();
+
+ fn eq_ignore_build_data<'a>(
+ left: &'a ProjectWorkspace,
+ right: &'a ProjectWorkspace,
+ ) -> bool {
+ let key = |p: &'a ProjectWorkspace| match p {
+ ProjectWorkspace::Cargo {
+ cargo,
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+
+ build_scripts: _,
+ toolchain: _,
+ } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
+ _ => None,
+ };
+ match (key(left), key(right)) {
+ (Some(lk), Some(rk)) => lk == rk,
+ _ => left == right,
+ }
+ }
+
+ let same_workspaces = workspaces.len() == self.workspaces.len()
+ && workspaces
+ .iter()
+ .zip(self.workspaces.iter())
+ .all(|(l, r)| eq_ignore_build_data(l, r));
+
+ if same_workspaces {
+ let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
+ if Arc::ptr_eq(workspaces, &self.workspaces) {
+ tracing::debug!("set build scripts to workspaces");
+
+ let workspaces = workspaces
+ .iter()
+ .cloned()
+ .zip(build_scripts)
+ .map(|(mut ws, bs)| {
+ ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
+ ws
+ })
+ .collect::<Vec<_>>();
+
+ // Workspaces are the same, but we've updated build data.
+ self.workspaces = Arc::new(workspaces);
+ } else {
+ tracing::info!("build scripts do not match the version of the active workspace");
+ // Current build scripts do not match the version of the active
+ // workspace, so there's nothing for us to update.
+ return;
+ }
+ } else {
+ tracing::debug!("abandon build scripts for workspaces");
+
+ // Here, we completely changed the workspace (Cargo.toml edit), so
+ // we don't care about build-script results, they are stale.
+ self.workspaces = Arc::new(workspaces)
+ }
+
+ if let FilesWatcher::Client = self.config.files().watcher {
+ let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
+ watchers: self
+ .workspaces
+ .iter()
+ .flat_map(|ws| ws.to_roots())
+ .filter(|it| it.is_local)
+ .flat_map(|root| {
+ root.include.into_iter().flat_map(|it| {
+ [
+ format!("{}/**/*.rs", it.display()),
+ format!("{}/**/Cargo.toml", it.display()),
+ format!("{}/**/Cargo.lock", it.display()),
+ ]
+ })
+ })
+ .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
+ .collect(),
+ };
+ let registration = lsp_types::Registration {
+ id: "workspace/didChangeWatchedFiles".to_string(),
+ method: "workspace/didChangeWatchedFiles".to_string(),
+ register_options: Some(serde_json::to_value(registration_options).unwrap()),
+ };
+ self.send_request::<lsp_types::request::RegisterCapability>(
+ lsp_types::RegistrationParams { registrations: vec![registration] },
+ |_, _| (),
+ );
+ }
+
+ let mut change = Change::new();
+
+ let files_config = self.config.files();
+ let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
+
- let (path, args) = if path_manually_set {
+ if self.proc_macro_clients.is_empty() {
+ if let Some((path, path_manually_set)) = self.config.proc_macro_srv() {
+ tracing::info!("Spawning proc-macro servers");
+ self.proc_macro_clients = self
+ .workspaces
+ .iter()
+ .map(|ws| {
- (path.clone(), vec![])
++ let (path, args): (_, &[_]) = if path_manually_set {
+ tracing::debug!(
+ "Pro-macro server path explicitly set: {}",
+ path.display()
+ );
- let mut sysroot_server = None;
- if let ProjectWorkspace::Cargo { sysroot, .. }
- | ProjectWorkspace::Json { sysroot, .. } = ws
- {
- if let Some(sysroot) = sysroot.as_ref() {
- let server_path = sysroot
- .root()
- .join("libexec")
- .join(&standalone_server_name);
- if std::fs::metadata(&server_path).is_ok() {
- tracing::debug!(
- "Sysroot proc-macro server exists at {}",
- server_path.display()
- );
- sysroot_server = Some(server_path);
- } else {
- tracing::debug!(
- "Sysroot proc-macro server does not exist at {}",
- server_path.display()
- );
- }
- }
++ (path.clone(), &[])
+ } else {
- sysroot_server.map_or_else(
- || (path.clone(), vec!["proc-macro".to_owned()]),
- |path| (path, vec![]),
- )
++ match ws.find_sysroot_proc_macro_srv() {
++ Some(server_path) => (server_path, &[]),
++ None => (path.clone(), &["proc-macro"]),
+ }
- for ws in self.fetch_workspaces_queue.last_op_result() {
- if let Err(err) = ws {
- stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
+ };
+
+ tracing::info!(?args, "Using proc-macro server at {}", path.display(),);
+ ProcMacroServer::spawn(path.clone(), args).map_err(|err| {
+ let error = format!(
+ "Failed to run proc-macro server from path {}, error: {:?}",
+ path.display(),
+ err
+ );
+ tracing::error!(error);
+ error
+ })
+ })
+ .collect()
+ };
+ }
+
+ let watch = match files_config.watcher {
+ FilesWatcher::Client => vec![],
+ FilesWatcher::Server => project_folders.watch,
+ };
+ self.vfs_config_version += 1;
+ self.loader.handle.set_config(vfs::loader::Config {
+ load: project_folders.load,
+ watch,
+ version: self.vfs_config_version,
+ });
+
+ // Create crate graph from all the workspaces
+ let crate_graph = {
+ let dummy_replacements = self.config.dummy_replacements();
+
+ let vfs = &mut self.vfs.write().0;
+ let loader = &mut self.loader;
+ let mem_docs = &self.mem_docs;
+ let mut load = move |path: &AbsPath| {
+ let _p = profile::span("GlobalState::load");
+ let vfs_path = vfs::VfsPath::from(path.to_path_buf());
+ if !mem_docs.contains(&vfs_path) {
+ let contents = loader.handle.load_sync(path);
+ vfs.set_file_contents(vfs_path.clone(), contents);
+ }
+ let res = vfs.file_id(&vfs_path);
+ if res.is_none() {
+ tracing::warn!("failed to load {}", path.display())
+ }
+ res
+ };
+
+ let mut crate_graph = CrateGraph::default();
+ for (idx, ws) in self.workspaces.iter().enumerate() {
+ let proc_macro_client = match self.proc_macro_clients.get(idx) {
+ Some(res) => res.as_ref().map_err(|e| &**e),
+ None => Err("Proc macros are disabled"),
+ };
+ let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| {
+ load_proc_macro(
+ proc_macro_client,
+ path,
+ dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(),
+ )
+ };
+ crate_graph.extend(ws.to_crate_graph(
+ &mut load_proc_macro,
+ &mut load,
+ &self.config.cargo().extra_env,
+ ));
+ }
+ crate_graph
+ };
+ change.set_crate_graph(crate_graph);
+
+ self.source_root_config = project_folders.source_root_config;
+
+ self.analysis_host.apply_change(change);
+ self.process_changes();
+ self.reload_flycheck();
+ tracing::info!("did switch workspaces");
+ }
+
+ fn fetch_workspace_error(&self) -> Result<(), String> {
+ let mut buf = String::new();
+
++ let Some(last_op_result) = self.fetch_workspaces_queue.last_op_result() else { return Ok(()) };
++ if last_op_result.is_empty() {
++ stdx::format_to!(buf, "rust-analyzer failed to discover workspace");
++ } else {
++ for ws in last_op_result {
++ if let Err(err) = ws {
++ stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
++ }
+ }
+ }
+
+ if buf.is_empty() {
+ return Ok(());
+ }
+
+ Err(buf)
+ }
+
+ fn fetch_build_data_error(&self) -> Result<(), String> {
+ let mut buf = String::new();
+
+ for ws in &self.fetch_build_data_queue.last_op_result().1 {
+ match ws {
+ Ok(data) => match data.error() {
+ Some(stderr) => stdx::format_to!(buf, "{:#}\n", stderr),
+ _ => (),
+ },
+ // io errors
+ Err(err) => stdx::format_to!(buf, "{:#}\n", err),
+ }
+ }
+
+ if buf.is_empty() {
+ Ok(())
+ } else {
+ Err(buf)
+ }
+ }
+
+ fn reload_flycheck(&mut self) {
+ let _p = profile::span("GlobalState::reload_flycheck");
+ let config = match self.config.flycheck() {
+ Some(it) => it,
+ None => {
+ self.flycheck = Arc::new([]);
+ self.diagnostics.clear_check_all();
+ return;
+ }
+ };
+
+ let sender = self.flycheck_sender.clone();
+ let invocation_strategy = match config {
+ FlycheckConfig::CargoCommand { .. } => flycheck::InvocationStrategy::PerWorkspace,
+ FlycheckConfig::CustomCommand { invocation_strategy, .. } => invocation_strategy,
+ };
+
+ self.flycheck = match invocation_strategy {
+ flycheck::InvocationStrategy::Once => vec![FlycheckHandle::spawn(
+ 0,
+ Box::new(move |msg| sender.send(msg).unwrap()),
+ config.clone(),
+ self.config.root_path().clone(),
+ )],
+ flycheck::InvocationStrategy::PerWorkspace => {
+ self.workspaces
+ .iter()
+ .enumerate()
+ .filter_map(|(id, w)| match w {
+ ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
+ ProjectWorkspace::Json { project, .. } => {
+ // Enable flychecks for json projects if a custom flycheck command was supplied
+ // in the workspace configuration.
+ match config {
+ FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
+ _ => None,
+ }
+ }
+ ProjectWorkspace::DetachedFiles { .. } => None,
+ })
+ .map(|(id, root)| {
+ let sender = sender.clone();
+ FlycheckHandle::spawn(
+ id,
+ Box::new(move |msg| sender.send(msg).unwrap()),
+ config.clone(),
+ root.to_path_buf(),
+ )
+ })
+ .collect()
+ }
+ }
+ .into();
+ }
+}
+
+#[derive(Default)]
+pub(crate) struct ProjectFolders {
+ pub(crate) load: Vec<vfs::loader::Entry>,
+ pub(crate) watch: Vec<usize>,
+ pub(crate) source_root_config: SourceRootConfig,
+}
+
+impl ProjectFolders {
+ pub(crate) fn new(
+ workspaces: &[ProjectWorkspace],
+ global_excludes: &[AbsPathBuf],
+ ) -> ProjectFolders {
+ let mut res = ProjectFolders::default();
+ let mut fsc = FileSetConfig::builder();
+ let mut local_filesets = vec![];
+
+ for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
+ let file_set_roots: Vec<VfsPath> =
+ root.include.iter().cloned().map(VfsPath::from).collect();
+
+ let entry = {
+ let mut dirs = vfs::loader::Directories::default();
+ dirs.extensions.push("rs".into());
+ dirs.include.extend(root.include);
+ dirs.exclude.extend(root.exclude);
+ for excl in global_excludes {
+ if dirs
+ .include
+ .iter()
+ .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
+ {
+ dirs.exclude.push(excl.clone());
+ }
+ }
+
+ vfs::loader::Entry::Directories(dirs)
+ };
+
+ if root.is_local {
+ res.watch.push(res.load.len());
+ }
+ res.load.push(entry);
+
+ if root.is_local {
+ local_filesets.push(fsc.len());
+ }
+ fsc.add_file_set(file_set_roots)
+ }
+
+ let fsc = fsc.build();
+ res.source_root_config = SourceRootConfig { fsc, local_filesets };
+
+ res
+ }
+}
+
+#[derive(Default, Debug)]
+pub(crate) struct SourceRootConfig {
+ pub(crate) fsc: FileSetConfig,
+ pub(crate) local_filesets: Vec<usize>,
+}
+
+impl SourceRootConfig {
+ pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
+ let _p = profile::span("SourceRootConfig::partition");
+ self.fsc
+ .partition(vfs)
+ .into_iter()
+ .enumerate()
+ .map(|(idx, file_set)| {
+ let is_local = self.local_filesets.contains(&idx);
+ if is_local {
+ SourceRoot::new_local(file_set)
+ } else {
+ SourceRoot::new_library(file_set)
+ }
+ })
+ .collect()
+ }
+}
+
+/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
+/// with an identity dummy expander.
+pub(crate) fn load_proc_macro(
+ server: Result<&ProcMacroServer, &str>,
+ path: &AbsPath,
+ dummy_replace: &[Box<str>],
+) -> ProcMacroLoadResult {
+ let res: Result<Vec<_>, String> = (|| {
+ let dylib = MacroDylib::new(path.to_path_buf())
+ .map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?;
+ let server = server.map_err(ToOwned::to_owned)?;
+ let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ if vec.is_empty() {
+ return Err("proc macro library returned no proc macros".to_string());
+ }
+ Ok(vec
+ .into_iter()
+ .map(|expander| expander_to_proc_macro(expander, dummy_replace))
+ .collect())
+ })();
+ return match res {
+ Ok(proc_macros) => {
+ tracing::info!(
+ "Loaded proc-macros for {}: {:?}",
+ path.display(),
+ proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
+ );
+ Ok(proc_macros)
+ }
+ Err(e) => {
+ tracing::warn!("proc-macro loading for {} failed: {e}", path.display());
+ Err(e)
+ }
+ };
+
+ fn expander_to_proc_macro(
+ expander: proc_macro_api::ProcMacro,
+ dummy_replace: &[Box<str>],
+ ) -> ProcMacro {
+ let name = SmolStr::from(expander.name());
+ let kind = match expander.kind() {
+ proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
+ proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
+ proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
+ };
+ let expander: Arc<dyn ProcMacroExpander> =
+ if dummy_replace.iter().any(|replace| &**replace == name) {
+ match kind {
+ ProcMacroKind::Attr => Arc::new(IdentityExpander),
+ _ => Arc::new(EmptyExpander),
+ }
+ } else {
+ Arc::new(Expander(expander))
+ };
+ ProcMacro { name, kind, expander }
+ }
+
+ #[derive(Debug)]
+ struct Expander(proc_macro_api::ProcMacro);
+
+ impl ProcMacroExpander for Expander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ attrs: Option<&tt::Subtree>,
+ env: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
+ match self.0.expand(subtree, attrs, env) {
+ Ok(Ok(subtree)) => Ok(subtree),
+ Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
+ Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
+ }
+ }
+ }
+
+ /// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
+ #[derive(Debug)]
+ struct IdentityExpander;
+
+ impl ProcMacroExpander for IdentityExpander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ _: Option<&tt::Subtree>,
+ _: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ Ok(subtree.clone())
+ }
+ }
+
+ /// Empty expander, used for proc-macros that are deliberately ignored by the user.
+ #[derive(Debug)]
+ struct EmptyExpander;
+
+ impl ProcMacroExpander for EmptyExpander {
+ fn expand(
+ &self,
+ _: &tt::Subtree,
+ _: Option<&tt::Subtree>,
+ _: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ Ok(tt::Subtree::default())
+ }
+ }
+}
+
+pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
+ const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
+ const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
+
+ let file_name = match path.file_name().unwrap_or_default().to_str() {
+ Some(it) => it,
+ None => return false,
+ };
+
+ if let "Cargo.toml" | "Cargo.lock" = file_name {
+ return true;
+ }
+ if change_kind == ChangeKind::Modify {
+ return false;
+ }
+
+ // .cargo/config{.toml}
+ if path.extension().unwrap_or_default() != "rs" {
+ let is_cargo_config = matches!(file_name, "config.toml" | "config")
+ && path.parent().map(|parent| parent.as_ref().ends_with(".cargo")).unwrap_or(false);
+ return is_cargo_config;
+ }
+
+ if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
+ return true;
+ }
+ let parent = match path.parent() {
+ Some(it) => it,
+ None => return false,
+ };
+ if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
+ return true;
+ }
+ if file_name == "main.rs" {
+ let grand_parent = match parent.parent() {
+ Some(it) => it,
+ None => return false,
+ };
+ if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
+ return true;
+ }
+ }
+ false
+}
--- /dev/null
- semantic_tokens, Result,
+//! Conversion of rust-analyzer specific types to lsp_types equivalents.
+use std::{
+ iter::once,
+ path,
+ sync::atomic::{AtomicU32, Ordering},
+};
+
+use ide::{
+ Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionItem,
+ CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit,
+ Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint,
+ InlayHintLabel, InlayKind, Markup, NavigationTarget, ReferenceCategory, RenameError, Runnable,
+ Severity, SignatureHelp, SourceChange, StructureNodeKind, SymbolKind, TextEdit, TextRange,
+ TextSize,
+};
+use itertools::Itertools;
+use serde_json::to_value;
+use vfs::AbsPath;
+
+use crate::{
+ cargo_target_spec::CargoTargetSpec,
+ config::{CallInfoConfig, Config},
+ global_state::GlobalStateSnapshot,
+ line_index::{LineEndings, LineIndex, PositionEncoding},
+ lsp_ext,
+ lsp_utils::invalid_params_error,
- ) -> Result<lsp_types::InlayHint> {
++ semantic_tokens,
+};
+
+pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
+ let line_col = line_index.index.line_col(offset);
+ match line_index.encoding {
+ PositionEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
+ PositionEncoding::Utf16 => {
+ let line_col = line_index.index.to_utf16(line_col);
+ lsp_types::Position::new(line_col.line, line_col.col)
+ }
+ }
+}
+
+pub(crate) fn range(line_index: &LineIndex, range: TextRange) -> lsp_types::Range {
+ let start = position(line_index, range.start());
+ let end = position(line_index, range.end());
+ lsp_types::Range::new(start, end)
+}
+
+pub(crate) fn symbol_kind(symbol_kind: SymbolKind) -> lsp_types::SymbolKind {
+ match symbol_kind {
+ SymbolKind::Function => lsp_types::SymbolKind::FUNCTION,
+ SymbolKind::Struct => lsp_types::SymbolKind::STRUCT,
+ SymbolKind::Enum => lsp_types::SymbolKind::ENUM,
+ SymbolKind::Variant => lsp_types::SymbolKind::ENUM_MEMBER,
+ SymbolKind::Trait => lsp_types::SymbolKind::INTERFACE,
+ SymbolKind::Macro
+ | SymbolKind::BuiltinAttr
+ | SymbolKind::Attribute
+ | SymbolKind::Derive
+ | SymbolKind::DeriveHelper => lsp_types::SymbolKind::FUNCTION,
+ SymbolKind::Module | SymbolKind::ToolModule => lsp_types::SymbolKind::MODULE,
+ SymbolKind::TypeAlias | SymbolKind::TypeParam | SymbolKind::SelfType => {
+ lsp_types::SymbolKind::TYPE_PARAMETER
+ }
+ SymbolKind::Field => lsp_types::SymbolKind::FIELD,
+ SymbolKind::Static => lsp_types::SymbolKind::CONSTANT,
+ SymbolKind::Const => lsp_types::SymbolKind::CONSTANT,
+ SymbolKind::ConstParam => lsp_types::SymbolKind::CONSTANT,
+ SymbolKind::Impl => lsp_types::SymbolKind::OBJECT,
+ SymbolKind::Local
+ | SymbolKind::SelfParam
+ | SymbolKind::LifetimeParam
+ | SymbolKind::ValueParam
+ | SymbolKind::Label => lsp_types::SymbolKind::VARIABLE,
+ SymbolKind::Union => lsp_types::SymbolKind::STRUCT,
+ }
+}
+
+pub(crate) fn structure_node_kind(kind: StructureNodeKind) -> lsp_types::SymbolKind {
+ match kind {
+ StructureNodeKind::SymbolKind(symbol) => symbol_kind(symbol),
+ StructureNodeKind::Region => lsp_types::SymbolKind::NAMESPACE,
+ }
+}
+
+pub(crate) fn document_highlight_kind(
+ category: ReferenceCategory,
+) -> Option<lsp_types::DocumentHighlightKind> {
+ match category {
+ ReferenceCategory::Read => Some(lsp_types::DocumentHighlightKind::READ),
+ ReferenceCategory::Write => Some(lsp_types::DocumentHighlightKind::WRITE),
+ ReferenceCategory::Import => None,
+ }
+}
+
+pub(crate) fn diagnostic_severity(severity: Severity) -> lsp_types::DiagnosticSeverity {
+ match severity {
+ Severity::Error => lsp_types::DiagnosticSeverity::ERROR,
+ Severity::WeakWarning => lsp_types::DiagnosticSeverity::HINT,
+ }
+}
+
+pub(crate) fn documentation(documentation: Documentation) -> lsp_types::Documentation {
+ let value = crate::markdown::format_docs(documentation.as_str());
+ let markup_content = lsp_types::MarkupContent { kind: lsp_types::MarkupKind::Markdown, value };
+ lsp_types::Documentation::MarkupContent(markup_content)
+}
+
+pub(crate) fn completion_item_kind(
+ completion_item_kind: CompletionItemKind,
+) -> lsp_types::CompletionItemKind {
+ match completion_item_kind {
+ CompletionItemKind::Binding => lsp_types::CompletionItemKind::VARIABLE,
+ CompletionItemKind::BuiltinType => lsp_types::CompletionItemKind::STRUCT,
+ CompletionItemKind::InferredType => lsp_types::CompletionItemKind::SNIPPET,
+ CompletionItemKind::Keyword => lsp_types::CompletionItemKind::KEYWORD,
+ CompletionItemKind::Method => lsp_types::CompletionItemKind::METHOD,
+ CompletionItemKind::Snippet => lsp_types::CompletionItemKind::SNIPPET,
+ CompletionItemKind::UnresolvedReference => lsp_types::CompletionItemKind::REFERENCE,
+ CompletionItemKind::SymbolKind(symbol) => match symbol {
+ SymbolKind::Attribute => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT,
+ SymbolKind::ConstParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Derive => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::DeriveHelper => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Enum => lsp_types::CompletionItemKind::ENUM,
+ SymbolKind::Field => lsp_types::CompletionItemKind::FIELD,
+ SymbolKind::Function => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Impl => lsp_types::CompletionItemKind::TEXT,
+ SymbolKind::Label => lsp_types::CompletionItemKind::VARIABLE,
+ SymbolKind::LifetimeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Local => lsp_types::CompletionItemKind::VARIABLE,
+ SymbolKind::Macro => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::Module => lsp_types::CompletionItemKind::MODULE,
+ SymbolKind::SelfParam => lsp_types::CompletionItemKind::VALUE,
+ SymbolKind::SelfType => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Static => lsp_types::CompletionItemKind::VALUE,
+ SymbolKind::Struct => lsp_types::CompletionItemKind::STRUCT,
+ SymbolKind::Trait => lsp_types::CompletionItemKind::INTERFACE,
+ SymbolKind::TypeAlias => lsp_types::CompletionItemKind::STRUCT,
+ SymbolKind::TypeParam => lsp_types::CompletionItemKind::TYPE_PARAMETER,
+ SymbolKind::Union => lsp_types::CompletionItemKind::STRUCT,
+ SymbolKind::ValueParam => lsp_types::CompletionItemKind::VALUE,
+ SymbolKind::Variant => lsp_types::CompletionItemKind::ENUM_MEMBER,
+ SymbolKind::BuiltinAttr => lsp_types::CompletionItemKind::FUNCTION,
+ SymbolKind::ToolModule => lsp_types::CompletionItemKind::MODULE,
+ },
+ }
+}
+
+pub(crate) fn text_edit(line_index: &LineIndex, indel: Indel) -> lsp_types::TextEdit {
+ let range = range(line_index, indel.delete);
+ let new_text = match line_index.endings {
+ LineEndings::Unix => indel.insert,
+ LineEndings::Dos => indel.insert.replace('\n', "\r\n"),
+ };
+ lsp_types::TextEdit { range, new_text }
+}
+
+pub(crate) fn completion_text_edit(
+ line_index: &LineIndex,
+ insert_replace_support: Option<lsp_types::Position>,
+ indel: Indel,
+) -> lsp_types::CompletionTextEdit {
+ let text_edit = text_edit(line_index, indel);
+ match insert_replace_support {
+ Some(cursor_pos) => lsp_types::InsertReplaceEdit {
+ new_text: text_edit.new_text,
+ insert: lsp_types::Range { start: text_edit.range.start, end: cursor_pos },
+ replace: text_edit.range,
+ }
+ .into(),
+ None => text_edit.into(),
+ }
+}
+
+pub(crate) fn snippet_text_edit(
+ line_index: &LineIndex,
+ is_snippet: bool,
+ indel: Indel,
+) -> lsp_ext::SnippetTextEdit {
+ let text_edit = text_edit(line_index, indel);
+ let insert_text_format =
+ if is_snippet { Some(lsp_types::InsertTextFormat::SNIPPET) } else { None };
+ lsp_ext::SnippetTextEdit {
+ range: text_edit.range,
+ new_text: text_edit.new_text,
+ insert_text_format,
+ annotation_id: None,
+ }
+}
+
+pub(crate) fn text_edit_vec(
+ line_index: &LineIndex,
+ text_edit: TextEdit,
+) -> Vec<lsp_types::TextEdit> {
+ text_edit.into_iter().map(|indel| self::text_edit(line_index, indel)).collect()
+}
+
+pub(crate) fn snippet_text_edit_vec(
+ line_index: &LineIndex,
+ is_snippet: bool,
+ text_edit: TextEdit,
+) -> Vec<lsp_ext::SnippetTextEdit> {
+ text_edit
+ .into_iter()
+ .map(|indel| self::snippet_text_edit(line_index, is_snippet, indel))
+ .collect()
+}
+
+pub(crate) fn completion_items(
+ config: &Config,
+ line_index: &LineIndex,
+ tdpp: lsp_types::TextDocumentPositionParams,
+ items: Vec<CompletionItem>,
+) -> Vec<lsp_types::CompletionItem> {
+ let max_relevance = items.iter().map(|it| it.relevance().score()).max().unwrap_or_default();
+ let mut res = Vec::with_capacity(items.len());
+ for item in items {
+ completion_item(&mut res, config, line_index, &tdpp, max_relevance, item)
+ }
+ res
+}
+
+fn completion_item(
+ acc: &mut Vec<lsp_types::CompletionItem>,
+ config: &Config,
+ line_index: &LineIndex,
+ tdpp: &lsp_types::TextDocumentPositionParams,
+ max_relevance: u32,
+ item: CompletionItem,
+) {
+ let insert_replace_support = config.insert_replace_support().then(|| tdpp.position);
+ let mut additional_text_edits = Vec::new();
+
+ // LSP does not allow arbitrary edits in completion, so we have to do a
+ // non-trivial mapping here.
+ let text_edit = {
+ let mut text_edit = None;
+ let source_range = item.source_range();
+ for indel in item.text_edit().iter() {
+ if indel.delete.contains_range(source_range) {
+ text_edit = Some(if indel.delete == source_range {
+ self::completion_text_edit(line_index, insert_replace_support, indel.clone())
+ } else {
+ assert!(source_range.end() == indel.delete.end());
+ let range1 = TextRange::new(indel.delete.start(), source_range.start());
+ let range2 = source_range;
+ let indel1 = Indel::replace(range1, String::new());
+ let indel2 = Indel::replace(range2, indel.insert.clone());
+ additional_text_edits.push(self::text_edit(line_index, indel1));
+ self::completion_text_edit(line_index, insert_replace_support, indel2)
+ })
+ } else {
+ assert!(source_range.intersect(indel.delete).is_none());
+ let text_edit = self::text_edit(line_index, indel.clone());
+ additional_text_edits.push(text_edit);
+ }
+ }
+ text_edit.unwrap()
+ };
+
+ let insert_text_format = item.is_snippet().then(|| lsp_types::InsertTextFormat::SNIPPET);
+ let tags = item.deprecated().then(|| vec![lsp_types::CompletionItemTag::DEPRECATED]);
+ let command = if item.trigger_call_info() && config.client_commands().trigger_parameter_hints {
+ Some(command::trigger_parameter_hints())
+ } else {
+ None
+ };
+
+ let mut lsp_item = lsp_types::CompletionItem {
+ label: item.label().to_string(),
+ detail: item.detail().map(|it| it.to_string()),
+ filter_text: Some(item.lookup().to_string()),
+ kind: Some(completion_item_kind(item.kind())),
+ text_edit: Some(text_edit),
+ additional_text_edits: Some(additional_text_edits),
+ documentation: item.documentation().map(documentation),
+ deprecated: Some(item.deprecated()),
+ tags,
+ command,
+ insert_text_format,
+ ..Default::default()
+ };
+
+ if config.completion_label_details_support() {
+ lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
+ detail: None,
+ description: lsp_item.detail.clone(),
+ });
+ }
+
+ set_score(&mut lsp_item, max_relevance, item.relevance());
+
+ if config.completion().enable_imports_on_the_fly {
+ if let imports @ [_, ..] = item.imports_to_add() {
+ let imports: Vec<_> = imports
+ .iter()
+ .filter_map(|import_edit| {
+ let import_path = &import_edit.import_path;
+ let import_name = import_path.segments().last()?;
+ Some(lsp_ext::CompletionImport {
+ full_import_path: import_path.to_string(),
+ imported_name: import_name.to_string(),
+ })
+ })
+ .collect();
+ if !imports.is_empty() {
+ let data = lsp_ext::CompletionResolveData { position: tdpp.clone(), imports };
+ lsp_item.data = Some(to_value(data).unwrap());
+ }
+ }
+ }
+
+ if let Some((mutability, offset, relevance)) = item.ref_match() {
+ let mut lsp_item_with_ref = lsp_item.clone();
+ set_score(&mut lsp_item_with_ref, max_relevance, relevance);
+ lsp_item_with_ref.label =
+ format!("&{}{}", mutability.as_keyword_for_ref(), lsp_item_with_ref.label);
+ lsp_item_with_ref.additional_text_edits.get_or_insert_with(Default::default).push(
+ self::text_edit(
+ line_index,
+ Indel::insert(offset, format!("&{}", mutability.as_keyword_for_ref())),
+ ),
+ );
+
+ acc.push(lsp_item_with_ref);
+ };
+
+ acc.push(lsp_item);
+
+ fn set_score(
+ res: &mut lsp_types::CompletionItem,
+ max_relevance: u32,
+ relevance: CompletionRelevance,
+ ) {
+ if relevance.is_relevant() && relevance.score() == max_relevance {
+ res.preselect = Some(true);
+ }
+ // The relevance needs to be inverted to come up with a sort score
+ // because the client will sort ascending.
+ let sort_score = relevance.score() ^ 0xFF_FF_FF_FF;
+ // Zero pad the string to ensure values can be properly sorted
+ // by the client. Hex format is used because it is easier to
+ // visually compare very large values, which the sort text
+ // tends to be since it is the opposite of the score.
+ res.sort_text = Some(format!("{:08x}", sort_score));
+ }
+}
+
+pub(crate) fn signature_help(
+ call_info: SignatureHelp,
+ config: CallInfoConfig,
+ label_offsets: bool,
+) -> lsp_types::SignatureHelp {
+ let (label, parameters) = match (config.params_only, label_offsets) {
+ (concise, false) => {
+ let params = call_info
+ .parameter_labels()
+ .map(|label| lsp_types::ParameterInformation {
+ label: lsp_types::ParameterLabel::Simple(label.to_string()),
+ documentation: None,
+ })
+ .collect::<Vec<_>>();
+ let label =
+ if concise { call_info.parameter_labels().join(", ") } else { call_info.signature };
+ (label, params)
+ }
+ (false, true) => {
+ let params = call_info
+ .parameter_ranges()
+ .iter()
+ .map(|it| {
+ let start = call_info.signature[..it.start().into()].chars().count() as u32;
+ let end = call_info.signature[..it.end().into()].chars().count() as u32;
+ [start, end]
+ })
+ .map(|label_offsets| lsp_types::ParameterInformation {
+ label: lsp_types::ParameterLabel::LabelOffsets(label_offsets),
+ documentation: None,
+ })
+ .collect::<Vec<_>>();
+ (call_info.signature, params)
+ }
+ (true, true) => {
+ let mut params = Vec::new();
+ let mut label = String::new();
+ let mut first = true;
+ for param in call_info.parameter_labels() {
+ if !first {
+ label.push_str(", ");
+ }
+ first = false;
+ let start = label.chars().count() as u32;
+ label.push_str(param);
+ let end = label.chars().count() as u32;
+ params.push(lsp_types::ParameterInformation {
+ label: lsp_types::ParameterLabel::LabelOffsets([start, end]),
+ documentation: None,
+ });
+ }
+
+ (label, params)
+ }
+ };
+
+ let documentation = call_info.doc.filter(|_| config.docs).map(|doc| {
+ lsp_types::Documentation::MarkupContent(lsp_types::MarkupContent {
+ kind: lsp_types::MarkupKind::Markdown,
+ value: doc,
+ })
+ });
+
+ let active_parameter = call_info.active_parameter.map(|it| it as u32);
+
+ let signature = lsp_types::SignatureInformation {
+ label,
+ documentation,
+ parameters: Some(parameters),
+ active_parameter,
+ };
+ lsp_types::SignatureHelp {
+ signatures: vec![signature],
+ active_signature: Some(0),
+ active_parameter,
+ }
+}
+
+pub(crate) fn inlay_hint(
+ snap: &GlobalStateSnapshot,
+ line_index: &LineIndex,
+ render_colons: bool,
+ mut inlay_hint: InlayHint,
- InlayKind::ParameterHint
- | InlayKind::ImplicitReborrowHint
- | InlayKind::BindingModeHint => position(line_index, inlay_hint.range.start()),
++) -> Cancellable<lsp_types::InlayHint> {
+ match inlay_hint.kind {
+ InlayKind::ParameterHint if render_colons => inlay_hint.label.append_str(":"),
+ InlayKind::TypeHint if render_colons => inlay_hint.label.prepend_str(": "),
+ InlayKind::ClosureReturnTypeHint => inlay_hint.label.prepend_str(" -> "),
+ _ => {}
+ }
+
+ Ok(lsp_types::InlayHint {
+ position: match inlay_hint.kind {
+ // before annotated thing
- InlayKind::BindingModeHint
++ InlayKind::ParameterHint | InlayKind::AdjustmentHint | InlayKind::BindingModeHint => {
++ position(line_index, inlay_hint.range.start())
++ }
+ // after annotated thing
+ InlayKind::ClosureReturnTypeHint
+ | InlayKind::TypeHint
+ | InlayKind::ChainingHint
+ | InlayKind::GenericParamListHint
++ | InlayKind::AdjustmentHintClosingParenthesis
+ | InlayKind::LifetimeHint
+ | InlayKind::ClosingBraceHint => position(line_index, inlay_hint.range.end()),
+ },
+ padding_left: Some(match inlay_hint.kind {
+ InlayKind::TypeHint => !render_colons,
+ InlayKind::ChainingHint | InlayKind::ClosingBraceHint => true,
- | InlayKind::ImplicitReborrowHint
++ InlayKind::AdjustmentHintClosingParenthesis
++ | InlayKind::BindingModeHint
+ | InlayKind::ClosureReturnTypeHint
+ | InlayKind::GenericParamListHint
- InlayKind::ChainingHint
++ | InlayKind::AdjustmentHint
+ | InlayKind::LifetimeHint
+ | InlayKind::ParameterHint => false,
+ }),
+ padding_right: Some(match inlay_hint.kind {
- | InlayKind::ImplicitReborrowHint
++ InlayKind::AdjustmentHintClosingParenthesis
++ | InlayKind::ChainingHint
+ | InlayKind::ClosureReturnTypeHint
+ | InlayKind::GenericParamListHint
- InlayKind::BindingModeHint
++ | InlayKind::AdjustmentHint
+ | InlayKind::TypeHint
+ | InlayKind::ClosingBraceHint => false,
+ InlayKind::BindingModeHint => inlay_hint.label.as_simple_str() != Some("&"),
+ InlayKind::ParameterHint | InlayKind::LifetimeHint => true,
+ }),
+ kind: match inlay_hint.kind {
+ InlayKind::ParameterHint => Some(lsp_types::InlayHintKind::PARAMETER),
+ InlayKind::ClosureReturnTypeHint | InlayKind::TypeHint | InlayKind::ChainingHint => {
+ Some(lsp_types::InlayHintKind::TYPE)
+ }
- | InlayKind::ImplicitReborrowHint
++ InlayKind::AdjustmentHintClosingParenthesis
++ | InlayKind::BindingModeHint
+ | InlayKind::GenericParamListHint
+ | InlayKind::LifetimeHint
- ) -> Result<lsp_types::InlayHintLabel> {
++ | InlayKind::AdjustmentHint
+ | InlayKind::ClosingBraceHint => None,
+ },
+ text_edits: None,
+ data: (|| match inlay_hint.tooltip {
+ Some(ide::InlayTooltip::HoverOffset(file_id, offset)) => {
+ let uri = url(snap, file_id);
+ let line_index = snap.file_line_index(file_id).ok()?;
+
+ let text_document = lsp_types::TextDocumentIdentifier { uri };
+ to_value(lsp_ext::InlayHintResolveData {
+ text_document,
+ position: lsp_ext::PositionOrRange::Position(position(&line_index, offset)),
+ })
+ .ok()
+ }
+ Some(ide::InlayTooltip::HoverRanged(file_id, text_range)) => {
+ let uri = url(snap, file_id);
+ let text_document = lsp_types::TextDocumentIdentifier { uri };
+ let line_index = snap.file_line_index(file_id).ok()?;
+ to_value(lsp_ext::InlayHintResolveData {
+ text_document,
+ position: lsp_ext::PositionOrRange::Range(range(&line_index, text_range)),
+ })
+ .ok()
+ }
+ _ => None,
+ })(),
+ tooltip: Some(match inlay_hint.tooltip {
+ Some(ide::InlayTooltip::String(s)) => lsp_types::InlayHintTooltip::String(s),
+ _ => lsp_types::InlayHintTooltip::String(inlay_hint.label.to_string()),
+ }),
+ label: inlay_hint_label(snap, inlay_hint.label)?,
+ })
+}
+
+fn inlay_hint_label(
+ snap: &GlobalStateSnapshot,
+ label: InlayHintLabel,
- .collect::<Result<Vec<_>>>()?,
++) -> Cancellable<lsp_types::InlayHintLabel> {
+ Ok(match label.as_simple_str() {
+ Some(s) => lsp_types::InlayHintLabel::String(s.into()),
+ None => lsp_types::InlayHintLabel::LabelParts(
+ label
+ .parts
+ .into_iter()
+ .map(|part| {
+ Ok(lsp_types::InlayHintLabelPart {
+ value: part.text,
+ tooltip: None,
+ location: part
+ .linked_location
+ .map(|range| location(snap, range))
+ .transpose()?,
+ command: None,
+ })
+ })
- ) -> Result<lsp_types::Location> {
++ .collect::<Cancellable<Vec<_>>>()?,
+ ),
+ })
+}
+
+static TOKEN_RESULT_COUNTER: AtomicU32 = AtomicU32::new(1);
+
+pub(crate) fn semantic_tokens(
+ text: &str,
+ line_index: &LineIndex,
+ highlights: Vec<HlRange>,
+) -> lsp_types::SemanticTokens {
+ let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string();
+ let mut builder = semantic_tokens::SemanticTokensBuilder::new(id);
+
+ for highlight_range in highlights {
+ if highlight_range.highlight.is_empty() {
+ continue;
+ }
+
+ let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight);
+ let token_index = semantic_tokens::type_index(ty);
+ let modifier_bitset = mods.0;
+
+ for mut text_range in line_index.index.lines(highlight_range.range) {
+ if text[text_range].ends_with('\n') {
+ text_range =
+ TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n'));
+ }
+ let range = range(line_index, text_range);
+ builder.push(range, token_index, modifier_bitset);
+ }
+ }
+
+ builder.build()
+}
+
+pub(crate) fn semantic_token_delta(
+ previous: &lsp_types::SemanticTokens,
+ current: &lsp_types::SemanticTokens,
+) -> lsp_types::SemanticTokensDelta {
+ let result_id = current.result_id.clone();
+ let edits = semantic_tokens::diff_tokens(&previous.data, ¤t.data);
+ lsp_types::SemanticTokensDelta { result_id, edits }
+}
+
+fn semantic_token_type_and_modifiers(
+ highlight: Highlight,
+) -> (lsp_types::SemanticTokenType, semantic_tokens::ModifierSet) {
+ let mut mods = semantic_tokens::ModifierSet::default();
+ let type_ = match highlight.tag {
+ HlTag::Symbol(symbol) => match symbol {
+ SymbolKind::Attribute => semantic_tokens::DECORATOR,
+ SymbolKind::Derive => semantic_tokens::DERIVE,
+ SymbolKind::DeriveHelper => semantic_tokens::DERIVE_HELPER,
+ SymbolKind::Module => semantic_tokens::NAMESPACE,
+ SymbolKind::Impl => semantic_tokens::TYPE_ALIAS,
+ SymbolKind::Field => semantic_tokens::PROPERTY,
+ SymbolKind::TypeParam => semantic_tokens::TYPE_PARAMETER,
+ SymbolKind::ConstParam => semantic_tokens::CONST_PARAMETER,
+ SymbolKind::LifetimeParam => semantic_tokens::LIFETIME,
+ SymbolKind::Label => semantic_tokens::LABEL,
+ SymbolKind::ValueParam => semantic_tokens::PARAMETER,
+ SymbolKind::SelfParam => semantic_tokens::SELF_KEYWORD,
+ SymbolKind::SelfType => semantic_tokens::SELF_TYPE_KEYWORD,
+ SymbolKind::Local => semantic_tokens::VARIABLE,
+ SymbolKind::Function => {
+ if highlight.mods.contains(HlMod::Associated) {
+ semantic_tokens::METHOD
+ } else {
+ semantic_tokens::FUNCTION
+ }
+ }
+ SymbolKind::Const => {
+ mods |= semantic_tokens::CONSTANT;
+ mods |= semantic_tokens::STATIC;
+ semantic_tokens::VARIABLE
+ }
+ SymbolKind::Static => {
+ mods |= semantic_tokens::STATIC;
+ semantic_tokens::VARIABLE
+ }
+ SymbolKind::Struct => semantic_tokens::STRUCT,
+ SymbolKind::Enum => semantic_tokens::ENUM,
+ SymbolKind::Variant => semantic_tokens::ENUM_MEMBER,
+ SymbolKind::Union => semantic_tokens::UNION,
+ SymbolKind::TypeAlias => semantic_tokens::TYPE_ALIAS,
+ SymbolKind::Trait => semantic_tokens::INTERFACE,
+ SymbolKind::Macro => semantic_tokens::MACRO,
+ SymbolKind::BuiltinAttr => semantic_tokens::BUILTIN_ATTRIBUTE,
+ SymbolKind::ToolModule => semantic_tokens::TOOL_MODULE,
+ },
+ HlTag::AttributeBracket => semantic_tokens::ATTRIBUTE_BRACKET,
+ HlTag::BoolLiteral => semantic_tokens::BOOLEAN,
+ HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE,
+ HlTag::ByteLiteral | HlTag::NumericLiteral => semantic_tokens::NUMBER,
+ HlTag::CharLiteral => semantic_tokens::CHAR,
+ HlTag::Comment => semantic_tokens::COMMENT,
+ HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE,
+ HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER,
+ HlTag::Keyword => semantic_tokens::KEYWORD,
+ HlTag::None => semantic_tokens::GENERIC,
+ HlTag::Operator(op) => match op {
+ HlOperator::Bitwise => semantic_tokens::BITWISE,
+ HlOperator::Arithmetic => semantic_tokens::ARITHMETIC,
+ HlOperator::Logical => semantic_tokens::LOGICAL,
+ HlOperator::Comparison => semantic_tokens::COMPARISON,
+ HlOperator::Other => semantic_tokens::OPERATOR,
+ },
+ HlTag::StringLiteral => semantic_tokens::STRING,
+ HlTag::UnresolvedReference => semantic_tokens::UNRESOLVED_REFERENCE,
+ HlTag::Punctuation(punct) => match punct {
+ HlPunct::Bracket => semantic_tokens::BRACKET,
+ HlPunct::Brace => semantic_tokens::BRACE,
+ HlPunct::Parenthesis => semantic_tokens::PARENTHESIS,
+ HlPunct::Angle => semantic_tokens::ANGLE,
+ HlPunct::Comma => semantic_tokens::COMMA,
+ HlPunct::Dot => semantic_tokens::DOT,
+ HlPunct::Colon => semantic_tokens::COLON,
+ HlPunct::Semi => semantic_tokens::SEMICOLON,
+ HlPunct::Other => semantic_tokens::PUNCTUATION,
+ HlPunct::MacroBang => semantic_tokens::MACRO_BANG,
+ },
+ };
+
+ for modifier in highlight.mods.iter() {
+ let modifier = match modifier {
+ HlMod::Associated => continue,
+ HlMod::Async => semantic_tokens::ASYNC,
+ HlMod::Attribute => semantic_tokens::ATTRIBUTE_MODIFIER,
+ HlMod::Callable => semantic_tokens::CALLABLE,
+ HlMod::Consuming => semantic_tokens::CONSUMING,
+ HlMod::ControlFlow => semantic_tokens::CONTROL_FLOW,
+ HlMod::CrateRoot => semantic_tokens::CRATE_ROOT,
+ HlMod::DefaultLibrary => semantic_tokens::DEFAULT_LIBRARY,
+ HlMod::Definition => semantic_tokens::DECLARATION,
+ HlMod::Documentation => semantic_tokens::DOCUMENTATION,
+ HlMod::Injected => semantic_tokens::INJECTED,
+ HlMod::IntraDocLink => semantic_tokens::INTRA_DOC_LINK,
+ HlMod::Library => semantic_tokens::LIBRARY,
+ HlMod::Mutable => semantic_tokens::MUTABLE,
+ HlMod::Public => semantic_tokens::PUBLIC,
+ HlMod::Reference => semantic_tokens::REFERENCE,
+ HlMod::Static => semantic_tokens::STATIC,
+ HlMod::Trait => semantic_tokens::TRAIT_MODIFIER,
+ HlMod::Unsafe => semantic_tokens::UNSAFE,
+ };
+ mods |= modifier;
+ }
+
+ (type_, mods)
+}
+
+pub(crate) fn folding_range(
+ text: &str,
+ line_index: &LineIndex,
+ line_folding_only: bool,
+ fold: Fold,
+) -> lsp_types::FoldingRange {
+ let kind = match fold.kind {
+ FoldKind::Comment => Some(lsp_types::FoldingRangeKind::Comment),
+ FoldKind::Imports => Some(lsp_types::FoldingRangeKind::Imports),
+ FoldKind::Region => Some(lsp_types::FoldingRangeKind::Region),
+ FoldKind::Mods
+ | FoldKind::Block
+ | FoldKind::ArgList
+ | FoldKind::Consts
+ | FoldKind::Statics
+ | FoldKind::WhereClause
+ | FoldKind::ReturnType
+ | FoldKind::Array
+ | FoldKind::MatchArm => None,
+ };
+
+ let range = range(line_index, fold.range);
+
+ if line_folding_only {
+ // Clients with line_folding_only == true (such as VSCode) will fold the whole end line
+ // even if it contains text not in the folding range. To prevent that we exclude
+ // range.end.line from the folding region if there is more text after range.end
+ // on the same line.
+ let has_more_text_on_end_line = text[TextRange::new(fold.range.end(), TextSize::of(text))]
+ .chars()
+ .take_while(|it| *it != '\n')
+ .any(|it| !it.is_whitespace());
+
+ let end_line = if has_more_text_on_end_line {
+ range.end.line.saturating_sub(1)
+ } else {
+ range.end.line
+ };
+
+ lsp_types::FoldingRange {
+ start_line: range.start.line,
+ start_character: None,
+ end_line,
+ end_character: None,
+ kind,
+ }
+ } else {
+ lsp_types::FoldingRange {
+ start_line: range.start.line,
+ start_character: Some(range.start.character),
+ end_line: range.end.line,
+ end_character: Some(range.end.character),
+ kind,
+ }
+ }
+}
+
+pub(crate) fn url(snap: &GlobalStateSnapshot, file_id: FileId) -> lsp_types::Url {
+ snap.file_id_to_url(file_id)
+}
+
+/// Returns a `Url` object from a given path, will lowercase drive letters if present.
+/// This will only happen when processing windows paths.
+///
+/// When processing non-windows path, this is essentially the same as `Url::from_file_path`.
+pub(crate) fn url_from_abs_path(path: &AbsPath) -> lsp_types::Url {
+ let url = lsp_types::Url::from_file_path(path).unwrap();
+ match path.as_ref().components().next() {
+ Some(path::Component::Prefix(prefix))
+ if matches!(prefix.kind(), path::Prefix::Disk(_) | path::Prefix::VerbatimDisk(_)) =>
+ {
+ // Need to lowercase driver letter
+ }
+ _ => return url,
+ }
+
+ let driver_letter_range = {
+ let (scheme, drive_letter, _rest) = match url.as_str().splitn(3, ':').collect_tuple() {
+ Some(it) => it,
+ None => return url,
+ };
+ let start = scheme.len() + ':'.len_utf8();
+ start..(start + drive_letter.len())
+ };
+
+ // Note: lowercasing the `path` itself doesn't help, the `Url::parse`
+ // machinery *also* canonicalizes the drive letter. So, just massage the
+ // string in place.
+ let mut url: String = url.into();
+ url[driver_letter_range].make_ascii_lowercase();
+ lsp_types::Url::parse(&url).unwrap()
+}
+
+pub(crate) fn optional_versioned_text_document_identifier(
+ snap: &GlobalStateSnapshot,
+ file_id: FileId,
+) -> lsp_types::OptionalVersionedTextDocumentIdentifier {
+ let url = url(snap, file_id);
+ let version = snap.url_file_version(&url);
+ lsp_types::OptionalVersionedTextDocumentIdentifier { uri: url, version }
+}
+
+pub(crate) fn location(
+ snap: &GlobalStateSnapshot,
+ frange: FileRange,
- ) -> Result<lsp_types::Location> {
++) -> Cancellable<lsp_types::Location> {
+ let url = url(snap, frange.file_id);
+ let line_index = snap.file_line_index(frange.file_id)?;
+ let range = range(&line_index, frange.range);
+ let loc = lsp_types::Location::new(url, range);
+ Ok(loc)
+}
+
+/// Prefer using `location_link`, if the client has the cap.
+pub(crate) fn location_from_nav(
+ snap: &GlobalStateSnapshot,
+ nav: NavigationTarget,
- ) -> Result<lsp_types::LocationLink> {
++) -> Cancellable<lsp_types::Location> {
+ let url = url(snap, nav.file_id);
+ let line_index = snap.file_line_index(nav.file_id)?;
+ let range = range(&line_index, nav.full_range);
+ let loc = lsp_types::Location::new(url, range);
+ Ok(loc)
+}
+
+pub(crate) fn location_link(
+ snap: &GlobalStateSnapshot,
+ src: Option<FileRange>,
+ target: NavigationTarget,
- ) -> Result<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
++) -> Cancellable<lsp_types::LocationLink> {
+ let origin_selection_range = match src {
+ Some(src) => {
+ let line_index = snap.file_line_index(src.file_id)?;
+ let range = range(&line_index, src.range);
+ Some(range)
+ }
+ None => None,
+ };
+ let (target_uri, target_range, target_selection_range) = location_info(snap, target)?;
+ let res = lsp_types::LocationLink {
+ origin_selection_range,
+ target_uri,
+ target_range,
+ target_selection_range,
+ };
+ Ok(res)
+}
+
+fn location_info(
+ snap: &GlobalStateSnapshot,
+ target: NavigationTarget,
- ) -> Result<lsp_types::GotoDefinitionResponse> {
++) -> Cancellable<(lsp_types::Url, lsp_types::Range, lsp_types::Range)> {
+ let line_index = snap.file_line_index(target.file_id)?;
+
+ let target_uri = url(snap, target.file_id);
+ let target_range = range(&line_index, target.full_range);
+ let target_selection_range =
+ target.focus_range.map(|it| range(&line_index, it)).unwrap_or(target_range);
+ Ok((target_uri, target_range, target_selection_range))
+}
+
+pub(crate) fn goto_definition_response(
+ snap: &GlobalStateSnapshot,
+ src: Option<FileRange>,
+ targets: Vec<NavigationTarget>,
- .collect::<Result<Vec<_>>>()?;
++) -> Cancellable<lsp_types::GotoDefinitionResponse> {
+ if snap.config.location_link() {
+ let links = targets
+ .into_iter()
+ .map(|nav| location_link(snap, src, nav))
- .collect::<Result<Vec<_>>>()?;
++ .collect::<Cancellable<Vec<_>>>()?;
+ Ok(links.into())
+ } else {
+ let locations = targets
+ .into_iter()
+ .map(|nav| {
+ location(snap, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ })
- ) -> Result<lsp_ext::SnippetTextDocumentEdit> {
++ .collect::<Cancellable<Vec<_>>>()?;
+ Ok(locations.into())
+ }
+}
+
+fn outside_workspace_annotation_id() -> String {
+ String::from("OutsideWorkspace")
+}
+
+pub(crate) fn snippet_text_document_edit(
+ snap: &GlobalStateSnapshot,
+ is_snippet: bool,
+ file_id: FileId,
+ edit: TextEdit,
- ) -> Result<lsp_ext::SnippetWorkspaceEdit> {
++) -> Cancellable<lsp_ext::SnippetTextDocumentEdit> {
+ let text_document = optional_versioned_text_document_identifier(snap, file_id);
+ let line_index = snap.file_line_index(file_id)?;
+ let mut edits: Vec<_> =
+ edit.into_iter().map(|it| snippet_text_edit(&line_index, is_snippet, it)).collect();
+
+ if snap.analysis.is_library_file(file_id)? && snap.config.change_annotation_support() {
+ for edit in &mut edits {
+ edit.annotation_id = Some(outside_workspace_annotation_id())
+ }
+ }
+ Ok(lsp_ext::SnippetTextDocumentEdit { text_document, edits })
+}
+
+pub(crate) fn snippet_text_document_ops(
+ snap: &GlobalStateSnapshot,
+ file_system_edit: FileSystemEdit,
+) -> Cancellable<Vec<lsp_ext::SnippetDocumentChangeOperation>> {
+ let mut ops = Vec::new();
+ match file_system_edit {
+ FileSystemEdit::CreateFile { dst, initial_contents } => {
+ let uri = snap.anchored_path(&dst);
+ let create_file = lsp_types::ResourceOp::Create(lsp_types::CreateFile {
+ uri: uri.clone(),
+ options: None,
+ annotation_id: None,
+ });
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(create_file));
+ if !initial_contents.is_empty() {
+ let text_document =
+ lsp_types::OptionalVersionedTextDocumentIdentifier { uri, version: None };
+ let text_edit = lsp_ext::SnippetTextEdit {
+ range: lsp_types::Range::default(),
+ new_text: initial_contents,
+ insert_text_format: Some(lsp_types::InsertTextFormat::PLAIN_TEXT),
+ annotation_id: None,
+ };
+ let edit_file =
+ lsp_ext::SnippetTextDocumentEdit { text_document, edits: vec![text_edit] };
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit_file));
+ }
+ }
+ FileSystemEdit::MoveFile { src, dst } => {
+ let old_uri = snap.file_id_to_url(src);
+ let new_uri = snap.anchored_path(&dst);
+ let mut rename_file =
+ lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None };
+ if snap.analysis.is_library_file(src).ok() == Some(true)
+ && snap.config.change_annotation_support()
+ {
+ rename_file.annotation_id = Some(outside_workspace_annotation_id())
+ }
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(lsp_types::ResourceOp::Rename(
+ rename_file,
+ )))
+ }
+ FileSystemEdit::MoveDir { src, src_id, dst } => {
+ let old_uri = snap.anchored_path(&src);
+ let new_uri = snap.anchored_path(&dst);
+ let mut rename_file =
+ lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None };
+ if snap.analysis.is_library_file(src_id).ok() == Some(true)
+ && snap.config.change_annotation_support()
+ {
+ rename_file.annotation_id = Some(outside_workspace_annotation_id())
+ }
+ ops.push(lsp_ext::SnippetDocumentChangeOperation::Op(lsp_types::ResourceOp::Rename(
+ rename_file,
+ )))
+ }
+ }
+ Ok(ops)
+}
+
+pub(crate) fn snippet_workspace_edit(
+ snap: &GlobalStateSnapshot,
+ source_change: SourceChange,
- ) -> Result<lsp_types::WorkspaceEdit> {
++) -> Cancellable<lsp_ext::SnippetWorkspaceEdit> {
+ let mut document_changes: Vec<lsp_ext::SnippetDocumentChangeOperation> = Vec::new();
+
+ for op in source_change.file_system_edits {
+ let ops = snippet_text_document_ops(snap, op)?;
+ document_changes.extend_from_slice(&ops);
+ }
+ for (file_id, edit) in source_change.source_file_edits {
+ let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?;
+ document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit));
+ }
+ let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit {
+ changes: None,
+ document_changes: Some(document_changes),
+ change_annotations: None,
+ };
+ if snap.config.change_annotation_support() {
+ workspace_edit.change_annotations = Some(
+ once((
+ outside_workspace_annotation_id(),
+ lsp_types::ChangeAnnotation {
+ label: String::from("Edit outside of the workspace"),
+ needs_confirmation: Some(true),
+ description: Some(String::from(
+ "This edit lies outside of the workspace and may affect dependencies",
+ )),
+ },
+ ))
+ .collect(),
+ )
+ }
+ Ok(workspace_edit)
+}
+
+pub(crate) fn workspace_edit(
+ snap: &GlobalStateSnapshot,
+ source_change: SourceChange,
- ) -> Result<lsp_types::CallHierarchyItem> {
++) -> Cancellable<lsp_types::WorkspaceEdit> {
+ assert!(!source_change.is_snippet);
+ snippet_workspace_edit(snap, source_change).map(|it| it.into())
+}
+
+impl From<lsp_ext::SnippetWorkspaceEdit> for lsp_types::WorkspaceEdit {
+ fn from(snippet_workspace_edit: lsp_ext::SnippetWorkspaceEdit) -> lsp_types::WorkspaceEdit {
+ lsp_types::WorkspaceEdit {
+ changes: None,
+ document_changes: snippet_workspace_edit.document_changes.map(|changes| {
+ lsp_types::DocumentChanges::Operations(
+ changes
+ .into_iter()
+ .map(|change| match change {
+ lsp_ext::SnippetDocumentChangeOperation::Op(op) => {
+ lsp_types::DocumentChangeOperation::Op(op)
+ }
+ lsp_ext::SnippetDocumentChangeOperation::Edit(edit) => {
+ lsp_types::DocumentChangeOperation::Edit(
+ lsp_types::TextDocumentEdit {
+ text_document: edit.text_document,
+ edits: edit.edits.into_iter().map(From::from).collect(),
+ },
+ )
+ }
+ })
+ .collect(),
+ )
+ }),
+ change_annotations: snippet_workspace_edit.change_annotations,
+ }
+ }
+}
+
+impl From<lsp_ext::SnippetTextEdit>
+ for lsp_types::OneOf<lsp_types::TextEdit, lsp_types::AnnotatedTextEdit>
+{
+ fn from(
+ lsp_ext::SnippetTextEdit { annotation_id, insert_text_format:_, new_text, range }: lsp_ext::SnippetTextEdit,
+ ) -> Self {
+ match annotation_id {
+ Some(annotation_id) => lsp_types::OneOf::Right(lsp_types::AnnotatedTextEdit {
+ text_edit: lsp_types::TextEdit { range, new_text },
+ annotation_id,
+ }),
+ None => lsp_types::OneOf::Left(lsp_types::TextEdit { range, new_text }),
+ }
+ }
+}
+
+pub(crate) fn call_hierarchy_item(
+ snap: &GlobalStateSnapshot,
+ target: NavigationTarget,
- ) -> Result<lsp_ext::CodeAction> {
++) -> Cancellable<lsp_types::CallHierarchyItem> {
+ let name = target.name.to_string();
+ let detail = target.description.clone();
+ let kind = target.kind.map(symbol_kind).unwrap_or(lsp_types::SymbolKind::FUNCTION);
+ let (uri, range, selection_range) = location_info(snap, target)?;
+ Ok(lsp_types::CallHierarchyItem {
+ name,
+ kind,
+ tags: None,
+ detail,
+ uri,
+ range,
+ selection_range,
+ data: None,
+ })
+}
+
+pub(crate) fn code_action_kind(kind: AssistKind) -> lsp_types::CodeActionKind {
+ match kind {
+ AssistKind::None | AssistKind::Generate => lsp_types::CodeActionKind::EMPTY,
+ AssistKind::QuickFix => lsp_types::CodeActionKind::QUICKFIX,
+ AssistKind::Refactor => lsp_types::CodeActionKind::REFACTOR,
+ AssistKind::RefactorExtract => lsp_types::CodeActionKind::REFACTOR_EXTRACT,
+ AssistKind::RefactorInline => lsp_types::CodeActionKind::REFACTOR_INLINE,
+ AssistKind::RefactorRewrite => lsp_types::CodeActionKind::REFACTOR_REWRITE,
+ }
+}
+
+pub(crate) fn code_action(
+ snap: &GlobalStateSnapshot,
+ assist: Assist,
+ resolve_data: Option<(usize, lsp_types::CodeActionParams)>,
- ) -> Result<lsp_ext::Runnable> {
++) -> Cancellable<lsp_ext::CodeAction> {
+ let mut res = lsp_ext::CodeAction {
+ title: assist.label.to_string(),
+ group: assist.group.filter(|_| snap.config.code_action_group()).map(|gr| gr.0),
+ kind: Some(code_action_kind(assist.id.1)),
+ edit: None,
+ is_preferred: None,
+ data: None,
+ command: None,
+ };
+
+ if assist.trigger_signature_help && snap.config.client_commands().trigger_parameter_hints {
+ res.command = Some(command::trigger_parameter_hints());
+ }
+
+ match (assist.source_change, resolve_data) {
+ (Some(it), _) => res.edit = Some(snippet_workspace_edit(snap, it)?),
+ (None, Some((index, code_action_params))) => {
+ res.data = Some(lsp_ext::CodeActionData {
+ id: format!("{}:{}:{}", assist.id.0, assist.id.1.name(), index),
+ code_action_params,
+ });
+ }
+ (None, None) => {
+ stdx::never!("assist should always be resolved if client can't do lazy resolving")
+ }
+ };
+ Ok(res)
+}
+
+pub(crate) fn runnable(
+ snap: &GlobalStateSnapshot,
+ runnable: Runnable,
- CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg)?;
++) -> Cancellable<lsp_ext::Runnable> {
+ let config = snap.config.runnables();
+ let spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id)?;
+ let workspace_root = spec.as_ref().map(|it| it.workspace_root.clone());
+ let target = spec.as_ref().map(|s| s.target.clone());
+ let (cargo_args, executable_args) =
- ) -> Result<()> {
++ CargoTargetSpec::runnable_args(snap, spec, &runnable.kind, &runnable.cfg);
+ let label = runnable.label(target);
+ let location = location_link(snap, None, runnable.nav)?;
+
+ Ok(lsp_ext::Runnable {
+ label,
+ location: Some(location),
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: workspace_root.map(|it| it.into()),
+ override_cargo: config.override_cargo,
+ cargo_args,
+ cargo_extra_args: config.cargo_extra_args,
+ executable_args,
+ expect_test: None,
+ },
+ })
+}
+
+pub(crate) fn code_lens(
+ acc: &mut Vec<lsp_types::CodeLens>,
+ snap: &GlobalStateSnapshot,
+ annotation: Annotation,
++) -> Cancellable<()> {
+ let client_commands_config = snap.config.client_commands();
+ match annotation.kind {
+ AnnotationKind::Runnable(run) => {
+ let line_index = snap.file_line_index(run.nav.file_id)?;
+ let annotation_range = range(&line_index, annotation.range);
+
+ let title = run.title();
+ let can_debug = match run.kind {
+ ide::RunnableKind::DocTest { .. } => false,
+ ide::RunnableKind::TestMod { .. }
+ | ide::RunnableKind::Test { .. }
+ | ide::RunnableKind::Bench { .. }
+ | ide::RunnableKind::Bin => true,
+ };
+ let r = runnable(snap, run)?;
+
+ let lens_config = snap.config.lens();
+ if lens_config.run && client_commands_config.run_single {
+ let command = command::run_single(&r, &title);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
+ if lens_config.debug && can_debug && client_commands_config.debug_single {
+ let command = command::debug_single(&r);
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command: Some(command),
+ data: None,
+ })
+ }
+ }
+ AnnotationKind::HasImpls { pos: file_range, data } => {
+ if !client_commands_config.show_reference {
+ return Ok(());
+ }
+ let line_index = snap.file_line_index(file_range.file_id)?;
+ let annotation_range = range(&line_index, annotation.range);
+ let url = url(snap, file_range.file_id);
+
+ let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
+
+ let doc_pos = lsp_types::TextDocumentPositionParams::new(id, annotation_range.start);
+
+ let goto_params = lsp_types::request::GotoImplementationParams {
+ text_document_position_params: doc_pos,
+ work_done_progress_params: Default::default(),
+ partial_result_params: Default::default(),
+ };
+
+ let command = data.map(|ranges| {
+ let locations: Vec<lsp_types::Location> = ranges
+ .into_iter()
+ .filter_map(|target| {
+ location(
+ snap,
+ FileRange { file_id: target.file_id, range: target.full_range },
+ )
+ .ok()
+ })
+ .collect();
+
+ command::show_references(
+ implementation_title(locations.len()),
+ &url,
+ annotation_range.start,
+ locations,
+ )
+ });
+
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command,
+ data: Some(to_value(lsp_ext::CodeLensResolveData::Impls(goto_params)).unwrap()),
+ })
+ }
+ AnnotationKind::HasReferences { pos: file_range, data } => {
+ if !client_commands_config.show_reference {
+ return Ok(());
+ }
+ let line_index = snap.file_line_index(file_range.file_id)?;
+ let annotation_range = range(&line_index, annotation.range);
+ let url = url(snap, file_range.file_id);
+
+ let id = lsp_types::TextDocumentIdentifier { uri: url.clone() };
+
+ let doc_pos = lsp_types::TextDocumentPositionParams::new(id, annotation_range.start);
+
+ let command = data.map(|ranges| {
+ let locations: Vec<lsp_types::Location> =
+ ranges.into_iter().filter_map(|range| location(snap, range).ok()).collect();
+
+ command::show_references(
+ reference_title(locations.len()),
+ &url,
+ annotation_range.start,
+ locations,
+ )
+ });
+
+ acc.push(lsp_types::CodeLens {
+ range: annotation_range,
+ command,
+ data: Some(to_value(lsp_ext::CodeLensResolveData::References(doc_pos)).unwrap()),
+ })
+ }
+ }
+ Ok(())
+}
+
+pub(crate) mod command {
+ use ide::{FileRange, NavigationTarget};
+ use serde_json::to_value;
+
+ use crate::{
+ global_state::GlobalStateSnapshot,
+ lsp_ext,
+ to_proto::{location, location_link},
+ };
+
+ pub(crate) fn show_references(
+ title: String,
+ uri: &lsp_types::Url,
+ position: lsp_types::Position,
+ locations: Vec<lsp_types::Location>,
+ ) -> lsp_types::Command {
+ // We cannot use the 'editor.action.showReferences' command directly
+ // because that command requires vscode types which we convert in the handler
+ // on the client side.
+
+ lsp_types::Command {
+ title,
+ command: "rust-analyzer.showReferences".into(),
+ arguments: Some(vec![
+ to_value(uri).unwrap(),
+ to_value(position).unwrap(),
+ to_value(locations).unwrap(),
+ ]),
+ }
+ }
+
+ pub(crate) fn run_single(runnable: &lsp_ext::Runnable, title: &str) -> lsp_types::Command {
+ lsp_types::Command {
+ title: title.to_string(),
+ command: "rust-analyzer.runSingle".into(),
+ arguments: Some(vec![to_value(runnable).unwrap()]),
+ }
+ }
+
+ pub(crate) fn debug_single(runnable: &lsp_ext::Runnable) -> lsp_types::Command {
+ lsp_types::Command {
+ title: "Debug".into(),
+ command: "rust-analyzer.debugSingle".into(),
+ arguments: Some(vec![to_value(runnable).unwrap()]),
+ }
+ }
+
+ pub(crate) fn goto_location(
+ snap: &GlobalStateSnapshot,
+ nav: &NavigationTarget,
+ ) -> Option<lsp_types::Command> {
+ let value = if snap.config.location_link() {
+ let link = location_link(snap, None, nav.clone()).ok()?;
+ to_value(link).ok()?
+ } else {
+ let range = FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() };
+ let location = location(snap, range).ok()?;
+ to_value(location).ok()?
+ };
+
+ Some(lsp_types::Command {
+ title: nav.name.to_string(),
+ command: "rust-analyzer.gotoLocation".into(),
+ arguments: Some(vec![value]),
+ })
+ }
+
+ pub(crate) fn trigger_parameter_hints() -> lsp_types::Command {
+ lsp_types::Command {
+ title: "triggerParameterHints".into(),
+ command: "editor.action.triggerParameterHints".into(),
+ arguments: None,
+ }
+ }
+}
+
+pub(crate) fn implementation_title(count: usize) -> String {
+ if count == 1 {
+ "1 implementation".into()
+ } else {
+ format!("{} implementations", count)
+ }
+}
+
+pub(crate) fn reference_title(count: usize) -> String {
+ if count == 1 {
+ "1 reference".into()
+ } else {
+ format!("{} references", count)
+ }
+}
+
+pub(crate) fn markup_content(
+ markup: Markup,
+ kind: ide::HoverDocFormat,
+) -> lsp_types::MarkupContent {
+ let kind = match kind {
+ ide::HoverDocFormat::Markdown => lsp_types::MarkupKind::Markdown,
+ ide::HoverDocFormat::PlainText => lsp_types::MarkupKind::PlainText,
+ };
+ let value = crate::markdown::format_docs(markup.as_str());
+ lsp_types::MarkupContent { kind, value }
+}
+
+pub(crate) fn rename_error(err: RenameError) -> crate::LspError {
+ // This is wrong, but we don't have a better alternative I suppose?
+ // https://github.com/microsoft/language-server-protocol/issues/1341
+ invalid_params_error(err.to_string())
+}
+
+#[cfg(test)]
+mod tests {
+ use std::sync::Arc;
+
+ use ide::Analysis;
+
+ use super::*;
+
+ #[test]
+ fn conv_fold_line_folding_only_fixup() {
+ let text = r#"mod a;
+mod b;
+mod c;
+
+fn main() {
+ if cond {
+ a::do_a();
+ } else {
+ b::do_b();
+ }
+}"#;
+
+ let (analysis, file_id) = Analysis::from_single_file(text.to_string());
+ let folds = analysis.folding_ranges(file_id).unwrap();
+ assert_eq!(folds.len(), 4);
+
+ let line_index = LineIndex {
+ index: Arc::new(ide::LineIndex::new(text)),
+ endings: LineEndings::Unix,
+ encoding: PositionEncoding::Utf16,
+ };
+ let converted: Vec<lsp_types::FoldingRange> =
+ folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
+
+ let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)];
+ assert_eq!(converted.len(), expected_lines.len());
+ for (folding_range, (start_line, end_line)) in converted.iter().zip(expected_lines.iter()) {
+ assert_eq!(folding_range.start_line, *start_line);
+ assert_eq!(folding_range.start_character, None);
+ assert_eq!(folding_range.end_line, *end_line);
+ assert_eq!(folding_range.end_character, None);
+ }
+ }
+
+ // `Url` is not able to parse windows paths on unix machines.
+ #[test]
+ #[cfg(target_os = "windows")]
+ fn test_lowercase_drive_letter() {
+ use std::path::Path;
+
+ let url = url_from_abs_path(Path::new("C:\\Test").try_into().unwrap());
+ assert_eq!(url.to_string(), "file:///c:/Test");
+
+ let url = url_from_abs_path(Path::new(r#"\\localhost\C$\my_dir"#).try_into().unwrap());
+ assert_eq!(url.to_string(), "file://localhost/C$/my_dir");
+ }
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "sourcegen"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+xshell = "0.2.2"
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "stdx"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
- miow = "0.4.0"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+libc = "0.2.135"
+backtrace = { version = "0.3.65", optional = true }
+always-assert = { version = "0.1.2", features = ["log"] }
+# Think twice before adding anything here
+
+[target.'cfg(windows)'.dependencies]
++miow = "0.5.0"
+winapi = { version = "0.3.9", features = ["winerror"] }
+
+[features]
+# Uncomment to enable for the whole crate graph
+# default = [ "backtrace" ]
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "syntax"
+version = "0.0.0"
+description = "Comment and whitespace preserving parser for the Rust language"
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/rust-lang/rust-analyzer"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+cov-mark = "2.0.0-pre.1"
+itertools = "0.10.5"
+rowan = "0.15.10"
+rustc_lexer = { version = "725.0.0", package = "rustc-ap-rustc_lexer" }
+rustc-hash = "1.1.0"
+once_cell = "1.15.0"
+indexmap = "1.9.1"
+smol_str = "0.1.23"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+text-edit = { path = "../text-edit", version = "0.0.0" }
+parser = { path = "../parser", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
+
+[dev-dependencies]
+rayon = "1.5.3"
+expect-test = "1.4.0"
+proc-macro2 = "1.0.47"
+quote = "1.0.20"
+ungrammar = "1.16.1"
+
+test-utils = { path = "../test-utils" }
+sourcegen = { path = "../sourcegen" }
+
+[features]
+in-rust-tree = []
--- /dev/null
- rust-version = "1.57"
+
+[package]
+name = "syntax-fuzz"
+version = "0.0.1"
+publish = false
+edition = "2021"
++rust-version = "1.65"
+
+[package.metadata]
+cargo-fuzz = true
+
+[dependencies]
+syntax = { path = "..", version = "0.0.0" }
+text_edit = { path = "../../text_edit", version = "0.0.0" }
+libfuzzer-sys = { git = "https://github.com/rust-fuzz/libfuzzer-sys.git" }
+
+# Prevent this from interfering with workspaces
+[workspace]
+members = ["."]
+
+[[bin]]
+name = "parser"
+path = "fuzz_targets/parser.rs"
+
+[[bin]]
+name = "reparse"
+path = "fuzz_targets/reparse.rs"
--- /dev/null
- 'trait' Name GenericParamList? (':' TypeBoundList?)? WhereClause?
- AssocItemList
+// Rust Un-Grammar.
+//
+// This grammar specifies the structure of Rust's concrete syntax tree.
+// It does not specify parsing rules (ambiguities, precedence, etc are out of scope).
+// Tokens are processed -- contextual keywords are recognised, compound operators glued.
+//
+// Legend:
+//
+// // -- comment
+// Name = -- non-terminal definition
+// 'ident' -- token (terminal)
+// A B -- sequence
+// A | B -- alternation
+// A* -- zero or more repetition
+// A? -- zero or one repetition
+// (A) -- same as A
+// label:A -- suggested name for field of AST node
+
+//*************************//
+// Names, Paths and Macros //
+//*************************//
+
+Name =
+ 'ident' | 'self'
+
+NameRef =
+ 'ident' | 'int_number' | 'self' | 'super' | 'crate' | 'Self'
+
+Lifetime =
+ 'lifetime_ident'
+
+Path =
+ (qualifier:Path '::')? segment:PathSegment
+
+PathSegment =
+ '::'? NameRef
+| NameRef GenericArgList?
+| NameRef ParamList RetType?
+| '<' PathType ('as' PathType)? '>'
+
+GenericArgList =
+ '::'? '<' (GenericArg (',' GenericArg)* ','?)? '>'
+
+GenericArg =
+ TypeArg
+| AssocTypeArg
+| LifetimeArg
+| ConstArg
+
+TypeArg =
+ Type
+
+AssocTypeArg =
+ NameRef GenericArgList? (':' TypeBoundList | ('=' Type | ConstArg))
+
+LifetimeArg =
+ Lifetime
+
+ConstArg =
+ Expr
+
+MacroCall =
+ Attr* Path '!' TokenTree ';'?
+
+TokenTree =
+ '(' ')'
+| '{' '}'
+| '[' ']'
+
+MacroItems =
+ Item*
+
+MacroStmts =
+ statements:Stmt*
+ Expr?
+
+//*************************//
+// Items //
+//*************************//
+
+SourceFile =
+ 'shebang'?
+ Attr*
+ Item*
+
+Item =
+ Const
+| Enum
+| ExternBlock
+| ExternCrate
+| Fn
+| Impl
+| MacroCall
+| MacroRules
+| MacroDef
+| Module
+| Static
+| Struct
+| Trait
+| TypeAlias
+| Union
+| Use
+
+MacroRules =
+ Attr* Visibility?
+ 'macro_rules' '!' Name
+ TokenTree
+
+MacroDef =
+ Attr* Visibility?
+ 'macro' Name args:TokenTree?
+ body:TokenTree
+
+Module =
+ Attr* Visibility?
+ 'mod' Name
+ (ItemList | ';')
+
+ItemList =
+ '{' Attr* Item* '}'
+
+ExternCrate =
+ Attr* Visibility?
+ 'extern' 'crate' NameRef Rename? ';'
+
+Rename =
+ 'as' (Name | '_')
+
+Use =
+ Attr* Visibility?
+ 'use' UseTree ';'
+
+UseTree =
+ (Path? '::')? ('*' | UseTreeList)
+| Path Rename?
+
+UseTreeList =
+ '{' (UseTree (',' UseTree)* ','?)? '}'
+
+Fn =
+ Attr* Visibility?
+ 'default'? 'const'? 'async'? 'unsafe'? Abi?
+ 'fn' Name GenericParamList? ParamList RetType? WhereClause?
+ (body:BlockExpr | ';')
+
+Abi =
+ 'extern' 'string'?
+
+ParamList =
+ '('(
+ SelfParam
+ | (SelfParam ',')? (Param (',' Param)* ','?)?
+ )')'
+| '|' (Param (',' Param)* ','?)? '|'
+
+SelfParam =
+ Attr* (
+ ('&' Lifetime?)? 'mut'? Name
+ | 'mut'? Name ':' Type
+ )
+
+Param =
+ Attr* (
+ Pat (':' Type)?
+ | Type
+ | '...'
+ )
+
+RetType =
+ '->' Type
+
+TypeAlias =
+ Attr* Visibility?
+ 'default'?
+ 'type' Name GenericParamList? (':' TypeBoundList?)? WhereClause?
+ ('=' Type)? ';'
+
+Struct =
+ Attr* Visibility?
+ 'struct' Name GenericParamList? (
+ WhereClause? (RecordFieldList | ';')
+ | TupleFieldList WhereClause? ';'
+ )
+
+RecordFieldList =
+ '{' fields:(RecordField (',' RecordField)* ','?)? '}'
+
+RecordField =
+ Attr* Visibility?
+ Name ':' Type
+
+TupleFieldList =
+ '(' fields:(TupleField (',' TupleField)* ','?)? ')'
+
+TupleField =
+ Attr* Visibility?
+ Type
+
+FieldList =
+ RecordFieldList
+| TupleFieldList
+
+Enum =
+ Attr* Visibility?
+ 'enum' Name GenericParamList? WhereClause?
+ VariantList
+
+VariantList =
+ '{' (Variant (',' Variant)* ','?)? '}'
+
+Variant =
+ Attr* Visibility?
+ Name FieldList? ('=' Expr)?
+
+Union =
+ Attr* Visibility?
+ 'union' Name GenericParamList? WhereClause?
+ RecordFieldList
+
+// A Data Type.
+//
+// Not used directly in the grammar, but handy to have anyway.
+Adt =
+ Enum
+| Struct
+| Union
+
+Const =
+ Attr* Visibility?
+ 'default'?
+ 'const' (Name | '_') ':' Type
+ ('=' body:Expr)? ';'
+
+Static =
+ Attr* Visibility?
+ 'static' 'mut'? Name ':' Type
+ ('=' body:Expr)? ';'
+
+Trait =
+ Attr* Visibility?
+ 'unsafe'? 'auto'?
++ 'trait' Name GenericParamList?
++ (
++ (':' TypeBoundList?)? WhereClause? AssocItemList
++ | '=' TypeBoundList? WhereClause? ';'
++ )
+
+AssocItemList =
+ '{' Attr* AssocItem* '}'
+
+AssocItem =
+ Const
+| Fn
+| MacroCall
+| TypeAlias
+
+Impl =
+ Attr* Visibility?
+ 'default'? 'unsafe'?
+ 'impl' GenericParamList? ('const'? '!'? trait:Type 'for')? self_ty:Type WhereClause?
+ AssocItemList
+
+ExternBlock =
+ Attr* 'unsafe'? Abi ExternItemList
+
+ExternItemList =
+ '{' Attr* ExternItem* '}'
+
+ExternItem =
+ Fn
+| MacroCall
+| Static
+| TypeAlias
+
+GenericParamList =
+ '<' (GenericParam (',' GenericParam)* ','?)? '>'
+
+GenericParam =
+ ConstParam
+| LifetimeParam
+| TypeParam
+
+TypeParam =
+ Attr* Name (':' TypeBoundList?)?
+ ('=' default_type:Type)?
+
+ConstParam =
+ Attr* 'const' Name ':' Type
+ ('=' default_val:Expr)?
+
+LifetimeParam =
+ Attr* Lifetime (':' TypeBoundList?)?
+
+WhereClause =
+ 'where' predicates:(WherePred (',' WherePred)* ','?)
+
+WherePred =
+ ('for' GenericParamList)? (Lifetime | Type) ':' TypeBoundList?
+
+Visibility =
+ 'pub' ('(' 'in'? Path ')')?
+
+Attr =
+ '#' '!'? '[' Meta ']'
+
+Meta =
+ Path ('=' Expr | TokenTree)?
+
+//****************************//
+// Statements and Expressions //
+//****************************//
+
+Stmt =
+ ';'
+| ExprStmt
+| Item
+| LetStmt
+
+LetStmt =
+ Attr* 'let' Pat (':' Type)?
+ '=' initializer:Expr
+ LetElse?
+ ';'
+
+LetElse =
+ 'else' BlockExpr
+
+ExprStmt =
+ Expr ';'?
+
+Expr =
+ ArrayExpr
+| AwaitExpr
+| BinExpr
+| BlockExpr
+| BoxExpr
+| BreakExpr
+| CallExpr
+| CastExpr
+| ClosureExpr
+| ContinueExpr
+| FieldExpr
+| ForExpr
+| IfExpr
+| IndexExpr
+| Literal
+| LoopExpr
+| MacroExpr
+| MatchExpr
+| MethodCallExpr
+| ParenExpr
+| PathExpr
+| PrefixExpr
+| RangeExpr
+| RecordExpr
+| RefExpr
+| ReturnExpr
+| TryExpr
+| TupleExpr
+| WhileExpr
+| YieldExpr
+| LetExpr
+| UnderscoreExpr
+
+MacroExpr =
+ MacroCall
+
+Literal =
+ Attr* value:(
+ 'int_number' | 'float_number'
+ | 'string' | 'raw_string'
+ | 'byte_string' | 'raw_byte_string'
+ | 'true' | 'false'
+ | 'char' | 'byte'
+ )
+
+PathExpr =
+ Attr* Path
+
+StmtList =
+ '{'
+ Attr*
+ statements:Stmt*
+ tail_expr:Expr?
+ '}'
+
+RefExpr =
+ Attr* '&' ('raw' | 'mut' | 'const') Expr
+
+TryExpr =
+ Attr* Expr '?'
+
+BlockExpr =
+ Attr* Label? ('try' | 'unsafe' | 'async' | 'const') StmtList
+
+PrefixExpr =
+ Attr* op:('-' | '!' | '*') Expr
+
+BinExpr =
+ Attr*
+ lhs:Expr
+ op:(
+ '||' | '&&'
+ | '==' | '!=' | '<=' | '>=' | '<' | '>'
+ | '+' | '*' | '-' | '/' | '%' | '<<' | '>>' | '^' | '|' | '&'
+ | '=' | '+=' | '/=' | '*=' | '%=' | '>>=' | '<<=' | '-=' | '|=' | '&=' | '^='
+ )
+ rhs:Expr
+
+CastExpr =
+ Attr* Expr 'as' Type
+
+ParenExpr =
+ Attr* '(' Attr* Expr ')'
+
+ArrayExpr =
+ Attr* '[' Attr* (
+ (Expr (',' Expr)* ','?)?
+ | Expr ';' Expr
+ ) ']'
+
+IndexExpr =
+ Attr* base:Expr '[' index:Expr ']'
+
+TupleExpr =
+ Attr* '(' Attr* fields:(Expr (',' Expr)* ','?)? ')'
+
+RecordExpr =
+ Path RecordExprFieldList
+
+RecordExprFieldList =
+ '{'
+ Attr*
+ fields:(RecordExprField (',' RecordExprField)* ','?)?
+ ('..' spread:Expr?)?
+ '}'
+
+RecordExprField =
+ Attr* (NameRef ':')? Expr
+
+CallExpr =
+ Attr* Expr ArgList
+
+ArgList =
+ '(' args:(Expr (',' Expr)* ','?)? ')'
+
+MethodCallExpr =
+ Attr* receiver:Expr '.' NameRef GenericArgList? ArgList
+
+FieldExpr =
+ Attr* Expr '.' NameRef
+
+ClosureExpr =
+ Attr* ('for' GenericParamList)? 'static'? 'async'? 'move'? ParamList RetType?
+ body:Expr
+
+IfExpr =
+ Attr* 'if' condition:Expr then_branch:BlockExpr
+ ('else' else_branch:(IfExpr | BlockExpr))?
+
+LoopExpr =
+ Attr* Label? 'loop'
+ loop_body:BlockExpr
+
+ForExpr =
+ Attr* Label? 'for' Pat 'in' iterable:Expr
+ loop_body:BlockExpr
+
+WhileExpr =
+ Attr* Label? 'while' condition:Expr
+ loop_body:BlockExpr
+
+Label =
+ Lifetime ':'
+
+BreakExpr =
+ Attr* 'break' Lifetime? Expr?
+
+ContinueExpr =
+ Attr* 'continue' Lifetime?
+
+RangeExpr =
+ Attr* start:Expr? op:('..' | '..=') end:Expr?
+
+MatchExpr =
+ Attr* 'match' Expr MatchArmList
+
+MatchArmList =
+ '{'
+ Attr*
+ arms:MatchArm*
+ '}'
+
+MatchArm =
+ Attr* Pat guard:MatchGuard? '=>' Expr ','?
+
+MatchGuard =
+ 'if' condition:Expr
+
+ReturnExpr =
+ Attr* 'return' Expr?
+
+YieldExpr =
+ Attr* 'yield' Expr?
+
+LetExpr =
+ Attr* 'let' Pat '=' Expr
+
+UnderscoreExpr =
+ Attr* '_'
+
+AwaitExpr =
+ Attr* Expr '.' 'await'
+
+BoxExpr =
+ Attr* 'box' Expr
+
+//*************************//
+// Types //
+//*************************//
+
+Type =
+ ArrayType
+| DynTraitType
+| FnPtrType
+| ForType
+| ImplTraitType
+| InferType
+| MacroType
+| NeverType
+| ParenType
+| PathType
+| PtrType
+| RefType
+| SliceType
+| TupleType
+
+ParenType =
+ '(' Type ')'
+
+NeverType =
+ '!'
+
+MacroType =
+ MacroCall
+
+PathType =
+ Path
+
+TupleType =
+ '(' fields:(Type (',' Type)* ','?)? ')'
+
+PtrType =
+ '*' ('const' | 'mut') Type
+
+RefType =
+ '&' Lifetime? 'mut'? Type
+
+ArrayType =
+ '[' Type ';' Expr ']'
+
+SliceType =
+ '[' Type ']'
+
+InferType =
+ '_'
+
+FnPtrType =
+ 'const'? 'async'? 'unsafe'? Abi? 'fn' ParamList RetType?
+
+ForType =
+ 'for' GenericParamList Type
+
+ImplTraitType =
+ 'impl' TypeBoundList
+
+DynTraitType =
+ 'dyn' TypeBoundList
+
+TypeBoundList =
+ bounds:(TypeBound ('+' TypeBound)* '+'?)
+
+TypeBound =
+ Lifetime
+| ('?' | '~' 'const')? Type
+
+//************************//
+// Patterns //
+//************************//
+
+Pat =
+ IdentPat
+| BoxPat
+| RestPat
+| LiteralPat
+| MacroPat
+| OrPat
+| ParenPat
+| PathPat
+| WildcardPat
+| RangePat
+| RecordPat
+| RefPat
+| SlicePat
+| TuplePat
+| TupleStructPat
+| ConstBlockPat
+
+LiteralPat =
+ Literal
+
+IdentPat =
+ Attr* 'ref'? 'mut'? Name ('@' Pat)?
+
+WildcardPat =
+ '_'
+
+RangePat =
+ // 1..
+ start:Pat op:('..' | '..=')
+ // 1..2
+ | start:Pat op:('..' | '..=') end:Pat
+ // ..2
+ | op:('..' | '..=') end:Pat
+
+RefPat =
+ '&' 'mut'? Pat
+
+RecordPat =
+ Path RecordPatFieldList
+
+RecordPatFieldList =
+ '{'
+ fields:(RecordPatField (',' RecordPatField)* ','?)?
+ RestPat?
+ '}'
+
+RecordPatField =
+ Attr* (NameRef ':')? Pat
+
+TupleStructPat =
+ Path '(' fields:(Pat (',' Pat)* ','?)? ')'
+
+TuplePat =
+ '(' fields:(Pat (',' Pat)* ','?)? ')'
+
+ParenPat =
+ '(' Pat ')'
+
+SlicePat =
+ '[' (Pat (',' Pat)* ','?)? ']'
+
+PathPat =
+ Path
+
+OrPat =
+ (Pat ('|' Pat)* '|'?)
+
+BoxPat =
+ 'box' Pat
+
+RestPat =
+ Attr* '..'
+
+MacroPat =
+ MacroCall
+
+ConstBlockPat =
+ 'const' BlockExpr
--- /dev/null
+//! Generated by `sourcegen_ast`, do not edit by hand.
+
+#![allow(non_snake_case)]
+use crate::{
+ ast::{self, support, AstChildren, AstNode},
+ SyntaxKind::{self, *},
+ SyntaxNode, SyntaxToken, T,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Name {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Name {
+ pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
+ pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NameRef {
+ pub(crate) syntax: SyntaxNode,
+}
+impl NameRef {
+ pub fn ident_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ident]) }
+ pub fn self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![self]) }
+ pub fn super_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![super]) }
+ pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+ pub fn Self_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![Self]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Lifetime {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Lifetime {
+ pub fn lifetime_ident_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![lifetime_ident])
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Path {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Path {
+ pub fn qualifier(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn segment(&self) -> Option<PathSegment> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathSegment {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathSegment {
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn path_type(&self) -> Option<PathType> { support::child(&self.syntax) }
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl GenericArgList {
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn generic_args(&self) -> AstChildren<GenericArg> { support::children(&self.syntax) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParamList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParamList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn self_param(&self) -> Option<SelfParam> { support::child(&self.syntax) }
+ pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) }
+ pub fn params(&self) -> AstChildren<Param> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+ pub fn pipe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![|]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RetType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RetType {
+ pub fn thin_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![->]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathType {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeArg {
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssocTypeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for AssocTypeArg {}
+impl AssocTypeArg {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn const_arg(&self) -> Option<ConstArg> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LifetimeArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LifetimeArg {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstArg {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ConstArg {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeBoundList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeBoundList {
+ pub fn bounds(&self) -> AstChildren<TypeBound> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroCall {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroCall {}
+impl ast::HasDocComments for MacroCall {}
+impl MacroCall {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Attr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Attr {
+ pub fn pound_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![#]) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn meta(&self) -> Option<Meta> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TokenTree {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TokenTree {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroItems {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasModuleItem for MacroItems {}
+impl MacroItems {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroStmts {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroStmts {
+ pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SourceFile {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for SourceFile {}
+impl ast::HasModuleItem for SourceFile {}
+impl ast::HasDocComments for SourceFile {}
+impl SourceFile {
+ pub fn shebang_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![shebang]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Const {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Const {}
+impl ast::HasName for Const {}
+impl ast::HasVisibility for Const {}
+impl ast::HasDocComments for Const {}
+impl Const {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Enum {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Enum {}
+impl ast::HasName for Enum {}
+impl ast::HasVisibility for Enum {}
+impl ast::HasGenericParams for Enum {}
+impl ast::HasDocComments for Enum {}
+impl Enum {
+ pub fn enum_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![enum]) }
+ pub fn variant_list(&self) -> Option<VariantList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternBlock {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternBlock {}
+impl ast::HasDocComments for ExternBlock {}
+impl ExternBlock {
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn extern_item_list(&self) -> Option<ExternItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternCrate {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternCrate {}
+impl ast::HasVisibility for ExternCrate {}
+impl ast::HasDocComments for ExternCrate {}
+impl ExternCrate {
+ pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+ pub fn crate_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![crate]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Fn {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Fn {}
+impl ast::HasName for Fn {}
+impl ast::HasVisibility for Fn {}
+impl ast::HasGenericParams for Fn {}
+impl ast::HasDocComments for Fn {}
+impl Fn {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Impl {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Impl {}
+impl ast::HasVisibility for Impl {}
+impl ast::HasGenericParams for Impl {}
+impl ast::HasDocComments for Impl {}
+impl Impl {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroRules {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroRules {}
+impl ast::HasName for MacroRules {}
+impl ast::HasVisibility for MacroRules {}
+impl ast::HasDocComments for MacroRules {}
+impl MacroRules {
+ pub fn macro_rules_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![macro_rules])
+ }
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroDef {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MacroDef {}
+impl ast::HasName for MacroDef {}
+impl ast::HasVisibility for MacroDef {}
+impl ast::HasDocComments for MacroDef {}
+impl MacroDef {
+ pub fn macro_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![macro]) }
+ pub fn args(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Module {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Module {}
+impl ast::HasName for Module {}
+impl ast::HasVisibility for Module {}
+impl ast::HasDocComments for Module {}
+impl Module {
+ pub fn mod_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mod]) }
+ pub fn item_list(&self) -> Option<ItemList> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Static {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Static {}
+impl ast::HasName for Static {}
+impl ast::HasVisibility for Static {}
+impl ast::HasDocComments for Static {}
+impl Static {
+ pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Struct {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Struct {}
+impl ast::HasName for Struct {}
+impl ast::HasVisibility for Struct {}
+impl ast::HasGenericParams for Struct {}
+impl ast::HasDocComments for Struct {}
+impl Struct {
+ pub fn struct_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![struct]) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Trait {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Trait {}
+impl ast::HasName for Trait {}
+impl ast::HasVisibility for Trait {}
+impl ast::HasGenericParams for Trait {}
+impl ast::HasTypeBounds for Trait {}
+impl ast::HasDocComments for Trait {}
+impl Trait {
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn auto_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![auto]) }
+ pub fn trait_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![trait]) }
+ pub fn assoc_item_list(&self) -> Option<AssocItemList> { support::child(&self.syntax) }
++ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
++ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeAlias {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TypeAlias {}
+impl ast::HasName for TypeAlias {}
+impl ast::HasVisibility for TypeAlias {}
+impl ast::HasGenericParams for TypeAlias {}
+impl ast::HasTypeBounds for TypeAlias {}
+impl ast::HasDocComments for TypeAlias {}
+impl TypeAlias {
+ pub fn default_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![default]) }
+ pub fn type_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![type]) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Union {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Union {}
+impl ast::HasName for Union {}
+impl ast::HasVisibility for Union {}
+impl ast::HasGenericParams for Union {}
+impl ast::HasDocComments for Union {}
+impl Union {
+ pub fn union_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![union]) }
+ pub fn record_field_list(&self) -> Option<RecordFieldList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Use {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Use {}
+impl ast::HasVisibility for Use {}
+impl ast::HasDocComments for Use {}
+impl Use {
+ pub fn use_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![use]) }
+ pub fn use_tree(&self) -> Option<UseTree> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Visibility {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Visibility {
+ pub fn pub_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![pub]) }
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ItemList {}
+impl ast::HasModuleItem for ItemList {}
+impl ItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Rename {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasName for Rename {}
+impl Rename {
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UseTree {
+ pub(crate) syntax: SyntaxNode,
+}
+impl UseTree {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn coloncolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![::]) }
+ pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) }
+ pub fn use_tree_list(&self) -> Option<UseTreeList> { support::child(&self.syntax) }
+ pub fn rename(&self) -> Option<Rename> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UseTreeList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl UseTreeList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn use_trees(&self) -> AstChildren<UseTree> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Abi {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Abi {
+ pub fn extern_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![extern]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct GenericParamList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl GenericParamList {
+ pub fn l_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![<]) }
+ pub fn generic_params(&self) -> AstChildren<GenericParam> { support::children(&self.syntax) }
+ pub fn r_angle_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![>]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WhereClause {
+ pub(crate) syntax: SyntaxNode,
+}
+impl WhereClause {
+ pub fn where_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![where]) }
+ pub fn predicates(&self) -> AstChildren<WherePred> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BlockExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BlockExpr {}
+impl BlockExpr {
+ pub fn label(&self) -> Option<Label> { support::child(&self.syntax) }
+ pub fn try_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![try]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn stmt_list(&self) -> Option<StmtList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SelfParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for SelfParam {}
+impl ast::HasName for SelfParam {}
+impl SelfParam {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Param {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Param {}
+impl Param {
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn dotdotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![...]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordField> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleFieldList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<TupleField> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordField {}
+impl ast::HasName for RecordField {}
+impl ast::HasVisibility for RecordField {}
+impl ast::HasDocComments for RecordField {}
+impl RecordField {
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TupleField {}
+impl ast::HasVisibility for TupleField {}
+impl ast::HasDocComments for TupleField {}
+impl TupleField {
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct VariantList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl VariantList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn variants(&self) -> AstChildren<Variant> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Variant {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Variant {}
+impl ast::HasName for Variant {}
+impl ast::HasVisibility for Variant {}
+impl ast::HasDocComments for Variant {}
+impl Variant {
+ pub fn field_list(&self) -> Option<FieldList> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AssocItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AssocItemList {}
+impl AssocItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn assoc_items(&self) -> AstChildren<AssocItem> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExternItemList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ExternItemList {}
+impl ExternItemList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn extern_items(&self) -> AstChildren<ExternItem> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ConstParam {}
+impl ast::HasName for ConstParam {}
+impl ConstParam {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn default_val(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LifetimeParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LifetimeParam {}
+impl ast::HasTypeBounds for LifetimeParam {}
+impl LifetimeParam {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeParam {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TypeParam {}
+impl ast::HasName for TypeParam {}
+impl ast::HasTypeBounds for TypeParam {}
+impl TypeParam {
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn default_type(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WherePred {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for WherePred {}
+impl WherePred {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Meta {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Meta {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn token_tree(&self) -> Option<TokenTree> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ExprStmt {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ExprStmt {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetStmt {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LetStmt {}
+impl LetStmt {
+ pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn initializer(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn let_else(&self) -> Option<LetElse> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetElse {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LetElse {
+ pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
+ pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArrayExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ArrayExpr {}
+impl ArrayExpr {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn exprs(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AwaitExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AwaitExpr {}
+impl AwaitExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn await_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![await]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BinExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BinExpr {}
+impl BinExpr {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BoxExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BoxExpr {}
+impl BoxExpr {
+ pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BreakExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for BreakExpr {}
+impl BreakExpr {
+ pub fn break_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![break]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CallExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for CallExpr {}
+impl ast::HasArgList for CallExpr {}
+impl CallExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct CastExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for CastExpr {}
+impl CastExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn as_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![as]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ClosureExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ClosureExpr {}
+impl ClosureExpr {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn static_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![static]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn move_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![move]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+ pub fn body(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ContinueExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ContinueExpr {}
+impl ContinueExpr {
+ pub fn continue_token(&self) -> Option<SyntaxToken> {
+ support::token(&self.syntax, T![continue])
+ }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FieldExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for FieldExpr {}
+impl FieldExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ForExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ForExpr {}
+impl ForExpr {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn in_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![in]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IfExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IfExpr {}
+impl IfExpr {
+ pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
+ pub fn else_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![else]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IndexExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IndexExpr {}
+impl IndexExpr {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Literal {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for Literal {}
+impl Literal {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LoopExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LoopExpr {}
+impl ast::HasLoopBody for LoopExpr {}
+impl LoopExpr {
+ pub fn loop_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![loop]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroExpr {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchExpr {}
+impl MatchExpr {
+ pub fn match_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![match]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn match_arm_list(&self) -> Option<MatchArmList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MethodCallExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MethodCallExpr {}
+impl ast::HasArgList for MethodCallExpr {}
+impl MethodCallExpr {
+ pub fn receiver(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn dot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![.]) }
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn generic_arg_list(&self) -> Option<GenericArgList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ParenExpr {}
+impl ParenExpr {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for PathExpr {}
+impl PathExpr {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PrefixExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for PrefixExpr {}
+impl PrefixExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RangeExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RangeExpr {}
+impl RangeExpr {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordExpr {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn record_expr_field_list(&self) -> Option<RecordExprFieldList> {
+ support::child(&self.syntax)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RefExpr {}
+impl RefExpr {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn raw_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![raw]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ReturnExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for ReturnExpr {}
+impl ReturnExpr {
+ pub fn return_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![return]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TryExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TryExpr {}
+impl TryExpr {
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for TupleExpr {}
+impl TupleExpr {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WhileExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for WhileExpr {}
+impl WhileExpr {
+ pub fn while_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![while]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct YieldExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for YieldExpr {}
+impl YieldExpr {
+ pub fn yield_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![yield]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LetExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for LetExpr {}
+impl LetExpr {
+ pub fn let_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![let]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn eq_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct UnderscoreExpr {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for UnderscoreExpr {}
+impl UnderscoreExpr {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct StmtList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for StmtList {}
+impl StmtList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn statements(&self) -> AstChildren<Stmt> { support::children(&self.syntax) }
+ pub fn tail_expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct Label {
+ pub(crate) syntax: SyntaxNode,
+}
+impl Label {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExprFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordExprFieldList {}
+impl RecordExprFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordExprField> { support::children(&self.syntax) }
+ pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+ pub fn spread(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordExprField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordExprField {}
+impl RecordExprField {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ArgList {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn args(&self) -> AstChildren<Expr> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchArmList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchArmList {}
+impl MatchArmList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn arms(&self) -> AstChildren<MatchArm> { support::children(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchArm {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for MatchArm {}
+impl MatchArm {
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn guard(&self) -> Option<MatchGuard> { support::child(&self.syntax) }
+ pub fn fat_arrow_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![=>]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn comma_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![,]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MatchGuard {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MatchGuard {
+ pub fn if_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![if]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ArrayType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ArrayType {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn semicolon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![;]) }
+ pub fn expr(&self) -> Option<Expr> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct DynTraitType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl DynTraitType {
+ pub fn dyn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![dyn]) }
+ pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct FnPtrType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl FnPtrType {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn async_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![async]) }
+ pub fn unsafe_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![unsafe]) }
+ pub fn abi(&self) -> Option<Abi> { support::child(&self.syntax) }
+ pub fn fn_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![fn]) }
+ pub fn param_list(&self) -> Option<ParamList> { support::child(&self.syntax) }
+ pub fn ret_type(&self) -> Option<RetType> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ForType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ForType {
+ pub fn for_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![for]) }
+ pub fn generic_param_list(&self) -> Option<GenericParamList> { support::child(&self.syntax) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ImplTraitType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ImplTraitType {
+ pub fn impl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![impl]) }
+ pub fn type_bound_list(&self) -> Option<TypeBoundList> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct InferType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl InferType {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroType {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct NeverType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl NeverType {
+ pub fn excl_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![!]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParenType {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PtrType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PtrType {
+ pub fn star_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![*]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RefType {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SliceType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl SliceType {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleType {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleType {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Type> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TypeBound {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TypeBound {
+ pub fn lifetime(&self) -> Option<Lifetime> { support::child(&self.syntax) }
+ pub fn question_mark_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![?]) }
+ pub fn tilde_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![~]) }
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn ty(&self) -> Option<Type> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct IdentPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for IdentPat {}
+impl ast::HasName for IdentPat {}
+impl IdentPat {
+ pub fn ref_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![ref]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn at_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![@]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct BoxPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl BoxPat {
+ pub fn box_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![box]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RestPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RestPat {}
+impl RestPat {
+ pub fn dotdot_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![..]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct LiteralPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl LiteralPat {
+ pub fn literal(&self) -> Option<Literal> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MacroPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl MacroPat {
+ pub fn macro_call(&self) -> Option<MacroCall> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct OrPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl OrPat {
+ pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ParenPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ParenPat {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PathPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl PathPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct WildcardPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl WildcardPat {
+ pub fn underscore_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![_]) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RangePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RangePat {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn record_pat_field_list(&self) -> Option<RecordPatFieldList> {
+ support::child(&self.syntax)
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RefPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RefPat {
+ pub fn amp_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![&]) }
+ pub fn mut_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![mut]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct SlicePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl SlicePat {
+ pub fn l_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['[']) }
+ pub fn pats(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_brack_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![']']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TuplePat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TuplePat {
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct TupleStructPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl TupleStructPat {
+ pub fn path(&self) -> Option<Path> { support::child(&self.syntax) }
+ pub fn l_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['(']) }
+ pub fn fields(&self) -> AstChildren<Pat> { support::children(&self.syntax) }
+ pub fn r_paren_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![')']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct ConstBlockPat {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ConstBlockPat {
+ pub fn const_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![const]) }
+ pub fn block_expr(&self) -> Option<BlockExpr> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPatFieldList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl RecordPatFieldList {
+ pub fn l_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['{']) }
+ pub fn fields(&self) -> AstChildren<RecordPatField> { support::children(&self.syntax) }
+ pub fn rest_pat(&self) -> Option<RestPat> { support::child(&self.syntax) }
+ pub fn r_curly_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T!['}']) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct RecordPatField {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for RecordPatField {}
+impl RecordPatField {
+ pub fn name_ref(&self) -> Option<NameRef> { support::child(&self.syntax) }
+ pub fn colon_token(&self) -> Option<SyntaxToken> { support::token(&self.syntax, T![:]) }
+ pub fn pat(&self) -> Option<Pat> { support::child(&self.syntax) }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericArg {
+ TypeArg(TypeArg),
+ AssocTypeArg(AssocTypeArg),
+ LifetimeArg(LifetimeArg),
+ ConstArg(ConstArg),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Type {
+ ArrayType(ArrayType),
+ DynTraitType(DynTraitType),
+ FnPtrType(FnPtrType),
+ ForType(ForType),
+ ImplTraitType(ImplTraitType),
+ InferType(InferType),
+ MacroType(MacroType),
+ NeverType(NeverType),
+ ParenType(ParenType),
+ PathType(PathType),
+ PtrType(PtrType),
+ RefType(RefType),
+ SliceType(SliceType),
+ TupleType(TupleType),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Expr {
+ ArrayExpr(ArrayExpr),
+ AwaitExpr(AwaitExpr),
+ BinExpr(BinExpr),
+ BlockExpr(BlockExpr),
+ BoxExpr(BoxExpr),
+ BreakExpr(BreakExpr),
+ CallExpr(CallExpr),
+ CastExpr(CastExpr),
+ ClosureExpr(ClosureExpr),
+ ContinueExpr(ContinueExpr),
+ FieldExpr(FieldExpr),
+ ForExpr(ForExpr),
+ IfExpr(IfExpr),
+ IndexExpr(IndexExpr),
+ Literal(Literal),
+ LoopExpr(LoopExpr),
+ MacroExpr(MacroExpr),
+ MatchExpr(MatchExpr),
+ MethodCallExpr(MethodCallExpr),
+ ParenExpr(ParenExpr),
+ PathExpr(PathExpr),
+ PrefixExpr(PrefixExpr),
+ RangeExpr(RangeExpr),
+ RecordExpr(RecordExpr),
+ RefExpr(RefExpr),
+ ReturnExpr(ReturnExpr),
+ TryExpr(TryExpr),
+ TupleExpr(TupleExpr),
+ WhileExpr(WhileExpr),
+ YieldExpr(YieldExpr),
+ LetExpr(LetExpr),
+ UnderscoreExpr(UnderscoreExpr),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Item {
+ Const(Const),
+ Enum(Enum),
+ ExternBlock(ExternBlock),
+ ExternCrate(ExternCrate),
+ Fn(Fn),
+ Impl(Impl),
+ MacroCall(MacroCall),
+ MacroRules(MacroRules),
+ MacroDef(MacroDef),
+ Module(Module),
+ Static(Static),
+ Struct(Struct),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ Union(Union),
+ Use(Use),
+}
+impl ast::HasAttrs for Item {}
+impl ast::HasDocComments for Item {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Stmt {
+ ExprStmt(ExprStmt),
+ Item(Item),
+ LetStmt(LetStmt),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Pat {
+ IdentPat(IdentPat),
+ BoxPat(BoxPat),
+ RestPat(RestPat),
+ LiteralPat(LiteralPat),
+ MacroPat(MacroPat),
+ OrPat(OrPat),
+ ParenPat(ParenPat),
+ PathPat(PathPat),
+ WildcardPat(WildcardPat),
+ RangePat(RangePat),
+ RecordPat(RecordPat),
+ RefPat(RefPat),
+ SlicePat(SlicePat),
+ TuplePat(TuplePat),
+ TupleStructPat(TupleStructPat),
+ ConstBlockPat(ConstBlockPat),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum FieldList {
+ RecordFieldList(RecordFieldList),
+ TupleFieldList(TupleFieldList),
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum Adt {
+ Enum(Enum),
+ Struct(Struct),
+ Union(Union),
+}
+impl ast::HasAttrs for Adt {}
+impl ast::HasDocComments for Adt {}
+impl ast::HasGenericParams for Adt {}
+impl ast::HasName for Adt {}
+impl ast::HasVisibility for Adt {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum AssocItem {
+ Const(Const),
+ Fn(Fn),
+ MacroCall(MacroCall),
+ TypeAlias(TypeAlias),
+}
+impl ast::HasAttrs for AssocItem {}
+impl ast::HasDocComments for AssocItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum ExternItem {
+ Fn(Fn),
+ MacroCall(MacroCall),
+ Static(Static),
+ TypeAlias(TypeAlias),
+}
+impl ast::HasAttrs for ExternItem {}
+impl ast::HasDocComments for ExternItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub enum GenericParam {
+ ConstParam(ConstParam),
+ LifetimeParam(LifetimeParam),
+ TypeParam(TypeParam),
+}
+impl ast::HasAttrs for GenericParam {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasArgList {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasArgList for AnyHasArgList {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasAttrs {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasAttrs for AnyHasAttrs {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasDocComments {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasDocComments for AnyHasDocComments {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasGenericParams {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasGenericParams for AnyHasGenericParams {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasLoopBody {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasLoopBody for AnyHasLoopBody {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasModuleItem {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasModuleItem for AnyHasModuleItem {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasName {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasName for AnyHasName {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasTypeBounds {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasTypeBounds for AnyHasTypeBounds {}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct AnyHasVisibility {
+ pub(crate) syntax: SyntaxNode,
+}
+impl ast::HasVisibility for AnyHasVisibility {}
+impl AstNode for Name {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NAME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for NameRef {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NAME_REF }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Lifetime {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Path {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathSegment {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_SEGMENT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for GenericArgList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_ARG_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParamList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RetType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RET_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AssocTypeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_TYPE_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LifetimeArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstArg {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_ARG }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeBoundList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroCall {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_CALL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Attr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ATTR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TokenTree {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TOKEN_TREE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroItems {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_ITEMS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroStmts {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_STMTS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SourceFile {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SOURCE_FILE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Const {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Enum {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ENUM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternBlock {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_BLOCK }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternCrate {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_CRATE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Fn {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FN }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Impl {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroRules {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_RULES }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroDef {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_DEF }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Module {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MODULE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Static {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STATIC }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Struct {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STRUCT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Trait {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TRAIT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeAlias {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_ALIAS }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Union {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == UNION }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Use {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Visibility {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VISIBILITY }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Rename {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RENAME }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UseTree {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UseTreeList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == USE_TREE_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Abi {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ABI }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for GenericParamList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == GENERIC_PARAM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WhereClause {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_CLAUSE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BlockExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BLOCK_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SelfParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SELF_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Param {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for VariantList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Variant {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == VARIANT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AssocItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ASSOC_ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExternItemList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXTERN_ITEM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LifetimeParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LIFETIME_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeParam {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_PARAM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WherePred {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHERE_PRED }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Meta {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == META }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ExprStmt {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == EXPR_STMT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetStmt {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_STMT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetElse {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_ELSE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArrayExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for AwaitExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == AWAIT_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BinExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BIN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BoxExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BreakExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BREAK_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for CallExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CALL_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for CastExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CAST_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ClosureExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CLOSURE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ContinueExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONTINUE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for FieldExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FIELD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ForExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IfExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IF_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IndexExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INDEX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Literal {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LoopExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LOOP_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MethodCallExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == METHOD_CALL_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PrefixExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PREFIX_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RangeExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ReturnExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RETURN_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TryExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WhileExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WHILE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for YieldExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == YIELD_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LetExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LET_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for UnderscoreExpr {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == UNDERSCORE_EXPR }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for StmtList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == STMT_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for Label {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LABEL }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExprFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordExprField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_EXPR_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArgList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARG_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchArmList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchArm {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_ARM }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MatchGuard {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MATCH_GUARD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ArrayType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == ARRAY_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for DynTraitType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == DYN_TRAIT_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for FnPtrType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FN_PTR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ForType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == FOR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ImplTraitType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IMPL_TRAIT_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for InferType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == INFER_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for NeverType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == NEVER_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PtrType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PTR_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SliceType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleType {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_TYPE }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TypeBound {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TYPE_BOUND }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for IdentPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == IDENT_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for BoxPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == BOX_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RestPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REST_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for LiteralPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == LITERAL_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for MacroPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == MACRO_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for OrPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == OR_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ParenPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PAREN_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for PathPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == PATH_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for WildcardPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == WILDCARD_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RangePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RANGE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RefPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == REF_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for SlicePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == SLICE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TuplePat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for TupleStructPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == TUPLE_STRUCT_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for ConstBlockPat {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == CONST_BLOCK_PAT }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPatFieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD_LIST }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AstNode for RecordPatField {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == RECORD_PAT_FIELD }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) {
+ Some(Self { syntax })
+ } else {
+ None
+ }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl From<TypeArg> for GenericArg {
+ fn from(node: TypeArg) -> GenericArg { GenericArg::TypeArg(node) }
+}
+impl From<AssocTypeArg> for GenericArg {
+ fn from(node: AssocTypeArg) -> GenericArg { GenericArg::AssocTypeArg(node) }
+}
+impl From<LifetimeArg> for GenericArg {
+ fn from(node: LifetimeArg) -> GenericArg { GenericArg::LifetimeArg(node) }
+}
+impl From<ConstArg> for GenericArg {
+ fn from(node: ConstArg) -> GenericArg { GenericArg::ConstArg(node) }
+}
+impl AstNode for GenericArg {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, TYPE_ARG | ASSOC_TYPE_ARG | LIFETIME_ARG | CONST_ARG)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ TYPE_ARG => GenericArg::TypeArg(TypeArg { syntax }),
+ ASSOC_TYPE_ARG => GenericArg::AssocTypeArg(AssocTypeArg { syntax }),
+ LIFETIME_ARG => GenericArg::LifetimeArg(LifetimeArg { syntax }),
+ CONST_ARG => GenericArg::ConstArg(ConstArg { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GenericArg::TypeArg(it) => &it.syntax,
+ GenericArg::AssocTypeArg(it) => &it.syntax,
+ GenericArg::LifetimeArg(it) => &it.syntax,
+ GenericArg::ConstArg(it) => &it.syntax,
+ }
+ }
+}
+impl From<ArrayType> for Type {
+ fn from(node: ArrayType) -> Type { Type::ArrayType(node) }
+}
+impl From<DynTraitType> for Type {
+ fn from(node: DynTraitType) -> Type { Type::DynTraitType(node) }
+}
+impl From<FnPtrType> for Type {
+ fn from(node: FnPtrType) -> Type { Type::FnPtrType(node) }
+}
+impl From<ForType> for Type {
+ fn from(node: ForType) -> Type { Type::ForType(node) }
+}
+impl From<ImplTraitType> for Type {
+ fn from(node: ImplTraitType) -> Type { Type::ImplTraitType(node) }
+}
+impl From<InferType> for Type {
+ fn from(node: InferType) -> Type { Type::InferType(node) }
+}
+impl From<MacroType> for Type {
+ fn from(node: MacroType) -> Type { Type::MacroType(node) }
+}
+impl From<NeverType> for Type {
+ fn from(node: NeverType) -> Type { Type::NeverType(node) }
+}
+impl From<ParenType> for Type {
+ fn from(node: ParenType) -> Type { Type::ParenType(node) }
+}
+impl From<PathType> for Type {
+ fn from(node: PathType) -> Type { Type::PathType(node) }
+}
+impl From<PtrType> for Type {
+ fn from(node: PtrType) -> Type { Type::PtrType(node) }
+}
+impl From<RefType> for Type {
+ fn from(node: RefType) -> Type { Type::RefType(node) }
+}
+impl From<SliceType> for Type {
+ fn from(node: SliceType) -> Type { Type::SliceType(node) }
+}
+impl From<TupleType> for Type {
+ fn from(node: TupleType) -> Type { Type::TupleType(node) }
+}
+impl AstNode for Type {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(
+ kind,
+ ARRAY_TYPE
+ | DYN_TRAIT_TYPE
+ | FN_PTR_TYPE
+ | FOR_TYPE
+ | IMPL_TRAIT_TYPE
+ | INFER_TYPE
+ | MACRO_TYPE
+ | NEVER_TYPE
+ | PAREN_TYPE
+ | PATH_TYPE
+ | PTR_TYPE
+ | REF_TYPE
+ | SLICE_TYPE
+ | TUPLE_TYPE
+ )
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ARRAY_TYPE => Type::ArrayType(ArrayType { syntax }),
+ DYN_TRAIT_TYPE => Type::DynTraitType(DynTraitType { syntax }),
+ FN_PTR_TYPE => Type::FnPtrType(FnPtrType { syntax }),
+ FOR_TYPE => Type::ForType(ForType { syntax }),
+ IMPL_TRAIT_TYPE => Type::ImplTraitType(ImplTraitType { syntax }),
+ INFER_TYPE => Type::InferType(InferType { syntax }),
+ MACRO_TYPE => Type::MacroType(MacroType { syntax }),
+ NEVER_TYPE => Type::NeverType(NeverType { syntax }),
+ PAREN_TYPE => Type::ParenType(ParenType { syntax }),
+ PATH_TYPE => Type::PathType(PathType { syntax }),
+ PTR_TYPE => Type::PtrType(PtrType { syntax }),
+ REF_TYPE => Type::RefType(RefType { syntax }),
+ SLICE_TYPE => Type::SliceType(SliceType { syntax }),
+ TUPLE_TYPE => Type::TupleType(TupleType { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Type::ArrayType(it) => &it.syntax,
+ Type::DynTraitType(it) => &it.syntax,
+ Type::FnPtrType(it) => &it.syntax,
+ Type::ForType(it) => &it.syntax,
+ Type::ImplTraitType(it) => &it.syntax,
+ Type::InferType(it) => &it.syntax,
+ Type::MacroType(it) => &it.syntax,
+ Type::NeverType(it) => &it.syntax,
+ Type::ParenType(it) => &it.syntax,
+ Type::PathType(it) => &it.syntax,
+ Type::PtrType(it) => &it.syntax,
+ Type::RefType(it) => &it.syntax,
+ Type::SliceType(it) => &it.syntax,
+ Type::TupleType(it) => &it.syntax,
+ }
+ }
+}
+impl From<ArrayExpr> for Expr {
+ fn from(node: ArrayExpr) -> Expr { Expr::ArrayExpr(node) }
+}
+impl From<AwaitExpr> for Expr {
+ fn from(node: AwaitExpr) -> Expr { Expr::AwaitExpr(node) }
+}
+impl From<BinExpr> for Expr {
+ fn from(node: BinExpr) -> Expr { Expr::BinExpr(node) }
+}
+impl From<BlockExpr> for Expr {
+ fn from(node: BlockExpr) -> Expr { Expr::BlockExpr(node) }
+}
+impl From<BoxExpr> for Expr {
+ fn from(node: BoxExpr) -> Expr { Expr::BoxExpr(node) }
+}
+impl From<BreakExpr> for Expr {
+ fn from(node: BreakExpr) -> Expr { Expr::BreakExpr(node) }
+}
+impl From<CallExpr> for Expr {
+ fn from(node: CallExpr) -> Expr { Expr::CallExpr(node) }
+}
+impl From<CastExpr> for Expr {
+ fn from(node: CastExpr) -> Expr { Expr::CastExpr(node) }
+}
+impl From<ClosureExpr> for Expr {
+ fn from(node: ClosureExpr) -> Expr { Expr::ClosureExpr(node) }
+}
+impl From<ContinueExpr> for Expr {
+ fn from(node: ContinueExpr) -> Expr { Expr::ContinueExpr(node) }
+}
+impl From<FieldExpr> for Expr {
+ fn from(node: FieldExpr) -> Expr { Expr::FieldExpr(node) }
+}
+impl From<ForExpr> for Expr {
+ fn from(node: ForExpr) -> Expr { Expr::ForExpr(node) }
+}
+impl From<IfExpr> for Expr {
+ fn from(node: IfExpr) -> Expr { Expr::IfExpr(node) }
+}
+impl From<IndexExpr> for Expr {
+ fn from(node: IndexExpr) -> Expr { Expr::IndexExpr(node) }
+}
+impl From<Literal> for Expr {
+ fn from(node: Literal) -> Expr { Expr::Literal(node) }
+}
+impl From<LoopExpr> for Expr {
+ fn from(node: LoopExpr) -> Expr { Expr::LoopExpr(node) }
+}
+impl From<MacroExpr> for Expr {
+ fn from(node: MacroExpr) -> Expr { Expr::MacroExpr(node) }
+}
+impl From<MatchExpr> for Expr {
+ fn from(node: MatchExpr) -> Expr { Expr::MatchExpr(node) }
+}
+impl From<MethodCallExpr> for Expr {
+ fn from(node: MethodCallExpr) -> Expr { Expr::MethodCallExpr(node) }
+}
+impl From<ParenExpr> for Expr {
+ fn from(node: ParenExpr) -> Expr { Expr::ParenExpr(node) }
+}
+impl From<PathExpr> for Expr {
+ fn from(node: PathExpr) -> Expr { Expr::PathExpr(node) }
+}
+impl From<PrefixExpr> for Expr {
+ fn from(node: PrefixExpr) -> Expr { Expr::PrefixExpr(node) }
+}
+impl From<RangeExpr> for Expr {
+ fn from(node: RangeExpr) -> Expr { Expr::RangeExpr(node) }
+}
+impl From<RecordExpr> for Expr {
+ fn from(node: RecordExpr) -> Expr { Expr::RecordExpr(node) }
+}
+impl From<RefExpr> for Expr {
+ fn from(node: RefExpr) -> Expr { Expr::RefExpr(node) }
+}
+impl From<ReturnExpr> for Expr {
+ fn from(node: ReturnExpr) -> Expr { Expr::ReturnExpr(node) }
+}
+impl From<TryExpr> for Expr {
+ fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) }
+}
+impl From<TupleExpr> for Expr {
+ fn from(node: TupleExpr) -> Expr { Expr::TupleExpr(node) }
+}
+impl From<WhileExpr> for Expr {
+ fn from(node: WhileExpr) -> Expr { Expr::WhileExpr(node) }
+}
+impl From<YieldExpr> for Expr {
+ fn from(node: YieldExpr) -> Expr { Expr::YieldExpr(node) }
+}
+impl From<LetExpr> for Expr {
+ fn from(node: LetExpr) -> Expr { Expr::LetExpr(node) }
+}
+impl From<UnderscoreExpr> for Expr {
+ fn from(node: UnderscoreExpr) -> Expr { Expr::UnderscoreExpr(node) }
+}
+impl AstNode for Expr {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(
+ kind,
+ ARRAY_EXPR
+ | AWAIT_EXPR
+ | BIN_EXPR
+ | BLOCK_EXPR
+ | BOX_EXPR
+ | BREAK_EXPR
+ | CALL_EXPR
+ | CAST_EXPR
+ | CLOSURE_EXPR
+ | CONTINUE_EXPR
+ | FIELD_EXPR
+ | FOR_EXPR
+ | IF_EXPR
+ | INDEX_EXPR
+ | LITERAL
+ | LOOP_EXPR
+ | MACRO_EXPR
+ | MATCH_EXPR
+ | METHOD_CALL_EXPR
+ | PAREN_EXPR
+ | PATH_EXPR
+ | PREFIX_EXPR
+ | RANGE_EXPR
+ | RECORD_EXPR
+ | REF_EXPR
+ | RETURN_EXPR
+ | TRY_EXPR
+ | TUPLE_EXPR
+ | WHILE_EXPR
+ | YIELD_EXPR
+ | LET_EXPR
+ | UNDERSCORE_EXPR
+ )
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ARRAY_EXPR => Expr::ArrayExpr(ArrayExpr { syntax }),
+ AWAIT_EXPR => Expr::AwaitExpr(AwaitExpr { syntax }),
+ BIN_EXPR => Expr::BinExpr(BinExpr { syntax }),
+ BLOCK_EXPR => Expr::BlockExpr(BlockExpr { syntax }),
+ BOX_EXPR => Expr::BoxExpr(BoxExpr { syntax }),
+ BREAK_EXPR => Expr::BreakExpr(BreakExpr { syntax }),
+ CALL_EXPR => Expr::CallExpr(CallExpr { syntax }),
+ CAST_EXPR => Expr::CastExpr(CastExpr { syntax }),
+ CLOSURE_EXPR => Expr::ClosureExpr(ClosureExpr { syntax }),
+ CONTINUE_EXPR => Expr::ContinueExpr(ContinueExpr { syntax }),
+ FIELD_EXPR => Expr::FieldExpr(FieldExpr { syntax }),
+ FOR_EXPR => Expr::ForExpr(ForExpr { syntax }),
+ IF_EXPR => Expr::IfExpr(IfExpr { syntax }),
+ INDEX_EXPR => Expr::IndexExpr(IndexExpr { syntax }),
+ LITERAL => Expr::Literal(Literal { syntax }),
+ LOOP_EXPR => Expr::LoopExpr(LoopExpr { syntax }),
+ MACRO_EXPR => Expr::MacroExpr(MacroExpr { syntax }),
+ MATCH_EXPR => Expr::MatchExpr(MatchExpr { syntax }),
+ METHOD_CALL_EXPR => Expr::MethodCallExpr(MethodCallExpr { syntax }),
+ PAREN_EXPR => Expr::ParenExpr(ParenExpr { syntax }),
+ PATH_EXPR => Expr::PathExpr(PathExpr { syntax }),
+ PREFIX_EXPR => Expr::PrefixExpr(PrefixExpr { syntax }),
+ RANGE_EXPR => Expr::RangeExpr(RangeExpr { syntax }),
+ RECORD_EXPR => Expr::RecordExpr(RecordExpr { syntax }),
+ REF_EXPR => Expr::RefExpr(RefExpr { syntax }),
+ RETURN_EXPR => Expr::ReturnExpr(ReturnExpr { syntax }),
+ TRY_EXPR => Expr::TryExpr(TryExpr { syntax }),
+ TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }),
+ WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }),
+ YIELD_EXPR => Expr::YieldExpr(YieldExpr { syntax }),
+ LET_EXPR => Expr::LetExpr(LetExpr { syntax }),
+ UNDERSCORE_EXPR => Expr::UnderscoreExpr(UnderscoreExpr { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Expr::ArrayExpr(it) => &it.syntax,
+ Expr::AwaitExpr(it) => &it.syntax,
+ Expr::BinExpr(it) => &it.syntax,
+ Expr::BlockExpr(it) => &it.syntax,
+ Expr::BoxExpr(it) => &it.syntax,
+ Expr::BreakExpr(it) => &it.syntax,
+ Expr::CallExpr(it) => &it.syntax,
+ Expr::CastExpr(it) => &it.syntax,
+ Expr::ClosureExpr(it) => &it.syntax,
+ Expr::ContinueExpr(it) => &it.syntax,
+ Expr::FieldExpr(it) => &it.syntax,
+ Expr::ForExpr(it) => &it.syntax,
+ Expr::IfExpr(it) => &it.syntax,
+ Expr::IndexExpr(it) => &it.syntax,
+ Expr::Literal(it) => &it.syntax,
+ Expr::LoopExpr(it) => &it.syntax,
+ Expr::MacroExpr(it) => &it.syntax,
+ Expr::MatchExpr(it) => &it.syntax,
+ Expr::MethodCallExpr(it) => &it.syntax,
+ Expr::ParenExpr(it) => &it.syntax,
+ Expr::PathExpr(it) => &it.syntax,
+ Expr::PrefixExpr(it) => &it.syntax,
+ Expr::RangeExpr(it) => &it.syntax,
+ Expr::RecordExpr(it) => &it.syntax,
+ Expr::RefExpr(it) => &it.syntax,
+ Expr::ReturnExpr(it) => &it.syntax,
+ Expr::TryExpr(it) => &it.syntax,
+ Expr::TupleExpr(it) => &it.syntax,
+ Expr::WhileExpr(it) => &it.syntax,
+ Expr::YieldExpr(it) => &it.syntax,
+ Expr::LetExpr(it) => &it.syntax,
+ Expr::UnderscoreExpr(it) => &it.syntax,
+ }
+ }
+}
+impl From<Const> for Item {
+ fn from(node: Const) -> Item { Item::Const(node) }
+}
+impl From<Enum> for Item {
+ fn from(node: Enum) -> Item { Item::Enum(node) }
+}
+impl From<ExternBlock> for Item {
+ fn from(node: ExternBlock) -> Item { Item::ExternBlock(node) }
+}
+impl From<ExternCrate> for Item {
+ fn from(node: ExternCrate) -> Item { Item::ExternCrate(node) }
+}
+impl From<Fn> for Item {
+ fn from(node: Fn) -> Item { Item::Fn(node) }
+}
+impl From<Impl> for Item {
+ fn from(node: Impl) -> Item { Item::Impl(node) }
+}
+impl From<MacroCall> for Item {
+ fn from(node: MacroCall) -> Item { Item::MacroCall(node) }
+}
+impl From<MacroRules> for Item {
+ fn from(node: MacroRules) -> Item { Item::MacroRules(node) }
+}
+impl From<MacroDef> for Item {
+ fn from(node: MacroDef) -> Item { Item::MacroDef(node) }
+}
+impl From<Module> for Item {
+ fn from(node: Module) -> Item { Item::Module(node) }
+}
+impl From<Static> for Item {
+ fn from(node: Static) -> Item { Item::Static(node) }
+}
+impl From<Struct> for Item {
+ fn from(node: Struct) -> Item { Item::Struct(node) }
+}
+impl From<Trait> for Item {
+ fn from(node: Trait) -> Item { Item::Trait(node) }
+}
+impl From<TypeAlias> for Item {
+ fn from(node: TypeAlias) -> Item { Item::TypeAlias(node) }
+}
+impl From<Union> for Item {
+ fn from(node: Union) -> Item { Item::Union(node) }
+}
+impl From<Use> for Item {
+ fn from(node: Use) -> Item { Item::Use(node) }
+}
+impl AstNode for Item {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(
+ kind,
+ CONST
+ | ENUM
+ | EXTERN_BLOCK
+ | EXTERN_CRATE
+ | FN
+ | IMPL
+ | MACRO_CALL
+ | MACRO_RULES
+ | MACRO_DEF
+ | MODULE
+ | STATIC
+ | STRUCT
+ | TRAIT
+ | TYPE_ALIAS
+ | UNION
+ | USE
+ )
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST => Item::Const(Const { syntax }),
+ ENUM => Item::Enum(Enum { syntax }),
+ EXTERN_BLOCK => Item::ExternBlock(ExternBlock { syntax }),
+ EXTERN_CRATE => Item::ExternCrate(ExternCrate { syntax }),
+ FN => Item::Fn(Fn { syntax }),
+ IMPL => Item::Impl(Impl { syntax }),
+ MACRO_CALL => Item::MacroCall(MacroCall { syntax }),
+ MACRO_RULES => Item::MacroRules(MacroRules { syntax }),
+ MACRO_DEF => Item::MacroDef(MacroDef { syntax }),
+ MODULE => Item::Module(Module { syntax }),
+ STATIC => Item::Static(Static { syntax }),
+ STRUCT => Item::Struct(Struct { syntax }),
+ TRAIT => Item::Trait(Trait { syntax }),
+ TYPE_ALIAS => Item::TypeAlias(TypeAlias { syntax }),
+ UNION => Item::Union(Union { syntax }),
+ USE => Item::Use(Use { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Item::Const(it) => &it.syntax,
+ Item::Enum(it) => &it.syntax,
+ Item::ExternBlock(it) => &it.syntax,
+ Item::ExternCrate(it) => &it.syntax,
+ Item::Fn(it) => &it.syntax,
+ Item::Impl(it) => &it.syntax,
+ Item::MacroCall(it) => &it.syntax,
+ Item::MacroRules(it) => &it.syntax,
+ Item::MacroDef(it) => &it.syntax,
+ Item::Module(it) => &it.syntax,
+ Item::Static(it) => &it.syntax,
+ Item::Struct(it) => &it.syntax,
+ Item::Trait(it) => &it.syntax,
+ Item::TypeAlias(it) => &it.syntax,
+ Item::Union(it) => &it.syntax,
+ Item::Use(it) => &it.syntax,
+ }
+ }
+}
+impl From<ExprStmt> for Stmt {
+ fn from(node: ExprStmt) -> Stmt { Stmt::ExprStmt(node) }
+}
+impl From<Item> for Stmt {
+ fn from(node: Item) -> Stmt { Stmt::Item(node) }
+}
+impl From<LetStmt> for Stmt {
+ fn from(node: LetStmt) -> Stmt { Stmt::LetStmt(node) }
+}
+impl From<IdentPat> for Pat {
+ fn from(node: IdentPat) -> Pat { Pat::IdentPat(node) }
+}
+impl From<BoxPat> for Pat {
+ fn from(node: BoxPat) -> Pat { Pat::BoxPat(node) }
+}
+impl From<RestPat> for Pat {
+ fn from(node: RestPat) -> Pat { Pat::RestPat(node) }
+}
+impl From<LiteralPat> for Pat {
+ fn from(node: LiteralPat) -> Pat { Pat::LiteralPat(node) }
+}
+impl From<MacroPat> for Pat {
+ fn from(node: MacroPat) -> Pat { Pat::MacroPat(node) }
+}
+impl From<OrPat> for Pat {
+ fn from(node: OrPat) -> Pat { Pat::OrPat(node) }
+}
+impl From<ParenPat> for Pat {
+ fn from(node: ParenPat) -> Pat { Pat::ParenPat(node) }
+}
+impl From<PathPat> for Pat {
+ fn from(node: PathPat) -> Pat { Pat::PathPat(node) }
+}
+impl From<WildcardPat> for Pat {
+ fn from(node: WildcardPat) -> Pat { Pat::WildcardPat(node) }
+}
+impl From<RangePat> for Pat {
+ fn from(node: RangePat) -> Pat { Pat::RangePat(node) }
+}
+impl From<RecordPat> for Pat {
+ fn from(node: RecordPat) -> Pat { Pat::RecordPat(node) }
+}
+impl From<RefPat> for Pat {
+ fn from(node: RefPat) -> Pat { Pat::RefPat(node) }
+}
+impl From<SlicePat> for Pat {
+ fn from(node: SlicePat) -> Pat { Pat::SlicePat(node) }
+}
+impl From<TuplePat> for Pat {
+ fn from(node: TuplePat) -> Pat { Pat::TuplePat(node) }
+}
+impl From<TupleStructPat> for Pat {
+ fn from(node: TupleStructPat) -> Pat { Pat::TupleStructPat(node) }
+}
+impl From<ConstBlockPat> for Pat {
+ fn from(node: ConstBlockPat) -> Pat { Pat::ConstBlockPat(node) }
+}
+impl AstNode for Pat {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(
+ kind,
+ IDENT_PAT
+ | BOX_PAT
+ | REST_PAT
+ | LITERAL_PAT
+ | MACRO_PAT
+ | OR_PAT
+ | PAREN_PAT
+ | PATH_PAT
+ | WILDCARD_PAT
+ | RANGE_PAT
+ | RECORD_PAT
+ | REF_PAT
+ | SLICE_PAT
+ | TUPLE_PAT
+ | TUPLE_STRUCT_PAT
+ | CONST_BLOCK_PAT
+ )
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ IDENT_PAT => Pat::IdentPat(IdentPat { syntax }),
+ BOX_PAT => Pat::BoxPat(BoxPat { syntax }),
+ REST_PAT => Pat::RestPat(RestPat { syntax }),
+ LITERAL_PAT => Pat::LiteralPat(LiteralPat { syntax }),
+ MACRO_PAT => Pat::MacroPat(MacroPat { syntax }),
+ OR_PAT => Pat::OrPat(OrPat { syntax }),
+ PAREN_PAT => Pat::ParenPat(ParenPat { syntax }),
+ PATH_PAT => Pat::PathPat(PathPat { syntax }),
+ WILDCARD_PAT => Pat::WildcardPat(WildcardPat { syntax }),
+ RANGE_PAT => Pat::RangePat(RangePat { syntax }),
+ RECORD_PAT => Pat::RecordPat(RecordPat { syntax }),
+ REF_PAT => Pat::RefPat(RefPat { syntax }),
+ SLICE_PAT => Pat::SlicePat(SlicePat { syntax }),
+ TUPLE_PAT => Pat::TuplePat(TuplePat { syntax }),
+ TUPLE_STRUCT_PAT => Pat::TupleStructPat(TupleStructPat { syntax }),
+ CONST_BLOCK_PAT => Pat::ConstBlockPat(ConstBlockPat { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Pat::IdentPat(it) => &it.syntax,
+ Pat::BoxPat(it) => &it.syntax,
+ Pat::RestPat(it) => &it.syntax,
+ Pat::LiteralPat(it) => &it.syntax,
+ Pat::MacroPat(it) => &it.syntax,
+ Pat::OrPat(it) => &it.syntax,
+ Pat::ParenPat(it) => &it.syntax,
+ Pat::PathPat(it) => &it.syntax,
+ Pat::WildcardPat(it) => &it.syntax,
+ Pat::RangePat(it) => &it.syntax,
+ Pat::RecordPat(it) => &it.syntax,
+ Pat::RefPat(it) => &it.syntax,
+ Pat::SlicePat(it) => &it.syntax,
+ Pat::TuplePat(it) => &it.syntax,
+ Pat::TupleStructPat(it) => &it.syntax,
+ Pat::ConstBlockPat(it) => &it.syntax,
+ }
+ }
+}
+impl From<RecordFieldList> for FieldList {
+ fn from(node: RecordFieldList) -> FieldList { FieldList::RecordFieldList(node) }
+}
+impl From<TupleFieldList> for FieldList {
+ fn from(node: TupleFieldList) -> FieldList { FieldList::TupleFieldList(node) }
+}
+impl AstNode for FieldList {
+ fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, RECORD_FIELD_LIST | TUPLE_FIELD_LIST) }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ RECORD_FIELD_LIST => FieldList::RecordFieldList(RecordFieldList { syntax }),
+ TUPLE_FIELD_LIST => FieldList::TupleFieldList(TupleFieldList { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ FieldList::RecordFieldList(it) => &it.syntax,
+ FieldList::TupleFieldList(it) => &it.syntax,
+ }
+ }
+}
+impl From<Enum> for Adt {
+ fn from(node: Enum) -> Adt { Adt::Enum(node) }
+}
+impl From<Struct> for Adt {
+ fn from(node: Struct) -> Adt { Adt::Struct(node) }
+}
+impl From<Union> for Adt {
+ fn from(node: Union) -> Adt { Adt::Union(node) }
+}
+impl AstNode for Adt {
+ fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, ENUM | STRUCT | UNION) }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ ENUM => Adt::Enum(Enum { syntax }),
+ STRUCT => Adt::Struct(Struct { syntax }),
+ UNION => Adt::Union(Union { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ Adt::Enum(it) => &it.syntax,
+ Adt::Struct(it) => &it.syntax,
+ Adt::Union(it) => &it.syntax,
+ }
+ }
+}
+impl From<Const> for AssocItem {
+ fn from(node: Const) -> AssocItem { AssocItem::Const(node) }
+}
+impl From<Fn> for AssocItem {
+ fn from(node: Fn) -> AssocItem { AssocItem::Fn(node) }
+}
+impl From<MacroCall> for AssocItem {
+ fn from(node: MacroCall) -> AssocItem { AssocItem::MacroCall(node) }
+}
+impl From<TypeAlias> for AssocItem {
+ fn from(node: TypeAlias) -> AssocItem { AssocItem::TypeAlias(node) }
+}
+impl AstNode for AssocItem {
+ fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CONST | FN | MACRO_CALL | TYPE_ALIAS) }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST => AssocItem::Const(Const { syntax }),
+ FN => AssocItem::Fn(Fn { syntax }),
+ MACRO_CALL => AssocItem::MacroCall(MacroCall { syntax }),
+ TYPE_ALIAS => AssocItem::TypeAlias(TypeAlias { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ AssocItem::Const(it) => &it.syntax,
+ AssocItem::Fn(it) => &it.syntax,
+ AssocItem::MacroCall(it) => &it.syntax,
+ AssocItem::TypeAlias(it) => &it.syntax,
+ }
+ }
+}
+impl From<Fn> for ExternItem {
+ fn from(node: Fn) -> ExternItem { ExternItem::Fn(node) }
+}
+impl From<MacroCall> for ExternItem {
+ fn from(node: MacroCall) -> ExternItem { ExternItem::MacroCall(node) }
+}
+impl From<Static> for ExternItem {
+ fn from(node: Static) -> ExternItem { ExternItem::Static(node) }
+}
+impl From<TypeAlias> for ExternItem {
+ fn from(node: TypeAlias) -> ExternItem { ExternItem::TypeAlias(node) }
+}
+impl AstNode for ExternItem {
+ fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FN | MACRO_CALL | STATIC | TYPE_ALIAS) }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ FN => ExternItem::Fn(Fn { syntax }),
+ MACRO_CALL => ExternItem::MacroCall(MacroCall { syntax }),
+ STATIC => ExternItem::Static(Static { syntax }),
+ TYPE_ALIAS => ExternItem::TypeAlias(TypeAlias { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ ExternItem::Fn(it) => &it.syntax,
+ ExternItem::MacroCall(it) => &it.syntax,
+ ExternItem::Static(it) => &it.syntax,
+ ExternItem::TypeAlias(it) => &it.syntax,
+ }
+ }
+}
+impl From<ConstParam> for GenericParam {
+ fn from(node: ConstParam) -> GenericParam { GenericParam::ConstParam(node) }
+}
+impl From<LifetimeParam> for GenericParam {
+ fn from(node: LifetimeParam) -> GenericParam { GenericParam::LifetimeParam(node) }
+}
+impl From<TypeParam> for GenericParam {
+ fn from(node: TypeParam) -> GenericParam { GenericParam::TypeParam(node) }
+}
+impl AstNode for GenericParam {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, CONST_PARAM | LIFETIME_PARAM | TYPE_PARAM)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ CONST_PARAM => GenericParam::ConstParam(ConstParam { syntax }),
+ LIFETIME_PARAM => GenericParam::LifetimeParam(LifetimeParam { syntax }),
+ TYPE_PARAM => GenericParam::TypeParam(TypeParam { syntax }),
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ GenericParam::ConstParam(it) => &it.syntax,
+ GenericParam::LifetimeParam(it) => &it.syntax,
+ GenericParam::TypeParam(it) => &it.syntax,
+ }
+ }
+}
+impl AnyHasArgList {
+ #[inline]
+ pub fn new<T: ast::HasArgList>(node: T) -> AnyHasArgList {
+ AnyHasArgList { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasArgList {
+ fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, CALL_EXPR | METHOD_CALL_EXPR) }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasArgList { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasAttrs {
+ #[inline]
+ pub fn new<T: ast::HasAttrs>(node: T) -> AnyHasAttrs {
+ AnyHasAttrs { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasAttrs {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(
+ kind,
+ MACRO_CALL
+ | SOURCE_FILE
+ | CONST
+ | ENUM
+ | EXTERN_BLOCK
+ | EXTERN_CRATE
+ | FN
+ | IMPL
+ | MACRO_RULES
+ | MACRO_DEF
+ | MODULE
+ | STATIC
+ | STRUCT
+ | TRAIT
+ | TYPE_ALIAS
+ | UNION
+ | USE
+ | ITEM_LIST
+ | BLOCK_EXPR
+ | SELF_PARAM
+ | PARAM
+ | RECORD_FIELD
+ | TUPLE_FIELD
+ | VARIANT
+ | ASSOC_ITEM_LIST
+ | EXTERN_ITEM_LIST
+ | CONST_PARAM
+ | LIFETIME_PARAM
+ | TYPE_PARAM
+ | LET_STMT
+ | ARRAY_EXPR
+ | AWAIT_EXPR
+ | BIN_EXPR
+ | BOX_EXPR
+ | BREAK_EXPR
+ | CALL_EXPR
+ | CAST_EXPR
+ | CLOSURE_EXPR
+ | CONTINUE_EXPR
+ | FIELD_EXPR
+ | FOR_EXPR
+ | IF_EXPR
+ | INDEX_EXPR
+ | LITERAL
+ | LOOP_EXPR
+ | MATCH_EXPR
+ | METHOD_CALL_EXPR
+ | PAREN_EXPR
+ | PATH_EXPR
+ | PREFIX_EXPR
+ | RANGE_EXPR
+ | REF_EXPR
+ | RETURN_EXPR
+ | TRY_EXPR
+ | TUPLE_EXPR
+ | WHILE_EXPR
+ | YIELD_EXPR
+ | LET_EXPR
+ | UNDERSCORE_EXPR
+ | STMT_LIST
+ | RECORD_EXPR_FIELD_LIST
+ | RECORD_EXPR_FIELD
+ | MATCH_ARM_LIST
+ | MATCH_ARM
+ | IDENT_PAT
+ | REST_PAT
+ | RECORD_PAT_FIELD
+ )
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasAttrs { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasDocComments {
+ #[inline]
+ pub fn new<T: ast::HasDocComments>(node: T) -> AnyHasDocComments {
+ AnyHasDocComments { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasDocComments {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(
+ kind,
+ MACRO_CALL
+ | SOURCE_FILE
+ | CONST
+ | ENUM
+ | EXTERN_BLOCK
+ | EXTERN_CRATE
+ | FN
+ | IMPL
+ | MACRO_RULES
+ | MACRO_DEF
+ | MODULE
+ | STATIC
+ | STRUCT
+ | TRAIT
+ | TYPE_ALIAS
+ | UNION
+ | USE
+ | RECORD_FIELD
+ | TUPLE_FIELD
+ | VARIANT
+ )
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasDocComments { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasGenericParams {
+ #[inline]
+ pub fn new<T: ast::HasGenericParams>(node: T) -> AnyHasGenericParams {
+ AnyHasGenericParams { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasGenericParams {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, ENUM | FN | IMPL | STRUCT | TRAIT | TYPE_ALIAS | UNION)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasGenericParams { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasLoopBody {
+ #[inline]
+ pub fn new<T: ast::HasLoopBody>(node: T) -> AnyHasLoopBody {
+ AnyHasLoopBody { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasLoopBody {
+ fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, FOR_EXPR | LOOP_EXPR | WHILE_EXPR) }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasLoopBody { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasModuleItem {
+ #[inline]
+ pub fn new<T: ast::HasModuleItem>(node: T) -> AnyHasModuleItem {
+ AnyHasModuleItem { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasModuleItem {
+ fn can_cast(kind: SyntaxKind) -> bool { matches!(kind, MACRO_ITEMS | SOURCE_FILE | ITEM_LIST) }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasModuleItem { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasName {
+ #[inline]
+ pub fn new<T: ast::HasName>(node: T) -> AnyHasName {
+ AnyHasName { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasName {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(
+ kind,
+ CONST
+ | ENUM
+ | FN
+ | MACRO_RULES
+ | MACRO_DEF
+ | MODULE
+ | STATIC
+ | STRUCT
+ | TRAIT
+ | TYPE_ALIAS
+ | UNION
+ | RENAME
+ | SELF_PARAM
+ | RECORD_FIELD
+ | VARIANT
+ | CONST_PARAM
+ | TYPE_PARAM
+ | IDENT_PAT
+ )
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasName { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasTypeBounds {
+ #[inline]
+ pub fn new<T: ast::HasTypeBounds>(node: T) -> AnyHasTypeBounds {
+ AnyHasTypeBounds { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasTypeBounds {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(
+ kind,
+ ASSOC_TYPE_ARG | TRAIT | TYPE_ALIAS | LIFETIME_PARAM | TYPE_PARAM | WHERE_PRED
+ )
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasTypeBounds { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl AnyHasVisibility {
+ #[inline]
+ pub fn new<T: ast::HasVisibility>(node: T) -> AnyHasVisibility {
+ AnyHasVisibility { syntax: node.syntax().clone() }
+ }
+}
+impl AstNode for AnyHasVisibility {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(
+ kind,
+ CONST
+ | ENUM
+ | EXTERN_CRATE
+ | FN
+ | IMPL
+ | MACRO_RULES
+ | MACRO_DEF
+ | MODULE
+ | STATIC
+ | STRUCT
+ | TRAIT
+ | TYPE_ALIAS
+ | UNION
+ | USE
+ | RECORD_FIELD
+ | TUPLE_FIELD
+ | VARIANT
+ )
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| AnyHasVisibility { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+}
+impl std::fmt::Display for GenericArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Type {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Expr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Item {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Stmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Pat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Adt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocItem {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternItem {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for NameRef {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Lifetime {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Path {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathSegment {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericArgList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParamList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RetType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocTypeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LifetimeArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstArg {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeBoundList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroCall {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Attr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TokenTree {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroItems {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroStmts {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SourceFile {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Const {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Enum {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternBlock {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternCrate {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Fn {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Impl {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroRules {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroDef {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Module {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Static {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Struct {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Trait {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeAlias {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Union {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Use {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Visibility {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Rename {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UseTree {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UseTreeList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Abi {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for GenericParamList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WhereClause {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BlockExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SelfParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Param {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for VariantList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Variant {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AssocItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExternItemList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LifetimeParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeParam {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WherePred {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Meta {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ExprStmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetStmt {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetElse {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArrayExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for AwaitExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BinExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BoxExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BreakExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for CallExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for CastExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ClosureExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ContinueExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FieldExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ForExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IfExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IndexExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Literal {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LoopExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MethodCallExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PrefixExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RangeExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ReturnExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TryExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WhileExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for YieldExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LetExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for UnderscoreExpr {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for StmtList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for Label {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExprFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordExprField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArgList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchArmList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchArm {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MatchGuard {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ArrayType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for DynTraitType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for FnPtrType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ForType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ImplTraitType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for InferType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for NeverType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PtrType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SliceType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleType {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TypeBound {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for IdentPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for BoxPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RestPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for LiteralPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for MacroPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for OrPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ParenPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for PathPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for WildcardPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RangePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RefPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for SlicePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TuplePat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for TupleStructPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for ConstBlockPat {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPatFieldList {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
+impl std::fmt::Display for RecordPatField {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+}
--- /dev/null
- let mut text_iter = text.chars();
+//! There are many AstNodes, but only a few tokens, so we hand-write them here.
+
+use std::borrow::Cow;
+
+use rustc_lexer::unescape::{unescape_byte, unescape_char, unescape_literal, Mode};
+
+use crate::{
+ ast::{self, AstToken},
+ TextRange, TextSize,
+};
+
+impl ast::Comment {
+ pub fn kind(&self) -> CommentKind {
+ CommentKind::from_text(self.text())
+ }
+
+ pub fn is_doc(&self) -> bool {
+ self.kind().doc.is_some()
+ }
+
+ pub fn is_inner(&self) -> bool {
+ self.kind().doc == Some(CommentPlacement::Inner)
+ }
+
+ pub fn is_outer(&self) -> bool {
+ self.kind().doc == Some(CommentPlacement::Outer)
+ }
+
+ pub fn prefix(&self) -> &'static str {
+ let &(prefix, _kind) = CommentKind::BY_PREFIX
+ .iter()
+ .find(|&(prefix, kind)| self.kind() == *kind && self.text().starts_with(prefix))
+ .unwrap();
+ prefix
+ }
+
+ /// Returns the textual content of a doc comment node as a single string with prefix and suffix
+ /// removed.
+ pub fn doc_comment(&self) -> Option<&str> {
+ let kind = self.kind();
+ match kind {
+ CommentKind { shape, doc: Some(_) } => {
+ let prefix = kind.prefix();
+ let text = &self.text()[prefix.len()..];
+ let text = if shape == CommentShape::Block {
+ text.strip_suffix("*/").unwrap_or(text)
+ } else {
+ text
+ };
+ Some(text)
+ }
+ _ => None,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub struct CommentKind {
+ pub shape: CommentShape,
+ pub doc: Option<CommentPlacement>,
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum CommentShape {
+ Line,
+ Block,
+}
+
+impl CommentShape {
+ pub fn is_line(self) -> bool {
+ self == CommentShape::Line
+ }
+
+ pub fn is_block(self) -> bool {
+ self == CommentShape::Block
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Clone, Copy)]
+pub enum CommentPlacement {
+ Inner,
+ Outer,
+}
+
+impl CommentKind {
+ const BY_PREFIX: [(&'static str, CommentKind); 9] = [
+ ("/**/", CommentKind { shape: CommentShape::Block, doc: None }),
+ ("/***", CommentKind { shape: CommentShape::Block, doc: None }),
+ ("////", CommentKind { shape: CommentShape::Line, doc: None }),
+ ("///", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Outer) }),
+ ("//!", CommentKind { shape: CommentShape::Line, doc: Some(CommentPlacement::Inner) }),
+ ("/**", CommentKind { shape: CommentShape::Block, doc: Some(CommentPlacement::Outer) }),
+ ("/*!", CommentKind { shape: CommentShape::Block, doc: Some(CommentPlacement::Inner) }),
+ ("//", CommentKind { shape: CommentShape::Line, doc: None }),
+ ("/*", CommentKind { shape: CommentShape::Block, doc: None }),
+ ];
+
+ pub(crate) fn from_text(text: &str) -> CommentKind {
+ let &(_prefix, kind) = CommentKind::BY_PREFIX
+ .iter()
+ .find(|&(prefix, _kind)| text.starts_with(prefix))
+ .unwrap();
+ kind
+ }
+
+ pub fn prefix(&self) -> &'static str {
+ let &(prefix, _) =
+ CommentKind::BY_PREFIX.iter().rev().find(|(_, kind)| kind == self).unwrap();
+ prefix
+ }
+}
+
+impl ast::Whitespace {
+ pub fn spans_multiple_lines(&self) -> bool {
+ let text = self.text();
+ text.find('\n').map_or(false, |idx| text[idx + 1..].contains('\n'))
+ }
+}
+
+pub struct QuoteOffsets {
+ pub quotes: (TextRange, TextRange),
+ pub contents: TextRange,
+}
+
+impl QuoteOffsets {
+ fn new(literal: &str) -> Option<QuoteOffsets> {
+ let left_quote = literal.find('"')?;
+ let right_quote = literal.rfind('"')?;
+ if left_quote == right_quote {
+ // `literal` only contains one quote
+ return None;
+ }
+
+ let start = TextSize::from(0);
+ let left_quote = TextSize::try_from(left_quote).unwrap() + TextSize::of('"');
+ let right_quote = TextSize::try_from(right_quote).unwrap();
+ let end = TextSize::of(literal);
+
+ let res = QuoteOffsets {
+ quotes: (TextRange::new(start, left_quote), TextRange::new(right_quote, end)),
+ contents: TextRange::new(left_quote, right_quote),
+ };
+ Some(res)
+ }
+}
+
+pub trait IsString: AstToken {
+ fn quote_offsets(&self) -> Option<QuoteOffsets> {
+ let text = self.text();
+ let offsets = QuoteOffsets::new(text)?;
+ let o = self.syntax().text_range().start();
+ let offsets = QuoteOffsets {
+ quotes: (offsets.quotes.0 + o, offsets.quotes.1 + o),
+ contents: offsets.contents + o,
+ };
+ Some(offsets)
+ }
+ fn text_range_between_quotes(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.contents)
+ }
+ fn open_quote_text_range(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.quotes.0)
+ }
+ fn close_quote_text_range(&self) -> Option<TextRange> {
+ self.quote_offsets().map(|it| it.quotes.1)
+ }
+ fn escaped_char_ranges(
+ &self,
+ cb: &mut dyn FnMut(TextRange, Result<char, rustc_lexer::unescape::EscapeError>),
+ ) {
+ let text_range_no_quotes = match self.text_range_between_quotes() {
+ Some(it) => it,
+ None => return,
+ };
+
+ let start = self.syntax().text_range().start();
+ let text = &self.text()[text_range_no_quotes - start];
+ let offset = text_range_no_quotes.start() - start;
+
+ unescape_literal(text, Mode::Str, &mut |range, unescaped_char| {
+ let text_range =
+ TextRange::new(range.start.try_into().unwrap(), range.end.try_into().unwrap());
+ cb(text_range + offset, unescaped_char);
+ });
+ }
+}
+
+impl IsString for ast::String {}
+
+impl ast::String {
+ pub fn is_raw(&self) -> bool {
+ self.text().starts_with('r')
+ }
+ pub fn map_range_up(&self, range: TextRange) -> Option<TextRange> {
+ let contents_range = self.text_range_between_quotes()?;
+ assert!(TextRange::up_to(contents_range.len()).contains_range(range));
+ Some(range + contents_range.start())
+ }
+
+ pub fn value(&self) -> Option<Cow<'_, str>> {
+ if self.is_raw() {
+ let text = self.text();
+ let text =
+ &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+ return Some(Cow::Borrowed(text));
+ }
+
+ let text = self.text();
+ let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+
+ let mut buf = String::new();
- (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
++ let mut prev_end = 0;
+ let mut has_error = false;
+ unescape_literal(text, Mode::Str, &mut |char_range, unescaped_char| match (
+ unescaped_char,
+ buf.capacity() == 0,
+ ) {
+ (Ok(c), false) => buf.push(c),
- buf.push_str(&text[..char_range.start]);
++ (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
++ prev_end = char_range.end
++ }
+ (Ok(c), true) => {
+ buf.reserve_exact(text.len());
- let mut text_iter = text.chars();
++ buf.push_str(&text[..prev_end]);
+ buf.push(c);
+ }
+ (Err(_), _) => has_error = true,
+ });
+
+ match (has_error, buf.capacity() == 0) {
+ (true, _) => None,
+ (false, true) => Some(Cow::Borrowed(text)),
+ (false, false) => Some(Cow::Owned(buf)),
+ }
+ }
+}
+
+impl IsString for ast::ByteString {}
+
+impl ast::ByteString {
+ pub fn is_raw(&self) -> bool {
+ self.text().starts_with("br")
+ }
+
+ pub fn value(&self) -> Option<Cow<'_, [u8]>> {
+ if self.is_raw() {
+ let text = self.text();
+ let text =
+ &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+ return Some(Cow::Borrowed(text.as_bytes()));
+ }
+
+ let text = self.text();
+ let text = &text[self.text_range_between_quotes()? - self.syntax().text_range().start()];
+
+ let mut buf: Vec<u8> = Vec::new();
- (Ok(c), true) if char_range.len() == 1 && Some(c) == text_iter.next() => (),
++ let mut prev_end = 0;
+ let mut has_error = false;
+ unescape_literal(text, Mode::ByteStr, &mut |char_range, unescaped_char| match (
+ unescaped_char,
+ buf.capacity() == 0,
+ ) {
+ (Ok(c), false) => buf.push(c as u8),
- buf.extend_from_slice(text[..char_range.start].as_bytes());
++ (Ok(_), true) if char_range.len() == 1 && char_range.start == prev_end => {
++ prev_end = char_range.end
++ }
+ (Ok(c), true) => {
+ buf.reserve_exact(text.len());
++ buf.extend_from_slice(text[..prev_end].as_bytes());
+ buf.push(c as u8);
+ }
+ (Err(_), _) => has_error = true,
+ });
+
+ match (has_error, buf.capacity() == 0) {
+ (true, _) => None,
+ (false, true) => Some(Cow::Borrowed(text.as_bytes())),
+ (false, false) => Some(Cow::Owned(buf)),
+ }
+ }
+}
+
+impl ast::IntNumber {
+ pub fn radix(&self) -> Radix {
+ match self.text().get(..2).unwrap_or_default() {
+ "0b" => Radix::Binary,
+ "0o" => Radix::Octal,
+ "0x" => Radix::Hexadecimal,
+ _ => Radix::Decimal,
+ }
+ }
+
+ pub fn split_into_parts(&self) -> (&str, &str, &str) {
+ let radix = self.radix();
+ let (prefix, mut text) = self.text().split_at(radix.prefix_len());
+
+ let is_suffix_start: fn(&(usize, char)) -> bool = match radix {
+ Radix::Hexadecimal => |(_, c)| matches!(c, 'g'..='z' | 'G'..='Z'),
+ _ => |(_, c)| c.is_ascii_alphabetic(),
+ };
+
+ let mut suffix = "";
+ if let Some((suffix_start, _)) = text.char_indices().find(is_suffix_start) {
+ let (text2, suffix2) = text.split_at(suffix_start);
+ text = text2;
+ suffix = suffix2;
+ };
+
+ (prefix, text, suffix)
+ }
+
+ pub fn value(&self) -> Option<u128> {
+ let (_, text, _) = self.split_into_parts();
+ let value = u128::from_str_radix(&text.replace('_', ""), self.radix() as u32).ok()?;
+ Some(value)
+ }
+
+ pub fn suffix(&self) -> Option<&str> {
+ let (_, _, suffix) = self.split_into_parts();
+ if suffix.is_empty() {
+ None
+ } else {
+ Some(suffix)
+ }
+ }
+
+ pub fn float_value(&self) -> Option<f64> {
+ let (_, text, _) = self.split_into_parts();
+ text.replace('_', "").parse::<f64>().ok()
+ }
+}
+
+impl ast::FloatNumber {
+ pub fn split_into_parts(&self) -> (&str, &str) {
+ let text = self.text();
+ let mut float_text = self.text();
+ let mut suffix = "";
+ let mut indices = text.char_indices();
+ if let Some((mut suffix_start, c)) = indices.by_ref().find(|(_, c)| c.is_ascii_alphabetic())
+ {
+ if c == 'e' || c == 'E' {
+ if let Some(suffix_start_tuple) = indices.find(|(_, c)| c.is_ascii_alphabetic()) {
+ suffix_start = suffix_start_tuple.0;
+
+ float_text = &text[..suffix_start];
+ suffix = &text[suffix_start..];
+ }
+ } else {
+ float_text = &text[..suffix_start];
+ suffix = &text[suffix_start..];
+ }
+ }
+
+ (float_text, suffix)
+ }
+
+ pub fn suffix(&self) -> Option<&str> {
+ let (_, suffix) = self.split_into_parts();
+ if suffix.is_empty() {
+ None
+ } else {
+ Some(suffix)
+ }
+ }
+
+ pub fn value(&self) -> Option<f64> {
+ let (text, _) = self.split_into_parts();
+ text.replace('_', "").parse::<f64>().ok()
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Copy, Clone)]
+pub enum Radix {
+ Binary = 2,
+ Octal = 8,
+ Decimal = 10,
+ Hexadecimal = 16,
+}
+
+impl Radix {
+ pub const ALL: &'static [Radix] =
+ &[Radix::Binary, Radix::Octal, Radix::Decimal, Radix::Hexadecimal];
+
+ const fn prefix_len(self) -> usize {
+ match self {
+ Self::Decimal => 0,
+ _ => 2,
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::ast::{self, make, FloatNumber, IntNumber};
+
+ fn check_float_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(FloatNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into());
+ }
+
+ fn check_int_suffix<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.suffix(), expected.into());
+ }
+
+ fn check_float_value(lit: &str, expected: impl Into<Option<f64>> + Copy) {
+ assert_eq!(FloatNumber { syntax: make::tokens::literal(lit) }.value(), expected.into());
+ assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.float_value(), expected.into());
+ }
+
+ fn check_int_value(lit: &str, expected: impl Into<Option<u128>>) {
+ assert_eq!(IntNumber { syntax: make::tokens::literal(lit) }.value(), expected.into());
+ }
+
+ #[test]
+ fn test_float_number_suffix() {
+ check_float_suffix("123.0", None);
+ check_float_suffix("123f32", "f32");
+ check_float_suffix("123.0e", None);
+ check_float_suffix("123.0e4", None);
+ check_float_suffix("123.0ef32", "f32");
+ check_float_suffix("123.0E4f32", "f32");
+ check_float_suffix("1_2_3.0_f32", "f32");
+ }
+
+ #[test]
+ fn test_int_number_suffix() {
+ check_int_suffix("123", None);
+ check_int_suffix("123i32", "i32");
+ check_int_suffix("1_0_1_l_o_l", "l_o_l");
+ check_int_suffix("0b11", None);
+ check_int_suffix("0o11", None);
+ check_int_suffix("0xff", None);
+ check_int_suffix("0b11u32", "u32");
+ check_int_suffix("0o11u32", "u32");
+ check_int_suffix("0xffu32", "u32");
+ }
+
+ fn check_string_value<'a>(lit: &str, expected: impl Into<Option<&'a str>>) {
+ assert_eq!(
+ ast::String { syntax: make::tokens::literal(&format!("\"{}\"", lit)) }
+ .value()
+ .as_deref(),
+ expected.into()
+ );
+ }
+
+ #[test]
+ fn test_string_escape() {
+ check_string_value(r"foobar", "foobar");
+ check_string_value(r"\foobar", None);
+ check_string_value(r"\nfoobar", "\nfoobar");
+ check_string_value(r"C:\\Windows\\System32\\", "C:\\Windows\\System32\\");
++ check_string_value(r"\x61bcde", "abcde");
++ check_string_value(
++ r"a\
++bcde", "abcde",
++ );
++ }
++
++ fn check_byte_string_value<'a, const N: usize>(
++ lit: &str,
++ expected: impl Into<Option<&'a [u8; N]>>,
++ ) {
++ assert_eq!(
++ ast::ByteString { syntax: make::tokens::literal(&format!("b\"{}\"", lit)) }
++ .value()
++ .as_deref(),
++ expected.into().map(|value| &value[..])
++ );
++ }
++
++ #[test]
++ fn test_byte_string_escape() {
++ check_byte_string_value(r"foobar", b"foobar");
++ check_byte_string_value(r"\foobar", None::<&[u8; 0]>);
++ check_byte_string_value(r"\nfoobar", b"\nfoobar");
++ check_byte_string_value(r"C:\\Windows\\System32\\", b"C:\\Windows\\System32\\");
++ check_byte_string_value(r"\x61bcde", b"abcde");
++ check_byte_string_value(
++ r"a\
++bcde", b"abcde",
++ );
+ }
+
+ #[test]
+ fn test_value_underscores() {
+ check_float_value("3.141592653589793_f64", 3.141592653589793_f64);
+ check_float_value("1__0.__0__f32", 10.0);
+ check_int_value("0b__1_0_", 2);
+ check_int_value("1_1_1_1_1_1", 111111);
+ }
+}
+
+impl ast::Char {
+ pub fn value(&self) -> Option<char> {
+ let mut text = self.text();
+ if text.starts_with('\'') {
+ text = &text[1..];
+ } else {
+ return None;
+ }
+ if text.ends_with('\'') {
+ text = &text[0..text.len() - 1];
+ }
+
+ unescape_char(text).ok()
+ }
+}
+
+impl ast::Byte {
+ pub fn value(&self) -> Option<u8> {
+ let mut text = self.text();
+ if text.starts_with("b\'") {
+ text = &text[2..];
+ } else {
+ return None;
+ }
+ if text.ends_with('\'') {
+ text = &text[0..text.len() - 1];
+ }
+
+ unescape_byte(text).ok()
+ }
+}
--- /dev/null
- // Loops have two expressions so this might collide, therefor manual impl it
+//! This module generates AST datatype used by rust-analyzer.
+//!
+//! Specifically, it generates the `SyntaxKind` enum and a number of newtype
+//! wrappers around `SyntaxNode` which implement `syntax::AstNode`.
+
+use std::{
+ collections::{BTreeSet, HashSet},
+ fmt::Write,
+};
+
+use itertools::Itertools;
+use proc_macro2::{Punct, Spacing};
+use quote::{format_ident, quote};
+use ungrammar::{Grammar, Rule};
+
+use crate::tests::ast_src::{
+ AstEnumSrc, AstNodeSrc, AstSrc, Cardinality, Field, KindsSrc, KINDS_SRC,
+};
+
+#[test]
+fn sourcegen_ast() {
+ let syntax_kinds = generate_syntax_kinds(KINDS_SRC);
+ let syntax_kinds_file =
+ sourcegen::project_root().join("crates/parser/src/syntax_kind/generated.rs");
+ sourcegen::ensure_file_contents(syntax_kinds_file.as_path(), &syntax_kinds);
+
+ let grammar =
+ include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/rust.ungram")).parse().unwrap();
+ let ast = lower(&grammar);
+
+ let ast_tokens = generate_tokens(&ast);
+ let ast_tokens_file =
+ sourcegen::project_root().join("crates/syntax/src/ast/generated/tokens.rs");
+ sourcegen::ensure_file_contents(ast_tokens_file.as_path(), &ast_tokens);
+
+ let ast_nodes = generate_nodes(KINDS_SRC, &ast);
+ let ast_nodes_file = sourcegen::project_root().join("crates/syntax/src/ast/generated/nodes.rs");
+ sourcegen::ensure_file_contents(ast_nodes_file.as_path(), &ast_nodes);
+}
+
+fn generate_tokens(grammar: &AstSrc) -> String {
+ let tokens = grammar.tokens.iter().map(|token| {
+ let name = format_ident!("{}", token);
+ let kind = format_ident!("{}", to_upper_snake_case(token));
+ quote! {
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxToken,
+ }
+ impl std::fmt::Display for #name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(&self.syntax, f)
+ }
+ }
+ impl AstToken for #name {
+ fn can_cast(kind: SyntaxKind) -> bool { kind == #kind }
+ fn cast(syntax: SyntaxToken) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
+ }
+ fn syntax(&self) -> &SyntaxToken { &self.syntax }
+ }
+ }
+ });
+
+ sourcegen::add_preamble(
+ "sourcegen_ast",
+ sourcegen::reformat(
+ quote! {
+ use crate::{SyntaxKind::{self, *}, SyntaxToken, ast::AstToken};
+ #(#tokens)*
+ }
+ .to_string(),
+ ),
+ )
+ .replace("#[derive", "\n#[derive")
+}
+
+fn generate_nodes(kinds: KindsSrc<'_>, grammar: &AstSrc) -> String {
+ let (node_defs, node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .nodes
+ .iter()
+ .map(|node| {
+ let name = format_ident!("{}", node.name);
+ let kind = format_ident!("{}", to_upper_snake_case(&node.name));
+ let traits = node
+ .traits
+ .iter()
+ .filter(|trait_name| {
++ // Loops have two expressions so this might collide, therefore manual impl it
+ node.name != "ForExpr" && node.name != "WhileExpr"
+ || trait_name.as_str() != "HasLoopBody"
+ })
+ .map(|trait_name| {
+ let trait_name = format_ident!("{}", trait_name);
+ quote!(impl ast::#trait_name for #name {})
+ });
+
+ let methods = node.fields.iter().map(|field| {
+ let method_name = field.method_name();
+ let ty = field.ty();
+
+ if field.is_many() {
+ quote! {
+ pub fn #method_name(&self) -> AstChildren<#ty> {
+ support::children(&self.syntax)
+ }
+ }
+ } else if let Some(token_kind) = field.token_kind() {
+ quote! {
+ pub fn #method_name(&self) -> Option<#ty> {
+ support::token(&self.syntax, #token_kind)
+ }
+ }
+ } else {
+ quote! {
+ pub fn #method_name(&self) -> Option<#ty> {
+ support::child(&self.syntax)
+ }
+ }
+ }
+ });
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxNode,
+ }
+
+ #(#traits)*
+
+ impl #name {
+ #(#methods)*
+ }
+ },
+ quote! {
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ kind == #kind
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None }
+ }
+ fn syntax(&self) -> &SyntaxNode { &self.syntax }
+ }
+ },
+ )
+ })
+ .unzip();
+
+ let (enum_defs, enum_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .enums
+ .iter()
+ .map(|en| {
+ let variants: Vec<_> = en.variants.iter().map(|var| format_ident!("{}", var)).collect();
+ let name = format_ident!("{}", en.name);
+ let kinds: Vec<_> = variants
+ .iter()
+ .map(|name| format_ident!("{}", to_upper_snake_case(&name.to_string())))
+ .collect();
+ let traits = en.traits.iter().map(|trait_name| {
+ let trait_name = format_ident!("{}", trait_name);
+ quote!(impl ast::#trait_name for #name {})
+ });
+
+ let ast_node = if en.name == "Stmt" {
+ quote! {}
+ } else {
+ quote! {
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, #(#kinds)|*)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ let res = match syntax.kind() {
+ #(
+ #kinds => #name::#variants(#variants { syntax }),
+ )*
+ _ => return None,
+ };
+ Some(res)
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ match self {
+ #(
+ #name::#variants(it) => &it.syntax,
+ )*
+ }
+ }
+ }
+ }
+ };
+
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub enum #name {
+ #(#variants(#variants),)*
+ }
+
+ #(#traits)*
+ },
+ quote! {
+ #(
+ impl From<#variants> for #name {
+ fn from(node: #variants) -> #name {
+ #name::#variants(node)
+ }
+ }
+ )*
+ #ast_node
+ },
+ )
+ })
+ .unzip();
+
+ let (any_node_defs, any_node_boilerplate_impls): (Vec<_>, Vec<_>) = grammar
+ .nodes
+ .iter()
+ .flat_map(|node| node.traits.iter().map(move |t| (t, node)))
+ .into_group_map()
+ .into_iter()
+ .sorted_by_key(|(k, _)| *k)
+ .map(|(trait_name, nodes)| {
+ let name = format_ident!("Any{}", trait_name);
+ let trait_name = format_ident!("{}", trait_name);
+ let kinds: Vec<_> = nodes
+ .iter()
+ .map(|name| format_ident!("{}", to_upper_snake_case(&name.name.to_string())))
+ .collect();
+
+ (
+ quote! {
+ #[pretty_doc_comment_placeholder_workaround]
+ #[derive(Debug, Clone, PartialEq, Eq, Hash)]
+ pub struct #name {
+ pub(crate) syntax: SyntaxNode,
+ }
+ impl ast::#trait_name for #name {}
+ },
+ quote! {
+ impl #name {
+ #[inline]
+ pub fn new<T: ast::#trait_name>(node: T) -> #name {
+ #name {
+ syntax: node.syntax().clone()
+ }
+ }
+ }
+ impl AstNode for #name {
+ fn can_cast(kind: SyntaxKind) -> bool {
+ matches!(kind, #(#kinds)|*)
+ }
+ fn cast(syntax: SyntaxNode) -> Option<Self> {
+ Self::can_cast(syntax.kind()).then(|| #name { syntax })
+ }
+ fn syntax(&self) -> &SyntaxNode {
+ &self.syntax
+ }
+ }
+ },
+ )
+ })
+ .unzip();
+
+ let enum_names = grammar.enums.iter().map(|it| &it.name);
+ let node_names = grammar.nodes.iter().map(|it| &it.name);
+
+ let display_impls =
+ enum_names.chain(node_names.clone()).map(|it| format_ident!("{}", it)).map(|name| {
+ quote! {
+ impl std::fmt::Display for #name {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ std::fmt::Display::fmt(self.syntax(), f)
+ }
+ }
+ }
+ });
+
+ let defined_nodes: HashSet<_> = node_names.collect();
+
+ for node in kinds
+ .nodes
+ .iter()
+ .map(|kind| to_pascal_case(kind))
+ .filter(|name| !defined_nodes.iter().any(|&it| it == name))
+ {
+ drop(node)
+ // FIXME: restore this
+ // eprintln!("Warning: node {} not defined in ast source", node);
+ }
+
+ let ast = quote! {
+ #![allow(non_snake_case)]
+ use crate::{
+ SyntaxNode, SyntaxToken, SyntaxKind::{self, *},
+ ast::{self, AstNode, AstChildren, support},
+ T,
+ };
+
+ #(#node_defs)*
+ #(#enum_defs)*
+ #(#any_node_defs)*
+ #(#node_boilerplate_impls)*
+ #(#enum_boilerplate_impls)*
+ #(#any_node_boilerplate_impls)*
+ #(#display_impls)*
+ };
+
+ let ast = ast.to_string().replace("T ! [", "T![");
+
+ let mut res = String::with_capacity(ast.len() * 2);
+
+ let mut docs =
+ grammar.nodes.iter().map(|it| &it.doc).chain(grammar.enums.iter().map(|it| &it.doc));
+
+ for chunk in ast.split("# [pretty_doc_comment_placeholder_workaround] ") {
+ res.push_str(chunk);
+ if let Some(doc) = docs.next() {
+ write_doc_comment(doc, &mut res);
+ }
+ }
+
+ let res = sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(res));
+ res.replace("#[derive", "\n#[derive")
+}
+
+fn write_doc_comment(contents: &[String], dest: &mut String) {
+ for line in contents {
+ writeln!(dest, "///{}", line).unwrap();
+ }
+}
+
+fn generate_syntax_kinds(grammar: KindsSrc<'_>) -> String {
+ let (single_byte_tokens_values, single_byte_tokens): (Vec<_>, Vec<_>) = grammar
+ .punct
+ .iter()
+ .filter(|(token, _name)| token.len() == 1)
+ .map(|(token, name)| (token.chars().next().unwrap(), format_ident!("{}", name)))
+ .unzip();
+
+ let punctuation_values = grammar.punct.iter().map(|(token, _name)| {
+ if "{}[]()".contains(token) {
+ let c = token.chars().next().unwrap();
+ quote! { #c }
+ } else {
+ let cs = token.chars().map(|c| Punct::new(c, Spacing::Joint));
+ quote! { #(#cs)* }
+ }
+ });
+ let punctuation =
+ grammar.punct.iter().map(|(_token, name)| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let x = |&name| match name {
+ "Self" => format_ident!("SELF_TYPE_KW"),
+ name => format_ident!("{}_KW", to_upper_snake_case(name)),
+ };
+ let full_keywords_values = grammar.keywords;
+ let full_keywords = full_keywords_values.iter().map(x);
+
+ let contextual_keywords_values = &grammar.contextual_keywords;
+ let contextual_keywords = contextual_keywords_values.iter().map(x);
+
+ let all_keywords_values = grammar
+ .keywords
+ .iter()
+ .chain(grammar.contextual_keywords.iter())
+ .copied()
+ .collect::<Vec<_>>();
+ let all_keywords_idents = all_keywords_values.iter().map(|kw| format_ident!("{}", kw));
+ let all_keywords = all_keywords_values.iter().map(x).collect::<Vec<_>>();
+
+ let literals =
+ grammar.literals.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let tokens = grammar.tokens.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let nodes = grammar.nodes.iter().map(|name| format_ident!("{}", name)).collect::<Vec<_>>();
+
+ let ast = quote! {
+ #![allow(bad_style, missing_docs, unreachable_pub)]
+ /// The kind of syntax node, e.g. `IDENT`, `USE_KW`, or `STRUCT`.
+ #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
+ #[repr(u16)]
+ pub enum SyntaxKind {
+ // Technical SyntaxKinds: they appear temporally during parsing,
+ // but never end up in the final tree
+ #[doc(hidden)]
+ TOMBSTONE,
+ #[doc(hidden)]
+ EOF,
+ #(#punctuation,)*
+ #(#all_keywords,)*
+ #(#literals,)*
+ #(#tokens,)*
+ #(#nodes,)*
+
+ // Technical kind so that we can cast from u16 safely
+ #[doc(hidden)]
+ __LAST,
+ }
+ use self::SyntaxKind::*;
+
+ impl SyntaxKind {
+ pub fn is_keyword(self) -> bool {
+ matches!(self, #(#all_keywords)|*)
+ }
+
+ pub fn is_punct(self) -> bool {
+
+ matches!(self, #(#punctuation)|*)
+
+ }
+
+ pub fn is_literal(self) -> bool {
+ matches!(self, #(#literals)|*)
+ }
+
+ pub fn from_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ #(#full_keywords_values => #full_keywords,)*
+ _ => return None,
+ };
+ Some(kw)
+ }
+
+ pub fn from_contextual_keyword(ident: &str) -> Option<SyntaxKind> {
+ let kw = match ident {
+ #(#contextual_keywords_values => #contextual_keywords,)*
+ _ => return None,
+ };
+ Some(kw)
+ }
+
+ pub fn from_char(c: char) -> Option<SyntaxKind> {
+ let tok = match c {
+ #(#single_byte_tokens_values => #single_byte_tokens,)*
+ _ => return None,
+ };
+ Some(tok)
+ }
+ }
+
+ #[macro_export]
+ macro_rules! T {
+ #([#punctuation_values] => { $crate::SyntaxKind::#punctuation };)*
+ #([#all_keywords_idents] => { $crate::SyntaxKind::#all_keywords };)*
+ [lifetime_ident] => { $crate::SyntaxKind::LIFETIME_IDENT };
+ [ident] => { $crate::SyntaxKind::IDENT };
+ [shebang] => { $crate::SyntaxKind::SHEBANG };
+ }
+ pub use T;
+ };
+
+ sourcegen::add_preamble("sourcegen_ast", sourcegen::reformat(ast.to_string()))
+}
+
+fn to_upper_snake_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev = false;
+ for c in s.chars() {
+ if c.is_ascii_uppercase() && prev {
+ buf.push('_')
+ }
+ prev = true;
+
+ buf.push(c.to_ascii_uppercase());
+ }
+ buf
+}
+
+fn to_lower_snake_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev = false;
+ for c in s.chars() {
+ if c.is_ascii_uppercase() && prev {
+ buf.push('_')
+ }
+ prev = true;
+
+ buf.push(c.to_ascii_lowercase());
+ }
+ buf
+}
+
+fn to_pascal_case(s: &str) -> String {
+ let mut buf = String::with_capacity(s.len());
+ let mut prev_is_underscore = true;
+ for c in s.chars() {
+ if c == '_' {
+ prev_is_underscore = true;
+ } else if prev_is_underscore {
+ buf.push(c.to_ascii_uppercase());
+ prev_is_underscore = false;
+ } else {
+ buf.push(c.to_ascii_lowercase());
+ }
+ }
+ buf
+}
+
+fn pluralize(s: &str) -> String {
+ format!("{}s", s)
+}
+
+impl Field {
+ fn is_many(&self) -> bool {
+ matches!(self, Field::Node { cardinality: Cardinality::Many, .. })
+ }
+ fn token_kind(&self) -> Option<proc_macro2::TokenStream> {
+ match self {
+ Field::Token(token) => {
+ let token: proc_macro2::TokenStream = token.parse().unwrap();
+ Some(quote! { T![#token] })
+ }
+ _ => None,
+ }
+ }
+ fn method_name(&self) -> proc_macro2::Ident {
+ match self {
+ Field::Token(name) => {
+ let name = match name.as_str() {
+ ";" => "semicolon",
+ "->" => "thin_arrow",
+ "'{'" => "l_curly",
+ "'}'" => "r_curly",
+ "'('" => "l_paren",
+ "')'" => "r_paren",
+ "'['" => "l_brack",
+ "']'" => "r_brack",
+ "<" => "l_angle",
+ ">" => "r_angle",
+ "=" => "eq",
+ "!" => "excl",
+ "*" => "star",
+ "&" => "amp",
+ "_" => "underscore",
+ "." => "dot",
+ ".." => "dotdot",
+ "..." => "dotdotdot",
+ "..=" => "dotdoteq",
+ "=>" => "fat_arrow",
+ "@" => "at",
+ ":" => "colon",
+ "::" => "coloncolon",
+ "#" => "pound",
+ "?" => "question_mark",
+ "," => "comma",
+ "|" => "pipe",
+ "~" => "tilde",
+ _ => name,
+ };
+ format_ident!("{}_token", name)
+ }
+ Field::Node { name, .. } => {
+ if name == "type" {
+ format_ident!("ty")
+ } else {
+ format_ident!("{}", name)
+ }
+ }
+ }
+ }
+ fn ty(&self) -> proc_macro2::Ident {
+ match self {
+ Field::Token(_) => format_ident!("SyntaxToken"),
+ Field::Node { ty, .. } => format_ident!("{}", ty),
+ }
+ }
+}
+
+fn lower(grammar: &Grammar) -> AstSrc {
+ let mut res = AstSrc {
+ tokens: "Whitespace Comment String ByteString IntNumber FloatNumber Char Byte Ident"
+ .split_ascii_whitespace()
+ .map(|it| it.to_string())
+ .collect::<Vec<_>>(),
+ ..Default::default()
+ };
+
+ let nodes = grammar.iter().collect::<Vec<_>>();
+
+ for &node in &nodes {
+ let name = grammar[node].name.clone();
+ let rule = &grammar[node].rule;
+ match lower_enum(grammar, rule) {
+ Some(variants) => {
+ let enum_src = AstEnumSrc { doc: Vec::new(), name, traits: Vec::new(), variants };
+ res.enums.push(enum_src);
+ }
+ None => {
+ let mut fields = Vec::new();
+ lower_rule(&mut fields, grammar, None, rule);
+ res.nodes.push(AstNodeSrc { doc: Vec::new(), name, traits: Vec::new(), fields });
+ }
+ }
+ }
+
+ deduplicate_fields(&mut res);
+ extract_enums(&mut res);
+ extract_struct_traits(&mut res);
+ extract_enum_traits(&mut res);
+ res
+}
+
+fn lower_enum(grammar: &Grammar, rule: &Rule) -> Option<Vec<String>> {
+ let alternatives = match rule {
+ Rule::Alt(it) => it,
+ _ => return None,
+ };
+ let mut variants = Vec::new();
+ for alternative in alternatives {
+ match alternative {
+ Rule::Node(it) => variants.push(grammar[*it].name.clone()),
+ Rule::Token(it) if grammar[*it].name == ";" => (),
+ _ => return None,
+ }
+ }
+ Some(variants)
+}
+
+fn lower_rule(acc: &mut Vec<Field>, grammar: &Grammar, label: Option<&String>, rule: &Rule) {
+ if lower_comma_list(acc, grammar, label, rule) {
+ return;
+ }
+
+ match rule {
+ Rule::Node(node) => {
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| to_lower_snake_case(&ty));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Optional };
+ acc.push(field);
+ }
+ Rule::Token(token) => {
+ assert!(label.is_none());
+ let mut name = grammar[*token].name.clone();
+ if name != "int_number" && name != "string" {
+ if "[]{}()".contains(&name) {
+ name = format!("'{}'", name);
+ }
+ let field = Field::Token(name);
+ acc.push(field);
+ }
+ }
+ Rule::Rep(inner) => {
+ if let Rule::Node(node) = &**inner {
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Many };
+ acc.push(field);
+ return;
+ }
+ panic!("unhandled rule: {:?}", rule)
+ }
+ Rule::Labeled { label: l, rule } => {
+ assert!(label.is_none());
+ let manually_implemented = matches!(
+ l.as_str(),
+ "lhs"
+ | "rhs"
+ | "then_branch"
+ | "else_branch"
+ | "start"
+ | "end"
+ | "op"
+ | "index"
+ | "base"
+ | "value"
+ | "trait"
+ | "self_ty"
+ | "iterable"
+ | "condition"
+ );
+ if manually_implemented {
+ return;
+ }
+ lower_rule(acc, grammar, Some(l), rule);
+ }
+ Rule::Seq(rules) | Rule::Alt(rules) => {
+ for rule in rules {
+ lower_rule(acc, grammar, label, rule)
+ }
+ }
+ Rule::Opt(rule) => lower_rule(acc, grammar, label, rule),
+ }
+}
+
+// (T (',' T)* ','?)
+fn lower_comma_list(
+ acc: &mut Vec<Field>,
+ grammar: &Grammar,
+ label: Option<&String>,
+ rule: &Rule,
+) -> bool {
+ let rule = match rule {
+ Rule::Seq(it) => it,
+ _ => return false,
+ };
+ let (node, repeat, trailing_comma) = match rule.as_slice() {
+ [Rule::Node(node), Rule::Rep(repeat), Rule::Opt(trailing_comma)] => {
+ (node, repeat, trailing_comma)
+ }
+ _ => return false,
+ };
+ let repeat = match &**repeat {
+ Rule::Seq(it) => it,
+ _ => return false,
+ };
+ match repeat.as_slice() {
+ [comma, Rule::Node(n)] if comma == &**trailing_comma && n == node => (),
+ _ => return false,
+ }
+ let ty = grammar[*node].name.clone();
+ let name = label.cloned().unwrap_or_else(|| pluralize(&to_lower_snake_case(&ty)));
+ let field = Field::Node { name, ty, cardinality: Cardinality::Many };
+ acc.push(field);
+ true
+}
+
+fn deduplicate_fields(ast: &mut AstSrc) {
+ for node in &mut ast.nodes {
+ let mut i = 0;
+ 'outer: while i < node.fields.len() {
+ for j in 0..i {
+ let f1 = &node.fields[i];
+ let f2 = &node.fields[j];
+ if f1 == f2 {
+ node.fields.remove(i);
+ continue 'outer;
+ }
+ }
+ i += 1;
+ }
+ }
+}
+
+fn extract_enums(ast: &mut AstSrc) {
+ for node in &mut ast.nodes {
+ for enm in &ast.enums {
+ let mut to_remove = Vec::new();
+ for (i, field) in node.fields.iter().enumerate() {
+ let ty = field.ty().to_string();
+ if enm.variants.iter().any(|it| it == &ty) {
+ to_remove.push(i);
+ }
+ }
+ if to_remove.len() == enm.variants.len() {
+ node.remove_field(to_remove);
+ let ty = enm.name.clone();
+ let name = to_lower_snake_case(&ty);
+ node.fields.push(Field::Node { name, ty, cardinality: Cardinality::Optional });
+ }
+ }
+ }
+}
+
+fn extract_struct_traits(ast: &mut AstSrc) {
+ let traits: &[(&str, &[&str])] = &[
+ ("HasAttrs", &["attrs"]),
+ ("HasName", &["name"]),
+ ("HasVisibility", &["visibility"]),
+ ("HasGenericParams", &["generic_param_list", "where_clause"]),
+ ("HasTypeBounds", &["type_bound_list", "colon_token"]),
+ ("HasModuleItem", &["items"]),
+ ("HasLoopBody", &["label", "loop_body"]),
+ ("HasArgList", &["arg_list"]),
+ ];
+
+ for node in &mut ast.nodes {
+ for (name, methods) in traits {
+ extract_struct_trait(node, name, methods);
+ }
+ }
+
+ let nodes_with_doc_comments = [
+ "SourceFile",
+ "Fn",
+ "Struct",
+ "Union",
+ "RecordField",
+ "TupleField",
+ "Enum",
+ "Variant",
+ "Trait",
+ "Module",
+ "Static",
+ "Const",
+ "TypeAlias",
+ "Impl",
+ "ExternBlock",
+ "ExternCrate",
+ "MacroCall",
+ "MacroRules",
+ "MacroDef",
+ "Use",
+ ];
+
+ for node in &mut ast.nodes {
+ if nodes_with_doc_comments.contains(&&*node.name) {
+ node.traits.push("HasDocComments".into());
+ }
+ }
+}
+
+fn extract_struct_trait(node: &mut AstNodeSrc, trait_name: &str, methods: &[&str]) {
+ let mut to_remove = Vec::new();
+ for (i, field) in node.fields.iter().enumerate() {
+ let method_name = field.method_name().to_string();
+ if methods.iter().any(|&it| it == method_name) {
+ to_remove.push(i);
+ }
+ }
+ if to_remove.len() == methods.len() {
+ node.traits.push(trait_name.to_string());
+ node.remove_field(to_remove);
+ }
+}
+
+fn extract_enum_traits(ast: &mut AstSrc) {
+ for enm in &mut ast.enums {
+ if enm.name == "Stmt" {
+ continue;
+ }
+ let nodes = &ast.nodes;
+ let mut variant_traits = enm
+ .variants
+ .iter()
+ .map(|var| nodes.iter().find(|it| &it.name == var).unwrap())
+ .map(|node| node.traits.iter().cloned().collect::<BTreeSet<_>>());
+
+ let mut enum_traits = match variant_traits.next() {
+ Some(it) => it,
+ None => continue,
+ };
+ for traits in variant_traits {
+ enum_traits = enum_traits.intersection(&traits).cloned().collect();
+ }
+ enm.traits = enum_traits.into_iter().collect();
+ }
+}
+
+impl AstNodeSrc {
+ fn remove_field(&mut self, to_remove: Vec<usize>) {
+ to_remove.into_iter().rev().for_each(|idx| {
+ self.fields.remove(idx);
+ });
+ }
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "test-utils"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+# Avoid adding deps here, this crate is widely used in tests it should compile fast!
+dissimilar = "1.0.4"
+text-size = "1.1.0"
+rustc-hash = "1.1.0"
+
+stdx = { path = "../stdx", version = "0.0.0" }
+profile = { path = "../profile", version = "0.0.0" }
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "text-edit"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+itertools = "0.10.5"
+text-size = "1.1.0"
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "toolchain"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+home = "0.5.4"
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "tt"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+smol_str = "0.1.23"
+
+stdx = { path = "../stdx", version = "0.0.0" }
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "vfs-notify"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+tracing = "0.1.35"
+jod-thread = "0.1.2"
+walkdir = "2.3.2"
+crossbeam-channel = "0.5.5"
+notify = "5.0"
+
+vfs = { path = "../vfs", version = "0.0.0" }
+paths = { path = "../paths", version = "0.0.0" }
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "vfs"
+version = "0.0.0"
+description = "TBD"
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[lib]
+doctest = false
+
+[dependencies]
+rustc-hash = "1.1.0"
+fst = "0.4.7"
+indexmap = "1.9.1"
+
+paths = { path = "../paths", version = "0.0.0" }
+stdx = { path = "../stdx", version = "0.0.0" }
--- /dev/null
- Enabling them by default is a matter of discoverability, as many users end up don't know about some features even though they are presented in the manual.
+# Architecture
+
+This document describes the high-level architecture of rust-analyzer.
+If you want to familiarize yourself with the code base, you are just in the right place!
+
+You might also enjoy ["Explaining Rust Analyzer"](https://www.youtube.com/playlist?list=PLhb66M_x9UmrqXhQuIpWC5VgTdrGxMx3y) series on YouTube.
+It goes deeper than what is covered in this document, but will take some time to watch.
+
+See also these implementation-related blog posts:
+
+* https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html
+* https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html
+* https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html
+* https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html
+* https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html
+
+For older, by now mostly outdated stuff, see the [guide](./guide.md) and [another playlist](https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE).
+
+
+## Bird's Eye View
+
+![](https://user-images.githubusercontent.com/4789492/107129398-0ab70f00-687a-11eb-9bfc-d4eb023aec06.png)
+
+On the highest level, rust-analyzer is a thing which accepts input source code from the client and produces a structured semantic model of the code.
+
+More specifically, input data consists of a set of test files (`(PathBuf, String)` pairs) and information about project structure, captured in the so called `CrateGraph`.
+The crate graph specifies which files are crate roots, which cfg flags are specified for each crate and what dependencies exist between the crates.
+This is the input (ground) state.
+The analyzer keeps all this input data in memory and never does any IO.
+Because the input data is source code, which typically measures in tens of megabytes at most, keeping everything in memory is OK.
+
+A "structured semantic model" is basically an object-oriented representation of modules, functions and types which appear in the source code.
+This representation is fully "resolved": all expressions have types, all references are bound to declarations, etc.
+This is derived state.
+
+The client can submit a small delta of input data (typically, a change to a single file) and get a fresh code model which accounts for changes.
+
+The underlying engine makes sure that model is computed lazily (on-demand) and can be quickly updated for small modifications.
+
+## Entry Points
+
+`crates/rust-analyzer/src/bin/main.rs` contains the main function which spawns LSP.
+This is *the* entry point, but it front-loads a lot of complexity, so it's fine to just skim through it.
+
+`crates/rust-analyzer/src/handlers.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP.
+
+`Analysis` and `AnalysisHost` types define the main API for consumers of IDE services.
+
+## Code Map
+
+This section talks briefly about various important directories and data structures.
+Pay attention to the **Architecture Invariant** sections.
+They often talk about things which are deliberately absent in the source code.
+
+Note also which crates are **API Boundaries**.
+Remember, [rules at the boundary are different](https://www.tedinski.com/2018/02/06/system-boundaries.html).
+
+### `xtask`
+
+This is rust-analyzer's "build system".
+We use cargo to compile rust code, but there are also various other tasks, like release management or local installation.
+They are handled by Rust code in the xtask directory.
+
+### `editors/code`
+
+VS Code plugin.
+
+### `lib/`
+
+rust-analyzer independent libraries which we publish to crates.io.
+It's not heavily utilized at the moment.
+
+### `crates/parser`
+
+It is a hand-written recursive descent parser, which produces a sequence of events like "start node X", "finish node Y".
+It works similarly to
+[kotlin's parser](https://github.com/JetBrains/kotlin/blob/4d951de616b20feca92f3e9cc9679b2de9e65195/compiler/frontend/src/org/jetbrains/kotlin/parsing/KotlinParsing.java),
+which is a good source of inspiration for dealing with syntax errors and incomplete input.
+Original [libsyntax parser](https://github.com/rust-lang/rust/blob/6b99adeb11313197f409b4f7c4083c2ceca8a4fe/src/libsyntax/parse/parser.rs) is what we use for the definition of the Rust language.
+`TreeSink` and `TokenSource` traits bridge the tree-agnostic parser from `grammar` with `rowan` trees.
+
+**Architecture Invariant:** the parser is independent of the particular tree structure and particular representation of the tokens.
+It transforms one flat stream of events into another flat stream of events.
+Token independence allows us to parse out both text-based source code and `tt`-based macro input.
+Tree independence allows us to more easily vary the syntax tree implementation.
+It should also unlock efficient light-parsing approaches.
+For example, you can extract the set of names defined in a file (for typo correction) without building a syntax tree.
+
+**Architecture Invariant:** parsing never fails, the parser produces `(T, Vec<Error>)` rather than `Result<T, Error>`.
+
+### `crates/syntax`
+
+Rust syntax tree structure and parser.
+See [RFC](https://github.com/rust-lang/rfcs/pull/2256) and [./syntax.md](./syntax.md) for some design notes.
+
+- [rowan](https://github.com/rust-analyzer/rowan) library is used for constructing syntax trees.
+- `ast` provides a type safe API on top of the raw `rowan` tree.
+- `ungrammar` description of the grammar, which is used to generate `syntax_kinds` and `ast` modules, using `cargo test -p xtask` command.
+
+Tests for ra_syntax are mostly data-driven.
+`test_data/parser` contains subdirectories with a bunch of `.rs` (test vectors) and `.txt` files with corresponding syntax trees.
+During testing, we check `.rs` against `.txt`.
+If the `.txt` file is missing, it is created (this is how you update tests).
+Additionally, running the xtask test suite with `cargo test -p xtask` will walk the grammar module and collect all `// test test_name` comments into files inside `test_data/parser/inline` directory.
+
+To update test data, run with `UPDATE_EXPECT` variable:
+
+```bash
+env UPDATE_EXPECT=1 cargo qt
+```
+
+After adding a new inline test you need to run `cargo test -p xtask` and also update the test data as described above.
+
+Note [`api_walkthrough`](https://github.com/rust-lang/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348)
+in particular: it shows off various methods of working with syntax tree.
+
+See [#93](https://github.com/rust-lang/rust-analyzer/pull/93) for an example PR which fixes a bug in the grammar.
+
+**Architecture Invariant:** `syntax` crate is completely independent from the rest of rust-analyzer. It knows nothing about salsa or LSP.
+This is important because it is possible to make useful tooling using only the syntax tree.
+Without semantic information, you don't need to be able to _build_ code, which makes the tooling more robust.
+See also https://web.stanford.edu/~mlfbrown/paper.pdf.
+You can view the `syntax` crate as an entry point to rust-analyzer.
+`syntax` crate is an **API Boundary**.
+
+**Architecture Invariant:** syntax tree is a value type.
+The tree is fully determined by the contents of its syntax nodes, it doesn't need global context (like an interner) and doesn't store semantic info.
+Using the tree as a store for semantic info is convenient in traditional compilers, but doesn't work nicely in the IDE.
+Specifically, assists and refactors require transforming syntax trees, and that becomes awkward if you need to do something with the semantic info.
+
+**Architecture Invariant:** syntax tree is built for a single file.
+This is to enable parallel parsing of all files.
+
+**Architecture Invariant:** Syntax trees are by design incomplete and do not enforce well-formedness.
+If an AST method returns an `Option`, it *can* be `None` at runtime, even if this is forbidden by the grammar.
+
+### `crates/base_db`
+
+We use the [salsa](https://github.com/salsa-rs/salsa) crate for incremental and on-demand computation.
+Roughly, you can think of salsa as a key-value store, but it can also compute derived values using specified functions.
+The `base_db` crate provides basic infrastructure for interacting with salsa.
+Crucially, it defines most of the "input" queries: facts supplied by the client of the analyzer.
+Reading the docs of the `base_db::input` module should be useful: everything else is strictly derived from those inputs.
+
+**Architecture Invariant:** particularities of the build system are *not* the part of the ground state.
+In particular, `base_db` knows nothing about cargo.
+For example, `cfg` flags are a part of `base_db`, but `feature`s are not.
+A `foo` feature is a Cargo-level concept, which is lowered by Cargo to `--cfg feature=foo` argument on the command line.
+The `CrateGraph` structure is used to represent the dependencies between the crates abstractly.
+
+**Architecture Invariant:** `base_db` doesn't know about file system and file paths.
+Files are represented with opaque `FileId`, there's no operation to get an `std::path::Path` out of the `FileId`.
+
+### `crates/hir_expand`, `crates/hir_def`, `crates/hir_ty`
+
+These crates are the *brain* of rust-analyzer.
+This is the compiler part of the IDE.
+
+`hir_xxx` crates have a strong [ECS](https://en.wikipedia.org/wiki/Entity_component_system) flavor, in that they work with raw ids and directly query the database.
+There's little abstraction here.
+These crates integrate deeply with salsa and chalk.
+
+Name resolution, macro expansion and type inference all happen here.
+These crates also define various intermediate representations of the core.
+
+`ItemTree` condenses a single `SyntaxTree` into a "summary" data structure, which is stable over modifications to function bodies.
+
+`DefMap` contains the module tree of a crate and stores module scopes.
+
+`Body` stores information about expressions.
+
+**Architecture Invariant:** these crates are not, and will never be, an api boundary.
+
+**Architecture Invariant:** these crates explicitly care about being incremental.
+The core invariant we maintain is "typing inside a function's body never invalidates global derived data".
+i.e., if you change the body of `foo`, all facts about `bar` should remain intact.
+
+**Architecture Invariant:** hir exists only in context of particular crate instance with specific CFG flags.
+The same syntax may produce several instances of HIR if the crate participates in the crate graph more than once.
+
+### `crates/hir`
+
+The top-level `hir` crate is an **API Boundary**.
+If you think about "using rust-analyzer as a library", `hir` crate is most likely the façade you'll be talking to.
+
+It wraps ECS-style internal API into a more OO-flavored API (with an extra `db` argument for each call).
+
+**Architecture Invariant:** `hir` provides a static, fully resolved view of the code.
+While internal `hir_*` crates _compute_ things, `hir`, from the outside, looks like an inert data structure.
+
+`hir` also handles the delicate task of going from syntax to the corresponding `hir`.
+Remember that the mapping here is one-to-many.
+See `Semantics` type and `source_to_def` module.
+
+Note in particular a curious recursive structure in `source_to_def`.
+We first resolve the parent _syntax_ node to the parent _hir_ element.
+Then we ask the _hir_ parent what _syntax_ children does it have.
+Then we look for our node in the set of children.
+
+This is the heart of many IDE features, like goto definition, which start with figuring out the hir node at the cursor.
+This is some kind of (yet unnamed) uber-IDE pattern, as it is present in Roslyn and Kotlin as well.
+
+### `crates/ide`
+
+The `ide` crate builds on top of `hir` semantic model to provide high-level IDE features like completion or goto definition.
+It is an **API Boundary**.
+If you want to use IDE parts of rust-analyzer via LSP, custom flatbuffers-based protocol or just as a library in your text editor, this is the right API.
+
+**Architecture Invariant:** `ide` crate's API is build out of POD types with public fields.
+The API uses editor's terminology, it talks about offsets and string labels rather than in terms of definitions or types.
+It is effectively the view in MVC and viewmodel in [MVVM](https://en.wikipedia.org/wiki/Model%E2%80%93view%E2%80%93viewmodel).
+All arguments and return types are conceptually serializable.
+In particular, syntax trees and hir types are generally absent from the API (but are used heavily in the implementation).
+Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at.
+
+`ide` is also the first crate which has the notion of change over time.
+`AnalysisHost` is a state to which you can transactionally `apply_change`.
+`Analysis` is an immutable snapshot of the state.
+
+Internally, `ide` is split across several crates. `ide_assists`, `ide_completion` and `ide_ssr` implement large isolated features.
+`ide_db` implements common IDE functionality (notably, reference search is implemented here).
+The `ide` contains a public API/façade, as well as implementation for a plethora of smaller features.
+
+**Architecture Invariant:** `ide` crate strives to provide a _perfect_ API.
+Although at the moment it has only one consumer, the LSP server, LSP *does not* influence its API design.
+Instead, we keep in mind a hypothetical _ideal_ client -- an IDE tailored specifically for rust, every nook and cranny of which is packed with Rust-specific goodies.
+
+### `crates/rust-analyzer`
+
+This crate defines the `rust-analyzer` binary, so it is the **entry point**.
+It implements the language server.
+
+**Architecture Invariant:** `rust-analyzer` is the only crate that knows about LSP and JSON serialization.
+If you want to expose a data structure `X` from ide to LSP, don't make it serializable.
+Instead, create a serializable counterpart in `rust-analyzer` crate and manually convert between the two.
+
+`GlobalState` is the state of the server.
+The `main_loop` defines the server event loop which accepts requests and sends responses.
+Requests that modify the state or might block user's typing are handled on the main thread.
+All other requests are processed in background.
+
+**Architecture Invariant:** the server is stateless, a-la HTTP.
+Sometimes state needs to be preserved between requests.
+For example, "what is the `edit` for the fifth completion item of the last completion edit?".
+For this, the second request should include enough info to re-create the context from scratch.
+This generally means including all the parameters of the original request.
+
+`reload` module contains the code that handles configuration and Cargo.toml changes.
+This is a tricky business.
+
+**Architecture Invariant:** `rust-analyzer` should be partially available even when the build is broken.
+Reloading process should not prevent IDE features from working.
+
+### `crates/toolchain`, `crates/project_model`, `crates/flycheck`
+
+These crates deal with invoking `cargo` to learn about project structure and get compiler errors for the "check on save" feature.
+
+They use `crates/path` heavily instead of `std::path`.
+A single `rust-analyzer` process can serve many projects, so it is important that server's current directory does not leak.
+
+### `crates/mbe`, `crates/tt`, `crates/proc_macro_api`, `crates/proc_macro_srv`
+
+These crates implement macros as token tree -> token tree transforms.
+They are independent from the rest of the code.
+
+`tt` crate defined `TokenTree`, a single token or a delimited sequence of token trees.
+`mbe` crate contains tools for transforming between syntax trees and token tree.
+And it also handles the actual parsing and expansion of declarative macro (a-la "Macros By Example" or mbe).
+
+For proc macros, the client-server model are used.
+We pass an argument `--proc-macro` to `rust-analyzer` binary to start a separate process (`proc_macro_srv`).
+And the client (`proc_macro_api`) provides an interface to talk to that server separately.
+
+And then token trees are passed from client, and the server will load the corresponding dynamic library (which built by `cargo`).
+And due to the fact the api for getting result from proc macro are always unstable in `rustc`,
+we maintain our own copy (and paste) of that part of code to allow us to build the whole thing in stable rust.
+
+ **Architecture Invariant:**
+Bad proc macros may panic or segfault accidentally. So we run it in another process and recover it from fatal error.
+And they may be non-deterministic which conflict how `salsa` works, so special attention is required.
+
+### `crates/cfg`
+
+This crate is responsible for parsing, evaluation and general definition of `cfg` attributes.
+
+### `crates/vfs`, `crates/vfs-notify`
+
+These crates implement a virtual file system.
+They provide consistent snapshots of the underlying file system and insulate messy OS paths.
+
+**Architecture Invariant:** vfs doesn't assume a single unified file system.
+i.e., a single rust-analyzer process can act as a remote server for two different machines, where the same `/tmp/foo.rs` path points to different files.
+For this reason, all path APIs generally take some existing path as a "file system witness".
+
+### `crates/stdx`
+
+This crate contains various non-rust-analyzer specific utils, which could have been in std, as well
+as copies of unstable std items we would like to make use of already, like `std::str::split_once`.
+
+### `crates/profile`
+
+This crate contains utilities for CPU and memory profiling.
+
+
+## Cross-Cutting Concerns
+
+This sections talks about the things which are everywhere and nowhere in particular.
+
+### Stability Guarantees
+
+One of the reasons rust-analyzer moves relatively fast is that we don't introduce new stability guarantees.
+Instead, as much as possible we leverage existing ones.
+
+Examples:
+
+* The `ide` API of rust-analyzer are explicitly unstable, but the LSP interface is stable, and here we just implement a stable API managed by someone else.
+* Rust language and Cargo are stable, and they are the primary inputs to rust-analyzer.
+* The `rowan` library is published to crates.io, but it is deliberately kept under `1.0` and always makes semver-incompatible upgrades
+
+Another important example is that rust-analyzer isn't run on CI, so, unlike `rustc` and `clippy`, it is actually ok for us to change runtime behavior.
+
+At some point we might consider opening up APIs or allowing crates.io libraries to include rust-analyzer specific annotations, but that's going to be a big commitment on our side.
+
+Exceptions:
+
+* `rust-project.json` is a de-facto stable format for non-cargo build systems.
+ It is probably ok enough, but was definitely stabilized implicitly.
+ Lesson for the future: when designing API which could become a stability boundary, don't wait for the first users until you stabilize it.
+ By the time you have first users, it is already de-facto stable.
+ And the users will first use the thing, and *then* inform you that now you have users.
+ The sad thing is that stuff should be stable before someone uses it for the first time, or it should contain explicit opt-in.
+* We ship some LSP extensions, and we try to keep those somewhat stable.
+ Here, we need to work with a finite set of editor maintainers, so not providing rock-solid guarantees works.
+
+### Code generation
+
+Some components in this repository are generated through automatic processes.
+Generated code is updated automatically on `cargo test`.
+Generated code is generally committed to the git repository.
+
+In particular, we generate:
+
+* API for working with syntax trees (`syntax::ast`, the [`ungrammar`](https://github.com/rust-analyzer/ungrammar) crate).
+* Various sections of the manual:
+
+ * features
+ * assists
+ * config
+
+* Documentation tests for assists
+
+See the `sourcegen` crate for details.
+
+**Architecture Invariant:** we avoid bootstrapping.
+For codegen we need to parse Rust code.
+Using rust-analyzer for that would work and would be fun, but it would also complicate the build process a lot.
+For that reason, we use syn and manual string parsing.
+
+### Cancellation
+
+Let's say that the IDE is in the process of computing syntax highlighting, when the user types `foo`.
+What should happen?
+`rust-analyzer`s answer is that the highlighting process should be cancelled -- its results are now stale, and it also blocks modification of the inputs.
+
+The salsa database maintains a global revision counter.
+When applying a change, salsa bumps this counter and waits until all other threads using salsa finish.
+If a thread does salsa-based computation and notices that the counter is incremented, it panics with a special value (see `Canceled::throw`).
+That is, rust-analyzer requires unwinding.
+
+`ide` is the boundary where the panic is caught and transformed into a `Result<T, Cancelled>`.
+
+### Testing
+
+rust-analyzer has three interesting [system boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) to concentrate tests on.
+
+The outermost boundary is the `rust-analyzer` crate, which defines an LSP interface in terms of stdio.
+We do integration testing of this component, by feeding it with a stream of LSP requests and checking responses.
+These tests are known as "heavy", because they interact with Cargo and read real files from disk.
+For this reason, we try to avoid writing too many tests on this boundary: in a statically typed language, it's hard to make an error in the protocol itself if messages are themselves typed.
+Heavy tests are only run when `RUN_SLOW_TESTS` env var is set.
+
+The middle, and most important, boundary is `ide`.
+Unlike `rust-analyzer`, which exposes API, `ide` uses Rust API and is intended for use by various tools.
+A typical test creates an `AnalysisHost`, calls some `Analysis` functions and compares the results against expectation.
+
+The innermost and most elaborate boundary is `hir`.
+It has a much richer vocabulary of types than `ide`, but the basic testing setup is the same: we create a database, run some queries, assert result.
+
+For comparisons, we use the `expect` crate for snapshot testing.
+
+To test various analysis corner cases and avoid forgetting about old tests, we use so-called marks.
+See the `marks` module in the `test_utils` crate for more.
+
+**Architecture Invariant:** rust-analyzer tests do not use libcore or libstd.
+All required library code must be a part of the tests.
+This ensures fast test execution.
+
+**Architecture Invariant:** tests are data driven and do not test the API.
+Tests which directly call various API functions are a liability, because they make refactoring the API significantly more complicated.
+So most of the tests look like this:
+
+```rust
+#[track_caller]
+fn check(input: &str, expect: expect_test::Expect) {
+ // The single place that actually exercises a particular API
+}
+
+#[test]
+fn foo() {
+ check("foo", expect![["bar"]]);
+}
+
+#[test]
+fn spam() {
+ check("spam", expect![["eggs"]]);
+}
+// ...and a hundred more tests that don't care about the specific API at all.
+```
+
+To specify input data, we use a single string literal in a special format, which can describe a set of rust files.
+See the `Fixture` its module for fixture examples and documentation.
+
+**Architecture Invariant:** all code invariants are tested by `#[test]` tests.
+There's no additional checks in CI, formatting and tidy tests are run with `cargo test`.
+
+**Architecture Invariant:** tests do not depend on any kind of external resources, they are perfectly reproducible.
+
+
+### Performance Testing
+
+TBA, take a look at the `metrics` xtask and `#[test] fn benchmark_xxx()` functions.
+
+### Error Handling
+
+**Architecture Invariant:** core parts of rust-analyzer (`ide`/`hir`) don't interact with the outside world and thus can't fail.
+Only parts touching LSP are allowed to do IO.
+
+Internals of rust-analyzer need to deal with broken code, but this is not an error condition.
+rust-analyzer is robust: various analysis compute `(T, Vec<Error>)` rather than `Result<T, Error>`.
+
+rust-analyzer is a complex long-running process.
+It will always have bugs and panics.
+But a panic in an isolated feature should not bring down the whole process.
+Each LSP-request is protected by a `catch_unwind`.
+We use `always` and `never` macros instead of `assert` to gracefully recover from impossible conditions.
+
+### Observability
+
+rust-analyzer is a long-running process, so it is important to understand what's going on inside.
+We have several instruments for that.
+
+The event loop that runs rust-analyzer is very explicit.
+Rather than spawning futures or scheduling callbacks (open), the event loop accepts an `enum` of possible events (closed).
+It's easy to see all the things that trigger rust-analyzer processing, together with their performance
+
+rust-analyzer includes a simple hierarchical profiler (`hprof`).
+It is enabled with `RA_PROFILE='*>50'` env var (log all (`*`) actions which take more than `50` ms) and produces output like:
+
+```
+85ms - handle_completion
+ 68ms - import_on_the_fly
+ 67ms - import_assets::search_for_relative_paths
+ 0ms - crate_def_map:wait (804 calls)
+ 0ms - find_path (16 calls)
+ 2ms - find_similar_imports (1 calls)
+ 0ms - generic_params_query (334 calls)
+ 59ms - trait_solve_query (186 calls)
+ 0ms - Semantics::analyze_impl (1 calls)
+ 1ms - render_resolution (8 calls)
+ 0ms - Semantics::analyze_impl (5 calls)
+```
+
+This is cheap enough to enable in production.
+
+
+Similarly, we save live object counting (`RA_COUNT=1`).
+It is not cheap enough to enable in prod, and this is a bug which should be fixed.
+
+### Configurability
+
+rust-analyzer strives to be as configurable as possible while offering reasonable defaults where no configuration exists yet.
+The rule of thumb is to enable most features by default unless they are buggy or degrade performance too much.
+There will always be features that some people find more annoying than helpful, so giving the users the ability to tweak or disable these is a big part of offering a good user experience.
- If such types need to cross an IPC boundary, then the client of rust-analyzer needs to provide custom, client-specific serialization format.
++Enabling them by default is a matter of discoverability, as many users don't know about some features even though they are presented in the manual.
+Mind the code--architecture gap: at the moment, we are using fewer feature flags than we really should.
+
+### Serialization
+
+In Rust, it is easy (often too easy) to add serialization to any type by adding `#[derive(Serialize)]`.
+This easiness is misleading -- serializable types impose significant backwards compatibility constraints.
+If a type is serializable, then it is a part of some IPC boundary.
+You often don't control the other side of this boundary, so changing serializable types is hard.
+
+For this reason, the types in `ide`, `base_db` and below are not serializable by design.
- For example, `rust-project.json` is it's own format -- it doesn't include `CrateGraph` as is.
++If such types need to cross an IPC boundary, then the client of rust-analyzer needs to provide a custom, client-specific serialization format.
+This isolates backwards compatibility and migration concerns to a specific client.
+
++For example, `rust-project.json` is its own format -- it doesn't include `CrateGraph` as is.
+Instead, it creates a `CrateGraph` by calling appropriate constructing functions.
--- /dev/null
- should therefor include `--message-format=json` or a similar option.
+[[rust-analyzer.assist.emitMustUse]]rust-analyzer.assist.emitMustUse (default: `false`)::
++
+--
+Whether to insert #[must_use] when generating `as_` methods
+for enum variants.
+--
+[[rust-analyzer.assist.expressionFillDefault]]rust-analyzer.assist.expressionFillDefault (default: `"todo"`)::
++
+--
+Placeholder expression to use for missing expressions in assists.
+--
+[[rust-analyzer.cachePriming.enable]]rust-analyzer.cachePriming.enable (default: `true`)::
++
+--
+Warm up caches on project load.
+--
+[[rust-analyzer.cachePriming.numThreads]]rust-analyzer.cachePriming.numThreads (default: `0`)::
++
+--
+How many worker threads to handle priming caches. The default `0` means to pick automatically.
+--
+[[rust-analyzer.cargo.autoreload]]rust-analyzer.cargo.autoreload (default: `true`)::
++
+--
+Automatically refresh project info via `cargo metadata` on
+`Cargo.toml` or `.cargo/config.toml` changes.
+--
+[[rust-analyzer.cargo.buildScripts.enable]]rust-analyzer.cargo.buildScripts.enable (default: `true`)::
++
+--
+Run build scripts (`build.rs`) for more precise code analysis.
+--
+[[rust-analyzer.cargo.buildScripts.invocationLocation]]rust-analyzer.cargo.buildScripts.invocationLocation (default: `"workspace"`)::
++
+--
+Specifies the working directory for running build scripts.
+- "workspace": run build scripts for a workspace in the workspace's root directory.
+ This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.
+- "root": run build scripts in the project's root directory.
+This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
+is set.
+--
+[[rust-analyzer.cargo.buildScripts.invocationStrategy]]rust-analyzer.cargo.buildScripts.invocationStrategy (default: `"per_workspace"`)::
++
+--
+Specifies the invocation strategy to use when running the build scripts command.
+If `per_workspace` is set, the command will be executed for each workspace.
+If `once` is set, the command will be executed once.
+This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
+is set.
+--
+[[rust-analyzer.cargo.buildScripts.overrideCommand]]rust-analyzer.cargo.buildScripts.overrideCommand (default: `null`)::
++
+--
+Override the command rust-analyzer uses to run build scripts and
+build procedural macros. The command is required to output json
+and should therefore include `--message-format=json` or a similar
+option.
+
+By default, a cargo invocation will be constructed for the configured
+targets and features, with the following base command line:
+
+```bash
+cargo check --quiet --workspace --message-format=json --all-targets
+```
+.
+--
+[[rust-analyzer.cargo.buildScripts.useRustcWrapper]]rust-analyzer.cargo.buildScripts.useRustcWrapper (default: `true`)::
++
+--
+Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to
+avoid checking unnecessary things.
+--
+[[rust-analyzer.cargo.extraEnv]]rust-analyzer.cargo.extraEnv (default: `{}`)::
++
+--
+Extra environment variables that will be set when running cargo, rustc
+or other commands within the workspace. Useful for setting RUSTFLAGS.
+--
+[[rust-analyzer.cargo.features]]rust-analyzer.cargo.features (default: `[]`)::
++
+--
+List of features to activate.
+
+Set this to `"all"` to pass `--all-features` to cargo.
+--
+[[rust-analyzer.cargo.noDefaultFeatures]]rust-analyzer.cargo.noDefaultFeatures (default: `false`)::
++
+--
+Whether to pass `--no-default-features` to cargo.
+--
+[[rust-analyzer.cargo.sysroot]]rust-analyzer.cargo.sysroot (default: `"discover"`)::
++
+--
+Relative path to the sysroot, or "discover" to try to automatically find it via
+"rustc --print sysroot".
+
+Unsetting this disables sysroot loading.
+
+This option does not take effect until rust-analyzer is restarted.
+--
+[[rust-analyzer.cargo.target]]rust-analyzer.cargo.target (default: `null`)::
++
+--
+Compilation target override (target triple).
+--
+[[rust-analyzer.cargo.unsetTest]]rust-analyzer.cargo.unsetTest (default: `["core"]`)::
++
+--
+Unsets `#[cfg(test)]` for the specified crates.
+--
+[[rust-analyzer.checkOnSave.allTargets]]rust-analyzer.checkOnSave.allTargets (default: `true`)::
++
+--
+Check all targets and tests (`--all-targets`).
+--
+[[rust-analyzer.checkOnSave.command]]rust-analyzer.checkOnSave.command (default: `"check"`)::
++
+--
+Cargo command to use for `cargo check`.
+--
+[[rust-analyzer.checkOnSave.enable]]rust-analyzer.checkOnSave.enable (default: `true`)::
++
+--
+Run specified `cargo check` command for diagnostics on save.
+--
+[[rust-analyzer.checkOnSave.extraArgs]]rust-analyzer.checkOnSave.extraArgs (default: `[]`)::
++
+--
+Extra arguments for `cargo check`.
+--
+[[rust-analyzer.checkOnSave.extraEnv]]rust-analyzer.checkOnSave.extraEnv (default: `{}`)::
++
+--
+Extra environment variables that will be set when running `cargo check`.
+Extends `#rust-analyzer.cargo.extraEnv#`.
+--
+[[rust-analyzer.checkOnSave.features]]rust-analyzer.checkOnSave.features (default: `null`)::
++
+--
+List of features to activate. Defaults to
+`#rust-analyzer.cargo.features#`.
+
+Set to `"all"` to pass `--all-features` to Cargo.
+--
+[[rust-analyzer.checkOnSave.invocationLocation]]rust-analyzer.checkOnSave.invocationLocation (default: `"workspace"`)::
++
+--
+Specifies the working directory for running checks.
+- "workspace": run checks for workspaces in the corresponding workspaces' root directories.
+ This falls back to "root" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.
+- "root": run checks in the project's root directory.
+This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
+is set.
+--
+[[rust-analyzer.checkOnSave.invocationStrategy]]rust-analyzer.checkOnSave.invocationStrategy (default: `"per_workspace"`)::
++
+--
+Specifies the invocation strategy to use when running the checkOnSave command.
+If `per_workspace` is set, the command will be executed for each workspace.
+If `once` is set, the command will be executed once.
+This config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`
+is set.
+--
+[[rust-analyzer.checkOnSave.noDefaultFeatures]]rust-analyzer.checkOnSave.noDefaultFeatures (default: `null`)::
++
+--
+Whether to pass `--no-default-features` to Cargo. Defaults to
+`#rust-analyzer.cargo.noDefaultFeatures#`.
+--
+[[rust-analyzer.checkOnSave.overrideCommand]]rust-analyzer.checkOnSave.overrideCommand (default: `null`)::
++
+--
+Override the command rust-analyzer uses instead of `cargo check` for
+diagnostics on save. The command is required to output json and
- [[rust-analyzer.checkOnSave.target]]rust-analyzer.checkOnSave.target (default: `null`)::
++should therefore include `--message-format=json` or a similar option.
+
+If you're changing this because you're using some tool wrapping
+Cargo, you might also want to change
+`#rust-analyzer.cargo.buildScripts.overrideCommand#`.
+
+If there are multiple linked projects, this command is invoked for
+each of them, with the working directory being the project root
+(i.e., the folder containing the `Cargo.toml`).
+
+An example command would be:
+
+```bash
+cargo check --workspace --message-format=json --all-targets
+```
+.
+--
- Check for a specific target. Defaults to
- `#rust-analyzer.cargo.target#`.
++[[rust-analyzer.checkOnSave.target]]rust-analyzer.checkOnSave.target (default: `[]`)::
++
+--
- Whether to show inlay type hints for compiler inserted reborrows.
++Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.
++
++Can be a single target, e.g. `"x86_64-unknown-linux-gnu"` or a list of targets, e.g.
++`["aarch64-apple-darwin", "x86_64-apple-darwin"]`.
++
++Aliased as `"checkOnSave.targets"`.
+--
+[[rust-analyzer.completion.autoimport.enable]]rust-analyzer.completion.autoimport.enable (default: `true`)::
++
+--
+Toggles the additional completions that automatically add imports when completed.
+Note that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.
+--
+[[rust-analyzer.completion.autoself.enable]]rust-analyzer.completion.autoself.enable (default: `true`)::
++
+--
+Toggles the additional completions that automatically show method calls and field accesses
+with `self` prefixed to them when inside a method.
+--
+[[rust-analyzer.completion.callable.snippets]]rust-analyzer.completion.callable.snippets (default: `"fill_arguments"`)::
++
+--
+Whether to add parenthesis and argument snippets when completing function.
+--
+[[rust-analyzer.completion.postfix.enable]]rust-analyzer.completion.postfix.enable (default: `true`)::
++
+--
+Whether to show postfix snippets like `dbg`, `if`, `not`, etc.
+--
+[[rust-analyzer.completion.privateEditable.enable]]rust-analyzer.completion.privateEditable.enable (default: `false`)::
++
+--
+Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.
+--
+[[rust-analyzer.completion.snippets.custom]]rust-analyzer.completion.snippets.custom::
++
+--
+Default:
+----
+{
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ }
+ }
+----
+Custom completion snippets.
+
+--
+[[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`)::
++
+--
+List of rust-analyzer diagnostics to disable.
+--
+[[rust-analyzer.diagnostics.enable]]rust-analyzer.diagnostics.enable (default: `true`)::
++
+--
+Whether to show native rust-analyzer diagnostics.
+--
+[[rust-analyzer.diagnostics.experimental.enable]]rust-analyzer.diagnostics.experimental.enable (default: `false`)::
++
+--
+Whether to show experimental rust-analyzer diagnostics that might
+have more false positives than usual.
+--
+[[rust-analyzer.diagnostics.remapPrefix]]rust-analyzer.diagnostics.remapPrefix (default: `{}`)::
++
+--
+Map of prefixes to be substituted when parsing diagnostic file paths.
+This should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.
+--
+[[rust-analyzer.diagnostics.warningsAsHint]]rust-analyzer.diagnostics.warningsAsHint (default: `[]`)::
++
+--
+List of warnings that should be displayed with hint severity.
+
+The warnings will be indicated by faded text or three dots in code
+and will not show up in the `Problems Panel`.
+--
+[[rust-analyzer.diagnostics.warningsAsInfo]]rust-analyzer.diagnostics.warningsAsInfo (default: `[]`)::
++
+--
+List of warnings that should be displayed with info severity.
+
+The warnings will be indicated by a blue squiggly underline in code
+and a blue icon in the `Problems Panel`.
+--
+[[rust-analyzer.files.excludeDirs]]rust-analyzer.files.excludeDirs (default: `[]`)::
++
+--
+These directories will be ignored by rust-analyzer. They are
+relative to the workspace root, and globs are not supported. You may
+also need to add the folders to Code's `files.watcherExclude`.
+--
+[[rust-analyzer.files.watcher]]rust-analyzer.files.watcher (default: `"client"`)::
++
+--
+Controls file watching implementation.
+--
+[[rust-analyzer.highlightRelated.breakPoints.enable]]rust-analyzer.highlightRelated.breakPoints.enable (default: `true`)::
++
+--
+Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.
+--
+[[rust-analyzer.highlightRelated.exitPoints.enable]]rust-analyzer.highlightRelated.exitPoints.enable (default: `true`)::
++
+--
+Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).
+--
+[[rust-analyzer.highlightRelated.references.enable]]rust-analyzer.highlightRelated.references.enable (default: `true`)::
++
+--
+Enables highlighting of related references while the cursor is on any identifier.
+--
+[[rust-analyzer.highlightRelated.yieldPoints.enable]]rust-analyzer.highlightRelated.yieldPoints.enable (default: `true`)::
++
+--
+Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.
+--
+[[rust-analyzer.hover.actions.debug.enable]]rust-analyzer.hover.actions.debug.enable (default: `true`)::
++
+--
+Whether to show `Debug` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.enable]]rust-analyzer.hover.actions.enable (default: `true`)::
++
+--
+Whether to show HoverActions in Rust files.
+--
+[[rust-analyzer.hover.actions.gotoTypeDef.enable]]rust-analyzer.hover.actions.gotoTypeDef.enable (default: `true`)::
++
+--
+Whether to show `Go to Type Definition` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.implementations.enable]]rust-analyzer.hover.actions.implementations.enable (default: `true`)::
++
+--
+Whether to show `Implementations` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.references.enable]]rust-analyzer.hover.actions.references.enable (default: `false`)::
++
+--
+Whether to show `References` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.actions.run.enable]]rust-analyzer.hover.actions.run.enable (default: `true`)::
++
+--
+Whether to show `Run` action. Only applies when
+`#rust-analyzer.hover.actions.enable#` is set.
+--
+[[rust-analyzer.hover.documentation.enable]]rust-analyzer.hover.documentation.enable (default: `true`)::
++
+--
+Whether to show documentation on hover.
+--
+[[rust-analyzer.hover.documentation.keywords.enable]]rust-analyzer.hover.documentation.keywords.enable (default: `true`)::
++
+--
+Whether to show keyword hover popups. Only applies when
+`#rust-analyzer.hover.documentation.enable#` is set.
+--
+[[rust-analyzer.hover.links.enable]]rust-analyzer.hover.links.enable (default: `true`)::
++
+--
+Use markdown syntax for links in hover.
+--
+[[rust-analyzer.imports.granularity.enforce]]rust-analyzer.imports.granularity.enforce (default: `false`)::
++
+--
+Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.
+--
+[[rust-analyzer.imports.granularity.group]]rust-analyzer.imports.granularity.group (default: `"crate"`)::
++
+--
+How imports should be grouped into use statements.
+--
+[[rust-analyzer.imports.group.enable]]rust-analyzer.imports.group.enable (default: `true`)::
++
+--
+Group inserted imports by the https://rust-analyzer.github.io/manual.html#auto-import[following order]. Groups are separated by newlines.
+--
+[[rust-analyzer.imports.merge.glob]]rust-analyzer.imports.merge.glob (default: `true`)::
++
+--
+Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.
+--
+[[rust-analyzer.imports.prefer.no.std]]rust-analyzer.imports.prefer.no.std (default: `false`)::
++
+--
+Prefer to unconditionally use imports of the core and alloc crate, over the std crate.
+--
+[[rust-analyzer.imports.prefix]]rust-analyzer.imports.prefix (default: `"plain"`)::
++
+--
+The path structure for newly inserted paths to use.
+--
+[[rust-analyzer.inlayHints.bindingModeHints.enable]]rust-analyzer.inlayHints.bindingModeHints.enable (default: `false`)::
++
+--
+Whether to show inlay type hints for binding modes.
+--
+[[rust-analyzer.inlayHints.chainingHints.enable]]rust-analyzer.inlayHints.chainingHints.enable (default: `true`)::
++
+--
+Whether to show inlay type hints for method chains.
+--
+[[rust-analyzer.inlayHints.closingBraceHints.enable]]rust-analyzer.inlayHints.closingBraceHints.enable (default: `true`)::
++
+--
+Whether to show inlay hints after a closing `}` to indicate what item it belongs to.
+--
+[[rust-analyzer.inlayHints.closingBraceHints.minLines]]rust-analyzer.inlayHints.closingBraceHints.minLines (default: `25`)::
++
+--
+Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1
+to always show them).
+--
+[[rust-analyzer.inlayHints.closureReturnTypeHints.enable]]rust-analyzer.inlayHints.closureReturnTypeHints.enable (default: `"never"`)::
++
+--
+Whether to show inlay type hints for return types of closures.
+--
++[[rust-analyzer.inlayHints.expressionAdjustmentHints.enable]]rust-analyzer.inlayHints.expressionAdjustmentHints.enable (default: `"never"`)::
+++
++--
++Whether to show inlay hints for type adjustments.
++--
+[[rust-analyzer.inlayHints.lifetimeElisionHints.enable]]rust-analyzer.inlayHints.lifetimeElisionHints.enable (default: `"never"`)::
++
+--
+Whether to show inlay type hints for elided lifetimes in function signatures.
+--
+[[rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames]]rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames (default: `false`)::
++
+--
+Whether to prefer using parameter names as the name for elided lifetime hints if possible.
+--
+[[rust-analyzer.inlayHints.maxLength]]rust-analyzer.inlayHints.maxLength (default: `25`)::
++
+--
+Maximum length for inlay hints. Set to null to have an unlimited length.
+--
+[[rust-analyzer.inlayHints.parameterHints.enable]]rust-analyzer.inlayHints.parameterHints.enable (default: `true`)::
++
+--
+Whether to show function parameter name inlay hints at the call
+site.
+--
+[[rust-analyzer.inlayHints.reborrowHints.enable]]rust-analyzer.inlayHints.reborrowHints.enable (default: `"never"`)::
++
+--
++Whether to show inlay hints for compiler inserted reborrows.
++This setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.
+--
+[[rust-analyzer.inlayHints.renderColons]]rust-analyzer.inlayHints.renderColons (default: `true`)::
++
+--
+Whether to render leading colons for type hints, and trailing colons for parameter hints.
+--
+[[rust-analyzer.inlayHints.typeHints.enable]]rust-analyzer.inlayHints.typeHints.enable (default: `true`)::
++
+--
+Whether to show inlay type hints for variables.
+--
+[[rust-analyzer.inlayHints.typeHints.hideClosureInitialization]]rust-analyzer.inlayHints.typeHints.hideClosureInitialization (default: `false`)::
++
+--
+Whether to hide inlay type hints for `let` statements that initialize to a closure.
+Only applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.
+--
+[[rust-analyzer.inlayHints.typeHints.hideNamedConstructor]]rust-analyzer.inlayHints.typeHints.hideNamedConstructor (default: `false`)::
++
+--
+Whether to hide inlay type hints for constructors.
+--
+[[rust-analyzer.joinLines.joinAssignments]]rust-analyzer.joinLines.joinAssignments (default: `true`)::
++
+--
+Join lines merges consecutive declaration and initialization of an assignment.
+--
+[[rust-analyzer.joinLines.joinElseIf]]rust-analyzer.joinLines.joinElseIf (default: `true`)::
++
+--
+Join lines inserts else between consecutive ifs.
+--
+[[rust-analyzer.joinLines.removeTrailingComma]]rust-analyzer.joinLines.removeTrailingComma (default: `true`)::
++
+--
+Join lines removes trailing commas.
+--
+[[rust-analyzer.joinLines.unwrapTrivialBlock]]rust-analyzer.joinLines.unwrapTrivialBlock (default: `true`)::
++
+--
+Join lines unwraps trivial blocks.
+--
+[[rust-analyzer.lens.debug.enable]]rust-analyzer.lens.debug.enable (default: `true`)::
++
+--
+Whether to show `Debug` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.enable]]rust-analyzer.lens.enable (default: `true`)::
++
+--
+Whether to show CodeLens in Rust files.
+--
+[[rust-analyzer.lens.forceCustomCommands]]rust-analyzer.lens.forceCustomCommands (default: `true`)::
++
+--
+Internal config: use custom client-side commands even when the
+client doesn't set the corresponding capability.
+--
+[[rust-analyzer.lens.implementations.enable]]rust-analyzer.lens.implementations.enable (default: `true`)::
++
+--
+Whether to show `Implementations` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.location]]rust-analyzer.lens.location (default: `"above_name"`)::
++
+--
+Where to render annotations.
+--
+[[rust-analyzer.lens.references.adt.enable]]rust-analyzer.lens.references.adt.enable (default: `false`)::
++
+--
+Whether to show `References` lens for Struct, Enum, and Union.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.enumVariant.enable]]rust-analyzer.lens.references.enumVariant.enable (default: `false`)::
++
+--
+Whether to show `References` lens for Enum Variants.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.method.enable]]rust-analyzer.lens.references.method.enable (default: `false`)::
++
+--
+Whether to show `Method References` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.references.trait.enable]]rust-analyzer.lens.references.trait.enable (default: `false`)::
++
+--
+Whether to show `References` lens for Trait.
+Only applies when `#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.lens.run.enable]]rust-analyzer.lens.run.enable (default: `true`)::
++
+--
+Whether to show `Run` lens. Only applies when
+`#rust-analyzer.lens.enable#` is set.
+--
+[[rust-analyzer.linkedProjects]]rust-analyzer.linkedProjects (default: `[]`)::
++
+--
+Disable project auto-discovery in favor of explicitly specified set
+of projects.
+
+Elements must be paths pointing to `Cargo.toml`,
+`rust-project.json`, or JSON objects in `rust-project.json` format.
+--
+[[rust-analyzer.lru.capacity]]rust-analyzer.lru.capacity (default: `null`)::
++
+--
+Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.
+--
+[[rust-analyzer.notifications.cargoTomlNotFound]]rust-analyzer.notifications.cargoTomlNotFound (default: `true`)::
++
+--
+Whether to show `can't find Cargo.toml` error message.
+--
+[[rust-analyzer.procMacro.attributes.enable]]rust-analyzer.procMacro.attributes.enable (default: `true`)::
++
+--
+Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.
+--
+[[rust-analyzer.procMacro.enable]]rust-analyzer.procMacro.enable (default: `true`)::
++
+--
+Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.
+--
+[[rust-analyzer.procMacro.ignored]]rust-analyzer.procMacro.ignored (default: `{}`)::
++
+--
+These proc-macros will be ignored when trying to expand them.
+
+This config takes a map of crate names with the exported proc-macro names to ignore as values.
+--
+[[rust-analyzer.procMacro.server]]rust-analyzer.procMacro.server (default: `null`)::
++
+--
+Internal config, path to proc-macro server executable (typically,
+this is rust-analyzer itself, but we override this in tests).
+--
+[[rust-analyzer.references.excludeImports]]rust-analyzer.references.excludeImports (default: `false`)::
++
+--
+Exclude imports from find-all-references.
+--
+[[rust-analyzer.runnables.command]]rust-analyzer.runnables.command (default: `null`)::
++
+--
+Command to be executed instead of 'cargo' for runnables.
+--
+[[rust-analyzer.runnables.extraArgs]]rust-analyzer.runnables.extraArgs (default: `[]`)::
++
+--
+Additional arguments to be passed to cargo for runnables such as
+tests or binaries. For example, it may be `--release`.
+--
+[[rust-analyzer.rustc.source]]rust-analyzer.rustc.source (default: `null`)::
++
+--
+Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private
+projects, or "discover" to try to automatically find it if the `rustc-dev` component
+is installed.
+
+Any project which uses rust-analyzer with the rustcPrivate
+crates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.
+
+This option does not take effect until rust-analyzer is restarted.
+--
+[[rust-analyzer.rustfmt.extraArgs]]rust-analyzer.rustfmt.extraArgs (default: `[]`)::
++
+--
+Additional arguments to `rustfmt`.
+--
+[[rust-analyzer.rustfmt.overrideCommand]]rust-analyzer.rustfmt.overrideCommand (default: `null`)::
++
+--
+Advanced option, fully override the command rust-analyzer uses for
+formatting.
+--
+[[rust-analyzer.rustfmt.rangeFormatting.enable]]rust-analyzer.rustfmt.rangeFormatting.enable (default: `false`)::
++
+--
+Enables the use of rustfmt's unstable range formatting command for the
+`textDocument/rangeFormatting` request. The rustfmt option is unstable and only
+available on a nightly build.
+--
+[[rust-analyzer.semanticHighlighting.doc.comment.inject.enable]]rust-analyzer.semanticHighlighting.doc.comment.inject.enable (default: `true`)::
++
+--
+Inject additional highlighting into doc comments.
+
+When enabled, rust-analyzer will highlight rust source in doc comments as well as intra
+doc links.
+--
+[[rust-analyzer.semanticHighlighting.operator.enable]]rust-analyzer.semanticHighlighting.operator.enable (default: `true`)::
++
+--
+Use semantic tokens for operators.
+
+When disabled, rust-analyzer will emit semantic tokens only for operator tokens when
+they are tagged with modifiers.
+--
+[[rust-analyzer.semanticHighlighting.operator.specialization.enable]]rust-analyzer.semanticHighlighting.operator.specialization.enable (default: `false`)::
++
+--
+Use specialized semantic tokens for operators.
+
+When enabled, rust-analyzer will emit special token types for operator tokens instead
+of the generic `operator` token type.
+--
+[[rust-analyzer.semanticHighlighting.punctuation.enable]]rust-analyzer.semanticHighlighting.punctuation.enable (default: `false`)::
++
+--
+Use semantic tokens for punctuations.
+
+When disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when
+they are tagged with modifiers or have a special role.
+--
+[[rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang]]rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang (default: `false`)::
++
+--
+When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro
+calls.
+--
+[[rust-analyzer.semanticHighlighting.punctuation.specialization.enable]]rust-analyzer.semanticHighlighting.punctuation.specialization.enable (default: `false`)::
++
+--
+Use specialized semantic tokens for punctuations.
+
+When enabled, rust-analyzer will emit special token types for punctuation tokens instead
+of the generic `punctuation` token type.
+--
+[[rust-analyzer.semanticHighlighting.strings.enable]]rust-analyzer.semanticHighlighting.strings.enable (default: `true`)::
++
+--
+Use semantic tokens for strings.
+
+In some editors (e.g. vscode) semantic tokens override other highlighting grammars.
+By disabling semantic tokens for strings, other grammars can be used to highlight
+their contents.
+--
+[[rust-analyzer.signatureInfo.detail]]rust-analyzer.signatureInfo.detail (default: `"full"`)::
++
+--
+Show full signature of the callable. Only shows parameters if disabled.
+--
+[[rust-analyzer.signatureInfo.documentation.enable]]rust-analyzer.signatureInfo.documentation.enable (default: `true`)::
++
+--
+Show documentation.
+--
+[[rust-analyzer.typing.autoClosingAngleBrackets.enable]]rust-analyzer.typing.autoClosingAngleBrackets.enable (default: `false`)::
++
+--
+Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.
+--
+[[rust-analyzer.workspace.symbol.search.kind]]rust-analyzer.workspace.symbol.search.kind (default: `"only_types"`)::
++
+--
+Workspace symbol search kind.
+--
+[[rust-analyzer.workspace.symbol.search.limit]]rust-analyzer.workspace.symbol.search.limit (default: `128`)::
++
+--
+Limits the number of items returned from a workspace symbol search (Defaults to 128).
+Some clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.
+Other clients requires all results upfront and might require a higher limit.
+--
+[[rust-analyzer.workspace.symbol.search.scope]]rust-analyzer.workspace.symbol.search.scope (default: `"workspace"`)::
++
+--
+Workspace symbol search scope.
+--
--- /dev/null
- There is no dedicated UI for the server configuration, so you would need to send any options as a value of the `initialization_options` field, as described in the <<_configuration,Configuration>> section.
+= User Manual
+:toc: preamble
+:sectanchors:
+:page-layout: post
+:icons: font
+:source-highlighter: rouge
+:experimental:
+
+////
+IMPORTANT: the master copy of this document lives in the https://github.com/rust-lang/rust-analyzer repository
+////
+
+At its core, rust-analyzer is a *library* for semantic analysis of Rust code as it changes over time.
+This manual focuses on a specific usage of the library -- running it as part of a server that implements the
+https://microsoft.github.io/language-server-protocol/[Language Server Protocol] (LSP).
+The LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic features like completion or goto definition by talking to an external language server process.
+
+[TIP]
+====
+[.lead]
+To improve this document, send a pull request: +
+https://github.com/rust-lang/rust-analyzer/blob/master/docs/user/manual.adoc[https://github.com/rust-analyzer/.../manual.adoc]
+
+The manual is written in https://asciidoc.org[AsciiDoc] and includes some extra files which are generated from the source code. Run `cargo test` and `cargo test -p xtask` to create these and then `asciidoctor manual.adoc` to create an HTML copy.
+====
+
+If you have questions about using rust-analyzer, please ask them in the https://users.rust-lang.org/c/ide/14["`IDEs and Editors`"] topic of Rust users forum.
+
+== Installation
+
+In theory, one should be able to just install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> and have it automatically work with any editor.
+We are not there yet, so some editor specific setup is required.
+
+Additionally, rust-analyzer needs the sources of the standard library.
+If the source code is not present, rust-analyzer will attempt to install it automatically.
+
+To add the sources manually, run the following command:
+
+```bash
+$ rustup component add rust-src
+```
+
+=== Toolchain
+
+Only the latest stable standard library source is officially supported for use with rust-analyzer.
+If you are using an older toolchain or have an override set, rust-analyzer may fail to understand the Rust source.
+You will either need to update your toolchain or use an older version of rust-analyzer that is compatible with your toolchain.
+
+If you are using an override in your project, you can still force rust-analyzer to use the stable toolchain via the environment variable `RUSTUP_TOOLCHAIN`.
+For example, with VS Code or coc-rust-analyzer:
+
+[source,json]
+----
+{ "rust-analyzer.server.extraEnv": { "RUSTUP_TOOLCHAIN": "stable" } }
+----
+
+=== VS Code
+
+This is the best supported editor at the moment.
+The rust-analyzer plugin for VS Code is maintained
+https://github.com/rust-lang/rust-analyzer/tree/master/editors/code[in tree].
+
+You can install the latest release of the plugin from
+https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer[the marketplace].
+
+Note that the plugin may cause conflicts with the
+https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[official Rust plugin].
+It is recommended to disable the Rust plugin when using the rust-analyzer extension.
+
+By default, the plugin will prompt you to download the matching version of the server as well:
+
+image::https://user-images.githubusercontent.com/9021944/75067008-17502500-54ba-11ea-835a-f92aac50e866.png[]
+
+[NOTE]
+====
+To disable this notification put the following to `settings.json`
+
+[source,json]
+----
+{ "rust-analyzer.updates.askBeforeDownload": false }
+----
+====
+
+The server binary is stored in the extension install directory, which starts with `rust-lang.rust-analyzer-` and is located under:
+
+* Linux: `~/.vscode/extensions`
+* Linux (Remote, such as WSL): `~/.vscode-server/extensions`
+* macOS: `~/.vscode/extensions`
+* Windows: `%USERPROFILE%\.vscode\extensions`
+
+As an exception, on NixOS, the extension makes a copy of the server and stores it under `~/.config/Code/User/globalStorage/rust-lang.rust-analyzer`.
+
+Note that we only support the two most recent versions of VS Code.
+
+==== Updates
+
+The extension will be updated automatically as new versions become available.
+It will ask your permission to download the matching language server version binary if needed.
+
+===== Nightly
+
+We ship nightly releases for VS Code.
+To help us out by testing the newest code, you can enable pre-release versions in the Code extension page.
+
+==== Manual installation
+
+Alternatively, download a VSIX corresponding to your platform from the
+https://github.com/rust-lang/rust-analyzer/releases[releases] page.
+
+Install the extension with the `Extensions: Install from VSIX` command within VS Code, or from the command line via:
+[source]
+----
+$ code --install-extension /path/to/rust-analyzer.vsix
+----
+
+If you are running an unsupported platform, you can install `rust-analyzer-no-server.vsix` and compile or obtain a server binary.
+Copy the server anywhere, then add the path to your settings.json, for example:
+[source,json]
+----
+{ "rust-analyzer.server.path": "~/.local/bin/rust-analyzer-linux" }
+----
+
+==== Building From Source
+
+Both the server and the Code plugin can be installed from source:
+
+[source]
+----
+$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
+$ cargo xtask install
+----
+
+You'll need Cargo, nodejs (matching a supported version of VS Code) and npm for this.
+
+Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually.
+
+If you're not using Code, you can compile and install only the LSP server:
+
+[source]
+----
+$ cargo xtask install --server
+----
+
+=== rust-analyzer Language Server Binary
+
+Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
+You can download pre-built binaries from the https://github.com/rust-lang/rust-analyzer/releases[releases] page.
+You will need to uncompress and rename the binary for your platform, e.g. from `rust-analyzer-aarch64-apple-darwin.gz` on Mac OS to `rust-analyzer`, make it executable, then move it into a directory in your `$PATH`.
+
+On Linux to install the `rust-analyzer` binary into `~/.local/bin`, these commands should work:
+
+[source,bash]
+----
+$ mkdir -p ~/.local/bin
+$ curl -L https://github.com/rust-lang/rust-analyzer/releases/latest/download/rust-analyzer-x86_64-unknown-linux-gnu.gz | gunzip -c - > ~/.local/bin/rust-analyzer
+$ chmod +x ~/.local/bin/rust-analyzer
+----
+
+Make sure that `~/.local/bin` is listed in the `$PATH` variable and use the appropriate URL if you're not on a `x86-64` system.
+
+You don't have to use `~/.local/bin`, any other path like `~/.cargo/bin` or `/usr/local/bin` will work just as well.
+
+Alternatively, you can install it from source using the command below.
+You'll need the latest stable version of the Rust toolchain.
+
+[source,bash]
+----
+$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
+$ cargo xtask install --server
+----
+
+If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-lang/rust-analyzer/issues/1811[this issue].
+On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help.
+
+==== `rustup`
+
+`rust-analyzer` is available in `rustup`:
+
+[source,bash]
+----
+$ rustup component add rust-analyzer
+----
+
+However, in contrast to `component add clippy` or `component add rustfmt`, this does not actually place a `rust-analyzer` binary in `~/.cargo/bin`, see https://github.com/rust-lang/rustup/issues/2411[this issue]. You can find the path to the binary using:
+[source,bash]
+----
+$ rustup which --toolchain stable rust-analyzer
+----
+You can link to there from `~/.cargo/bin` or configure your editor to use the full path.
+
+Alternatively you might be able to configure your editor to start `rust-analyzer` using the command:
+[source,bash]
+----
+$ rustup run stable rust-analyzer
+----
+
+==== Arch Linux
+
+The `rust-analyzer` binary can be installed from the repos or AUR (Arch User Repository):
+
+- https://www.archlinux.org/packages/community/x86_64/rust-analyzer/[`rust-analyzer`] (built from latest tagged source)
+- https://aur.archlinux.org/packages/rust-analyzer-git[`rust-analyzer-git`] (latest Git version)
+
+Install it with pacman, for example:
+
+[source,bash]
+----
+$ pacman -S rust-analyzer
+----
+
+==== Gentoo Linux
+
+`rust-analyzer` is available in the GURU repository:
+
+- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer`] builds from source
+- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer-bin?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer-bin`] installs an official binary release
+
+If not already, GURU must be enabled (e.g. using `app-eselect/eselect-repository`) and sync'd before running `emerge`:
+
+[source,bash]
+----
+$ eselect repository enable guru && emaint sync -r guru
+$ emerge rust-analyzer-bin
+----
+
+==== macOS
+
+The `rust-analyzer` binary can be installed via https://brew.sh/[Homebrew].
+
+[source,bash]
+----
+$ brew install rust-analyzer
+----
+
+=== Emacs
+
+Note this excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm].
+
+Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+
+Emacs support is maintained as part of the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP] package in https://github.com/emacs-lsp/lsp-mode/blob/master/lsp-rust.el[lsp-rust.el].
+
+1. Install the most recent version of `emacs-lsp` package by following the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP instructions].
+2. Set `lsp-rust-server` to `'rust-analyzer`.
+3. Run `lsp` in a Rust buffer.
+4. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys.
+
+=== Vim/NeoVim
+
+Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+Not needed if the extension can install/update it on its own, coc-rust-analyzer is one example.
+
+There are several LSP client implementations for vim or neovim:
+
+==== coc-rust-analyzer
+
+1. Install coc.nvim by following the instructions at
+ https://github.com/neoclide/coc.nvim[coc.nvim]
+ (Node.js required)
+2. Run `:CocInstall coc-rust-analyzer` to install
+ https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
+ this extension implements _most_ of the features supported in the VSCode extension:
+ * automatically install and upgrade stable/nightly releases
+ * same configurations as VSCode extension, `rust-analyzer.server.path`, `rust-analyzer.cargo.features` etc.
+ * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc.
+ * inlay hints for variables and method chaining, _Neovim Only_
+
+Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected`; `coc-codeaction` and `coc-codeaction-line` are unlikely to be useful.
+
+==== LanguageClient-neovim
+
+1. Install LanguageClient-neovim by following the instructions
+ https://github.com/autozimu/LanguageClient-neovim[here]
+ * The GitHub project wiki has extra tips on configuration
+
+2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists):
++
+[source,vim]
+----
+let g:LanguageClient_serverCommands = {
+\ 'rust': ['rust-analyzer'],
+\ }
+----
+
+==== YouCompleteMe
+
+Install YouCompleteMe by following the instructions
+ https://github.com/ycm-core/YouCompleteMe#installation[here].
+
+rust-analyzer is the default in ycm, it should work out of the box.
+
+==== ALE
+
+To use the LSP server in https://github.com/dense-analysis/ale[ale]:
+
+[source,vim]
+----
+let g:ale_linters = {'rust': ['analyzer']}
+----
+
+==== nvim-lsp
+
+NeoVim 0.5 has built-in language server support.
+For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lspconfig#rust_analyzer[neovim/nvim-lspconfig].
+Once `neovim/nvim-lspconfig` is installed, use `+lua require'lspconfig'.rust_analyzer.setup({})+` in your `init.vim`.
+
+You can also pass LSP settings to the server:
+
+[source,vim]
+----
+lua << EOF
+local nvim_lsp = require'lspconfig'
+
+local on_attach = function(client)
+ require'completion'.on_attach(client)
+end
+
+nvim_lsp.rust_analyzer.setup({
+ on_attach=on_attach,
+ settings = {
+ ["rust-analyzer"] = {
+ imports = {
+ granularity = {
+ group = "module",
+ },
+ prefix = "self",
+ },
+ cargo = {
+ buildScripts = {
+ enable = true,
+ },
+ },
+ procMacro = {
+ enable = true
+ },
+ }
+ }
+})
+EOF
+----
+
+See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started.
+
+Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for neovim.
+
+==== vim-lsp
+
+vim-lsp is installed by following https://github.com/prabirshrestha/vim-lsp[the plugin instructions].
+It can be as simple as adding this line to your `.vimrc`:
+
+[source,vim]
+----
+Plug 'prabirshrestha/vim-lsp'
+----
+
+Next you need to register the `rust-analyzer` binary.
+If it is available in `$PATH`, you may want to add this to your `.vimrc`:
+
+[source,vim]
+----
+if executable('rust-analyzer')
+ au User lsp_setup call lsp#register_server({
+ \ 'name': 'Rust Language Server',
+ \ 'cmd': {server_info->['rust-analyzer']},
+ \ 'whitelist': ['rust'],
+ \ })
+endif
+----
+
++There is no dedicated UI for the server configuration, so you would need to send any options as a value of the `initialization_options` field, as described in the <<configuration,Configuration>> section.
+Here is an example of how to enable the proc-macro support:
+
+[source,vim]
+----
+if executable('rust-analyzer')
+ au User lsp_setup call lsp#register_server({
+ \ 'name': 'Rust Language Server',
+ \ 'cmd': {server_info->['rust-analyzer']},
+ \ 'whitelist': ['rust'],
+ \ 'initialization_options': {
+ \ 'cargo': {
+ \ 'buildScripts': {
+ \ 'enable': v:true,
+ \ },
+ \ },
+ \ 'procMacro': {
+ \ 'enable': v:true,
+ \ },
+ \ },
+ \ })
+endif
+----
+
+=== Sublime Text
+
+==== Sublime Text 4:
+* Follow the instructions in link:https://github.com/sublimelsp/LSP-rust-analyzer[LSP-rust-analyzer].
+
+NOTE: Install link:https://packagecontrol.io/packages/LSP-file-watcher-chokidar[LSP-file-watcher-chokidar] to enable file watching (`workspace/didChangeWatchedFiles`).
+
+==== Sublime Text 3:
+* Install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+* Install the link:https://packagecontrol.io/packages/LSP[LSP package].
+* From the command palette, run `LSP: Enable Language Server Globally` and select `rust-analyzer`.
+
+If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the status bar, and after waiting a bit, functionalities like tooltips on hovering over variables should become available.
+
+If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary.
+
+=== GNOME Builder
+
+GNOME Builder 3.37.1 and newer has native `rust-analyzer` support.
+If the LSP binary is not available, GNOME Builder can install it when opening a Rust file.
+
+
+=== Eclipse IDE
+
+Support for Rust development in the Eclipse IDE is provided by link:https://github.com/eclipse/corrosion[Eclipse Corrosion].
+If available in PATH or in some standard location, `rust-analyzer` is detected and powers editing of Rust files without further configuration.
+If `rust-analyzer` is not detected, Corrosion will prompt you for configuration of your Rust toolchain and language server with a link to the __Window > Preferences > Rust__ preference page; from here a button allows to download and configure `rust-analyzer`, but you can also reference another installation.
+You'll need to close and reopen all .rs and Cargo files, or to restart the IDE, for this change to take effect.
+
+=== Kate Text Editor
+
+Support for the language server protocol is built into Kate through the LSP plugin, which is included by default.
+It is preconfigured to use rust-analyzer for Rust sources since Kate 21.12.
+
+Earlier versions allow you to use rust-analyzer through a simple settings change.
+In the LSP Client settings of Kate, copy the content of the third tab "default parameters" to the second tab "server configuration".
+Then in the configuration replace:
+[source,json]
+----
+ "rust": {
+ "command": ["rls"],
+ "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
+ "url": "https://github.com/rust-lang/rls",
+ "highlightingModeRegex": "^Rust$"
+ },
+----
+With
+[source,json]
+----
+ "rust": {
+ "command": ["rust-analyzer"],
+ "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
+ "url": "https://github.com/rust-lang/rust-analyzer",
+ "highlightingModeRegex": "^Rust$"
+ },
+----
+Then click on apply, and restart the LSP server for your rust project.
+
+=== juCi++
+
+https://gitlab.com/cppit/jucipp[juCi++] has built-in support for the language server protocol, and since version 1.7.0 offers installation of both Rust and rust-analyzer when opening a Rust file.
+
+=== Kakoune
+
+https://kakoune.org/[Kakoune] supports LSP with the help of https://github.com/kak-lsp/kak-lsp[`kak-lsp`].
+Follow the https://github.com/kak-lsp/kak-lsp#installation[instructions] to install `kak-lsp`.
+To configure `kak-lsp`, refer to the https://github.com/kak-lsp/kak-lsp#configuring-kak-lsp[configuration section] which is basically about copying the https://github.com/kak-lsp/kak-lsp/blob/master/kak-lsp.toml[configuration file] in the right place (latest versions should use `rust-analyzer` by default).
+
+Finally, you need to configure Kakoune to talk to `kak-lsp` (see https://github.com/kak-lsp/kak-lsp#usage[Usage section]).
+A basic configuration will only get you LSP but you can also activate inlay diagnostics and auto-formatting on save.
+The following might help you get all of this.
+
+[source,txt]
+----
+eval %sh{kak-lsp --kakoune -s $kak_session} # Not needed if you load it with plug.kak.
+hook global WinSetOption filetype=rust %{
+ # Enable LSP
+ lsp-enable-window
+
+ # Auto-formatting on save
+ hook window BufWritePre .* lsp-formatting-sync
+
+ # Configure inlay hints (only on save)
+ hook window -group rust-inlay-hints BufWritePost .* rust-analyzer-inlay-hints
+ hook -once -always window WinSetOption filetype=.* %{
+ remove-hooks window rust-inlay-hints
+ }
+}
+----
+
+=== Helix
+
+https://docs.helix-editor.com/[Helix] supports LSP by default.
+However, it won't install `rust-analyzer` automatically.
+You can follow instructions for installing <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+
+=== Crates
+
+There is a package named `ra_ap_rust_analyzer` available on https://crates.io/crates/ra_ap_rust-analyzer[crates.io], for someone who wants to use it programmatically.
+
+For more details, see https://github.com/rust-lang/rust-analyzer/blob/master/.github/workflows/publish.yml[the publish workflow].
+
+== Troubleshooting
+
+Start with looking at the rust-analyzer version.
+Try **rust-analyzer: Show RA Version** in VS Code (using **Command Palette** feature typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the command line.
+If the date is more than a week ago, it's better to update rust-analyzer version.
+
+The next thing to check would be panic messages in rust-analyzer's log.
+Log messages are printed to stderr, in VS Code you can see then in the `Output > Rust Analyzer Language Server` tab of the panel.
+To see more logs, set the `RA_LOG=info` environment variable, this can be done either by setting the environment variable manually or by using `rust-analyzer.server.extraEnv`, note that both of these approaches require the server to be restarted.
+
+To fully capture LSP messages between the editor and the server, set `"rust-analyzer.trace.server": "verbose"` config and check
+`Output > Rust Analyzer Language Server Trace`.
+
+The root cause for many "`nothing works`" problems is that rust-analyzer fails to understand the project structure.
+To debug that, first note the `rust-analyzer` section in the status bar.
+If it has an error icon and red, that's the problem (hover will have somewhat helpful error message).
+**rust-analyzer: Status** prints dependency information for the current file.
+Finally, `RA_LOG=project_model=debug` enables verbose logs during project loading.
+
+If rust-analyzer outright crashes, try running `rust-analyzer analysis-stats /path/to/project/directory/` on the command line.
+This command type checks the whole project in batch mode bypassing LSP machinery.
+
+When filing issues, it is useful (but not necessary) to try to minimize examples.
+An ideal bug reproduction looks like this:
+
+```bash
+$ git clone https://github.com/username/repo.git && cd repo && git switch --detach commit-hash
+$ rust-analyzer --version
+rust-analyzer dd12184e4 2021-05-08 dev
+$ rust-analyzer analysis-stats .
+💀 💀 💀
+```
+
+It is especially useful when the `repo` doesn't use external crates or the standard library.
+
+If you want to go as far as to modify the source code to debug the problem, be sure to take a look at the
+https://github.com/rust-lang/rust-analyzer/tree/master/docs/dev[dev docs]!
+
+== Configuration
+
+**Source:** https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs[config.rs]
+
+The <<_installation,Installation>> section contains details on configuration for some of the editors.
+In general `rust-analyzer` is configured via LSP messages, which means that it's up to the editor to decide on the exact format and location of configuration files.
+
+Some clients, such as <<vs-code,VS Code>> or <<coc-rust-analyzer,COC plugin in Vim>> provide `rust-analyzer` specific configuration UIs. Others may require you to know a bit more about the interaction with `rust-analyzer`.
+
+For the later category, it might help to know that the initial configuration is specified as a value of the `initializationOptions` field of the https://microsoft.github.io/language-server-protocol/specifications/specification-current/#initialize[`InitializeParams` message, in the LSP protocol].
+The spec says that the field type is `any?`, but `rust-analyzer` is looking for a JSON object that is constructed using settings from the list below.
+Name of the setting, ignoring the `rust-analyzer.` prefix, is used as a path, and value of the setting becomes the JSON property value.
+
+For example, a very common configuration is to enable proc-macro support, can be achieved by sending this JSON:
+
+[source,json]
+----
+{
+ "cargo": {
+ "buildScripts": {
+ "enable": true,
+ },
+ },
+ "procMacro": {
+ "enable": true,
+ }
+}
+----
+
+Please consult your editor's documentation to learn more about how to configure https://microsoft.github.io/language-server-protocol/[LSP servers].
+
+To verify which configuration is actually used by `rust-analyzer`, set `RA_LOG` environment variable to `rust_analyzer=info` and look for config-related messages.
+Logs should show both the JSON that `rust-analyzer` sees as well as the updated config.
+
+This is the list of config options `rust-analyzer` supports:
+
+include::./generated_config.adoc[]
+
+== Non-Cargo Based Projects
+
+rust-analyzer does not require Cargo.
+However, if you use some other build system, you'll have to describe the structure of your project for rust-analyzer in the `rust-project.json` format:
+
+[source,TypeScript]
+----
+interface JsonProject {
+ /// Path to the directory with *source code* of
+ /// sysroot crates.
+ ///
+ /// It should point to the directory where std,
+ /// core, and friends can be found:
+ ///
+ /// https://github.com/rust-lang/rust/tree/master/library.
+ ///
+ /// If provided, rust-analyzer automatically adds
+ /// dependencies on sysroot crates. Conversely,
+ /// if you omit this path, you can specify sysroot
+ /// dependencies yourself and, for example, have
+ /// several different "sysroots" in one graph of
+ /// crates.
+ sysroot_src?: string;
+ /// The set of crates comprising the current
+ /// project. Must include all transitive
+ /// dependencies as well as sysroot crate (libstd,
+ /// libcore and such).
+ crates: Crate[];
+}
+
+interface Crate {
+ /// Optional crate name used for display purposes,
+ /// without affecting semantics. See the `deps`
+ /// key for semantically-significant crate names.
+ display_name?: string;
+ /// Path to the root module of the crate.
+ root_module: string;
+ /// Edition of the crate.
+ edition: "2015" | "2018" | "2021";
+ /// Dependencies
+ deps: Dep[];
+ /// Should this crate be treated as a member of
+ /// current "workspace".
+ ///
+ /// By default, inferred from the `root_module`
+ /// (members are the crates which reside inside
+ /// the directory opened in the editor).
+ ///
+ /// Set this to `false` for things like standard
+ /// library and 3rd party crates to enable
+ /// performance optimizations (rust-analyzer
+ /// assumes that non-member crates don't change).
+ is_workspace_member?: boolean;
+ /// Optionally specify the (super)set of `.rs`
+ /// files comprising this crate.
+ ///
+ /// By default, rust-analyzer assumes that only
+ /// files under `root_module.parent` can belong
+ /// to a crate. `include_dirs` are included
+ /// recursively, unless a subdirectory is in
+ /// `exclude_dirs`.
+ ///
+ /// Different crates can share the same `source`.
+ ///
+ /// If two crates share an `.rs` file in common,
+ /// they *must* have the same `source`.
+ /// rust-analyzer assumes that files from one
+ /// source can't refer to files in another source.
+ source?: {
+ include_dirs: string[],
+ exclude_dirs: string[],
+ },
+ /// The set of cfgs activated for a given crate, like
+ /// `["unix", "feature=\"foo\"", "feature=\"bar\""]`.
+ cfg: string[];
+ /// Target triple for this Crate.
+ ///
+ /// Used when running `rustc --print cfg`
+ /// to get target-specific cfgs.
+ target?: string;
+ /// Environment variables, used for
+ /// the `env!` macro
+ env: { [key: string]: string; },
+
+ /// Whether the crate is a proc-macro crate.
+ is_proc_macro: boolean;
+ /// For proc-macro crates, path to compiled
+ /// proc-macro (.so file).
+ proc_macro_dylib_path?: string;
+}
+
+interface Dep {
+ /// Index of a crate in the `crates` array.
+ crate: number,
+ /// Name as should appear in the (implicit)
+ /// `extern crate name` declaration.
+ name: string,
+}
+----
+
+This format is provisional and subject to change.
+Specifically, the `roots` setup will be different eventually.
+
+There are three ways to feed `rust-project.json` to rust-analyzer:
+
+* Place `rust-project.json` file at the root of the project, and rust-analyzer will discover it.
+* Specify `"rust-analyzer.linkedProjects": [ "path/to/rust-project.json" ]` in the settings (and make sure that your LSP client sends settings as a part of initialize request).
+* Specify `"rust-analyzer.linkedProjects": [ { "roots": [...], "crates": [...] }]` inline.
+
+Relative paths are interpreted relative to `rust-project.json` file location or (for inline JSON) relative to `rootUri`.
+
+See https://github.com/rust-analyzer/rust-project.json-example for a small example.
+
+You can set the `RA_LOG` environment variable to `rust_analyzer=info` to inspect how rust-analyzer handles config and project loading.
+
+Note that calls to `cargo check` are disabled when using `rust-project.json` by default, so compilation errors and warnings will no longer be sent to your LSP client. To enable these compilation errors you will need to specify explicitly what command rust-analyzer should run to perform the checks using the `checkOnSave.overrideCommand` configuration. As an example, the following configuration explicitly sets `cargo check` as the `checkOnSave` command.
+
+[source,json]
+----
+{ "rust-analyzer.checkOnSave.overrideCommand": ["cargo", "check", "--message-format=json"] }
+----
+
+The `checkOnSave.overrideCommand` requires the command specified to output json error messages for rust-analyzer to consume. The `--message-format=json` flag does this for `cargo check` so whichever command you use must also output errors in this format. See the <<Configuration>> section for more information.
+
+== Security
+
+At the moment, rust-analyzer assumes that all code is trusted.
+Here is a **non-exhaustive** list of ways to make rust-analyzer execute arbitrary code:
+
+* proc macros and build scripts are executed by default
+* `.cargo/config` can override `rustc` with an arbitrary executable
+* `rust-toolchain.toml` can override `rustc` with an arbitrary executable
+* VS Code plugin reads configuration from project directory, and that can be used to override paths to various executables, like `rustfmt` or `rust-analyzer` itself.
+* rust-analyzer's syntax trees library uses a lot of `unsafe` and hasn't been properly audited for memory safety.
+
+== Privacy
+
+The LSP server performs no network access in itself, but runs `cargo metadata` which will update or download the crate registry and the source code of the project dependencies.
+If enabled (the default), build scripts and procedural macros can do anything.
+
+The Code extension does not access the network.
+
+Any other editor plugins are not under the control of the `rust-analyzer` developers. For any privacy concerns, you should check with their respective developers.
+
+For `rust-analyzer` developers, `cargo xtask release` uses the GitHub API to put together the release notes.
+
+== Features
+
+include::./generated_features.adoc[]
+
+== Assists (Code Actions)
+
+Assists, or code actions, are small local refactorings, available in a particular context.
+They are usually triggered by a shortcut or by clicking a light bulb icon in the editor.
+Cursor position or selection is signified by `┃` character.
+
+include::./generated_assists.adoc[]
+
+== Diagnostics
+
+While most errors and warnings provided by rust-analyzer come from the `cargo check` integration, there's a growing number of diagnostics implemented using rust-analyzer's own analysis.
+Some of these diagnostics don't respect `\#[allow]` or `\#[deny]` attributes yet, but can be turned off using the `rust-analyzer.diagnostics.enable`, `rust-analyzer.diagnostics.experimental.enable` or `rust-analyzer.diagnostics.disabled` settings.
+
+include::./generated_diagnostic.adoc[]
+
+== Editor Features
+=== VS Code
+
+==== Color configurations
+
+It is possible to change the foreground/background color and font family/size of inlay hints.
+Just add this to your `settings.json`:
+
+[source,jsonc]
+----
+{
+ "editor.inlayHints.fontFamily": "Courier New",
+ "editor.inlayHints.fontSize": 11,
+
+ "workbench.colorCustomizations": {
+ // Name of the theme you are currently using
+ "[Default Dark+]": {
+ "editorInlayHint.foreground": "#868686f0",
+ "editorInlayHint.background": "#3d3d3d48",
+
+ // Overrides for specific kinds of inlay hints
+ "editorInlayHint.typeForeground": "#fdb6fdf0",
+ "editorInlayHint.parameterForeground": "#fdb6fdf0",
+ }
+ }
+}
+----
+
+==== Semantic style customizations
+
+You can customize the look of different semantic elements in the source code.
+For example, mutable bindings are underlined by default and you can override this behavior by adding the following section to your `settings.json`:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "*.mutable": {
+ "fontStyle": "", // underline is the default
+ },
+ }
+ },
+}
+----
+
+Most themes doesn't support styling unsafe operations differently yet. You can fix this by adding overrides for the rules `operator.unsafe`, `function.unsafe`, and `method.unsafe`:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "operator.unsafe": "#ff6600",
+ "function.unsafe": "#ff6600",
+ "method.unsafe": "#ff6600"
+ }
+ },
+}
+----
+
+In addition to the top-level rules you can specify overrides for specific themes. For example, if you wanted to use a darker text color on a specific light theme, you might write:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "operator.unsafe": "#ff6600"
+ },
+ "[Ayu Light]": {
+ "rules": {
+ "operator.unsafe": "#572300"
+ }
+ }
+ },
+}
+----
+
+Make sure you include the brackets around the theme name. For example, use `"[Ayu Light]"` to customize the theme Ayu Light.
+
+==== Special `when` clause context for keybindings.
+You may use `inRustProject` context to configure keybindings for rust projects only.
+For example:
+
+[source,json]
+----
+{
+ "key": "ctrl+alt+d",
+ "command": "rust-analyzer.openDocs",
+ "when": "inRustProject"
+}
+----
+More about `when` clause contexts https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts[here].
+
+==== Setting runnable environment variables
+You can use "rust-analyzer.runnableEnv" setting to define runnable environment-specific substitution variables.
+The simplest way for all runnables in a bunch:
+```jsonc
+"rust-analyzer.runnableEnv": {
+ "RUN_SLOW_TESTS": "1"
+}
+```
+
+Or it is possible to specify vars more granularly:
+```jsonc
+"rust-analyzer.runnableEnv": [
+ {
+ // "mask": null, // null mask means that this rule will be applied for all runnables
+ env: {
+ "APP_ID": "1",
+ "APP_DATA": "asdf"
+ }
+ },
+ {
+ "mask": "test_name",
+ "env": {
+ "APP_ID": "2", // overwrites only APP_ID
+ }
+ }
+]
+```
+
+You can use any valid regular expression as a mask.
+Also note that a full runnable name is something like *run bin_or_example_name*, *test some::mod::test_name* or *test-mod some::mod*, so it is possible to distinguish binaries, single tests, and test modules with this masks: `"^run"`, `"^test "` (the trailing space matters!), and `"^test-mod"` respectively.
+
+==== Compiler feedback from external commands
+
+Instead of relying on the built-in `cargo check`, you can configure Code to run a command in the background and use the `$rustc-watch` problem matcher to generate inline error markers from its output.
+
+To do this you need to create a new https://code.visualstudio.com/docs/editor/tasks[VS Code Task] and set `rust-analyzer.checkOnSave.enable: false` in preferences.
+
+For example, if you want to run https://crates.io/crates/cargo-watch[`cargo watch`] instead, you might add the following to `.vscode/tasks.json`:
+
+```json
+{
+ "label": "Watch",
+ "group": "build",
+ "type": "shell",
+ "command": "cargo watch",
+ "problemMatcher": "$rustc-watch",
+ "isBackground": true
+}
+```
+
+==== Live Share
+
+VS Code Live Share has partial support for rust-analyzer.
+
+Live Share _requires_ the official Microsoft build of VS Code, OSS builds will not work correctly.
+
+The host's rust-analyzer instance will be shared with all guests joining the session.
+The guests do not have to have the rust-analyzer extension installed for this to work.
+
+If you are joining a Live Share session and _do_ have rust-analyzer installed locally, commands from the command palette will not work correctly since they will attempt to communicate with the local server.
--- /dev/null
- "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefor include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.",
+{
+ "name": "rust-analyzer",
+ "displayName": "rust-analyzer",
+ "description": "Rust language support for Visual Studio Code",
+ "private": true,
+ "icon": "icon.png",
+ "version": "0.5.0-dev",
+ "releaseTag": null,
+ "publisher": "rust-lang",
+ "repository": {
+ "url": "https://github.com/rust-lang/rust-analyzer.git",
+ "type": "git"
+ },
+ "homepage": "https://rust-analyzer.github.io/",
+ "license": "MIT OR Apache-2.0",
+ "keywords": [
+ "rust"
+ ],
+ "categories": [
+ "Programming Languages"
+ ],
+ "engines": {
+ "vscode": "^1.66.0"
+ },
+ "enabledApiProposals": [],
+ "scripts": {
+ "vscode:prepublish": "npm run build-base -- --minify",
+ "package": "vsce package -o rust-analyzer.vsix",
+ "build-base": "esbuild ./src/main.ts --bundle --outfile=out/main.js --external:vscode --format=cjs --platform=node --target=node16",
+ "build": "npm run build-base -- --sourcemap",
+ "watch": "npm run build-base -- --sourcemap --watch",
+ "lint": "prettier --check . && eslint -c .eslintrc.js --ext ts ./src ./tests",
+ "fix": "prettier --write . && eslint -c .eslintrc.js --ext ts ./src ./tests --fix",
+ "pretest": "tsc && npm run build",
+ "test": "cross-env TEST_VARIABLE=test node ./out/tests/runTests.js"
+ },
+ "dependencies": {
+ "d3": "^7.6.1",
+ "d3-graphviz": "^4.1.1",
+ "vscode-languageclient": "^8.0.2"
+ },
+ "devDependencies": {
+ "@types/node": "~16.11.7",
+ "@types/vscode": "~1.66.0",
+ "@typescript-eslint/eslint-plugin": "^5.30.5",
+ "@typescript-eslint/parser": "^5.30.5",
+ "@vscode/test-electron": "^2.1.5",
+ "cross-env": "^7.0.3",
+ "esbuild": "^0.14.48",
+ "eslint": "^8.19.0",
+ "eslint-config-prettier": "^8.5.0",
+ "ovsx": "^0.5.2",
+ "prettier": "^2.7.1",
+ "tslib": "^2.4.0",
+ "typescript": "^4.7.4",
+ "vsce": "^2.9.2"
+ },
+ "activationEvents": [
+ "onLanguage:rust",
+ "onCommand:rust-analyzer.analyzerStatus",
+ "onCommand:rust-analyzer.memoryUsage",
+ "onCommand:rust-analyzer.reloadWorkspace",
+ "onCommand:rust-analyzer.startServer",
+ "workspaceContains:*/Cargo.toml",
+ "workspaceContains:*/rust-project.json"
+ ],
+ "main": "./out/main",
+ "contributes": {
+ "taskDefinitions": [
+ {
+ "type": "cargo",
+ "required": [
+ "command"
+ ],
+ "properties": {
+ "label": {
+ "type": "string"
+ },
+ "command": {
+ "type": "string"
+ },
+ "args": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "env": {
+ "type": "object",
+ "patternProperties": {
+ ".+": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ ],
+ "commands": [
+ {
+ "command": "rust-analyzer.syntaxTree",
+ "title": "Show Syntax Tree",
+ "category": "rust-analyzer (debug command)"
+ },
+ {
+ "command": "rust-analyzer.viewHir",
+ "title": "View Hir",
+ "category": "rust-analyzer (debug command)"
+ },
+ {
+ "command": "rust-analyzer.viewFileText",
+ "title": "View File Text (as seen by the server)",
+ "category": "rust-analyzer (debug command)"
+ },
+ {
+ "command": "rust-analyzer.viewItemTree",
+ "title": "Debug ItemTree",
+ "category": "rust-analyzer (debug command)"
+ },
+ {
+ "command": "rust-analyzer.shuffleCrateGraph",
+ "title": "Shuffle Crate Graph",
+ "category": "rust-analyzer (debug command)"
+ },
+ {
+ "command": "rust-analyzer.memoryUsage",
+ "title": "Memory Usage (Clears Database)",
+ "category": "rust-analyzer (debug command)"
+ },
+ {
+ "command": "rust-analyzer.viewCrateGraph",
+ "title": "View Crate Graph",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.viewFullCrateGraph",
+ "title": "View Crate Graph (Full)",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.expandMacro",
+ "title": "Expand macro recursively",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.matchingBrace",
+ "title": "Find matching brace",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.parentModule",
+ "title": "Locate parent module",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.joinLines",
+ "title": "Join lines",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.run",
+ "title": "Run",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.copyRunCommandLine",
+ "title": "Copy Run Command Line",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.debug",
+ "title": "Debug",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.newDebugConfig",
+ "title": "Generate launch configuration",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.analyzerStatus",
+ "title": "Status",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.reloadWorkspace",
+ "title": "Reload workspace",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.reload",
+ "title": "Restart server",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.startServer",
+ "title": "Start server",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.stopServer",
+ "title": "Stop server",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.onEnter",
+ "title": "Enhanced enter key",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.ssr",
+ "title": "Structural Search Replace",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.serverVersion",
+ "title": "Show RA Version",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.openDocs",
+ "title": "Open docs under cursor",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.openCargoToml",
+ "title": "Open Cargo.toml",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.peekTests",
+ "title": "Peek related tests",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.moveItemUp",
+ "title": "Move item up",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.moveItemDown",
+ "title": "Move item down",
+ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.cancelFlycheck",
+ "title": "Cancel running flychecks",
+ "category": "rust-analyzer"
+ }
+ ],
+ "keybindings": [
+ {
+ "command": "rust-analyzer.parentModule",
+ "key": "ctrl+shift+u",
+ "when": "editorTextFocus && editorLangId == rust"
+ },
+ {
+ "command": "rust-analyzer.matchingBrace",
+ "key": "ctrl+shift+m",
+ "when": "editorTextFocus && editorLangId == rust"
+ },
+ {
+ "command": "rust-analyzer.joinLines",
+ "key": "ctrl+shift+j",
+ "when": "editorTextFocus && editorLangId == rust"
+ }
+ ],
+ "configuration": {
+ "type": "object",
+ "title": "rust-analyzer",
+ "properties": {
+ "rust-analyzer.cargoRunner": {
+ "type": [
+ "null",
+ "string"
+ ],
+ "default": null,
+ "description": "Custom cargo runner extension ID."
+ },
+ "rust-analyzer.runnableEnv": {
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "mask": {
+ "type": "string",
+ "description": "Runnable name mask"
+ },
+ "env": {
+ "type": "object",
+ "description": "Variables in form of { \"key\": \"value\"}"
+ }
+ }
+ }
+ },
+ {
+ "type": "object",
+ "description": "Variables in form of { \"key\": \"value\"}"
+ }
+ ],
+ "default": null,
+ "markdownDescription": "Environment variables passed to the runnable launched using `Test` or `Debug` lens or `rust-analyzer.run` command."
+ },
+ "rust-analyzer.server.path": {
+ "type": [
+ "null",
+ "string"
+ ],
+ "scope": "machine-overridable",
+ "default": null,
+ "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default)."
+ },
+ "rust-analyzer.server.extraEnv": {
+ "type": [
+ "null",
+ "object"
+ ],
+ "additionalProperties": {
+ "type": [
+ "string",
+ "number"
+ ]
+ },
+ "default": null,
+ "markdownDescription": "Extra environment variables that will be passed to the rust-analyzer executable. Useful for passing e.g. `RA_LOG` for debugging."
+ },
+ "rust-analyzer.trace.server": {
+ "type": "string",
+ "scope": "window",
+ "enum": [
+ "off",
+ "messages",
+ "verbose"
+ ],
+ "enumDescriptions": [
+ "No traces",
+ "Error only",
+ "Full log"
+ ],
+ "default": "off",
+ "description": "Trace requests to the rust-analyzer (this is usually overly verbose and not recommended for regular users)."
+ },
+ "rust-analyzer.trace.extension": {
+ "description": "Enable logging of VS Code extensions itself.",
+ "type": "boolean",
+ "default": false
+ },
+ "rust-analyzer.debug.engine": {
+ "type": "string",
+ "enum": [
+ "auto",
+ "vadimcn.vscode-lldb",
+ "ms-vscode.cpptools"
+ ],
+ "default": "auto",
+ "description": "Preferred debug engine.",
+ "markdownEnumDescriptions": [
+ "First try to use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb), if it's not installed try to use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools).",
+ "Use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb)",
+ "Use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools)"
+ ]
+ },
+ "rust-analyzer.debug.sourceFileMap": {
+ "type": [
+ "object",
+ "string"
+ ],
+ "const": "auto",
+ "description": "Optional source file mappings passed to the debug engine.",
+ "default": {
+ "/rustc/<id>": "${env:USERPROFILE}/.rustup/toolchains/<toolchain-id>/lib/rustlib/src/rust"
+ }
+ },
+ "rust-analyzer.debug.openDebugPane": {
+ "markdownDescription": "Whether to open up the `Debug Panel` on debugging start.",
+ "type": "boolean",
+ "default": false
+ },
+ "rust-analyzer.debug.engineSettings": {
+ "type": "object",
+ "default": {},
+ "markdownDescription": "Optional settings passed to the debug engine. Example: `{ \"lldb\": { \"terminal\":\"external\"} }`"
+ },
+ "rust-analyzer.restartServerOnConfigChange": {
+ "markdownDescription": "Whether to restart the server automatically when certain settings that require a restart are changed.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.typing.continueCommentsOnNewline": {
+ "markdownDescription": "Whether to prefix newlines after comments with the corresponding comment prefix.",
+ "default": true,
+ "type": "boolean"
+ },
++ "rust-analyzer.diagnostics.previewRustcOutput": {
++ "markdownDescription": "Whether to show the main part of the rendered rustc output of a diagnostic message.",
++ "default": false,
++ "type": "boolean"
++ },
+ "$generated-start": {},
+ "rust-analyzer.assist.emitMustUse": {
+ "markdownDescription": "Whether to insert #[must_use] when generating `as_` methods\nfor enum variants.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.assist.expressionFillDefault": {
+ "markdownDescription": "Placeholder expression to use for missing expressions in assists.",
+ "default": "todo",
+ "type": "string",
+ "enum": [
+ "todo",
+ "default"
+ ],
+ "enumDescriptions": [
+ "Fill missing expressions with the `todo` macro",
+ "Fill missing expressions with reasonable defaults, `new` or `default` constructors."
+ ]
+ },
+ "rust-analyzer.cachePriming.enable": {
+ "markdownDescription": "Warm up caches on project load.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.cachePriming.numThreads": {
+ "markdownDescription": "How many worker threads to handle priming caches. The default `0` means to pick automatically.",
+ "default": 0,
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ "rust-analyzer.cargo.autoreload": {
+ "markdownDescription": "Automatically refresh project info via `cargo metadata` on\n`Cargo.toml` or `.cargo/config.toml` changes.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.cargo.buildScripts.enable": {
+ "markdownDescription": "Run build scripts (`build.rs`) for more precise code analysis.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.cargo.buildScripts.invocationLocation": {
+ "markdownDescription": "Specifies the working directory for running build scripts.\n- \"workspace\": run build scripts for a workspace in the workspace's root directory.\n This is incompatible with `#rust-analyzer.cargo.buildScripts.invocationStrategy#` set to `once`.\n- \"root\": run build scripts in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
+ "default": "workspace",
+ "type": "string",
+ "enum": [
+ "workspace",
+ "root"
+ ],
+ "enumDescriptions": [
+ "The command will be executed in the corresponding workspace root.",
+ "The command will be executed in the project root."
+ ]
+ },
+ "rust-analyzer.cargo.buildScripts.invocationStrategy": {
+ "markdownDescription": "Specifies the invocation strategy to use when running the build scripts command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
+ "default": "per_workspace",
+ "type": "string",
+ "enum": [
+ "per_workspace",
+ "once"
+ ],
+ "enumDescriptions": [
+ "The command will be executed for each workspace.",
+ "The command will be executed once."
+ ]
+ },
+ "rust-analyzer.cargo.buildScripts.overrideCommand": {
+ "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets\n```\n.",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.cargo.buildScripts.useRustcWrapper": {
+ "markdownDescription": "Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to\navoid checking unnecessary things.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.cargo.extraEnv": {
+ "markdownDescription": "Extra environment variables that will be set when running cargo, rustc\nor other commands within the workspace. Useful for setting RUSTFLAGS.",
+ "default": {},
+ "type": "object"
+ },
+ "rust-analyzer.cargo.features": {
+ "markdownDescription": "List of features to activate.\n\nSet this to `\"all\"` to pass `--all-features` to cargo.",
+ "default": [],
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo"
+ ]
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ "rust-analyzer.cargo.noDefaultFeatures": {
+ "markdownDescription": "Whether to pass `--no-default-features` to cargo.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.cargo.sysroot": {
+ "markdownDescription": "Relative path to the sysroot, or \"discover\" to try to automatically find it via\n\"rustc --print sysroot\".\n\nUnsetting this disables sysroot loading.\n\nThis option does not take effect until rust-analyzer is restarted.",
+ "default": "discover",
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.cargo.target": {
+ "markdownDescription": "Compilation target override (target triple).",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.cargo.unsetTest": {
+ "markdownDescription": "Unsets `#[cfg(test)]` for the specified crates.",
+ "default": [
+ "core"
+ ],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.checkOnSave.allTargets": {
+ "markdownDescription": "Check all targets and tests (`--all-targets`).",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.checkOnSave.command": {
+ "markdownDescription": "Cargo command to use for `cargo check`.",
+ "default": "check",
+ "type": "string"
+ },
+ "rust-analyzer.checkOnSave.enable": {
+ "markdownDescription": "Run specified `cargo check` command for diagnostics on save.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.checkOnSave.extraArgs": {
+ "markdownDescription": "Extra arguments for `cargo check`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.checkOnSave.extraEnv": {
+ "markdownDescription": "Extra environment variables that will be set when running `cargo check`.\nExtends `#rust-analyzer.cargo.extraEnv#`.",
+ "default": {},
+ "type": "object"
+ },
+ "rust-analyzer.checkOnSave.features": {
+ "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.",
+ "default": null,
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo"
+ ]
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "type": "null"
+ }
+ ]
+ },
+ "rust-analyzer.checkOnSave.invocationLocation": {
+ "markdownDescription": "Specifies the working directory for running checks.\n- \"workspace\": run checks for workspaces in the corresponding workspaces' root directories.\n This falls back to \"root\" if `#rust-analyzer.cargo.checkOnSave.invocationStrategy#` is set to `once`.\n- \"root\": run checks in the project's root directory.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
+ "default": "workspace",
+ "type": "string",
+ "enum": [
+ "workspace",
+ "root"
+ ],
+ "enumDescriptions": [
+ "The command will be executed in the corresponding workspace root.",
+ "The command will be executed in the project root."
+ ]
+ },
+ "rust-analyzer.checkOnSave.invocationStrategy": {
+ "markdownDescription": "Specifies the invocation strategy to use when running the checkOnSave command.\nIf `per_workspace` is set, the command will be executed for each workspace.\nIf `once` is set, the command will be executed once.\nThis config only has an effect when `#rust-analyzer.cargo.buildScripts.overrideCommand#`\nis set.",
+ "default": "per_workspace",
+ "type": "string",
+ "enum": [
+ "per_workspace",
+ "once"
+ ],
+ "enumDescriptions": [
+ "The command will be executed for each workspace.",
+ "The command will be executed once."
+ ]
+ },
+ "rust-analyzer.checkOnSave.noDefaultFeatures": {
+ "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.",
+ "default": null,
+ "type": [
+ "null",
+ "boolean"
+ ]
+ },
+ "rust-analyzer.checkOnSave.overrideCommand": {
- "markdownDescription": "Check for a specific target. Defaults to\n`#rust-analyzer.cargo.target#`.",
- "default": null,
- "type": [
- "null",
- "string"
++ "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects, this command is invoked for\neach of them, with the working directory being the project root\n(i.e., the folder containing the `Cargo.toml`).\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.checkOnSave.target": {
- "markdownDescription": "Whether to show inlay type hints for compiler inserted reborrows.",
++ "markdownDescription": "Check for specific targets. Defaults to `#rust-analyzer.cargo.target#` if empty.\n\nCan be a single target, e.g. `\"x86_64-unknown-linux-gnu\"` or a list of targets, e.g.\n`[\"aarch64-apple-darwin\", \"x86_64-apple-darwin\"]`.\n\nAliased as `\"checkOnSave.targets\"`.",
++ "default": [],
++ "anyOf": [
++ {
++ "type": "string"
++ },
++ {
++ "type": "array",
++ "items": {
++ "type": "string"
++ }
++ }
+ ]
+ },
+ "rust-analyzer.completion.autoimport.enable": {
+ "markdownDescription": "Toggles the additional completions that automatically add imports when completed.\nNote that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.completion.autoself.enable": {
+ "markdownDescription": "Toggles the additional completions that automatically show method calls and field accesses\nwith `self` prefixed to them when inside a method.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.completion.callable.snippets": {
+ "markdownDescription": "Whether to add parenthesis and argument snippets when completing function.",
+ "default": "fill_arguments",
+ "type": "string",
+ "enum": [
+ "fill_arguments",
+ "add_parentheses",
+ "none"
+ ],
+ "enumDescriptions": [
+ "Add call parentheses and pre-fill arguments.",
+ "Add call parentheses.",
+ "Do no snippet completions for callables."
+ ]
+ },
+ "rust-analyzer.completion.postfix.enable": {
+ "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.completion.privateEditable.enable": {
+ "markdownDescription": "Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.completion.snippets.custom": {
+ "markdownDescription": "Custom completion snippets.",
+ "default": {
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ }
+ },
+ "type": "object"
+ },
+ "rust-analyzer.diagnostics.disabled": {
+ "markdownDescription": "List of rust-analyzer diagnostics to disable.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "uniqueItems": true
+ },
+ "rust-analyzer.diagnostics.enable": {
+ "markdownDescription": "Whether to show native rust-analyzer diagnostics.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.diagnostics.experimental.enable": {
+ "markdownDescription": "Whether to show experimental rust-analyzer diagnostics that might\nhave more false positives than usual.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.diagnostics.remapPrefix": {
+ "markdownDescription": "Map of prefixes to be substituted when parsing diagnostic file paths.\nThis should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.",
+ "default": {},
+ "type": "object"
+ },
+ "rust-analyzer.diagnostics.warningsAsHint": {
+ "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code\nand will not show up in the `Problems Panel`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.diagnostics.warningsAsInfo": {
+ "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.files.excludeDirs": {
+ "markdownDescription": "These directories will be ignored by rust-analyzer. They are\nrelative to the workspace root, and globs are not supported. You may\nalso need to add the folders to Code's `files.watcherExclude`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.files.watcher": {
+ "markdownDescription": "Controls file watching implementation.",
+ "default": "client",
+ "type": "string",
+ "enum": [
+ "client",
+ "server"
+ ],
+ "enumDescriptions": [
+ "Use the client (editor) to watch files for changes",
+ "Use server-side file watching"
+ ]
+ },
+ "rust-analyzer.highlightRelated.breakPoints.enable": {
+ "markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.highlightRelated.exitPoints.enable": {
+ "markdownDescription": "Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.highlightRelated.references.enable": {
+ "markdownDescription": "Enables highlighting of related references while the cursor is on any identifier.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.highlightRelated.yieldPoints.enable": {
+ "markdownDescription": "Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.debug.enable": {
+ "markdownDescription": "Whether to show `Debug` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.enable": {
+ "markdownDescription": "Whether to show HoverActions in Rust files.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.gotoTypeDef.enable": {
+ "markdownDescription": "Whether to show `Go to Type Definition` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.implementations.enable": {
+ "markdownDescription": "Whether to show `Implementations` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.references.enable": {
+ "markdownDescription": "Whether to show `References` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.run.enable": {
+ "markdownDescription": "Whether to show `Run` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.documentation.enable": {
+ "markdownDescription": "Whether to show documentation on hover.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.documentation.keywords.enable": {
+ "markdownDescription": "Whether to show keyword hover popups. Only applies when\n`#rust-analyzer.hover.documentation.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.links.enable": {
+ "markdownDescription": "Use markdown syntax for links in hover.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.imports.granularity.enforce": {
+ "markdownDescription": "Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.imports.granularity.group": {
+ "markdownDescription": "How imports should be grouped into use statements.",
+ "default": "crate",
+ "type": "string",
+ "enum": [
+ "preserve",
+ "crate",
+ "module",
+ "item"
+ ],
+ "enumDescriptions": [
+ "Do not change the granularity of any imports and preserve the original structure written by the developer.",
+ "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
+ "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
+ "Flatten imports so that each has its own use statement."
+ ]
+ },
+ "rust-analyzer.imports.group.enable": {
+ "markdownDescription": "Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.imports.merge.glob": {
+ "markdownDescription": "Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.imports.prefer.no.std": {
+ "markdownDescription": "Prefer to unconditionally use imports of the core and alloc crate, over the std crate.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.imports.prefix": {
+ "markdownDescription": "The path structure for newly inserted paths to use.",
+ "default": "plain",
+ "type": "string",
+ "enum": [
+ "plain",
+ "self",
+ "crate"
+ ],
+ "enumDescriptions": [
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.",
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item. Prefixes `self` in front of the path if it starts with a module.",
+ "Force import paths to be absolute by always starting them with `crate` or the extern crate name they come from."
+ ]
+ },
+ "rust-analyzer.inlayHints.bindingModeHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for binding modes.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.chainingHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for method chains.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.closingBraceHints.enable": {
+ "markdownDescription": "Whether to show inlay hints after a closing `}` to indicate what item it belongs to.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.closingBraceHints.minLines": {
+ "markdownDescription": "Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1\nto always show them).",
+ "default": 25,
+ "type": "integer",
+ "minimum": 0
+ },
+ "rust-analyzer.inlayHints.closureReturnTypeHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for return types of closures.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "with_block"
+ ],
+ "enumDescriptions": [
+ "Always show type hints for return types of closures.",
+ "Never show type hints for return types of closures.",
+ "Only show type hints for return types of closures with blocks."
+ ]
+ },
++ "rust-analyzer.inlayHints.expressionAdjustmentHints.enable": {
++ "markdownDescription": "Whether to show inlay hints for type adjustments.",
++ "default": "never",
++ "type": "string",
++ "enum": [
++ "always",
++ "never",
++ "reborrow"
++ ],
++ "enumDescriptions": [
++ "Always show all adjustment hints.",
++ "Never show adjustment hints.",
++ "Only show auto borrow and dereference adjustment hints."
++ ]
++ },
+ "rust-analyzer.inlayHints.lifetimeElisionHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "skip_trivial"
+ ],
+ "enumDescriptions": [
+ "Always show lifetime elision hints.",
+ "Never show lifetime elision hints.",
+ "Only show lifetime elision hints if a return type is involved."
+ ]
+ },
+ "rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames": {
+ "markdownDescription": "Whether to prefer using parameter names as the name for elided lifetime hints if possible.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.maxLength": {
+ "markdownDescription": "Maximum length for inlay hints. Set to null to have an unlimited length.",
+ "default": 25,
+ "type": [
+ "null",
+ "integer"
+ ],
+ "minimum": 0
+ },
+ "rust-analyzer.inlayHints.parameterHints.enable": {
+ "markdownDescription": "Whether to show function parameter name inlay hints at the call\nsite.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.reborrowHints.enable": {
++ "markdownDescription": "Whether to show inlay hints for compiler inserted reborrows.\nThis setting is deprecated in favor of #rust-analyzer.inlayHints.expressionAdjustmentHints.enable#.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "mutable"
+ ],
+ "enumDescriptions": [
+ "Always show reborrow hints.",
+ "Never show reborrow hints.",
+ "Only show mutable reborrow hints."
+ ]
+ },
+ "rust-analyzer.inlayHints.renderColons": {
+ "markdownDescription": "Whether to render leading colons for type hints, and trailing colons for parameter hints.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.typeHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for variables.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.typeHints.hideClosureInitialization": {
+ "markdownDescription": "Whether to hide inlay type hints for `let` statements that initialize to a closure.\nOnly applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.typeHints.hideNamedConstructor": {
+ "markdownDescription": "Whether to hide inlay type hints for constructors.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.joinLines.joinAssignments": {
+ "markdownDescription": "Join lines merges consecutive declaration and initialization of an assignment.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.joinLines.joinElseIf": {
+ "markdownDescription": "Join lines inserts else between consecutive ifs.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.joinLines.removeTrailingComma": {
+ "markdownDescription": "Join lines removes trailing commas.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.joinLines.unwrapTrivialBlock": {
+ "markdownDescription": "Join lines unwraps trivial blocks.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.debug.enable": {
+ "markdownDescription": "Whether to show `Debug` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.enable": {
+ "markdownDescription": "Whether to show CodeLens in Rust files.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.forceCustomCommands": {
+ "markdownDescription": "Internal config: use custom client-side commands even when the\nclient doesn't set the corresponding capability.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.implementations.enable": {
+ "markdownDescription": "Whether to show `Implementations` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.location": {
+ "markdownDescription": "Where to render annotations.",
+ "default": "above_name",
+ "type": "string",
+ "enum": [
+ "above_name",
+ "above_whole_item"
+ ],
+ "enumDescriptions": [
+ "Render annotations above the name of the item.",
+ "Render annotations above the whole item, including documentation comments and attributes."
+ ]
+ },
+ "rust-analyzer.lens.references.adt.enable": {
+ "markdownDescription": "Whether to show `References` lens for Struct, Enum, and Union.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.references.enumVariant.enable": {
+ "markdownDescription": "Whether to show `References` lens for Enum Variants.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.references.method.enable": {
+ "markdownDescription": "Whether to show `Method References` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.references.trait.enable": {
+ "markdownDescription": "Whether to show `References` lens for Trait.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.run.enable": {
+ "markdownDescription": "Whether to show `Run` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.linkedProjects": {
+ "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set\nof projects.\n\nElements must be paths pointing to `Cargo.toml`,\n`rust-project.json`, or JSON objects in `rust-project.json` format.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": [
+ "string",
+ "object"
+ ]
+ }
+ },
+ "rust-analyzer.lru.capacity": {
+ "markdownDescription": "Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.",
+ "default": null,
+ "type": [
+ "null",
+ "integer"
+ ],
+ "minimum": 0
+ },
+ "rust-analyzer.notifications.cargoTomlNotFound": {
+ "markdownDescription": "Whether to show `can't find Cargo.toml` error message.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.procMacro.attributes.enable": {
+ "markdownDescription": "Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.procMacro.enable": {
+ "markdownDescription": "Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.procMacro.ignored": {
+ "markdownDescription": "These proc-macros will be ignored when trying to expand them.\n\nThis config takes a map of crate names with the exported proc-macro names to ignore as values.",
+ "default": {},
+ "type": "object"
+ },
+ "rust-analyzer.procMacro.server": {
+ "markdownDescription": "Internal config, path to proc-macro server executable (typically,\nthis is rust-analyzer itself, but we override this in tests).",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.references.excludeImports": {
+ "markdownDescription": "Exclude imports from find-all-references.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.runnables.command": {
+ "markdownDescription": "Command to be executed instead of 'cargo' for runnables.",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.runnables.extraArgs": {
+ "markdownDescription": "Additional arguments to be passed to cargo for runnables such as\ntests or binaries. For example, it may be `--release`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.rustc.source": {
+ "markdownDescription": "Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private\nprojects, or \"discover\" to try to automatically find it if the `rustc-dev` component\nis installed.\n\nAny project which uses rust-analyzer with the rustcPrivate\ncrates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.\n\nThis option does not take effect until rust-analyzer is restarted.",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.rustfmt.extraArgs": {
+ "markdownDescription": "Additional arguments to `rustfmt`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.rustfmt.overrideCommand": {
+ "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting.",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.rustfmt.rangeFormatting.enable": {
+ "markdownDescription": "Enables the use of rustfmt's unstable range formatting command for the\n`textDocument/rangeFormatting` request. The rustfmt option is unstable and only\navailable on a nightly build.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.semanticHighlighting.doc.comment.inject.enable": {
+ "markdownDescription": "Inject additional highlighting into doc comments.\n\nWhen enabled, rust-analyzer will highlight rust source in doc comments as well as intra\ndoc links.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.semanticHighlighting.operator.enable": {
+ "markdownDescription": "Use semantic tokens for operators.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for operator tokens when\nthey are tagged with modifiers.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.semanticHighlighting.operator.specialization.enable": {
+ "markdownDescription": "Use specialized semantic tokens for operators.\n\nWhen enabled, rust-analyzer will emit special token types for operator tokens instead\nof the generic `operator` token type.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.semanticHighlighting.punctuation.enable": {
+ "markdownDescription": "Use semantic tokens for punctuations.\n\nWhen disabled, rust-analyzer will emit semantic tokens only for punctuation tokens when\nthey are tagged with modifiers or have a special role.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.semanticHighlighting.punctuation.separate.macro.bang": {
+ "markdownDescription": "When enabled, rust-analyzer will emit a punctuation semantic token for the `!` of macro\ncalls.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.semanticHighlighting.punctuation.specialization.enable": {
+ "markdownDescription": "Use specialized semantic tokens for punctuations.\n\nWhen enabled, rust-analyzer will emit special token types for punctuation tokens instead\nof the generic `punctuation` token type.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.semanticHighlighting.strings.enable": {
+ "markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.signatureInfo.detail": {
+ "markdownDescription": "Show full signature of the callable. Only shows parameters if disabled.",
+ "default": "full",
+ "type": "string",
+ "enum": [
+ "full",
+ "parameters"
+ ],
+ "enumDescriptions": [
+ "Show the entire signature.",
+ "Show only the parameters."
+ ]
+ },
+ "rust-analyzer.signatureInfo.documentation.enable": {
+ "markdownDescription": "Show documentation.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.typing.autoClosingAngleBrackets.enable": {
+ "markdownDescription": "Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.workspace.symbol.search.kind": {
+ "markdownDescription": "Workspace symbol search kind.",
+ "default": "only_types",
+ "type": "string",
+ "enum": [
+ "only_types",
+ "all_symbols"
+ ],
+ "enumDescriptions": [
+ "Search for types only.",
+ "Search for all symbols kinds."
+ ]
+ },
+ "rust-analyzer.workspace.symbol.search.limit": {
+ "markdownDescription": "Limits the number of items returned from a workspace symbol search (Defaults to 128).\nSome clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.\nOther clients requires all results upfront and might require a higher limit.",
+ "default": 128,
+ "type": "integer",
+ "minimum": 0
+ },
+ "rust-analyzer.workspace.symbol.search.scope": {
+ "markdownDescription": "Workspace symbol search scope.",
+ "default": "workspace",
+ "type": "string",
+ "enum": [
+ "workspace",
+ "workspace_and_dependencies"
+ ],
+ "enumDescriptions": [
+ "Search in current workspace only.",
+ "Search in current workspace and dependencies."
+ ]
+ },
+ "$generated-end": {}
+ }
+ },
++ "configurationDefaults": {
++ "explorer.fileNesting.patterns": {
++ "Cargo.toml": "Cargo.lock"
++ }
++ },
+ "problemPatterns": [
+ {
+ "name": "rustc",
+ "patterns": [
+ {
+ "regexp": "^(warning|warn|error)(?:\\[(.*?)\\])?: (.*)$",
+ "severity": 1,
+ "code": 2,
+ "message": 3
+ },
+ {
+ "regexp": "^[\\s->=]*(.*?):(\\d*):(\\d*)\\s*$",
+ "file": 1,
+ "line": 2,
+ "column": 3
+ }
+ ]
+ },
+ {
+ "name": "rustc-json",
+ "patterns": [
+ {
+ "regexp": "^.*\"message\":{\"message\":\"([^\"]*).*?\"file_name\":\"([^\"]+).*?\"line_start\":(\\d+).*?\"line_end\":(\\d+).*?\"column_start\":(\\d+).*?\"column_end\":(\\d+).*}$",
+ "message": 1,
+ "file": 2,
+ "line": 3,
+ "endLine": 4,
+ "column": 5,
+ "endColumn": 6
+ }
+ ]
+ }
+ ],
+ "languages": [
+ {
+ "id": "ra_syntax_tree",
+ "extensions": [
+ ".rast"
+ ]
+ },
+ {
+ "id": "rust",
+ "extensions": [
+ ".rs"
+ ],
+ "aliases": [
+ "Rust",
+ "rs"
+ ],
+ "configuration": "language-configuration.json"
+ }
+ ],
+ "grammars": [
+ {
+ "language": "ra_syntax_tree",
+ "scopeName": "source.ra_syntax_tree",
+ "path": "ra_syntax_tree.tmGrammar.json"
+ }
+ ],
+ "problemMatchers": [
+ {
+ "name": "rustc",
+ "owner": "rustc",
+ "source": "rustc",
+ "fileLocation": [
+ "autoDetect",
+ "${workspaceRoot}"
+ ],
+ "pattern": "$rustc"
+ },
+ {
+ "name": "rustc-json",
+ "owner": "rustc",
+ "source": "rustc",
+ "fileLocation": [
+ "autoDetect",
+ "${workspaceRoot}"
+ ],
+ "pattern": "$rustc-json"
+ },
+ {
+ "name": "rustc-watch",
+ "owner": "rustc",
+ "source": "rustc",
+ "fileLocation": [
+ "autoDetect",
+ "${workspaceRoot}"
+ ],
+ "background": {
+ "beginsPattern": "^\\[Running\\b",
+ "endsPattern": "^\\[Finished running\\b"
+ },
+ "pattern": "$rustc"
+ }
+ ],
+ "colors": [
+ {
+ "id": "rust_analyzer.syntaxTreeBorder",
+ "description": "Color of the border displayed in the Rust source code for the selected syntax node (see \"Show Syntax Tree\" command)",
+ "defaults": {
+ "dark": "#ffffff",
+ "light": "#b700ff",
+ "highContrast": "#b700ff"
+ }
+ }
+ ],
+ "semanticTokenTypes": [
+ {
+ "id": "angle",
+ "description": "Style for < or >",
+ "superType": "punctuation"
+ },
+ {
+ "id": "arithmetic",
+ "description": "Style for arithmetic operators",
+ "superType": "operator"
+ },
+ {
+ "id": "attribute",
+ "description": "Style for attributes"
+ },
+ {
+ "id": "attributeBracket",
+ "description": "Style for attribute invocation brackets, that is the `#[` and `]` tokens",
+ "superType": "punctuation"
+ },
+ {
+ "id": "bitwise",
+ "description": "Style for bitwise operators",
+ "superType": "operator"
+ },
+ {
+ "id": "boolean",
+ "description": "Style for boolean literals",
+ "superType": "keyword"
+ },
+ {
+ "id": "brace",
+ "description": "Style for { or }",
+ "superType": "punctuation"
+ },
+ {
+ "id": "bracket",
+ "description": "Style for [ or ]",
+ "superType": "punctuation"
+ },
+ {
+ "id": "builtinAttribute",
+ "description": "Style for builtin attributes",
+ "superType": "attribute"
+ },
+ {
+ "id": "builtinType",
+ "description": "Style for builtin types",
+ "superType": "type"
+ },
+ {
+ "id": "character",
+ "description": "Style for character literals",
+ "superType": "string"
+ },
+ {
+ "id": "colon",
+ "description": "Style for :",
+ "superType": "punctuation"
+ },
+ {
+ "id": "comma",
+ "description": "Style for ,",
+ "superType": "punctuation"
+ },
+ {
+ "id": "comparison",
+ "description": "Style for comparison operators",
+ "superType": "operator"
+ },
+ {
+ "id": "constParameter",
+ "description": "Style for const generics"
+ },
+ {
+ "id": "derive",
+ "description": "Style for derives",
+ "superType": "attribute"
+ },
+ {
+ "id": "dot",
+ "description": "Style for .",
+ "superType": "punctuation"
+ },
+ {
+ "id": "escapeSequence",
+ "description": "Style for char escapes in strings"
+ },
+ {
+ "id": "formatSpecifier",
+ "description": "Style for {} placeholders in format strings"
+ },
+ {
+ "id": "label",
+ "description": "Style for labels"
+ },
+ {
+ "id": "lifetime",
+ "description": "Style for lifetimes"
+ },
+ {
+ "id": "logical",
+ "description": "Style for logic operators",
+ "superType": "operator"
+ },
+ {
+ "id": "macroBang",
+ "description": "Style for the ! token of macro calls",
+ "superType": "punctuation"
+ },
+ {
+ "id": "operator",
+ "description": "Style for operators",
+ "superType": "punctuation"
+ },
+ {
+ "id": "parenthesis",
+ "description": "Style for ( or )",
+ "superType": "punctuation"
+ },
+ {
+ "id": "punctuation",
+ "description": "Style for generic punctuation"
+ },
+ {
+ "id": "selfKeyword",
+ "description": "Style for the self keyword",
+ "superType": "keyword"
+ },
+ {
+ "id": "selfTypeKeyword",
+ "description": "Style for the self type keyword",
+ "superType": "keyword"
+ },
+ {
+ "id": "semicolon",
+ "description": "Style for ;",
+ "superType": "punctuation"
+ },
+ {
+ "id": "typeAlias",
+ "description": "Style for type aliases",
+ "superType": "type"
+ },
+ {
+ "id": "union",
+ "description": "Style for C-style untagged unions",
+ "superType": "type"
+ },
+ {
+ "id": "unresolvedReference",
+ "description": "Style for names which can not be resolved due to compilation errors"
+ }
+ ],
+ "semanticTokenModifiers": [
+ {
+ "id": "async",
+ "description": "Style for async functions and the `async` and `await` keywords"
+ },
+ {
+ "id": "attribute",
+ "description": "Style for elements within attributes"
+ },
+ {
+ "id": "callable",
+ "description": "Style for locals whose types implements one of the `Fn*` traits"
+ },
+ {
+ "id": "constant",
+ "description": "Style for compile-time constants"
+ },
+ {
+ "id": "consuming",
+ "description": "Style for locals that are being consumed when use in a function call"
+ },
+ {
+ "id": "controlFlow",
+ "description": "Style for control-flow related tokens, this includes the `?` operator"
+ },
+ {
+ "id": "crateRoot",
+ "description": "Style for names resolving to a crate root"
+ },
+ {
+ "id": "injected",
+ "description": "Style for doc-string injected highlighting like rust source blocks in documentation"
+ },
+ {
+ "id": "intraDocLink",
+ "description": "Style for intra doc links in doc-strings"
+ },
+ {
+ "id": "library",
+ "description": "Style for items that are defined outside of the current crate"
+ },
+ {
+ "id": "mutable",
+ "description": "Style for mutable locals and statics as well as functions taking `&mut self`"
+ },
+ {
+ "id": "public",
+ "description": "Style for items that are from the current crate and are `pub`"
+ },
+ {
+ "id": "reference",
+ "description": "Style for locals behind a reference and functions taking `self` by reference"
+ },
+ {
+ "id": "trait",
+ "description": "Style for associated trait items"
+ },
+ {
+ "id": "unsafe",
+ "description": "Style for unsafe operations, like unsafe function calls, as well as the `unsafe` token"
+ }
+ ],
+ "semanticTokenScopes": [
+ {
+ "language": "rust",
+ "scopes": {
+ "attribute": [
+ "meta.attribute.rust"
+ ],
+ "boolean": [
+ "constant.language.boolean.rust"
+ ],
+ "builtinType": [
+ "support.type.primitive.rust"
+ ],
+ "constParameter": [
+ "constant.other.caps.rust"
+ ],
+ "enum": [
+ "entity.name.type.enum.rust"
+ ],
+ "formatSpecifier": [
+ "punctuation.section.embedded.rust"
+ ],
+ "function": [
+ "entity.name.function.rust"
+ ],
+ "interface": [
+ "entity.name.type.trait.rust"
+ ],
+ "keyword": [
+ "keyword.other.rust"
+ ],
+ "keyword.controlFlow": [
+ "keyword.control.rust"
+ ],
+ "lifetime": [
+ "storage.modifier.lifetime.rust"
+ ],
+ "macroBang": [
+ "entity.name.function.macro.rust"
+ ],
+ "method": [
+ "entity.name.function.rust"
+ ],
+ "struct": [
+ "entity.name.type.struct.rust"
+ ],
+ "typeAlias": [
+ "entity.name.type.declaration.rust"
+ ],
+ "union": [
+ "entity.name.type.union.rust"
+ ],
+ "variable": [
+ "variable.other.rust"
+ ],
+ "variable.constant": [
+ "variable.other.constant.rust"
+ ],
+ "*.mutable": [
+ "markup.underline"
+ ]
+ }
+ }
+ ],
+ "menus": {
+ "commandPalette": [
+ {
+ "command": "rust-analyzer.syntaxTree",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.viewHir",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.viewFileText",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.expandMacro",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.matchingBrace",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.parentModule",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.joinLines",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.run",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.debug",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.newDebugConfig",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.analyzerStatus",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.memoryUsage",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.reloadWorkspace",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.reload",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.onEnter",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.ssr",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.serverVersion",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.openDocs",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.openCargoToml",
+ "when": "inRustProject"
+ }
+ ],
+ "editor/context": [
+ {
+ "command": "rust-analyzer.peekTests",
+ "when": "inRustProject",
+ "group": "navigation@1000"
+ }
+ ]
+ }
+ }
+}
--- /dev/null
- import { substituteVSCodeVariables } from "./config";
+import * as lc from "vscode-languageclient/node";
+import * as vscode from "vscode";
+import * as ra from "../src/lsp_ext";
+import * as Is from "vscode-languageclient/lib/common/utils/is";
+import { assert } from "./util";
+import { WorkspaceEdit } from "vscode";
- serverOptions: lc.ServerOptions
++import { Config, substituteVSCodeVariables } from "./config";
+import { randomUUID } from "crypto";
+
+export interface Env {
+ [name: string]: string;
+}
+
+// Command URIs have a form of command:command-name?arguments, where
+// arguments is a percent-encoded array of data we want to pass along to
+// the command function. For "Show References" this is a list of all file
+// URIs with locations of every reference, and it can get quite long.
+//
+// To work around it we use an intermediary linkToCommand command. When
+// we render a command link, a reference to a command with all its arguments
+// is stored in a map, and instead a linkToCommand link is rendered
+// with the key to that map.
+export const LINKED_COMMANDS = new Map<string, ra.CommandLink>();
+
+// For now the map is cleaned up periodically (I've set it to every
+// 10 minutes). In general case we'll probably need to introduce TTLs or
+// flags to denote ephemeral links (like these in hover popups) and
+// persistent links and clean those separately. But for now simply keeping
+// the last few links in the map should be good enough. Likewise, we could
+// add code to remove a target command from the map after the link is
+// clicked, but assuming most links in hover sheets won't be clicked anyway
+// this code won't change the overall memory use much.
+setInterval(function cleanupOlderCommandLinks() {
+ // keys are returned in insertion order, we'll keep a few
+ // of recent keys available, and clean the rest
+ const keys = [...LINKED_COMMANDS.keys()];
+ const keysToRemove = keys.slice(0, keys.length - 10);
+ for (const key of keysToRemove) {
+ LINKED_COMMANDS.delete(key);
+ }
+}, 10 * 60 * 1000);
+
+function renderCommand(cmd: ra.CommandLink): string {
+ const commandId = randomUUID();
+ LINKED_COMMANDS.set(commandId, cmd);
+ return `[${cmd.title}](command:rust-analyzer.linkToCommand?${encodeURIComponent(
+ JSON.stringify([commandId])
+ )} '${cmd.tooltip}')`;
+}
+
+function renderHoverActions(actions: ra.CommandLinkGroup[]): vscode.MarkdownString {
+ const text = actions
+ .map(
+ (group) =>
+ (group.title ? group.title + " " : "") +
+ group.commands.map(renderCommand).join(" | ")
+ )
+ .join("___");
+
+ const result = new vscode.MarkdownString(text);
+ result.isTrusted = true;
+ return result;
+}
+
+export async function createClient(
+ traceOutputChannel: vscode.OutputChannel,
+ outputChannel: vscode.OutputChannel,
+ initializationOptions: vscode.WorkspaceConfiguration,
++ serverOptions: lc.ServerOptions,
++ config: Config
+): Promise<lc.LanguageClient> {
+ const clientOptions: lc.LanguageClientOptions = {
+ documentSelector: [{ scheme: "file", language: "rust" }],
+ initializationOptions,
+ diagnosticCollectionName: "rustc",
+ traceOutputChannel,
+ outputChannel,
+ middleware: {
+ workspace: {
+ // HACK: This is a workaround, when the client has been disposed, VSCode
+ // continues to emit events to the client and the default one for this event
+ // attempt to restart the client for no reason
+ async didChangeWatchedFile(event, next) {
+ if (client.isRunning()) {
+ await next(event);
+ }
+ },
+ async configuration(
+ params: lc.ConfigurationParams,
+ token: vscode.CancellationToken,
+ next: lc.ConfigurationRequest.HandlerSignature
+ ) {
+ const resp = await next(params, token);
+ if (resp && Array.isArray(resp)) {
+ return resp.map((val) => {
+ return substituteVSCodeVariables(val);
+ });
+ } else {
+ return resp;
+ }
+ },
+ },
++ async handleDiagnostics(
++ uri: vscode.Uri,
++ diagnostics: vscode.Diagnostic[],
++ next: lc.HandleDiagnosticsSignature
++ ) {
++ const preview = config.previewRustcOutput;
++ diagnostics.forEach((diag, idx) => {
++ // Abuse the fact that VSCode leaks the LSP diagnostics data field through the
++ // Diagnostic class, if they ever break this we are out of luck and have to go
++ // back to the worst diagnostics experience ever:)
++
++ // We encode the rendered output of a rustc diagnostic in the rendered field of
++ // the data payload of the lsp diagnostic. If that field exists, overwrite the
++ // diagnostic code such that clicking it opens the diagnostic in a readonly
++ // text editor for easy inspection
++ const rendered = (diag as unknown as { data?: { rendered?: string } }).data
++ ?.rendered;
++ if (rendered) {
++ if (preview) {
++ const index = rendered.match(/^(note|help):/m)?.index || 0;
++ diag.message = rendered
++ .substring(0, index)
++ .replace(/^ -->[^\n]+\n/m, "");
++ }
++ diag.code = {
++ target: vscode.Uri.from({
++ scheme: "rust-analyzer-diagnostics-view",
++ path: "/diagnostic message",
++ fragment: uri.toString(),
++ query: idx.toString(),
++ }),
++ value: "Click for full compiler diagnostic",
++ };
++ }
++ });
++ return next(uri, diagnostics);
++ },
+ async provideHover(
+ document: vscode.TextDocument,
+ position: vscode.Position,
+ token: vscode.CancellationToken,
+ _next: lc.ProvideHoverSignature
+ ) {
+ const editor = vscode.window.activeTextEditor;
+ const positionOrRange = editor?.selection?.contains(position)
+ ? client.code2ProtocolConverter.asRange(editor.selection)
+ : client.code2ProtocolConverter.asPosition(position);
+ return client
+ .sendRequest(
+ ra.hover,
+ {
+ textDocument:
+ client.code2ProtocolConverter.asTextDocumentIdentifier(document),
+ position: positionOrRange,
+ },
+ token
+ )
+ .then(
+ (result) => {
+ const hover = client.protocol2CodeConverter.asHover(result);
+ if (hover) {
+ const actions = (<any>result).actions;
+ if (actions) {
+ hover.contents.push(renderHoverActions(actions));
+ }
+ }
+ return hover;
+ },
+ (error) => {
+ client.handleFailedRequest(lc.HoverRequest.type, token, error, null);
+ return Promise.resolve(null);
+ }
+ );
+ },
+ // Using custom handling of CodeActions to support action groups and snippet edits.
+ // Note that this means we have to re-implement lazy edit resolving ourselves as well.
+ async provideCodeActions(
+ document: vscode.TextDocument,
+ range: vscode.Range,
+ context: vscode.CodeActionContext,
+ token: vscode.CancellationToken,
+ _next: lc.ProvideCodeActionsSignature
+ ) {
+ const params: lc.CodeActionParams = {
+ textDocument: client.code2ProtocolConverter.asTextDocumentIdentifier(document),
+ range: client.code2ProtocolConverter.asRange(range),
+ context: await client.code2ProtocolConverter.asCodeActionContext(
+ context,
+ token
+ ),
+ };
+ return client.sendRequest(lc.CodeActionRequest.type, params, token).then(
+ async (values) => {
+ if (values === null) return undefined;
+ const result: (vscode.CodeAction | vscode.Command)[] = [];
+ const groups = new Map<
+ string,
+ { index: number; items: vscode.CodeAction[] }
+ >();
+ for (const item of values) {
+ // In our case we expect to get code edits only from diagnostics
+ if (lc.CodeAction.is(item)) {
+ assert(
+ !item.command,
+ "We don't expect to receive commands in CodeActions"
+ );
+ const action = await client.protocol2CodeConverter.asCodeAction(
+ item,
+ token
+ );
+ result.push(action);
+ continue;
+ }
+ assert(
+ isCodeActionWithoutEditsAndCommands(item),
+ "We don't expect edits or commands here"
+ );
+ const kind = client.protocol2CodeConverter.asCodeActionKind(
+ (item as any).kind
+ );
+ const action = new vscode.CodeAction(item.title, kind);
+ const group = (item as any).group;
+ action.command = {
+ command: "rust-analyzer.resolveCodeAction",
+ title: item.title,
+ arguments: [item],
+ };
+
+ // Set a dummy edit, so that VS Code doesn't try to resolve this.
+ action.edit = new WorkspaceEdit();
+
+ if (group) {
+ let entry = groups.get(group);
+ if (!entry) {
+ entry = { index: result.length, items: [] };
+ groups.set(group, entry);
+ result.push(action);
+ }
+ entry.items.push(action);
+ } else {
+ result.push(action);
+ }
+ }
+ for (const [group, { index, items }] of groups) {
+ if (items.length === 1) {
+ result[index] = items[0];
+ } else {
+ const action = new vscode.CodeAction(group);
+ action.kind = items[0].kind;
+ action.command = {
+ command: "rust-analyzer.applyActionGroup",
+ title: "",
+ arguments: [
+ items.map((item) => {
+ return {
+ label: item.title,
+ arguments: item.command!.arguments![0],
+ };
+ }),
+ ],
+ };
+
+ // Set a dummy edit, so that VS Code doesn't try to resolve this.
+ action.edit = new WorkspaceEdit();
+
+ result[index] = action;
+ }
+ }
+ return result;
+ },
+ (_error) => undefined
+ );
+ },
+ },
+ markdown: {
+ supportHtml: true,
+ },
+ };
+
+ const client = new lc.LanguageClient(
+ "rust-analyzer",
+ "Rust Analyzer Language Server",
+ serverOptions,
+ clientOptions
+ );
+
+ // To turn on all proposed features use: client.registerProposedFeatures();
+ client.registerFeature(new ExperimentalFeatures());
+
+ return client;
+}
+
+class ExperimentalFeatures implements lc.StaticFeature {
+ getState(): lc.FeatureState {
+ return { kind: "static" };
+ }
+ fillClientCapabilities(capabilities: lc.ClientCapabilities): void {
+ const caps: any = capabilities.experimental ?? {};
+ caps.snippetTextEdit = true;
+ caps.codeActionGroup = true;
+ caps.hoverActions = true;
+ caps.serverStatusNotification = true;
+ caps.commands = {
+ commands: [
+ "rust-analyzer.runSingle",
+ "rust-analyzer.debugSingle",
+ "rust-analyzer.showReferences",
+ "rust-analyzer.gotoLocation",
+ "editor.action.triggerParameterHints",
+ ],
+ };
+ capabilities.experimental = caps;
+ }
+ initialize(
+ _capabilities: lc.ServerCapabilities<any>,
+ _documentSelector: lc.DocumentSelector | undefined
+ ): void {}
+ dispose(): void {}
+}
+
+function isCodeActionWithoutEditsAndCommands(value: any): boolean {
+ const candidate: lc.CodeAction = value;
+ return (
+ candidate &&
+ Is.string(candidate.title) &&
+ (candidate.diagnostics === void 0 ||
+ Is.typedArray(candidate.diagnostics, lc.Diagnostic.is)) &&
+ (candidate.kind === void 0 || Is.string(candidate.kind)) &&
+ candidate.edit === void 0 &&
+ candidate.command === void 0
+ );
+}
--- /dev/null
+import * as path from "path";
+import * as os from "os";
+import * as vscode from "vscode";
+import { Env } from "./client";
+import { log } from "./util";
+
+export type RunnableEnvCfg =
+ | undefined
+ | Record<string, string>
+ | { mask?: string; env: Record<string, string> }[];
+
+export class Config {
+ readonly extensionId = "rust-lang.rust-analyzer";
+ configureLang: vscode.Disposable | undefined;
+
+ readonly rootSection = "rust-analyzer";
+ private readonly requiresReloadOpts = [
+ "cargo",
+ "procMacro",
+ "serverPath",
+ "server",
+ "files",
+ "lens", // works as lens.*
+ ].map((opt) => `${this.rootSection}.${opt}`);
+
+ readonly package: {
+ version: string;
+ releaseTag: string | null;
+ enableProposedApi: boolean | undefined;
+ } = vscode.extensions.getExtension(this.extensionId)!.packageJSON;
+
+ readonly globalStorageUri: vscode.Uri;
+
+ constructor(ctx: vscode.ExtensionContext) {
+ this.globalStorageUri = ctx.globalStorageUri;
+ vscode.workspace.onDidChangeConfiguration(
+ this.onDidChangeConfiguration,
+ this,
+ ctx.subscriptions
+ );
+ this.refreshLogging();
+ this.configureLanguage();
+ }
+
+ dispose() {
+ this.configureLang?.dispose();
+ }
+
+ private refreshLogging() {
+ log.setEnabled(this.traceExtension);
+ log.info("Extension version:", this.package.version);
+
+ const cfg = Object.entries(this.cfg).filter(([_, val]) => !(val instanceof Function));
+ log.info("Using configuration", Object.fromEntries(cfg));
+ }
+
+ private async onDidChangeConfiguration(event: vscode.ConfigurationChangeEvent) {
+ this.refreshLogging();
+
+ this.configureLanguage();
+
+ const requiresReloadOpt = this.requiresReloadOpts.find((opt) =>
+ event.affectsConfiguration(opt)
+ );
+
+ if (!requiresReloadOpt) return;
+
+ if (this.restartServerOnConfigChange) {
+ await vscode.commands.executeCommand("rust-analyzer.reload");
+ return;
+ }
+
+ const message = `Changing "${requiresReloadOpt}" requires a server restart`;
+ const userResponse = await vscode.window.showInformationMessage(message, "Restart now");
+
+ if (userResponse) {
+ const command = "rust-analyzer.reload";
+ await vscode.commands.executeCommand(command);
+ }
+ }
+
+ /**
+ * Sets up additional language configuration that's impossible to do via a
+ * separate language-configuration.json file. See [1] for more information.
+ *
+ * [1]: https://github.com/Microsoft/vscode/issues/11514#issuecomment-244707076
+ */
+ private configureLanguage() {
+ if (this.typingContinueCommentsOnNewline && !this.configureLang) {
+ const indentAction = vscode.IndentAction.None;
+
+ this.configureLang = vscode.languages.setLanguageConfiguration("rust", {
+ onEnterRules: [
+ {
+ // Doc single-line comment
+ // e.g. ///|
+ beforeText: /^\s*\/{3}.*$/,
+ action: { indentAction, appendText: "/// " },
+ },
+ {
+ // Parent doc single-line comment
+ // e.g. //!|
+ beforeText: /^\s*\/{2}\!.*$/,
+ action: { indentAction, appendText: "//! " },
+ },
+ {
+ // Begins an auto-closed multi-line comment (standard or parent doc)
+ // e.g. /** | */ or /*! | */
+ beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
+ afterText: /^\s*\*\/$/,
+ action: {
+ indentAction: vscode.IndentAction.IndentOutdent,
+ appendText: " * ",
+ },
+ },
+ {
+ // Begins a multi-line comment (standard or parent doc)
+ // e.g. /** ...| or /*! ...|
+ beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
+ action: { indentAction, appendText: " * " },
+ },
+ {
+ // Continues a multi-line comment
+ // e.g. * ...|
+ beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/,
+ action: { indentAction, appendText: "* " },
+ },
+ {
+ // Dedents after closing a multi-line comment
+ // e.g. */|
+ beforeText: /^(\ \ )*\ \*\/\s*$/,
+ action: { indentAction, removeText: 1 },
+ },
+ ],
+ });
+ }
+ if (!this.typingContinueCommentsOnNewline && this.configureLang) {
+ this.configureLang.dispose();
+ this.configureLang = undefined;
+ }
+ }
+
+ // We don't do runtime config validation here for simplicity. More on stackoverflow:
+ // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension
+
+ private get cfg(): vscode.WorkspaceConfiguration {
+ return vscode.workspace.getConfiguration(this.rootSection);
+ }
+
+ /**
+ * Beware that postfix `!` operator erases both `null` and `undefined`.
+ * This is why the following doesn't work as expected:
+ *
+ * ```ts
+ * const nullableNum = vscode
+ * .workspace
+ * .getConfiguration
+ * .getConfiguration("rust-analyzer")
+ * .get<number | null>(path)!;
+ *
+ * // What happens is that type of `nullableNum` is `number` but not `null | number`:
+ * const fullFledgedNum: number = nullableNum;
+ * ```
+ * So this getter handles this quirk by not requiring the caller to use postfix `!`
+ */
+ private get<T>(path: string): T {
+ return this.cfg.get<T>(path)!;
+ }
+
+ get serverPath() {
+ return this.get<null | string>("server.path") ?? this.get<null | string>("serverPath");
+ }
+ get serverExtraEnv(): Env {
+ const extraEnv =
+ this.get<{ [key: string]: string | number } | null>("server.extraEnv") ?? {};
+ return Object.fromEntries(
+ Object.entries(extraEnv).map(([k, v]) => [k, typeof v !== "string" ? v.toString() : v])
+ );
+ }
+ get traceExtension() {
+ return this.get<boolean>("trace.extension");
+ }
+
+ get cargoRunner() {
+ return this.get<string | undefined>("cargoRunner");
+ }
+
+ get runnableEnv() {
+ const item = this.get<any>("runnableEnv");
+ if (!item) return item;
+ const fixRecord = (r: Record<string, any>) => {
+ for (const key in r) {
+ if (typeof r[key] !== "string") {
+ r[key] = String(r[key]);
+ }
+ }
+ };
+ if (item instanceof Array) {
+ item.forEach((x) => fixRecord(x.env));
+ } else {
+ fixRecord(item);
+ }
+ return item;
+ }
+
+ get restartServerOnConfigChange() {
+ return this.get<boolean>("restartServerOnConfigChange");
+ }
+
+ get typingContinueCommentsOnNewline() {
+ return this.get<boolean>("typing.continueCommentsOnNewline");
+ }
+
+ get debug() {
+ let sourceFileMap = this.get<Record<string, string> | "auto">("debug.sourceFileMap");
+ if (sourceFileMap !== "auto") {
+ // "/rustc/<id>" used by suggestions only.
+ const { ["/rustc/<id>"]: _, ...trimmed } =
+ this.get<Record<string, string>>("debug.sourceFileMap");
+ sourceFileMap = trimmed;
+ }
+
+ return {
+ engine: this.get<string>("debug.engine"),
+ engineSettings: this.get<object>("debug.engineSettings"),
+ openDebugPane: this.get<boolean>("debug.openDebugPane"),
+ sourceFileMap: sourceFileMap,
+ };
+ }
+
+ get hoverActions() {
+ return {
+ enable: this.get<boolean>("hover.actions.enable"),
+ implementations: this.get<boolean>("hover.actions.implementations.enable"),
+ references: this.get<boolean>("hover.actions.references.enable"),
+ run: this.get<boolean>("hover.actions.run.enable"),
+ debug: this.get<boolean>("hover.actions.debug.enable"),
+ gotoTypeDef: this.get<boolean>("hover.actions.gotoTypeDef.enable"),
+ };
+ }
++ get previewRustcOutput() {
++ return this.get<boolean>("diagnostics.previewRustcOutput");
++ }
+}
+
+const VarRegex = new RegExp(/\$\{(.+?)\}/g);
+
+export function substituteVSCodeVariableInString(val: string): string {
+ return val.replace(VarRegex, (substring: string, varName) => {
+ if (typeof varName === "string") {
+ return computeVscodeVar(varName) || substring;
+ } else {
+ return substring;
+ }
+ });
+}
+
+export function substituteVSCodeVariables(resp: any): any {
+ if (typeof resp === "string") {
+ return substituteVSCodeVariableInString(resp);
+ } else if (resp && Array.isArray(resp)) {
+ return resp.map((val) => {
+ return substituteVSCodeVariables(val);
+ });
+ } else if (resp && typeof resp === "object") {
+ const res: { [key: string]: any } = {};
+ for (const key in resp) {
+ const val = resp[key];
+ res[key] = substituteVSCodeVariables(val);
+ }
+ return res;
+ } else if (typeof resp === "function") {
+ return null;
+ }
+ return resp;
+}
+export function substituteVariablesInEnv(env: Env): Env {
+ const missingDeps = new Set<string>();
+ // vscode uses `env:ENV_NAME` for env vars resolution, and it's easier
+ // to follow the same convention for our dependency tracking
+ const definedEnvKeys = new Set(Object.keys(env).map((key) => `env:${key}`));
+ const envWithDeps = Object.fromEntries(
+ Object.entries(env).map(([key, value]) => {
+ const deps = new Set<string>();
+ const depRe = new RegExp(/\${(?<depName>.+?)}/g);
+ let match = undefined;
+ while ((match = depRe.exec(value))) {
+ const depName = match.groups!.depName;
+ deps.add(depName);
+ // `depName` at this point can have a form of `expression` or
+ // `prefix:expression`
+ if (!definedEnvKeys.has(depName)) {
+ missingDeps.add(depName);
+ }
+ }
+ return [`env:${key}`, { deps: [...deps], value }];
+ })
+ );
+
+ const resolved = new Set<string>();
+ for (const dep of missingDeps) {
+ const match = /(?<prefix>.*?):(?<body>.+)/.exec(dep);
+ if (match) {
+ const { prefix, body } = match.groups!;
+ if (prefix === "env") {
+ const envName = body;
+ envWithDeps[dep] = {
+ value: process.env[envName] ?? "",
+ deps: [],
+ };
+ resolved.add(dep);
+ } else {
+ // we can't handle other prefixes at the moment
+ // leave values as is, but still mark them as resolved
+ envWithDeps[dep] = {
+ value: "${" + dep + "}",
+ deps: [],
+ };
+ resolved.add(dep);
+ }
+ } else {
+ envWithDeps[dep] = {
+ value: computeVscodeVar(dep) || "${" + dep + "}",
+ deps: [],
+ };
+ }
+ }
+ const toResolve = new Set(Object.keys(envWithDeps));
+
+ let leftToResolveSize;
+ do {
+ leftToResolveSize = toResolve.size;
+ for (const key of toResolve) {
+ if (envWithDeps[key].deps.every((dep) => resolved.has(dep))) {
+ envWithDeps[key].value = envWithDeps[key].value.replace(
+ /\${(?<depName>.+?)}/g,
+ (_wholeMatch, depName) => {
+ return envWithDeps[depName].value;
+ }
+ );
+ resolved.add(key);
+ toResolve.delete(key);
+ }
+ }
+ } while (toResolve.size > 0 && toResolve.size < leftToResolveSize);
+
+ const resolvedEnv: Env = {};
+ for (const key of Object.keys(env)) {
+ resolvedEnv[key] = envWithDeps[`env:${key}`].value;
+ }
+ return resolvedEnv;
+}
+
+function computeVscodeVar(varName: string): string | null {
+ const workspaceFolder = () => {
+ const folders = vscode.workspace.workspaceFolders ?? [];
+ if (folders.length === 1) {
+ // TODO: support for remote workspaces?
+ return folders[0].uri.fsPath;
+ } else if (folders.length > 1) {
+ // could use currently opened document to detect the correct
+ // workspace. However, that would be determined by the document
+ // user has opened on Editor startup. Could lead to
+ // unpredictable workspace selection in practice.
+ // It's better to pick the first one
+ return folders[0].uri.fsPath;
+ } else {
+ // no workspace opened
+ return "";
+ }
+ };
+ // https://code.visualstudio.com/docs/editor/variables-reference
+ const supportedVariables: { [k: string]: () => string } = {
+ workspaceFolder,
+
+ workspaceFolderBasename: () => {
+ return path.basename(workspaceFolder());
+ },
+
+ cwd: () => process.cwd(),
+ userHome: () => os.homedir(),
+
+ // see
+ // https://github.com/microsoft/vscode/blob/08ac1bb67ca2459496b272d8f4a908757f24f56f/src/vs/workbench/api/common/extHostVariableResolverService.ts#L81
+ // or
+ // https://github.com/microsoft/vscode/blob/29eb316bb9f154b7870eb5204ec7f2e7cf649bec/src/vs/server/node/remoteTerminalChannel.ts#L56
+ execPath: () => process.env.VSCODE_EXEC_PATH ?? process.execPath,
+
+ pathSeparator: () => path.sep,
+ };
+
+ if (varName in supportedVariables) {
+ return supportedVariables[varName]();
+ } else {
+ // return "${" + varName + "}";
+ return null;
+ }
+}
--- /dev/null
- serverOptions
+import * as vscode from "vscode";
+import * as lc from "vscode-languageclient/node";
+import * as ra from "./lsp_ext";
+
+import { Config, substituteVariablesInEnv, substituteVSCodeVariables } from "./config";
+import { createClient } from "./client";
+import { isRustDocument, isRustEditor, log, RustEditor } from "./util";
+import { ServerStatusParams } from "./lsp_ext";
+import { PersistentState } from "./persistent_state";
+import { bootstrap } from "./bootstrap";
+
+// We only support local folders, not eg. Live Share (`vlsl:` scheme), so don't activate if
+// only those are in use. We use "Empty" to represent these scenarios
+// (r-a still somewhat works with Live Share, because commands are tunneled to the host)
+
+export type Workspace =
+ | { kind: "Empty" }
+ | {
+ kind: "Workspace Folder";
+ }
+ | {
+ kind: "Detached Files";
+ files: vscode.TextDocument[];
+ };
+
+export function fetchWorkspace(): Workspace {
+ const folders = (vscode.workspace.workspaceFolders || []).filter(
+ (folder) => folder.uri.scheme === "file"
+ );
+ const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
+ isRustDocument(document)
+ );
+
+ return folders.length === 0
+ ? rustDocuments.length === 0
+ ? { kind: "Empty" }
+ : {
+ kind: "Detached Files",
+ files: rustDocuments,
+ }
+ : { kind: "Workspace Folder" };
+}
+
+export type CommandFactory = {
+ enabled: (ctx: CtxInit) => Cmd;
+ disabled?: (ctx: Ctx) => Cmd;
+};
+
+export type CtxInit = Ctx & {
+ readonly client: lc.LanguageClient;
+};
+
+export class Ctx {
+ readonly statusBar: vscode.StatusBarItem;
+ readonly config: Config;
+ readonly workspace: Workspace;
+
+ private _client: lc.LanguageClient | undefined;
+ private _serverPath: string | undefined;
+ private traceOutputChannel: vscode.OutputChannel | undefined;
+ private outputChannel: vscode.OutputChannel | undefined;
+ private clientSubscriptions: Disposable[];
+ private state: PersistentState;
+ private commandFactories: Record<string, CommandFactory>;
+ private commandDisposables: Disposable[];
+
+ get client() {
+ return this._client;
+ }
+
+ constructor(
+ readonly extCtx: vscode.ExtensionContext,
+ commandFactories: Record<string, CommandFactory>,
+ workspace: Workspace
+ ) {
+ extCtx.subscriptions.push(this);
+ this.statusBar = vscode.window.createStatusBarItem(vscode.StatusBarAlignment.Left);
+ this.statusBar.show();
+ this.workspace = workspace;
+ this.clientSubscriptions = [];
+ this.commandDisposables = [];
+ this.commandFactories = commandFactories;
+
+ this.state = new PersistentState(extCtx.globalState);
+ this.config = new Config(extCtx);
+
+ this.updateCommands("disable");
+ this.setServerStatus({
+ health: "stopped",
+ });
+ }
+
+ dispose() {
+ this.config.dispose();
+ this.statusBar.dispose();
+ void this.disposeClient();
+ this.commandDisposables.forEach((disposable) => disposable.dispose());
+ }
+
+ async onWorkspaceFolderChanges() {
+ const workspace = fetchWorkspace();
+ if (workspace.kind === "Detached Files" && this.workspace.kind === "Detached Files") {
+ if (workspace.files !== this.workspace.files) {
+ if (this.client?.isRunning()) {
+ // Ideally we wouldn't need to tear down the server here, but currently detached files
+ // are only specified at server start
+ await this.stopAndDispose();
+ await this.start();
+ }
+ return;
+ }
+ }
+ if (workspace.kind === "Workspace Folder" && this.workspace.kind === "Workspace Folder") {
+ return;
+ }
+ if (workspace.kind === "Empty") {
+ await this.stopAndDispose();
+ return;
+ }
+ if (this.client?.isRunning()) {
+ await this.restart();
+ }
+ }
+
+ private async getOrCreateClient() {
+ if (this.workspace.kind === "Empty") {
+ return;
+ }
+
+ if (!this.traceOutputChannel) {
+ this.traceOutputChannel = vscode.window.createOutputChannel(
+ "Rust Analyzer Language Server Trace"
+ );
+ this.pushExtCleanup(this.traceOutputChannel);
+ }
+ if (!this.outputChannel) {
+ this.outputChannel = vscode.window.createOutputChannel("Rust Analyzer Language Server");
+ this.pushExtCleanup(this.outputChannel);
+ }
+
+ if (!this._client) {
+ this._serverPath = await bootstrap(this.extCtx, this.config, this.state).catch(
+ (err) => {
+ let message = "bootstrap error. ";
+
+ message +=
+ 'See the logs in "OUTPUT > Rust Analyzer Client" (should open automatically). ';
+ message +=
+ 'To enable verbose logs use { "rust-analyzer.trace.extension": true }';
+
+ log.error("Bootstrap error", err);
+ throw new Error(message);
+ }
+ );
+ const newEnv = substituteVariablesInEnv(
+ Object.assign({}, process.env, this.config.serverExtraEnv)
+ );
+ const run: lc.Executable = {
+ command: this._serverPath,
+ options: { env: newEnv },
+ };
+ const serverOptions = {
+ run,
+ debug: run,
+ };
+
+ let rawInitializationOptions = vscode.workspace.getConfiguration("rust-analyzer");
+
+ if (this.workspace.kind === "Detached Files") {
+ rawInitializationOptions = {
+ detachedFiles: this.workspace.files.map((file) => file.uri.fsPath),
+ ...rawInitializationOptions,
+ };
+ }
+
+ const initializationOptions = substituteVSCodeVariables(rawInitializationOptions);
+
+ this._client = await createClient(
+ this.traceOutputChannel,
+ this.outputChannel,
+ initializationOptions,
++ serverOptions,
++ this.config
+ );
+ this.pushClientCleanup(
+ this._client.onNotification(ra.serverStatus, (params) =>
+ this.setServerStatus(params)
+ )
+ );
+ }
+ return this._client;
+ }
+
+ async start() {
+ log.info("Starting language client");
+ const client = await this.getOrCreateClient();
+ if (!client) {
+ return;
+ }
+ await client.start();
+ this.updateCommands();
+ }
+
+ async restart() {
+ // FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
+ await this.stopAndDispose();
+ await this.start();
+ }
+
+ async stop() {
+ if (!this._client) {
+ return;
+ }
+ log.info("Stopping language client");
+ this.updateCommands("disable");
+ await this._client.stop();
+ }
+
+ async stopAndDispose() {
+ if (!this._client) {
+ return;
+ }
+ log.info("Disposing language client");
+ this.updateCommands("disable");
+ await this.disposeClient();
+ }
+
+ private async disposeClient() {
+ this.clientSubscriptions?.forEach((disposable) => disposable.dispose());
+ this.clientSubscriptions = [];
+ await this._client?.dispose();
+ this._serverPath = undefined;
+ this._client = undefined;
+ }
+
+ get activeRustEditor(): RustEditor | undefined {
+ const editor = vscode.window.activeTextEditor;
+ return editor && isRustEditor(editor) ? editor : undefined;
+ }
+
+ get extensionPath(): string {
+ return this.extCtx.extensionPath;
+ }
+
+ get subscriptions(): Disposable[] {
+ return this.extCtx.subscriptions;
+ }
+
+ get serverPath(): string | undefined {
+ return this._serverPath;
+ }
+
+ private updateCommands(forceDisable?: "disable") {
+ this.commandDisposables.forEach((disposable) => disposable.dispose());
+ this.commandDisposables = [];
+
+ const clientRunning = (!forceDisable && this._client?.isRunning()) ?? false;
+ const isClientRunning = function (_ctx: Ctx): _ctx is CtxInit {
+ return clientRunning;
+ };
+
+ for (const [name, factory] of Object.entries(this.commandFactories)) {
+ const fullName = `rust-analyzer.${name}`;
+ let callback;
+ if (isClientRunning(this)) {
+ // we asserted that `client` is defined
+ callback = factory.enabled(this);
+ } else if (factory.disabled) {
+ callback = factory.disabled(this);
+ } else {
+ callback = () =>
+ vscode.window.showErrorMessage(
+ `command ${fullName} failed: rust-analyzer server is not running`
+ );
+ }
+
+ this.commandDisposables.push(vscode.commands.registerCommand(fullName, callback));
+ }
+ }
+
+ setServerStatus(status: ServerStatusParams | { health: "stopped" }) {
+ let icon = "";
+ const statusBar = this.statusBar;
+ switch (status.health) {
+ case "ok":
+ statusBar.tooltip = (status.message ?? "Ready") + "\nClick to stop server.";
+ statusBar.command = "rust-analyzer.stopServer";
+ statusBar.color = undefined;
+ statusBar.backgroundColor = undefined;
+ break;
+ case "warning":
+ statusBar.tooltip =
+ (status.message ? status.message + "\n" : "") + "Click to reload.";
+
+ statusBar.command = "rust-analyzer.reloadWorkspace";
+ statusBar.color = new vscode.ThemeColor("statusBarItem.warningForeground");
+ statusBar.backgroundColor = new vscode.ThemeColor(
+ "statusBarItem.warningBackground"
+ );
+ icon = "$(warning) ";
+ break;
+ case "error":
+ statusBar.tooltip =
+ (status.message ? status.message + "\n" : "") + "Click to reload.";
+
+ statusBar.command = "rust-analyzer.reloadWorkspace";
+ statusBar.color = new vscode.ThemeColor("statusBarItem.errorForeground");
+ statusBar.backgroundColor = new vscode.ThemeColor("statusBarItem.errorBackground");
+ icon = "$(error) ";
+ break;
+ case "stopped":
+ statusBar.tooltip = "Server is stopped.\nClick to start.";
+ statusBar.command = "rust-analyzer.startServer";
+ statusBar.color = undefined;
+ statusBar.backgroundColor = undefined;
+ statusBar.text = `$(stop-circle) rust-analyzer`;
+ return;
+ }
+ if (!status.quiescent) icon = "$(sync~spin) ";
+ statusBar.text = `${icon}rust-analyzer`;
+ }
+
+ pushExtCleanup(d: Disposable) {
+ this.extCtx.subscriptions.push(d);
+ }
+
+ private pushClientCleanup(d: Disposable) {
+ this.clientSubscriptions.push(d);
+ }
+}
+
+export interface Disposable {
+ dispose(): void;
+}
+export type Cmd = (...args: any[]) => unknown;
--- /dev/null
+import * as vscode from "vscode";
+import * as lc from "vscode-languageclient/node";
+
+import * as commands from "./commands";
+import { CommandFactory, Ctx, fetchWorkspace } from "./ctx";
+import { activateTaskProvider } from "./tasks";
+import { setContextValue } from "./util";
+
+const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
+
+export interface RustAnalyzerExtensionApi {
+ readonly client?: lc.LanguageClient;
+}
+
+export async function deactivate() {
+ await setContextValue(RUST_PROJECT_CONTEXT_NAME, undefined);
+}
+
+export async function activate(
+ context: vscode.ExtensionContext
+): Promise<RustAnalyzerExtensionApi> {
+ if (vscode.extensions.getExtension("rust-lang.rust")) {
+ vscode.window
+ .showWarningMessage(
+ `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` +
+ "plugins enabled. These are known to conflict and cause various functions of " +
+ "both plugins to not work correctly. You should disable one of them.",
+ "Got it"
+ )
+ .then(() => {}, console.error);
+ }
+
+ const ctx = new Ctx(context, createCommands(), fetchWorkspace());
+ // VS Code doesn't show a notification when an extension fails to activate
+ // so we do it ourselves.
+ const api = await activateServer(ctx).catch((err) => {
+ void vscode.window.showErrorMessage(
+ `Cannot activate rust-analyzer extension: ${err.message}`
+ );
+ throw err;
+ });
+ await setContextValue(RUST_PROJECT_CONTEXT_NAME, true);
+ return api;
+}
+
+async function activateServer(ctx: Ctx): Promise<RustAnalyzerExtensionApi> {
+ if (ctx.workspace.kind === "Workspace Folder") {
+ ctx.pushExtCleanup(activateTaskProvider(ctx.config));
+ }
+
++ ctx.pushExtCleanup(
++ vscode.workspace.registerTextDocumentContentProvider(
++ "rust-analyzer-diagnostics-view",
++ new (class implements vscode.TextDocumentContentProvider {
++ async provideTextDocumentContent(uri: vscode.Uri): Promise<string> {
++ const diags = ctx.client?.diagnostics?.get(
++ vscode.Uri.parse(uri.fragment, true)
++ );
++ if (!diags) {
++ return "Unable to find original rustc diagnostic";
++ }
++
++ const diag = diags[parseInt(uri.query)];
++ if (!diag) {
++ return "Unable to find original rustc diagnostic";
++ }
++ const rendered = (diag as unknown as { data?: { rendered?: string } }).data
++ ?.rendered;
++ return rendered ?? "Unable to find original rustc diagnostic";
++ }
++ })()
++ )
++ );
++
+ vscode.workspace.onDidChangeWorkspaceFolders(
+ async (_) => ctx.onWorkspaceFolderChanges(),
+ null,
+ ctx.subscriptions
+ );
+ vscode.workspace.onDidChangeConfiguration(
+ async (_) => {
+ await ctx.client?.sendNotification("workspace/didChangeConfiguration", {
+ settings: "",
+ });
+ },
+ null,
+ ctx.subscriptions
+ );
+
+ await ctx.start();
+ return ctx;
+}
+
+function createCommands(): Record<string, CommandFactory> {
+ return {
+ onEnter: {
+ enabled: commands.onEnter,
+ disabled: (_) => () => vscode.commands.executeCommand("default:type", { text: "\n" }),
+ },
+ reload: {
+ enabled: (ctx) => async () => {
+ void vscode.window.showInformationMessage("Reloading rust-analyzer...");
+ await ctx.restart();
+ },
+ disabled: (ctx) => async () => {
+ void vscode.window.showInformationMessage("Reloading rust-analyzer...");
+ await ctx.start();
+ },
+ },
+ startServer: {
+ enabled: (ctx) => async () => {
+ await ctx.start();
+ },
+ disabled: (ctx) => async () => {
+ await ctx.start();
+ },
+ },
+ stopServer: {
+ enabled: (ctx) => async () => {
+ // FIXME: We should re-use the client, that is ctx.deactivate() if none of the configs have changed
+ await ctx.stopAndDispose();
+ ctx.setServerStatus({
+ health: "stopped",
+ });
+ },
+ disabled: (_) => async () => {},
+ },
+
+ analyzerStatus: { enabled: commands.analyzerStatus },
+ memoryUsage: { enabled: commands.memoryUsage },
+ shuffleCrateGraph: { enabled: commands.shuffleCrateGraph },
+ reloadWorkspace: { enabled: commands.reloadWorkspace },
+ matchingBrace: { enabled: commands.matchingBrace },
+ joinLines: { enabled: commands.joinLines },
+ parentModule: { enabled: commands.parentModule },
+ syntaxTree: { enabled: commands.syntaxTree },
+ viewHir: { enabled: commands.viewHir },
+ viewFileText: { enabled: commands.viewFileText },
+ viewItemTree: { enabled: commands.viewItemTree },
+ viewCrateGraph: { enabled: commands.viewCrateGraph },
+ viewFullCrateGraph: { enabled: commands.viewFullCrateGraph },
+ expandMacro: { enabled: commands.expandMacro },
+ run: { enabled: commands.run },
+ copyRunCommandLine: { enabled: commands.copyRunCommandLine },
+ debug: { enabled: commands.debug },
+ newDebugConfig: { enabled: commands.newDebugConfig },
+ openDocs: { enabled: commands.openDocs },
+ openCargoToml: { enabled: commands.openCargoToml },
+ peekTests: { enabled: commands.peekTests },
+ moveItemUp: { enabled: commands.moveItemUp },
+ moveItemDown: { enabled: commands.moveItemDown },
+ cancelFlycheck: { enabled: commands.cancelFlycheck },
+ ssr: { enabled: commands.ssr },
+ serverVersion: { enabled: commands.serverVersion },
+ // Internal commands which are invoked by the server.
+ applyActionGroup: { enabled: commands.applyActionGroup },
+ applySnippetWorkspaceEdit: { enabled: commands.applySnippetWorkspaceEditCommand },
+ debugSingle: { enabled: commands.debugSingle },
+ gotoLocation: { enabled: commands.gotoLocation },
+ linkToCommand: { enabled: commands.linkToCommand },
+ resolveCodeAction: { enabled: commands.resolveCodeAction },
+ runSingle: { enabled: commands.runSingle },
+ showReferences: { enabled: commands.showReferences },
+ };
+}
--- /dev/null
- rust-version = "1.57"
+[package]
+name = "xtask"
+version = "0.1.0"
+publish = false
+license = "MIT OR Apache-2.0"
+edition = "2021"
++rust-version = "1.65"
+
+[dependencies]
+anyhow = "1.0.62"
+flate2 = "1.0.24"
+write-json = "0.1.2"
+xshell = "0.2.2"
+xflags = "0.3.0"
+# Avoid adding more dependencies to this crate