cache: cargo
before_cache:
- find ./target/debug -type f -maxdepth 1 -delete
- - rm -fr ./target/debug/{deps,.fingerprint}/{*ra_*,*test*}
+ - rm -fr ./target/debug/{deps,.fingerprint}/{*ra_*,*test*,*tools*,*gen_lsp*}
+ - rm -f ./target/.rustc_info.json
env:
- CARGO_INCREMENTAL=0
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"nodrop 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)",
"scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
"arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"cfg-if 0.1.6 (registry+https://github.com/rust-lang/crates.io-index)",
"crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"scopeguard 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "derive-new"
-version = "0.5.5"
+version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.18 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.18 (registry+https://github.com/rust-lang/crates.io-index)",
"synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "flexi_logger"
-version = "0.9.3"
+version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"chrono 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
dependencies = [
"crossbeam-channel 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "languageserver-types 0.51.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "languageserver-types 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "languageserver-types"
-version = "0.51.0"
+version = "0.51.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.32 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
"url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "lazy_static"
-version = "1.1.0"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
-]
[[package]]
name = "libc"
"num-traits 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.18 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_editor 0.1.0",
"ra_syntax 0.1.0",
- "rayon 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"salsa 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
- "flexi_logger 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "flexi_logger 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"gen_lsp_server 0.1.0",
"im 12.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "languageserver-types 0.51.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "languageserver-types 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"ra_analysis 0.1.0",
"ra_editor 0.1.0",
"ra_syntax 0.1.0",
- "rayon 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
"relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-hash 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
name = "ra_syntax"
version = "0.1.0"
dependencies = [
+ "arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)",
"drop_bomb 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)",
"parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "rayon"
-version = "1.0.2"
+version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"crossbeam-deque 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "regex"
-version = "1.0.5"
+version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"aho-corasick 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "derive-new 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "derive-new 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)",
"indexmap 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"lock_api 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.18 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
[[package]]
name = "syn"
-version = "0.15.17"
+version = "0.15.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
dependencies = [
"proc-macro2 0.4.20 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.15.17 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.18 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
"error-chain 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)",
"glob 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)",
"humansize 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"pest_derive 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "regex 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.32 (registry+https://github.com/rust-lang/crates.io-index)",
"slug 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"unic-segment 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
[[package]]
name = "url"
-version = "1.7.1"
+version = "1.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"idna 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
- "url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"checksum crossbeam-epoch 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9c90f1474584f38e270b5b613e898c8c328aa4f3dea85e0a27ac2e642f009416"
"checksum crossbeam-utils 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2760899e32a1d58d5abb31129f8fae5de75220bc2176e77ff7c627ae45c918d9"
"checksum crossbeam-utils 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "677d453a17e8bd2b913fa38e8b9cf04bcdbb5be790aa294f2389661d72036015"
-"checksum derive-new 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "899ec79626c14e00ccc9729b4d750bbe67fe76a8f436824c16e0233bbd9d7daa"
+"checksum derive-new 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "6ca414e896ae072546f4d789f452daaecf60ddee4c9df5dc6d5936d769e3d87c"
"checksum deunicode 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "850878694b7933ca4c9569d30a34b55031b9b139ee1fc7b94a527c4ef960d690"
"checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198"
"checksum digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"
"checksum failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6dd377bcc1b1b7ce911967e3ec24fa19c3224394ec05b54aa7b083d498341ac7"
"checksum failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "64c2d913fe8ed3b6c6518eedf4538255b989945c14c2a7d5cbff62a5e2120596"
"checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
-"checksum flexi_logger 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7992096ba2290bd35b86b282e72edae518a25aa9a067ff417bc017ae63ac5e22"
+"checksum flexi_logger 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "553854ebfebeae44ba699a9dc7d53a4036ccc01cd1e144aea0e3054c54383733"
"checksum fst 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9b0408ab57c1bf7c634b2ac6a165d14f642dc3335a43203090a7f8c78b54577b"
"checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
"checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
"checksum itertools 0.7.8 (registry+https://github.com/rust-lang/crates.io-index)" = "f58856976b776fedd95533137617a02fb25719f40e7d9b01c7043cd65474f450"
"checksum itoa 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1306f3464951f30e30d12373d31c79fbd52d236e5e896fd92f96ec7babbbe60b"
"checksum join_to_string 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7bddc885f3fd69dd4b5d747c2efe6dd2c36d795ea9938281ed50910e32c95e31"
-"checksum languageserver-types 0.51.0 (registry+https://github.com/rust-lang/crates.io-index)" = "caecadd973c43c93f5ce96fa457da310113d867af28808a8ed74023e9887a39e"
-"checksum lazy_static 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca488b89a5657b0a2ecd45b95609b3e848cf1755da332a0da46e2b2b1cb371a7"
+"checksum languageserver-types 0.51.1 (registry+https://github.com/rust-lang/crates.io-index)" = "68de833188ada4e175d04a028f03f244f6370eedbcc75a05604d47d925933f69"
+"checksum lazy_static 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a374c89b9db55895453a74c1e38861d9deec0b01b405a82516e9d5de4820dea1"
"checksum libc 0.2.43 (registry+https://github.com/rust-lang/crates.io-index)" = "76e3a3ef172f1a0b9a9ff0dd1491ae5e6c948b94479a3021819ba7d860c8645d"
"checksum lock_api 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "775751a3e69bde4df9b38dd00a1b5d6ac13791e4223d4a0506577f0dd27cfb7a"
"checksum log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c84ec4b527950aa83a329754b01dbe3f58361d1c5efacd1f6d68c494d08a17c6"
"checksum rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e464cd887e869cddcae8792a4ee31d23c7edd516700695608f5b98c67ee0131c"
"checksum rand_core 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "1961a422c4d189dfb50ffa9320bf1f2a9bd54ecb92792fb9477f99a1045f3372"
"checksum rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0905b6b7079ec73b314d4c748701f6931eb79fd97c668caa3f1899b22b32c6db"
-"checksum rayon 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "df7a791f788cb4c516f0e091301a29c2b71ef680db5e644a7d68835c8ae6dbfa"
+"checksum rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "373814f27745b2686b350dd261bfd24576a6fb0e2c5919b3a2b6005f820b0473"
"checksum rayon-core 1.4.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b055d1e92aba6877574d8fe604a63c8b5df60f60e5982bf7ccbb1338ea527356"
"checksum redox_syscall 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)" = "c214e91d3ecf43e9a4e41e578973adeb14b474f2bee858742d127af75a0112b1"
"checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76"
-"checksum regex 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "2069749032ea3ec200ca51e4a31df41759190a88edca0d2d86ee8bedf7073341"
+"checksum regex 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ee84f70c8c08744ea9641a731c7fadb475bf2ecc52d7f627feb833e0b3990467"
"checksum regex-syntax 0.6.2 (registry+https://github.com/rust-lang/crates.io-index)" = "747ba3b235651f6e2f67dfa8bcdcd073ddb7c243cb21c442fc12395dfcac212d"
"checksum relative-path 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0e7790c7f1cc73d831d28dc5a7deb316a006e7848e6a7f467cdb10a0a9e0fb1c"
"checksum remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3488ba1b9a2084d38645c4c08276a1752dcbf2c7130d74f1569681ad5d2799c5"
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum superslice 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b50b13d42370e0f5fc62eafdd5c2d20065eaf5458dab215ff3e20e63eea96b30"
"checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741"
-"checksum syn 0.15.17 (registry+https://github.com/rust-lang/crates.io-index)" = "3391038ebc3e4ab24eb028cb0ef2f2dc4ba0cbf72ee895ed6a6fad730640b5bc"
+"checksum syn 0.15.18 (registry+https://github.com/rust-lang/crates.io-index)" = "90c39a061e2f412a9f869540471ab679e85e50c6b05604daf28bc3060f75c430"
"checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
"checksum tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8"
"checksum tera 0.11.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6ac6d8ad623a7efcfb4367ce2a36f84ef849d5aa3c7bcf2e0324c4cbcc57ebaf"
"checksum unicode-width 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "882386231c45df4700b275c7ff55b6f3698780a650026380e72dabe76fa46526"
"checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
"checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
-"checksum url 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2a321979c09843d272956e73700d12c4e7d3d92b2ee112b31548aef0d4efc5a6"
+"checksum url 1.7.2 (registry+https://github.com/rust-lang/crates.io-index)" = "dd4e7c0d531266369519a4aa4f399d748bd37043b00bde1e4ff1f60a120b355a"
"checksum url_serde 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "74e7d099f1ee52f823d4bdd60c93c3602043c728f5db3b97bdb548467f7bddea"
"checksum utf8-ranges 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "796f7e48bef87609f7ade7e06495a87d5cd06c7866e6a5cbfceffc558a243737"
"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
--- /dev/null
+# Rust Analyzer Roadmap 01
+
+Written on 2018-11-06, extends approximately to February 2019.
+After that, we should coordinate with the compiler/rls developers to align goals and share code and experience.
+
+
+# Overall Goals
+
+The mission is:
+ * Provide an excellent "code analyzed as you type" IDE experience for the Rust language,
+ * Implement the bulk of the features in Rust itself.
+
+
+High-level architecture constraints:
+ * Long-term, replace the current rustc frontend.
+ It's *obvious* that the code should be shared, but OTOH, all great IDEs started as from-scratch rewrites.
+ * Don't hard-code a particular protocol or mode of operation.
+ Produce a library which could be used for implementing an LSP server, or for in-process embedding.
+ * As long as possible, stick with stable Rust (NB: we currently use beta for 2018 edition and salsa).
+
+
+# Current Goals
+
+Ideally, we would be coordinating with the compiler/rls teams, but they are busy working on making Rust 2018 at the moment.
+The sync-up point will happen some time after the edition, probably early 2019.
+In the meantime, the goal is to **experiment**, specifically, to figure out how a from-scratch written RLS might look like.
+
+
+## Data Storage and Protocol implementation
+
+The fundamental part of any architecture is who owns which data, how the data is mutated and how the data is exposed to user.
+For storage we use the [salsa](http://github.com/salsa-rs/salsa) library, which provides a solid model that seems to be the way to go.
+
+Modification to source files is mostly driven by the language client, but we also should support watching the file system. The current
+file watching implementation is a stub.
+
+**Action Item:** implement reliable file watching service.
+
+We also should extract LSP bits as a reusable library. There's already `gen_lsp_server`, but it is pretty limited.
+
+**Action Item:** try using `gen_lsp_server` in more than one language server, for example for TOML and Nix.
+
+The ideal architecture for `gen_lsp_server` is still unclear. I'd rather avoid futures: they bring significant runtime complexity
+(call stacks become insane) and the performance benefits are negligible for our use case (one thread per request is perfectly OK given
+the low amount of requests a language server receives). The current interface is based on crossbeam-channel, but it's not clear
+if that is the best choice.
+
+
+## Low-effort, high payoff features
+
+Implementing 20% of type inference will give use 80% of completion.
+Thus it makes sense to partially implement name resolution, type inference and trait matching, even though there is a chance that
+this code is replaced later on when we integrate with the compiler
+
+Specifically, we need to:
+
+* **Action Item:** implement path resolution, so that we get completion in imports and such.
+* **Action Item:** implement simple type inference, so that we get completion for inherent methods.
+* **Action Item:** implement nicer completion infrastructure, so that we have icons, snippets, doc comments, after insert callbacks, ...
+
+
+## Dragons to kill
+
+To make experiments most effective, we should try to prototype solutions for the hardest problems.
+In the case of Rust, the two hardest problems are:
+ * Conditional compilation and source/model mismatch.
+ A single source file might correspond to several entities in the semantic model.
+ For example, different cfg flags produce effectively different crates from the same source.
+ * Macros are intertwined with name resolution in a single fix-point iteration algorithm.
+ This is just plain hard to implement, but also interacts poorly with on-demand.
+
+
+For the first bullet point, we need to design descriptors infra and explicit mapping step between sources and semantic model, which is intentionally fuzzy in one direction.
+The **action item** here is basically "write code, see what works, keep high-level picture in mind".
+
+For the second bullet point, there's hope that salsa with its deep memoization will result in a fast enough solution even without being fully on-demand.
+Again, the **action item** is to write the code and see what works. Salsa itself uses macros heavily, so it should be a great test.
-//! A language server scaffold, exposing synchroneous crossbeam-channel based API.
+//! A language server scaffold, exposing a synchronous crossbeam-channel based API.
//! This crate handles protocol handshaking and parsing messages, while you
//! control the message dispatch loop yourself.
//!
+++ /dev/null
-use ra_editor::find_node_at_offset;
-use ra_syntax::{
- algo::visit::{visitor, visitor_ctx, Visitor, VisitorCtx},
- ast::{self, AstChildren, LoopBodyOwner, ModuleItemOwner},
- AstNode, AtomEdit, File,
- SyntaxKind::*,
- SyntaxNodeRef, TextUnit,
-};
-use rustc_hash::{FxHashMap, FxHashSet};
-
-use crate::{
- db::{self, SyntaxDatabase},
- descriptors::function::FnScopes,
- descriptors::module::{ModuleId, ModuleScope, ModuleTree, ModuleSource},
- descriptors::DescriptorDatabase,
- input::FilesDatabase,
- Cancelable, FilePosition,
-};
-
-#[derive(Debug)]
-pub struct CompletionItem {
- /// What user sees in pop-up
- pub label: String,
- /// What string is used for filtering, defaults to label
- pub lookup: Option<String>,
- /// What is inserted, defaults to label
- pub snippet: Option<String>,
-}
-
-pub(crate) fn resolve_based_completion(
- db: &db::RootDatabase,
- position: FilePosition,
-) -> Cancelable<Option<Vec<CompletionItem>>> {
- let source_root_id = db.file_source_root(position.file_id);
- let file = db.file_syntax(position.file_id);
- let module_tree = db.module_tree(source_root_id)?;
- let module_id = match module_tree.any_module_for_source(ModuleSource::File(position.file_id)) {
- None => return Ok(None),
- Some(it) => it,
- };
- let file = {
- let edit = AtomEdit::insert(position.offset, "intellijRulezz".to_string());
- file.reparse(&edit)
- };
- let target_module_id = match find_target_module(&module_tree, module_id, &file, position.offset)
- {
- None => return Ok(None),
- Some(it) => it,
- };
- let module_scope = db.module_scope(source_root_id, target_module_id)?;
- let res: Vec<_> = module_scope
- .entries()
- .iter()
- .map(|entry| CompletionItem {
- label: entry.name().to_string(),
- lookup: None,
- snippet: None,
- })
- .collect();
- Ok(Some(res))
-}
-
-pub(crate) fn find_target_module(
- module_tree: &ModuleTree,
- module_id: ModuleId,
- file: &File,
- offset: TextUnit,
-) -> Option<ModuleId> {
- let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), offset)?;
- let mut crate_path = crate_path(name_ref)?;
-
- crate_path.pop();
- let mut target_module = module_id.root(&module_tree);
- for name in crate_path {
- target_module = target_module.child(module_tree, name.text().as_str())?;
- }
- Some(target_module)
-}
-
-fn crate_path(name_ref: ast::NameRef) -> Option<Vec<ast::NameRef>> {
- let mut path = name_ref
- .syntax()
- .parent()
- .and_then(ast::PathSegment::cast)?
- .parent_path();
- let mut res = Vec::new();
- loop {
- let segment = path.segment()?;
- match segment.kind()? {
- ast::PathSegmentKind::Name(name) => res.push(name),
- ast::PathSegmentKind::CrateKw => break,
- ast::PathSegmentKind::SelfKw | ast::PathSegmentKind::SuperKw => return None,
- }
- path = path.qualifier()?;
- }
- res.reverse();
- Some(res)
-}
-
-pub(crate) fn scope_completion(
- db: &db::RootDatabase,
- position: FilePosition,
-) -> Option<Vec<CompletionItem>> {
- let original_file = db.file_syntax(position.file_id);
- // Insert a fake ident to get a valid parse tree
- let file = {
- let edit = AtomEdit::insert(position.offset, "intellijRulezz".to_string());
- original_file.reparse(&edit)
- };
- let mut has_completions = false;
- let mut res = Vec::new();
- if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) {
- has_completions = true;
- complete_name_ref(&file, name_ref, &mut res);
- // special case, `trait T { fn foo(i_am_a_name_ref) {} }`
- if is_node::<ast::Param>(name_ref.syntax()) {
- param_completions(name_ref.syntax(), &mut res);
- }
- let name_range = name_ref.syntax().range();
- let top_node = name_ref
- .syntax()
- .ancestors()
- .take_while(|it| it.range() == name_range)
- .last()
- .unwrap();
- match top_node.parent().map(|it| it.kind()) {
- Some(ROOT) | Some(ITEM_LIST) => complete_mod_item_snippets(&mut res),
- _ => (),
- }
- }
- if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) {
- if is_node::<ast::Param>(name.syntax()) {
- has_completions = true;
- param_completions(name.syntax(), &mut res);
- }
- }
- if has_completions {
- Some(res)
- } else {
- None
- }
-}
-
-fn complete_module_items(
- file: &File,
- items: AstChildren<ast::ModuleItem>,
- this_item: Option<ast::NameRef>,
- acc: &mut Vec<CompletionItem>,
-) {
- let scope = ModuleScope::new(items); // FIXME
- acc.extend(
- scope
- .entries()
- .iter()
- .filter(|entry| {
- let syntax = entry.ptr().resolve(file);
- Some(syntax.borrowed()) != this_item.map(|it| it.syntax())
- })
- .map(|entry| CompletionItem {
- label: entry.name().to_string(),
- lookup: None,
- snippet: None,
- }),
- );
-}
-
-fn complete_name_ref(file: &File, name_ref: ast::NameRef, acc: &mut Vec<CompletionItem>) {
- if !is_node::<ast::Path>(name_ref.syntax()) {
- return;
- }
- let mut visited_fn = false;
- for node in name_ref.syntax().ancestors() {
- if let Some(items) = visitor()
- .visit::<ast::Root, _>(|it| Some(it.items()))
- .visit::<ast::Module, _>(|it| Some(it.item_list()?.items()))
- .accept(node)
- {
- if let Some(items) = items {
- complete_module_items(file, items, Some(name_ref), acc);
- }
- break;
- } else if !visited_fn {
- if let Some(fn_def) = ast::FnDef::cast(node) {
- visited_fn = true;
- complete_expr_keywords(&file, fn_def, name_ref, acc);
- complete_expr_snippets(acc);
- let scopes = FnScopes::new(fn_def);
- complete_fn(name_ref, &scopes, acc);
- }
- }
- }
-}
-
-fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) {
- let mut params = FxHashMap::default();
- for node in ctx.ancestors() {
- let _ = visitor_ctx(&mut params)
- .visit::<ast::Root, _>(process)
- .visit::<ast::ItemList, _>(process)
- .accept(node);
- }
- params
- .into_iter()
- .filter_map(|(label, (count, param))| {
- let lookup = param.pat()?.syntax().text().to_string();
- if count < 2 {
- None
- } else {
- Some((label, lookup))
- }
- })
- .for_each(|(label, lookup)| {
- acc.push(CompletionItem {
- label,
- lookup: Some(lookup),
- snippet: None,
- })
- });
-
- fn process<'a, N: ast::FnDefOwner<'a>>(
- node: N,
- params: &mut FxHashMap<String, (u32, ast::Param<'a>)>,
- ) {
- node.functions()
- .filter_map(|it| it.param_list())
- .flat_map(|it| it.params())
- .for_each(|param| {
- let text = param.syntax().text().to_string();
- params.entry(text).or_insert((0, param)).0 += 1;
- })
- }
-}
-
-fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
- match node.ancestors().filter_map(N::cast).next() {
- None => false,
- Some(n) => n.syntax().range() == node.range(),
- }
-}
-
-fn complete_expr_keywords(
- file: &File,
- fn_def: ast::FnDef,
- name_ref: ast::NameRef,
- acc: &mut Vec<CompletionItem>,
-) {
- acc.push(keyword("if", "if $0 {}"));
- acc.push(keyword("match", "match $0 {}"));
- acc.push(keyword("while", "while $0 {}"));
- acc.push(keyword("loop", "loop {$0}"));
-
- if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
- if let Some(if_expr) = find_node_at_offset::<ast::IfExpr>(file.syntax(), off) {
- if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
- acc.push(keyword("else", "else {$0}"));
- acc.push(keyword("else if", "else if $0 {}"));
- }
- }
- }
- if is_in_loop_body(name_ref) {
- acc.push(keyword("continue", "continue"));
- acc.push(keyword("break", "break"));
- }
- acc.extend(complete_return(fn_def, name_ref));
-}
-
-fn is_in_loop_body(name_ref: ast::NameRef) -> bool {
- for node in name_ref.syntax().ancestors() {
- if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
- break;
- }
- let loop_body = visitor()
- .visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body)
- .visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body)
- .visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body)
- .accept(node);
- if let Some(Some(body)) = loop_body {
- if name_ref
- .syntax()
- .range()
- .is_subrange(&body.syntax().range())
- {
- return true;
- }
- }
- }
- false
-}
-
-fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option<CompletionItem> {
- // let is_last_in_block = name_ref.syntax().ancestors().filter_map(ast::Expr::cast)
- // .next()
- // .and_then(|it| it.syntax().parent())
- // .and_then(ast::Block::cast)
- // .is_some();
-
- // if is_last_in_block {
- // return None;
- // }
-
- let is_stmt = match name_ref
- .syntax()
- .ancestors()
- .filter_map(ast::ExprStmt::cast)
- .next()
- {
- None => false,
- Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range(),
- };
- let snip = match (is_stmt, fn_def.ret_type().is_some()) {
- (true, true) => "return $0;",
- (true, false) => "return;",
- (false, true) => "return $0",
- (false, false) => "return",
- };
- Some(keyword("return", snip))
-}
-
-fn keyword(kw: &str, snip: &str) -> CompletionItem {
- CompletionItem {
- label: kw.to_string(),
- lookup: None,
- snippet: Some(snip.to_string()),
- }
-}
-
-fn complete_expr_snippets(acc: &mut Vec<CompletionItem>) {
- acc.push(CompletionItem {
- label: "pd".to_string(),
- lookup: None,
- snippet: Some("eprintln!(\"$0 = {:?}\", $0);".to_string()),
- });
- acc.push(CompletionItem {
- label: "ppd".to_string(),
- lookup: None,
- snippet: Some("eprintln!(\"$0 = {:#?}\", $0);".to_string()),
- });
-}
-
-fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) {
- acc.push(CompletionItem {
- label: "tfn".to_string(),
- lookup: None,
- snippet: Some("#[test]\nfn $1() {\n $0\n}".to_string()),
- });
- acc.push(CompletionItem {
- label: "pub(crate)".to_string(),
- lookup: None,
- snippet: Some("pub(crate) $0".to_string()),
- })
-}
-
-fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) {
- let mut shadowed = FxHashSet::default();
- acc.extend(
- scopes
- .scope_chain(name_ref.syntax())
- .flat_map(|scope| scopes.entries(scope).iter())
- .filter(|entry| shadowed.insert(entry.name()))
- .map(|entry| CompletionItem {
- label: entry.name().to_string(),
- lookup: None,
- snippet: None,
- }),
- );
- if scopes.self_param.is_some() {
- acc.push(CompletionItem {
- label: "self".to_string(),
- lookup: None,
- snippet: None,
- })
- }
-}
-
-#[cfg(test)]
-mod tests {
- use test_utils::assert_eq_dbg;
-
- use crate::mock_analysis::single_file_with_position;
-
- use super::*;
-
- fn check_scope_completion(code: &str, expected_completions: &str) {
- let (analysis, position) = single_file_with_position(code);
- let completions = scope_completion(&analysis.imp.db, position)
- .unwrap()
- .into_iter()
- .filter(|c| c.snippet.is_none())
- .collect::<Vec<_>>();
- assert_eq_dbg(expected_completions, &completions);
- }
-
- fn check_snippet_completion(code: &str, expected_completions: &str) {
- let (analysis, position) = single_file_with_position(code);
- let completions = scope_completion(&analysis.imp.db, position)
- .unwrap()
- .into_iter()
- .filter(|c| c.snippet.is_some())
- .collect::<Vec<_>>();
- assert_eq_dbg(expected_completions, &completions);
- }
-
- #[test]
- fn test_completion_let_scope() {
- check_scope_completion(
- r"
- fn quux(x: i32) {
- let y = 92;
- 1 + <|>;
- let z = ();
- }
- ",
- r#"[CompletionItem { label: "y", lookup: None, snippet: None },
- CompletionItem { label: "x", lookup: None, snippet: None },
- CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
- );
- }
-
- #[test]
- fn test_completion_if_let_scope() {
- check_scope_completion(
- r"
- fn quux() {
- if let Some(x) = foo() {
- let y = 92;
- };
- if let Some(a) = bar() {
- let b = 62;
- 1 + <|>
- }
- }
- ",
- r#"[CompletionItem { label: "b", lookup: None, snippet: None },
- CompletionItem { label: "a", lookup: None, snippet: None },
- CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
- );
- }
-
- #[test]
- fn test_completion_for_scope() {
- check_scope_completion(
- r"
- fn quux() {
- for x in &[1, 2, 3] {
- <|>
- }
- }
- ",
- r#"[CompletionItem { label: "x", lookup: None, snippet: None },
- CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
- );
- }
-
- #[test]
- fn test_completion_mod_scope() {
- check_scope_completion(
- r"
- struct Foo;
- enum Baz {}
- fn quux() {
- <|>
- }
- ",
- r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
- CompletionItem { label: "Baz", lookup: None, snippet: None },
- CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
- );
- }
-
- #[test]
- fn test_completion_mod_scope_no_self_use() {
- check_scope_completion(
- r"
- use foo<|>;
- ",
- r#"[]"#,
- );
- }
-
- #[test]
- fn test_completion_mod_scope_nested() {
- check_scope_completion(
- r"
- struct Foo;
- mod m {
- struct Bar;
- fn quux() { <|> }
- }
- ",
- r#"[CompletionItem { label: "Bar", lookup: None, snippet: None },
- CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
- );
- }
-
- #[test]
- fn test_complete_type() {
- check_scope_completion(
- r"
- struct Foo;
- fn x() -> <|>
- ",
- r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
- CompletionItem { label: "x", lookup: None, snippet: None }]"#,
- )
- }
-
- #[test]
- fn test_complete_shadowing() {
- check_scope_completion(
- r"
- fn foo() -> {
- let bar = 92;
- {
- let bar = 62;
- <|>
- }
- }
- ",
- r#"[CompletionItem { label: "bar", lookup: None, snippet: None },
- CompletionItem { label: "foo", lookup: None, snippet: None }]"#,
- )
- }
-
- #[test]
- fn test_complete_self() {
- check_scope_completion(
- r"
- impl S { fn foo(&self) { <|> } }
- ",
- r#"[CompletionItem { label: "self", lookup: None, snippet: None }]"#,
- )
- }
-
- #[test]
- fn test_completion_kewords() {
- check_snippet_completion(r"
- fn quux() {
- <|>
- }
- ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
- CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
- CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
- CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
- CompletionItem { label: "return", lookup: None, snippet: Some("return") },
- CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
- CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
- }
-
- #[test]
- fn test_completion_else() {
- check_snippet_completion(r"
- fn quux() {
- if true {
- ()
- } <|>
- }
- ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
- CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
- CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
- CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
- CompletionItem { label: "else", lookup: None, snippet: Some("else {$0}") },
- CompletionItem { label: "else if", lookup: None, snippet: Some("else if $0 {}") },
- CompletionItem { label: "return", lookup: None, snippet: Some("return") },
- CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
- CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
- }
-
- #[test]
- fn test_completion_return_value() {
- check_snippet_completion(r"
- fn quux() -> i32 {
- <|>
- 92
- }
- ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
- CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
- CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
- CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
- CompletionItem { label: "return", lookup: None, snippet: Some("return $0;") },
- CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
- CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
- check_snippet_completion(r"
- fn quux() {
- <|>
- 92
- }
- ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
- CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
- CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
- CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
- CompletionItem { label: "return", lookup: None, snippet: Some("return;") },
- CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
- CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
- }
-
- #[test]
- fn test_completion_return_no_stmt() {
- check_snippet_completion(r"
- fn quux() -> i32 {
- match () {
- () => <|>
- }
- }
- ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
- CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
- CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
- CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
- CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
- CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
- CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
- }
-
- #[test]
- fn test_continue_break_completion() {
- check_snippet_completion(r"
- fn quux() -> i32 {
- loop { <|> }
- }
- ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
- CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
- CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
- CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
- CompletionItem { label: "continue", lookup: None, snippet: Some("continue") },
- CompletionItem { label: "break", lookup: None, snippet: Some("break") },
- CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
- CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
- CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
- check_snippet_completion(r"
- fn quux() -> i32 {
- loop { || { <|> } }
- }
- ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
- CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
- CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
- CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
- CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
- CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
- CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
- }
-
- #[test]
- fn test_param_completion_last_param() {
- check_scope_completion(r"
- fn foo(file_id: FileId) {}
- fn bar(file_id: FileId) {}
- fn baz(file<|>) {}
- ", r#"[CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
- }
-
- #[test]
- fn test_param_completion_nth_param() {
- check_scope_completion(r"
- fn foo(file_id: FileId) {}
- fn bar(file_id: FileId) {}
- fn baz(file<|>, x: i32) {}
- ", r#"[CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
- }
-
- #[test]
- fn test_param_completion_trait_param() {
- check_scope_completion(r"
- pub(crate) trait SourceRoot {
- pub fn contains(&self, file_id: FileId) -> bool;
- pub fn module_map(&self) -> &ModuleMap;
- pub fn lines(&self, file_id: FileId) -> &LineIndex;
- pub fn syntax(&self, file<|>)
- }
- ", r#"[CompletionItem { label: "self", lookup: None, snippet: None },
- CompletionItem { label: "SourceRoot", lookup: None, snippet: None },
- CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
- }
-
- #[test]
- fn test_item_snippets() {
- // check_snippet_completion(r"
- // <|>
- // ",
- // r##"[CompletionItem { label: "tfn", lookup: None, snippet: Some("#[test]\nfn $1() {\n $0\n}") }]"##,
- // );
- check_snippet_completion(r"
- #[cfg(test)]
- mod tests {
- <|>
- }
- ",
- r##"[CompletionItem { label: "tfn", lookup: None, snippet: Some("#[test]\nfn $1() {\n $0\n}") },
- CompletionItem { label: "pub(crate)", lookup: None, snippet: Some("pub(crate) $0") }]"##,
- );
- }
-}
--- /dev/null
+mod reference_completion;
+
+use ra_editor::find_node_at_offset;
+use ra_syntax::{
+ algo::find_leaf_at_offset,
+ algo::visit::{visitor_ctx, VisitorCtx},
+ ast,
+ AstNode, AtomEdit,
+ SyntaxNodeRef,
+};
+use rustc_hash::{FxHashMap};
+
+use crate::{
+ db::{self, SyntaxDatabase},
+ descriptors::{DescriptorDatabase, module::ModuleSource},
+ input::{FilesDatabase},
+ Cancelable, FilePosition
+};
+
+#[derive(Debug)]
+pub struct CompletionItem {
+ /// What user sees in pop-up
+ pub label: String,
+ /// What string is used for filtering, defaults to label
+ pub lookup: Option<String>,
+ /// What is inserted, defaults to label
+ pub snippet: Option<String>,
+}
+
+pub(crate) fn completions(
+ db: &db::RootDatabase,
+ position: FilePosition,
+) -> Cancelable<Option<Vec<CompletionItem>>> {
+ let original_file = db.file_syntax(position.file_id);
+ // Insert a fake ident to get a valid parse tree
+ let file = {
+ let edit = AtomEdit::insert(position.offset, "intellijRulezz".to_string());
+ original_file.reparse(&edit)
+ };
+
+ let leaf = match find_leaf_at_offset(original_file.syntax(), position.offset).left_biased() {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let source_root_id = db.file_source_root(position.file_id);
+ let module_tree = db.module_tree(source_root_id)?;
+ let module_source = ModuleSource::for_node(position.file_id, leaf);
+ let module_id = match module_tree.any_module_for_source(module_source) {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let mut res = Vec::new();
+ let mut has_completions = false;
+ // First, let's try to complete a reference to some declaration.
+ if let Some(name_ref) = find_node_at_offset::<ast::NameRef>(file.syntax(), position.offset) {
+ has_completions = true;
+ reference_completion::completions(
+ &mut res,
+ db,
+ source_root_id,
+ &module_tree,
+ module_id,
+ &file,
+ name_ref,
+ )?;
+ // special case, `trait T { fn foo(i_am_a_name_ref) {} }`
+ if is_node::<ast::Param>(name_ref.syntax()) {
+ param_completions(name_ref.syntax(), &mut res);
+ }
+ }
+
+ // Otherwise, if this is a declaration, use heuristics to suggest a name.
+ if let Some(name) = find_node_at_offset::<ast::Name>(file.syntax(), position.offset) {
+ if is_node::<ast::Param>(name.syntax()) {
+ has_completions = true;
+ param_completions(name.syntax(), &mut res);
+ }
+ }
+ let res = if has_completions { Some(res) } else { None };
+ Ok(res)
+}
+
+fn param_completions(ctx: SyntaxNodeRef, acc: &mut Vec<CompletionItem>) {
+ let mut params = FxHashMap::default();
+ for node in ctx.ancestors() {
+ let _ = visitor_ctx(&mut params)
+ .visit::<ast::SourceFile, _>(process)
+ .visit::<ast::ItemList, _>(process)
+ .accept(node);
+ }
+ params
+ .into_iter()
+ .filter_map(|(label, (count, param))| {
+ let lookup = param.pat()?.syntax().text().to_string();
+ if count < 2 {
+ None
+ } else {
+ Some((label, lookup))
+ }
+ })
+ .for_each(|(label, lookup)| {
+ acc.push(CompletionItem {
+ label,
+ lookup: Some(lookup),
+ snippet: None,
+ })
+ });
+
+ fn process<'a, N: ast::FnDefOwner<'a>>(
+ node: N,
+ params: &mut FxHashMap<String, (u32, ast::Param<'a>)>,
+ ) {
+ node.functions()
+ .filter_map(|it| it.param_list())
+ .flat_map(|it| it.params())
+ .for_each(|param| {
+ let text = param.syntax().text().to_string();
+ params.entry(text).or_insert((0, param)).0 += 1;
+ })
+ }
+}
+
+fn is_node<'a, N: AstNode<'a>>(node: SyntaxNodeRef<'a>) -> bool {
+ match node.ancestors().filter_map(N::cast).next() {
+ None => false,
+ Some(n) => n.syntax().range() == node.range(),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::assert_eq_dbg;
+
+ use crate::mock_analysis::single_file_with_position;
+
+ use super::*;
+
+ fn check_scope_completion(code: &str, expected_completions: &str) {
+ let (analysis, position) = single_file_with_position(code);
+ let completions = completions(&analysis.imp.db, position)
+ .unwrap()
+ .unwrap()
+ .into_iter()
+ .filter(|c| c.snippet.is_none())
+ .collect::<Vec<_>>();
+ assert_eq_dbg(expected_completions, &completions);
+ }
+
+ fn check_snippet_completion(code: &str, expected_completions: &str) {
+ let (analysis, position) = single_file_with_position(code);
+ let completions = completions(&analysis.imp.db, position)
+ .unwrap()
+ .unwrap()
+ .into_iter()
+ .filter(|c| c.snippet.is_some())
+ .collect::<Vec<_>>();
+ assert_eq_dbg(expected_completions, &completions);
+ }
+
+ #[test]
+ fn test_completion_let_scope() {
+ check_scope_completion(
+ r"
+ fn quux(x: i32) {
+ let y = 92;
+ 1 + <|>;
+ let z = ();
+ }
+ ",
+ r#"[CompletionItem { label: "y", lookup: None, snippet: None },
+ CompletionItem { label: "x", lookup: None, snippet: None },
+ CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
+ );
+ }
+
+ #[test]
+ fn test_completion_if_let_scope() {
+ check_scope_completion(
+ r"
+ fn quux() {
+ if let Some(x) = foo() {
+ let y = 92;
+ };
+ if let Some(a) = bar() {
+ let b = 62;
+ 1 + <|>
+ }
+ }
+ ",
+ r#"[CompletionItem { label: "b", lookup: None, snippet: None },
+ CompletionItem { label: "a", lookup: None, snippet: None },
+ CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
+ );
+ }
+
+ #[test]
+ fn test_completion_for_scope() {
+ check_scope_completion(
+ r"
+ fn quux() {
+ for x in &[1, 2, 3] {
+ <|>
+ }
+ }
+ ",
+ r#"[CompletionItem { label: "x", lookup: None, snippet: None },
+ CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
+ );
+ }
+
+ #[test]
+ fn test_completion_mod_scope() {
+ check_scope_completion(
+ r"
+ struct Foo;
+ enum Baz {}
+ fn quux() {
+ <|>
+ }
+ ",
+ r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
+ CompletionItem { label: "Baz", lookup: None, snippet: None },
+ CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
+ );
+ }
+
+ #[test]
+ fn test_completion_mod_scope_no_self_use() {
+ check_scope_completion(
+ r"
+ use foo<|>;
+ ",
+ r#"[]"#,
+ );
+ }
+
+ #[test]
+ fn test_completion_mod_scope_nested() {
+ check_scope_completion(
+ r"
+ struct Foo;
+ mod m {
+ struct Bar;
+ fn quux() { <|> }
+ }
+ ",
+ r#"[CompletionItem { label: "Bar", lookup: None, snippet: None },
+ CompletionItem { label: "quux", lookup: None, snippet: None }]"#,
+ );
+ }
+
+ #[test]
+ fn test_complete_type() {
+ check_scope_completion(
+ r"
+ struct Foo;
+ fn x() -> <|>
+ ",
+ r#"[CompletionItem { label: "Foo", lookup: None, snippet: None },
+ CompletionItem { label: "x", lookup: None, snippet: None }]"#,
+ )
+ }
+
+ #[test]
+ fn test_complete_shadowing() {
+ check_scope_completion(
+ r"
+ fn foo() -> {
+ let bar = 92;
+ {
+ let bar = 62;
+ <|>
+ }
+ }
+ ",
+ r#"[CompletionItem { label: "bar", lookup: None, snippet: None },
+ CompletionItem { label: "foo", lookup: None, snippet: None }]"#,
+ )
+ }
+
+ #[test]
+ fn test_complete_self() {
+ check_scope_completion(
+ r"
+ impl S { fn foo(&self) { <|> } }
+ ",
+ r#"[CompletionItem { label: "self", lookup: None, snippet: None }]"#,
+ )
+ }
+
+ #[test]
+ fn test_completion_kewords() {
+ check_snippet_completion(r"
+ fn quux() {
+ <|>
+ }
+ ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
+ CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
+ CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
+ CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
+ CompletionItem { label: "return", lookup: None, snippet: Some("return") },
+ CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
+ CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
+ }
+
+ #[test]
+ fn test_completion_else() {
+ check_snippet_completion(r"
+ fn quux() {
+ if true {
+ ()
+ } <|>
+ }
+ ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
+ CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
+ CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
+ CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
+ CompletionItem { label: "else", lookup: None, snippet: Some("else {$0}") },
+ CompletionItem { label: "else if", lookup: None, snippet: Some("else if $0 {}") },
+ CompletionItem { label: "return", lookup: None, snippet: Some("return") },
+ CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
+ CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
+ }
+
+ #[test]
+ fn test_completion_return_value() {
+ check_snippet_completion(r"
+ fn quux() -> i32 {
+ <|>
+ 92
+ }
+ ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
+ CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
+ CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
+ CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
+ CompletionItem { label: "return", lookup: None, snippet: Some("return $0;") },
+ CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
+ CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
+ check_snippet_completion(r"
+ fn quux() {
+ <|>
+ 92
+ }
+ ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
+ CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
+ CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
+ CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
+ CompletionItem { label: "return", lookup: None, snippet: Some("return;") },
+ CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
+ CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
+ }
+
+ #[test]
+ fn test_completion_return_no_stmt() {
+ check_snippet_completion(r"
+ fn quux() -> i32 {
+ match () {
+ () => <|>
+ }
+ }
+ ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
+ CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
+ CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
+ CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
+ CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
+ CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
+ CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
+ }
+
+ #[test]
+ fn test_continue_break_completion() {
+ check_snippet_completion(r"
+ fn quux() -> i32 {
+ loop { <|> }
+ }
+ ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
+ CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
+ CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
+ CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
+ CompletionItem { label: "continue", lookup: None, snippet: Some("continue") },
+ CompletionItem { label: "break", lookup: None, snippet: Some("break") },
+ CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
+ CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
+ CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
+ check_snippet_completion(r"
+ fn quux() -> i32 {
+ loop { || { <|> } }
+ }
+ ", r#"[CompletionItem { label: "if", lookup: None, snippet: Some("if $0 {}") },
+ CompletionItem { label: "match", lookup: None, snippet: Some("match $0 {}") },
+ CompletionItem { label: "while", lookup: None, snippet: Some("while $0 {}") },
+ CompletionItem { label: "loop", lookup: None, snippet: Some("loop {$0}") },
+ CompletionItem { label: "return", lookup: None, snippet: Some("return $0") },
+ CompletionItem { label: "pd", lookup: None, snippet: Some("eprintln!(\"$0 = {:?}\", $0);") },
+ CompletionItem { label: "ppd", lookup: None, snippet: Some("eprintln!(\"$0 = {:#?}\", $0);") }]"#);
+ }
+
+ #[test]
+ fn test_param_completion_last_param() {
+ check_scope_completion(r"
+ fn foo(file_id: FileId) {}
+ fn bar(file_id: FileId) {}
+ fn baz(file<|>) {}
+ ", r#"[CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
+ }
+
+ #[test]
+ fn test_param_completion_nth_param() {
+ check_scope_completion(r"
+ fn foo(file_id: FileId) {}
+ fn bar(file_id: FileId) {}
+ fn baz(file<|>, x: i32) {}
+ ", r#"[CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
+ }
+
+ #[test]
+ fn test_param_completion_trait_param() {
+ check_scope_completion(r"
+ pub(crate) trait SourceRoot {
+ pub fn contains(&self, file_id: FileId) -> bool;
+ pub fn module_map(&self) -> &ModuleMap;
+ pub fn lines(&self, file_id: FileId) -> &LineIndex;
+ pub fn syntax(&self, file<|>)
+ }
+ ", r#"[CompletionItem { label: "self", lookup: None, snippet: None },
+ CompletionItem { label: "SourceRoot", lookup: None, snippet: None },
+ CompletionItem { label: "file_id: FileId", lookup: Some("file_id"), snippet: None }]"#);
+ }
+
+ #[test]
+ fn test_item_snippets() {
+ // check_snippet_completion(r"
+ // <|>
+ // ",
+ // r##"[CompletionItem { label: "tfn", lookup: None, snippet: Some("#[test]\nfn $1() {\n $0\n}") }]"##,
+ // );
+ check_snippet_completion(r"
+ #[cfg(test)]
+ mod tests {
+ <|>
+ }
+ ",
+ r##"[CompletionItem { label: "tfn", lookup: None, snippet: Some("#[test]\nfn $1() {\n $0\n}") },
+ CompletionItem { label: "pub(crate)", lookup: None, snippet: Some("pub(crate) $0") }]"##,
+ );
+ }
+}
--- /dev/null
+use rustc_hash::{FxHashSet};
+use ra_editor::find_node_at_offset;
+use ra_syntax::{
+ algo::visit::{visitor, Visitor},
+ SourceFileNode, AstNode,
+ ast::{self, LoopBodyOwner},
+ SyntaxKind::*,
+};
+
+use crate::{
+ db::RootDatabase,
+ input::{SourceRootId},
+ completion::CompletionItem,
+ descriptors::module::{ModuleId, ModuleTree},
+ descriptors::function::FnScopes,
+ descriptors::DescriptorDatabase,
+ Cancelable
+};
+
+pub(super) fn completions(
+ acc: &mut Vec<CompletionItem>,
+ db: &RootDatabase,
+ source_root_id: SourceRootId,
+ module_tree: &ModuleTree,
+ module_id: ModuleId,
+ file: &SourceFileNode,
+ name_ref: ast::NameRef,
+) -> Cancelable<()> {
+ let kind = match classify_name_ref(name_ref) {
+ Some(it) => it,
+ None => return Ok(()),
+ };
+
+ match kind {
+ NameRefKind::LocalRef { enclosing_fn } => {
+ if let Some(fn_def) = enclosing_fn {
+ let scopes = FnScopes::new(fn_def);
+ complete_fn(name_ref, &scopes, acc);
+ complete_expr_keywords(&file, fn_def, name_ref, acc);
+ complete_expr_snippets(acc);
+ }
+
+ let module_scope = db.module_scope(source_root_id, module_id)?;
+ acc.extend(
+ module_scope
+ .entries()
+ .iter()
+ .filter(|entry| {
+ // Don't expose this item
+ !entry.ptr().range().is_subrange(&name_ref.syntax().range())
+ })
+ .map(|entry| CompletionItem {
+ label: entry.name().to_string(),
+ lookup: None,
+ snippet: None,
+ }),
+ );
+ }
+ NameRefKind::CratePath(path) => {
+ complete_path(acc, db, source_root_id, module_tree, module_id, path)?
+ }
+ NameRefKind::BareIdentInMod => {
+ let name_range = name_ref.syntax().range();
+ let top_node = name_ref
+ .syntax()
+ .ancestors()
+ .take_while(|it| it.range() == name_range)
+ .last()
+ .unwrap();
+ match top_node.parent().map(|it| it.kind()) {
+ Some(SOURCE_FILE) | Some(ITEM_LIST) => complete_mod_item_snippets(acc),
+ _ => (),
+ }
+ }
+ }
+ Ok(())
+}
+
+enum NameRefKind<'a> {
+ /// NameRef is a part of single-segment path, for example, a refernece to a
+ /// local variable.
+ LocalRef {
+ enclosing_fn: Option<ast::FnDef<'a>>,
+ },
+ /// NameRef is the last segment in crate:: path
+ CratePath(Vec<ast::NameRef<'a>>),
+ /// NameRef is bare identifier at the module's root.
+ /// Used for keyword completion
+ BareIdentInMod,
+}
+
+fn classify_name_ref(name_ref: ast::NameRef) -> Option<NameRefKind> {
+ let name_range = name_ref.syntax().range();
+ let top_node = name_ref
+ .syntax()
+ .ancestors()
+ .take_while(|it| it.range() == name_range)
+ .last()
+ .unwrap();
+ match top_node.parent().map(|it| it.kind()) {
+ Some(SOURCE_FILE) | Some(ITEM_LIST) => return Some(NameRefKind::BareIdentInMod),
+ _ => (),
+ }
+
+ let parent = name_ref.syntax().parent()?;
+ if let Some(segment) = ast::PathSegment::cast(parent) {
+ let path = segment.parent_path();
+ if let Some(crate_path) = crate_path(path) {
+ return Some(NameRefKind::CratePath(crate_path));
+ }
+ if path.qualifier().is_none() {
+ let enclosing_fn = name_ref
+ .syntax()
+ .ancestors()
+ .take_while(|it| it.kind() != SOURCE_FILE && it.kind() != MODULE)
+ .find_map(ast::FnDef::cast);
+ return Some(NameRefKind::LocalRef { enclosing_fn });
+ }
+ }
+ None
+}
+
+fn crate_path(mut path: ast::Path) -> Option<Vec<ast::NameRef>> {
+ let mut res = Vec::new();
+ loop {
+ let segment = path.segment()?;
+ match segment.kind()? {
+ ast::PathSegmentKind::Name(name) => res.push(name),
+ ast::PathSegmentKind::CrateKw => break,
+ ast::PathSegmentKind::SelfKw | ast::PathSegmentKind::SuperKw => return None,
+ }
+ path = qualifier(path)?;
+ }
+ res.reverse();
+ return Some(res);
+
+ fn qualifier(path: ast::Path) -> Option<ast::Path> {
+ if let Some(q) = path.qualifier() {
+ return Some(q);
+ }
+ // TODO: this bottom up traversal is not too precise.
+ // Should we handle do a top-down analysiss, recording results?
+ let use_tree_list = path.syntax().ancestors().find_map(ast::UseTreeList::cast)?;
+ let use_tree = use_tree_list.parent_use_tree();
+ use_tree.path()
+ }
+}
+
+fn complete_fn(name_ref: ast::NameRef, scopes: &FnScopes, acc: &mut Vec<CompletionItem>) {
+ let mut shadowed = FxHashSet::default();
+ acc.extend(
+ scopes
+ .scope_chain(name_ref.syntax())
+ .flat_map(|scope| scopes.entries(scope).iter())
+ .filter(|entry| shadowed.insert(entry.name()))
+ .map(|entry| CompletionItem {
+ label: entry.name().to_string(),
+ lookup: None,
+ snippet: None,
+ }),
+ );
+ if scopes.self_param.is_some() {
+ acc.push(CompletionItem {
+ label: "self".to_string(),
+ lookup: None,
+ snippet: None,
+ })
+ }
+}
+
+fn complete_path(
+ acc: &mut Vec<CompletionItem>,
+ db: &RootDatabase,
+ source_root_id: SourceRootId,
+ module_tree: &ModuleTree,
+ module_id: ModuleId,
+ crate_path: Vec<ast::NameRef>,
+) -> Cancelable<()> {
+ let target_module_id = match find_target_module(module_tree, module_id, crate_path) {
+ None => return Ok(()),
+ Some(it) => it,
+ };
+ let module_scope = db.module_scope(source_root_id, target_module_id)?;
+ let completions = module_scope.entries().iter().map(|entry| CompletionItem {
+ label: entry.name().to_string(),
+ lookup: None,
+ snippet: None,
+ });
+ acc.extend(completions);
+ Ok(())
+}
+
+fn find_target_module(
+ module_tree: &ModuleTree,
+ module_id: ModuleId,
+ mut crate_path: Vec<ast::NameRef>,
+) -> Option<ModuleId> {
+ crate_path.pop();
+ let mut target_module = module_id.root(&module_tree);
+ for name in crate_path {
+ target_module = target_module.child(module_tree, name.text().as_str())?;
+ }
+ Some(target_module)
+}
+
+fn complete_mod_item_snippets(acc: &mut Vec<CompletionItem>) {
+ acc.push(CompletionItem {
+ label: "tfn".to_string(),
+ lookup: None,
+ snippet: Some("#[test]\nfn $1() {\n $0\n}".to_string()),
+ });
+ acc.push(CompletionItem {
+ label: "pub(crate)".to_string(),
+ lookup: None,
+ snippet: Some("pub(crate) $0".to_string()),
+ })
+}
+
+fn complete_expr_keywords(
+ file: &SourceFileNode,
+ fn_def: ast::FnDef,
+ name_ref: ast::NameRef,
+ acc: &mut Vec<CompletionItem>,
+) {
+ acc.push(keyword("if", "if $0 {}"));
+ acc.push(keyword("match", "match $0 {}"));
+ acc.push(keyword("while", "while $0 {}"));
+ acc.push(keyword("loop", "loop {$0}"));
+
+ if let Some(off) = name_ref.syntax().range().start().checked_sub(2.into()) {
+ if let Some(if_expr) = find_node_at_offset::<ast::IfExpr>(file.syntax(), off) {
+ if if_expr.syntax().range().end() < name_ref.syntax().range().start() {
+ acc.push(keyword("else", "else {$0}"));
+ acc.push(keyword("else if", "else if $0 {}"));
+ }
+ }
+ }
+ if is_in_loop_body(name_ref) {
+ acc.push(keyword("continue", "continue"));
+ acc.push(keyword("break", "break"));
+ }
+ acc.extend(complete_return(fn_def, name_ref));
+}
+
+fn is_in_loop_body(name_ref: ast::NameRef) -> bool {
+ for node in name_ref.syntax().ancestors() {
+ if node.kind() == FN_DEF || node.kind() == LAMBDA_EXPR {
+ break;
+ }
+ let loop_body = visitor()
+ .visit::<ast::ForExpr, _>(LoopBodyOwner::loop_body)
+ .visit::<ast::WhileExpr, _>(LoopBodyOwner::loop_body)
+ .visit::<ast::LoopExpr, _>(LoopBodyOwner::loop_body)
+ .accept(node);
+ if let Some(Some(body)) = loop_body {
+ if name_ref
+ .syntax()
+ .range()
+ .is_subrange(&body.syntax().range())
+ {
+ return true;
+ }
+ }
+ }
+ false
+}
+
+fn complete_return(fn_def: ast::FnDef, name_ref: ast::NameRef) -> Option<CompletionItem> {
+ // let is_last_in_block = name_ref.syntax().ancestors().filter_map(ast::Expr::cast)
+ // .next()
+ // .and_then(|it| it.syntax().parent())
+ // .and_then(ast::Block::cast)
+ // .is_some();
+
+ // if is_last_in_block {
+ // return None;
+ // }
+
+ let is_stmt = match name_ref
+ .syntax()
+ .ancestors()
+ .filter_map(ast::ExprStmt::cast)
+ .next()
+ {
+ None => false,
+ Some(expr_stmt) => expr_stmt.syntax().range() == name_ref.syntax().range(),
+ };
+ let snip = match (is_stmt, fn_def.ret_type().is_some()) {
+ (true, true) => "return $0;",
+ (true, false) => "return;",
+ (false, true) => "return $0",
+ (false, false) => "return",
+ };
+ Some(keyword("return", snip))
+}
+
+fn keyword(kw: &str, snip: &str) -> CompletionItem {
+ CompletionItem {
+ label: kw.to_string(),
+ lookup: None,
+ snippet: Some(snip.to_string()),
+ }
+}
+
+fn complete_expr_snippets(acc: &mut Vec<CompletionItem>) {
+ acc.push(CompletionItem {
+ label: "pd".to_string(),
+ lookup: None,
+ snippet: Some("eprintln!(\"$0 = {:?}\", $0);".to_string()),
+ });
+ acc.push(CompletionItem {
+ label: "ppd".to_string(),
+ lookup: None,
+ snippet: Some("eprintln!(\"$0 = {:#?}\", $0);".to_string()),
+ });
+}
use std::sync::Arc;
use ra_editor::LineIndex;
-use ra_syntax::{File, SyntaxNode};
+use ra_syntax::{SourceFileNode, SyntaxNode};
use salsa::{self, Database};
use crate::{
salsa::query_group! {
pub(crate) trait SyntaxDatabase: crate::input::FilesDatabase {
- fn file_syntax(file_id: FileId) -> File {
+ fn file_syntax(file_id: FileId) -> SourceFileNode {
type FileSyntaxQuery;
}
fn file_lines(file_id: FileId) -> Arc<LineIndex> {
}
}
-fn file_syntax(db: &impl SyntaxDatabase, file_id: FileId) -> File {
+fn file_syntax(db: &impl SyntaxDatabase, file_id: FileId) -> SourceFileNode {
let text = db.file_text(file_id);
- File::parse(&*text)
+ SourceFileNode::parse(&*text)
}
fn file_lines(db: &impl SyntaxDatabase, file_id: FileId) -> Arc<LineIndex> {
let text = db.file_text(file_id);
#[cfg(test)]
mod tests {
use ra_editor::find_node_at_offset;
- use ra_syntax::File;
+ use ra_syntax::SourceFileNode;
use test_utils::extract_offset;
use super::*;
buf.push_str(&code[off..]);
buf
};
- let file = File::parse(&code);
+ let file = SourceFileNode::parse(&code);
let marker: ast::PathExpr = find_node_at_offset(file.syntax(), off).unwrap();
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let scopes = FnScopes::new(fn_def);
fn do_check_local_name(code: &str, expected_offset: u32) {
let (off, code) = extract_offset(code);
- let file = File::parse(&code);
+ let file = SourceFileNode::parse(&code);
let fn_def: ast::FnDef = find_node_at_offset(file.syntax(), off).unwrap();
let name_ref: ast::NameRef = find_node_at_offset(file.syntax(), off).unwrap();
db::check_canceled(db)?;
let file_id = source.file_id();
let submodules = match source.resolve(db) {
- ModuleSourceNode::Root(it) => collect_submodules(file_id, it.borrowed()),
- ModuleSourceNode::Inline(it) => it
+ ModuleSourceNode::SourceFile(it) => collect_submodules(file_id, it.borrowed()),
+ ModuleSourceNode::Module(it) => it
.borrowed()
.item_list()
.map(|it| collect_submodules(file_id, it))
let tree = db.module_tree(source_root_id)?;
let source = module_id.source(&tree).resolve(db);
let res = match source {
- ModuleSourceNode::Root(root) => ModuleScope::new(root.borrowed().items()),
- ModuleSourceNode::Inline(inline) => match inline.borrowed().item_list() {
+ ModuleSourceNode::SourceFile(it) => ModuleScope::new(it.borrowed().items()),
+ ModuleSourceNode::Module(it) => match it.borrowed().item_list() {
Some(items) => ModuleScope::new(items.items()),
None => ModuleScope::new(std::iter::empty()),
},
let source_root = db.source_root(source_root);
for &file_id in source_root.files.iter() {
- let source = ModuleSource::File(file_id);
+ let source = ModuleSource::SourceFile(file_id);
if visited.contains(&source) {
continue; // TODO: use explicit crate_roots here
}
visited,
roots,
Some(link),
- ModuleSource::File(file_id),
+ ModuleSource::SourceFile(file_id),
),
})
.collect::<Cancelable<Vec<_>>>()?;
file_resolver: &FileResolverImp,
) -> (Vec<FileId>, Option<Problem>) {
let file_id = match source {
- ModuleSource::File(it) => it,
- ModuleSource::Inline(..) => {
+ ModuleSource::SourceFile(it) => it,
+ ModuleSource::Module(..) => {
// TODO
return (Vec::new(), None);
}
use ra_syntax::{
ast::{self, AstNode, NameOwner},
- SmolStr, SyntaxNode,
+ SmolStr, SyntaxNode, SyntaxNodeRef,
};
use relative_path::RelativePathBuf;
/// `ModuleSource` is the syntax tree element that produced this module:
/// either a file, or an inlinde module.
-/// TODO: we don't produce Inline modules yet
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub(crate) enum ModuleSource {
- File(FileId),
+ SourceFile(FileId),
#[allow(dead_code)]
- Inline(SyntaxPtr),
+ Module(SyntaxPtr),
}
/// An owned syntax node for a module. Unlike `ModuleSource`,
/// this holds onto the AST for the whole file.
enum ModuleSourceNode {
- Root(ast::RootNode),
- Inline(ast::ModuleNode),
+ SourceFile(ast::SourceFileNode),
+ Module(ast::ModuleNode),
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)]
) -> ast::ModuleNode {
let owner = self.owner(tree);
match owner.source(tree).resolve(db) {
- ModuleSourceNode::Root(root) => {
+ ModuleSourceNode::SourceFile(root) => {
let ast = imp::modules(root.borrowed())
.find(|(name, _)| name == &tree.link(self).name)
.unwrap()
.1;
ast.owned()
}
- ModuleSourceNode::Inline(it) => it,
+ ModuleSourceNode::Module(it) => it,
}
}
}
}
impl ModuleSource {
+ pub(crate) fn for_node(file_id: FileId, node: SyntaxNodeRef) -> ModuleSource {
+ for node in node.ancestors() {
+ if let Some(m) = ast::Module::cast(node) {
+ if !m.has_semi() {
+ return ModuleSource::new_inline(file_id, m);
+ }
+ }
+ }
+ ModuleSource::SourceFile(file_id)
+ }
pub(crate) fn new_inline(file_id: FileId, module: ast::Module) -> ModuleSource {
assert!(!module.has_semi());
let ptr = SyntaxPtr::new(file_id, module.syntax());
- ModuleSource::Inline(ptr)
+ ModuleSource::Module(ptr)
}
pub(crate) fn as_file(self) -> Option<FileId> {
match self {
- ModuleSource::File(f) => Some(f),
- ModuleSource::Inline(..) => None,
+ ModuleSource::SourceFile(f) => Some(f),
+ ModuleSource::Module(..) => None,
}
}
pub(crate) fn file_id(self) -> FileId {
match self {
- ModuleSource::File(f) => f,
- ModuleSource::Inline(ptr) => ptr.file_id(),
+ ModuleSource::SourceFile(f) => f,
+ ModuleSource::Module(ptr) => ptr.file_id(),
}
}
fn resolve(self, db: &impl SyntaxDatabase) -> ModuleSourceNode {
match self {
- ModuleSource::File(file_id) => {
+ ModuleSource::SourceFile(file_id) => {
let syntax = db.file_syntax(file_id);
- ModuleSourceNode::Root(syntax.ast().owned())
+ ModuleSourceNode::SourceFile(syntax.ast().owned())
}
- ModuleSource::Inline(ptr) => {
+ ModuleSource::Module(ptr) => {
let syntax = db.resolve_syntax_ptr(ptr);
let syntax = syntax.borrowed();
let module = ast::Module::cast(syntax).unwrap();
- ModuleSourceNode::Inline(module.owned())
+ ModuleSourceNode::Module(module.owned())
}
}
}
}
impl ModuleScope {
- pub(crate) fn new<'a>(items: impl Iterator<Item = ast::ModuleItem<'a>>) -> ModuleScope {
+ pub(super) fn new<'a>(items: impl Iterator<Item = ast::ModuleItem<'a>>) -> ModuleScope {
let mut entries = Vec::new();
for item in items {
let entry = match item {
#[cfg(test)]
mod tests {
use super::*;
- use ra_syntax::{ast::ModuleItemOwner, File};
+ use ra_syntax::{ast::ModuleItemOwner, SourceFileNode};
fn do_check(code: &str, expected: &[&str]) {
- let file = File::parse(&code);
+ let file = SourceFileNode::parse(&code);
let scope = ModuleScope::new(file.ast().items());
let actual = scope.entries.iter().map(|it| it.name()).collect::<Vec<_>>();
assert_eq!(expected, actual.as_slice());
use ra_editor::{self, find_node_at_offset, FileSymbol, LineIndex, LocalEdit};
use ra_syntax::{
ast::{self, ArgListOwner, Expr, NameOwner},
- AstNode, File, SmolStr,
+ AstNode, SourceFileNode, SmolStr,
SyntaxKind::*,
SyntaxNodeRef, TextRange, TextUnit,
};
use salsa::{Database, ParallelDatabase};
use crate::{
- completion::{resolve_based_completion, scope_completion, CompletionItem},
+ completion::{completions, CompletionItem},
db::{self, FileSyntaxQuery, SyntaxDatabase},
descriptors::{
function::{FnDescriptor, FnId},
input::{FilesDatabase, SourceRoot, SourceRootId, WORKSPACE},
symbol_index::SymbolIndex,
AnalysisChange, Cancelable, CrateGraph, CrateId, Diagnostic, FileId, FileResolver,
- FileSystemEdit, FilePosition, Query, SourceChange, SourceFileEdit,
+ FileSystemEdit, FilePosition, Query, SourceChange, SourceFileNodeEdit,
};
#[derive(Clone, Debug)]
}
impl AnalysisImpl {
- pub fn file_syntax(&self, file_id: FileId) -> File {
+ pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode {
self.db.file_syntax(file_id)
}
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
let module_source = match find_node_at_offset::<ast::Module>(file.syntax(), position.offset)
{
Some(m) if !m.has_semi() => ModuleSource::new_inline(position.file_id, m),
- _ => ModuleSource::File(position.file_id),
+ _ => ModuleSource::SourceFile(position.file_id),
};
let res = module_tree
let module_tree = self.module_tree(file_id)?;
let crate_graph = self.db.crate_graph();
let res = module_tree
- .modules_for_source(ModuleSource::File(file_id))
+ .modules_for_source(ModuleSource::SourceFile(file_id))
.into_iter()
.map(|it| it.root(&module_tree))
.filter_map(|it| it.source(&module_tree).as_file())
self.db.crate_graph().crate_roots[&crate_id]
}
pub fn completions(&self, position: FilePosition) -> Cancelable<Option<Vec<CompletionItem>>> {
- let mut res = Vec::new();
- let mut has_completions = false;
- if let Some(scope_based) = scope_completion(&self.db, position) {
- res.extend(scope_based);
- has_completions = true;
- }
- if let Some(scope_based) = resolve_based_completion(&self.db, position)? {
- res.extend(scope_based);
- has_completions = true;
- }
- let res = if has_completions { Some(res) } else { None };
- Ok(res)
+ completions(&self.db, position)
}
pub fn approximately_resolve_symbol(
&self,
ret
}
+ pub fn doc_comment_for(
+ &self,
+ file_id: FileId,
+ symbol: FileSymbol,
+ ) -> Cancelable<Option<String>> {
+ let file = self.db.file_syntax(file_id);
+
+ Ok(symbol.docs(&file))
+ }
+
pub fn diagnostics(&self, file_id: FileId) -> Cancelable<Vec<Diagnostic>> {
let module_tree = self.module_tree(file_id)?;
let syntax = self.db.file_syntax(file_id);
fix: None,
})
.collect::<Vec<_>>();
- if let Some(m) = module_tree.any_module_for_source(ModuleSource::File(file_id)) {
+ if let Some(m) = module_tree.any_module_for_source(ModuleSource::SourceFile(file_id)) {
for (name_node, problem) in m.problems(&module_tree, &*self.db) {
let diag = match problem {
Problem::UnresolvedModule { candidate } => {
Some(name) => name.text(),
None => return Vec::new(),
};
- let module_id = match module_tree.any_module_for_source(ModuleSource::File(file_id)) {
+ let module_id = match module_tree.any_module_for_source(ModuleSource::SourceFile(file_id)) {
Some(id) => id,
None => return Vec::new(),
};
impl SourceChange {
pub(crate) fn from_local_edit(file_id: FileId, label: &str, edit: LocalEdit) -> SourceChange {
- let file_edit = SourceFileEdit {
+ let file_edit = SourceFileNodeEdit {
file_id,
edits: edit.edit.into_atoms(),
};
//! ra_analyzer crate is the brain of Rust analyzer. It relies on the `salsa`
-//! crate, which provides and incremental on-deman database of facts.
+//! crate, which provides and incremental on-demand database of facts.
extern crate fst;
extern crate ra_editor;
use std::{fmt, sync::Arc};
-use ra_syntax::{AtomEdit, File, TextRange, TextUnit};
+use ra_syntax::{AtomEdit, SourceFileNode, TextRange, TextUnit};
use rayon::prelude::*;
use relative_path::RelativePathBuf;
#[derive(Debug)]
pub struct SourceChange {
pub label: String,
- pub source_file_edits: Vec<SourceFileEdit>,
+ pub source_file_edits: Vec<SourceFileNodeEdit>,
pub file_system_edits: Vec<FileSystemEdit>,
pub cursor_position: Option<FilePosition>,
}
#[derive(Debug)]
-pub struct SourceFileEdit {
+pub struct SourceFileNodeEdit {
pub file_id: FileId,
pub edits: Vec<AtomEdit>,
}
}
impl Analysis {
- pub fn file_syntax(&self, file_id: FileId) -> File {
+ pub fn file_syntax(&self, file_id: FileId) -> SourceFileNode {
self.imp.file_syntax(file_id).clone()
}
pub fn file_line_index(&self, file_id: FileId) -> Arc<LineIndex> {
self.imp.file_line_index(file_id)
}
- pub fn extend_selection(&self, file: &File, range: TextRange) -> TextRange {
+ pub fn extend_selection(&self, file: &SourceFileNode, range: TextRange) -> TextRange {
ra_editor::extend_selection(file, range).unwrap_or(range)
}
- pub fn matching_brace(&self, file: &File, offset: TextUnit) -> Option<TextUnit> {
+ pub fn matching_brace(&self, file: &SourceFileNode, offset: TextUnit) -> Option<TextUnit> {
ra_editor::matching_brace(file, offset)
}
pub fn syntax_tree(&self, file_id: FileId) -> String {
pub fn find_all_refs(&self, position: FilePosition) -> Cancelable<Vec<(FileId, TextRange)>> {
Ok(self.imp.find_all_refs(position))
}
+ pub fn doc_comment_for(
+ &self,
+ file_id: FileId,
+ symbol: FileSymbol,
+ ) -> Cancelable<Option<String>> {
+ self.imp.doc_comment_for(file_id, symbol)
+ }
pub fn parent_module(&self, position: FilePosition) -> Cancelable<Vec<(FileId, FileSymbol)>> {
self.imp.parent_module(position)
}
impl LibraryData {
pub fn prepare(files: Vec<(FileId, String)>, file_resolver: Arc<FileResolver>) -> LibraryData {
let symbol_index = SymbolIndex::for_files(files.par_iter().map(|(file_id, text)| {
- let file = File::parse(text);
+ let file = SourceFileNode::parse(text);
(*file_id, file)
}));
LibraryData {
use fst::{self, Streamer};
use ra_editor::{file_symbols, FileSymbol};
use ra_syntax::{
- File,
+ SourceFileNode,
SyntaxKind::{self, *},
};
use rayon::prelude::*;
}
impl SymbolIndex {
- pub(crate) fn for_files(files: impl ParallelIterator<Item = (FileId, File)>) -> SymbolIndex {
+ pub(crate) fn for_files(
+ files: impl ParallelIterator<Item = (FileId, SourceFileNode)>,
+ ) -> SymbolIndex {
let mut symbols = files
.flat_map(|(file_id, file)| {
file_symbols(&file)
SymbolIndex { symbols, map }
}
- pub(crate) fn for_file(file_id: FileId, file: File) -> SymbolIndex {
+ pub(crate) fn for_file(file_id: FileId, file: SourceFileNode) -> SymbolIndex {
SymbolIndex::for_files(rayon::iter::once((file_id, file)))
}
}
-use ra_syntax::{File, SyntaxKind, SyntaxNode, SyntaxNodeRef, TextRange};
+use ra_syntax::{SourceFileNode, SyntaxKind, SyntaxNode, SyntaxNodeRef, TextRange};
use crate::db::SyntaxDatabase;
use crate::FileId;
}
}
- pub(crate) fn resolve(self, file: &File) -> SyntaxNode {
+ pub(crate) fn resolve(self, file: &SourceFileNode) -> SyntaxNode {
let mut curr = file.syntax();
loop {
if curr.range() == self.range && curr.kind() == self.kind {
local: self,
}
}
+
+ // Seems unfortunate to expose
+ pub(crate) fn range(self) -> TextRange {
+ self.range
+ }
}
#[test]
fn test_local_syntax_ptr() {
use ra_syntax::{ast, AstNode};
- let file = File::parse("struct Foo { f: u32, }");
+ let file = SourceFileNode::parse("struct Foo { f: u32, }");
let field = file
.syntax()
.descendants()
&completions,
);
}
+
+#[test]
+fn test_complete_crate_path_with_braces() {
+ let (analysis, position) = analysis_and_position(
+ "
+ //- /lib.rs
+ mod foo;
+ struct Spam;
+ //- /foo.rs
+ use crate::{Sp<|>};
+ ",
+ );
+ let completions = analysis.completions(position).unwrap().unwrap();
+ assert_eq_dbg(
+ r#"[CompletionItem { label: "foo", lookup: None, snippet: None },
+ CompletionItem { label: "Spam", lookup: None, snippet: None }]"#,
+ &completions,
+ );
+}
+
+#[test]
+fn test_complete_crate_path_in_nested_tree() {
+ let (analysis, position) = analysis_and_position(
+ "
+ //- /lib.rs
+ mod foo;
+ pub mod bar {
+ pub mod baz {
+ pub struct Spam;
+ }
+ }
+ //- /foo.rs
+ use crate::{bar::{baz::Sp<|>}};
+ ",
+ );
+ let completions = analysis.completions(position).unwrap().unwrap();
+ assert_eq_dbg(
+ r#"[CompletionItem { label: "Spam", lookup: None, snippet: None }]"#,
+ &completions,
+ );
+}
use clap::{App, Arg, SubCommand};
use join_to_string::join;
use ra_editor::{extend_selection, file_structure, syntax_tree};
-use ra_syntax::{File, TextRange};
+use ra_syntax::{SourceFileNode, TextRange};
use tools::collect_tests;
type Result<T> = ::std::result::Result<T, failure::Error>;
Ok(())
}
-fn file() -> Result<File> {
+fn file() -> Result<SourceFileNode> {
let text = read_stdin()?;
- Ok(File::parse(&text))
+ Ok(SourceFileNode::parse(&text))
}
fn read_stdin() -> Result<String> {
None => bail!("No test found at line {} at {}", line, file.display()),
Some((_start_line, test)) => test,
};
- let file = File::parse(&test.text);
+ let file = SourceFileNode::parse(&test.text);
let tree = syntax_tree(&file);
Ok((test.text, tree))
}
-fn selections(file: &File, start: u32, end: u32) -> String {
+fn selections(file: &SourceFileNode, start: u32, end: u32) -> String {
let mut ranges = Vec::new();
let mut cur = Some(TextRange::from_to((start - 1).into(), (end - 1).into()));
while let Some(r) = cur {
use ra_syntax::{
algo::{find_covering_node, find_leaf_at_offset},
ast::{self, AstNode, AttrsOwner, NameOwner, TypeParamsOwner},
- Direction, File,
+ Direction, SourceFileNode,
SyntaxKind::{COMMA, WHITESPACE},
SyntaxNodeRef, TextRange, TextUnit,
};
pub cursor_position: Option<TextUnit>,
}
-pub fn flip_comma<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce() -> LocalEdit + 'a> {
+pub fn flip_comma<'a>(
+ file: &'a SourceFileNode,
+ offset: TextUnit,
+) -> Option<impl FnOnce() -> LocalEdit + 'a> {
let syntax = file.syntax();
let comma = find_leaf_at_offset(syntax, offset).find(|leaf| leaf.kind() == COMMA)?;
})
}
-pub fn add_derive<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce() -> LocalEdit + 'a> {
+pub fn add_derive<'a>(
+ file: &'a SourceFileNode,
+ offset: TextUnit,
+) -> Option<impl FnOnce() -> LocalEdit + 'a> {
let nominal = find_node_at_offset::<ast::NominalDef>(file.syntax(), offset)?;
Some(move || {
let derive_attr = nominal
})
}
-pub fn add_impl<'a>(file: &'a File, offset: TextUnit) -> Option<impl FnOnce() -> LocalEdit + 'a> {
+pub fn add_impl<'a>(
+ file: &'a SourceFileNode,
+ offset: TextUnit,
+) -> Option<impl FnOnce() -> LocalEdit + 'a> {
let nominal = find_node_at_offset::<ast::NominalDef>(file.syntax(), offset)?;
let name = nominal.name()?;
}
pub fn introduce_variable<'a>(
- file: &'a File,
+ file: &'a SourceFileNode,
range: TextRange,
) -> Option<impl FnOnce() -> LocalEdit + 'a> {
let node = find_covering_node(file.syntax(), range);
use ra_syntax::{
algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
- Direction, File,
+ Direction, SourceFileNode,
SyntaxKind::*,
SyntaxNodeRef, TextRange, TextUnit,
};
-pub fn extend_selection(file: &File, range: TextRange) -> Option<TextRange> {
+pub fn extend_selection(file: &SourceFileNode, range: TextRange) -> Option<TextRange> {
let syntax = file.syntax();
extend(syntax.borrowed(), range)
}
fn do_check(before: &str, afters: &[&str]) {
let (cursor, before) = extract_offset(before);
- let file = File::parse(&before);
+ let file = SourceFileNode::parse(&before);
let mut range = TextRange::offset_len(cursor, 0.into());
for &after in afters {
range = extend_selection(&file, range).unwrap();
use rustc_hash::FxHashSet;
use ra_syntax::{
- ast, AstNode, Direction, File,
+ ast, AstNode, Direction, SourceFileNode,
SyntaxKind::{self, *},
SyntaxNodeRef, TextRange,
};
pub kind: FoldKind,
}
-pub fn folding_ranges(file: &File) -> Vec<Fold> {
+pub fn folding_ranges(file: &SourceFileNode) -> Vec<Fold> {
let mut res = vec![];
let mut visited_comments = FxHashSet::default();
let mut visited_imports = FxHashSet::default();
fn do_check(text: &str, fold_kinds: &[FoldKind]) {
let (ranges, text) = extract_ranges(text);
- let file = File::parse(&text);
+ let file = SourceFileNode::parse(&text);
let folds = folding_ranges(&file);
assert_eq!(
use ra_syntax::{
algo::find_leaf_at_offset,
ast::{self, AstNode, NameOwner},
- File,
+ SourceFileNode,
Location,
SyntaxKind::{self, *},
SyntaxNodeRef, TextRange, TextUnit,
Bin,
}
-pub fn matching_brace(file: &File, offset: TextUnit) -> Option<TextUnit> {
+pub fn matching_brace(file: &SourceFileNode, offset: TextUnit) -> Option<TextUnit> {
const BRACES: &[SyntaxKind] = &[
L_CURLY, R_CURLY, L_BRACK, R_BRACK, L_PAREN, R_PAREN, L_ANGLE, R_ANGLE,
];
Some(matching_node.range().start())
}
-pub fn highlight(file: &File) -> Vec<HighlightedRange> {
+pub fn highlight(file: &SourceFileNode) -> Vec<HighlightedRange> {
let mut res = Vec::new();
for node in file.syntax().descendants() {
let tag = match node.kind() {
res
}
-pub fn diagnostics(file: &File) -> Vec<Diagnostic> {
+pub fn diagnostics(file: &SourceFileNode) -> Vec<Diagnostic> {
fn location_to_range(location: Location) -> TextRange {
match location {
Location::Offset(offset) => TextRange::offset_len(offset, 1.into()),
.collect()
}
-pub fn syntax_tree(file: &File) -> String {
+pub fn syntax_tree(file: &SourceFileNode) -> String {
::ra_syntax::utils::dump_tree(file.syntax())
}
-pub fn runnables(file: &File) -> Vec<Runnable> {
+pub fn runnables(file: &SourceFileNode) -> Vec<Runnable> {
file.syntax()
.descendants()
.filter_map(ast::FnDef::cast)
#[test]
fn test_highlighting() {
- let file = File::parse(
+ let file = SourceFileNode::parse(
r#"
// comment
fn main() {}
#[test]
fn test_runnables() {
- let file = File::parse(
+ let file = SourceFileNode::parse(
r#"
fn main() {}
fn test_matching_brace() {
fn do_check(before: &str, after: &str) {
let (pos, before) = extract_offset(before);
- let file = File::parse(&before);
+ let file = SourceFileNode::parse(&before);
let new_pos = match matching_brace(&file, pos) {
None => pos,
Some(pos) => pos,
use crate::TextUnit;
+use rustc_hash::FxHashMap;
use superslice::Ext;
-#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+#[derive(Clone, Debug, PartialEq, Eq)]
pub struct LineIndex {
newlines: Vec<TextUnit>,
+ utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct LineCol {
pub line: u32,
- pub col: TextUnit,
+ pub col_utf16: u32,
+}
+
+#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+struct Utf16Char {
+ start: TextUnit,
+ end: TextUnit,
+}
+
+impl Utf16Char {
+ fn len(&self) -> TextUnit {
+ self.end - self.start
+ }
}
impl LineIndex {
pub fn new(text: &str) -> LineIndex {
+ let mut utf16_lines = FxHashMap::default();
+ let mut utf16_chars = Vec::new();
+
let mut newlines = vec![0.into()];
- let mut curr = 0.into();
+ let mut curr_row = 0.into();
+ let mut curr_col = 0.into();
+ let mut line = 0;
for c in text.chars() {
- curr += TextUnit::of_char(c);
+ curr_row += TextUnit::of_char(c);
if c == '\n' {
- newlines.push(curr);
+ newlines.push(curr_row);
+
+ // Save any utf-16 characters seen in the previous line
+ if utf16_chars.len() > 0 {
+ utf16_lines.insert(line, utf16_chars);
+ utf16_chars = Vec::new();
+ }
+
+ // Prepare for processing the next line
+ curr_col = 0.into();
+ line += 1;
+ continue;
}
+
+ let char_len = TextUnit::of_char(c);
+ if char_len.to_usize() > 1 {
+ utf16_chars.push(Utf16Char {
+ start: curr_col,
+ end: curr_col + char_len,
+ });
+ }
+
+ curr_col += char_len;
+ }
+ LineIndex {
+ newlines,
+ utf16_lines,
}
- LineIndex { newlines }
}
pub fn line_col(&self, offset: TextUnit) -> LineCol {
let line = self.newlines.upper_bound(&offset) - 1;
let line_start_offset = self.newlines[line];
let col = offset - line_start_offset;
+
LineCol {
line: line as u32,
- col,
+ col_utf16: self.utf8_to_utf16_col(line as u32, col) as u32,
}
}
pub fn offset(&self, line_col: LineCol) -> TextUnit {
//TODO: return Result
- self.newlines[line_col.line as usize] + line_col.col
+ let col = self.utf16_to_utf8_col(line_col.line, line_col.col_utf16);
+ self.newlines[line_col.line as usize] + col
+ }
+
+ fn utf8_to_utf16_col(&self, line: u32, mut col: TextUnit) -> usize {
+ if let Some(utf16_chars) = self.utf16_lines.get(&line) {
+ let mut correction = TextUnit::from_usize(0);
+ for c in utf16_chars {
+ if col >= c.end {
+ correction += c.len() - TextUnit::from_usize(1);
+ } else {
+ // From here on, all utf16 characters come *after* the character we are mapping,
+ // so we don't need to take them into account
+ break;
+ }
+ }
+
+ col -= correction;
+ }
+
+ col.to_usize()
+ }
+
+ fn utf16_to_utf8_col(&self, line: u32, col: u32) -> TextUnit {
+ let mut col: TextUnit = col.into();
+ if let Some(utf16_chars) = self.utf16_lines.get(&line) {
+ for c in utf16_chars {
+ if col >= c.start {
+ col += c.len() - TextUnit::from_usize(1);
+ } else {
+ // From here on, all utf16 characters come *after* the character we are mapping,
+ // so we don't need to take them into account
+ break;
+ }
+ }
+ }
+
+ col
}
}
index.line_col(0.into()),
LineCol {
line: 0,
- col: 0.into()
+ col_utf16: 0
}
);
assert_eq!(
index.line_col(1.into()),
LineCol {
line: 0,
- col: 1.into()
+ col_utf16: 1
}
);
assert_eq!(
index.line_col(5.into()),
LineCol {
line: 0,
- col: 5.into()
+ col_utf16: 5
}
);
assert_eq!(
index.line_col(6.into()),
LineCol {
line: 1,
- col: 0.into()
+ col_utf16: 0
}
);
assert_eq!(
index.line_col(7.into()),
LineCol {
line: 1,
- col: 1.into()
+ col_utf16: 1
}
);
assert_eq!(
index.line_col(8.into()),
LineCol {
line: 1,
- col: 2.into()
+ col_utf16: 2
}
);
assert_eq!(
index.line_col(10.into()),
LineCol {
line: 1,
- col: 4.into()
+ col_utf16: 4
}
);
assert_eq!(
index.line_col(11.into()),
LineCol {
line: 1,
- col: 5.into()
+ col_utf16: 5
}
);
assert_eq!(
index.line_col(12.into()),
LineCol {
line: 1,
- col: 6.into()
+ col_utf16: 6
}
);
index.line_col(0.into()),
LineCol {
line: 0,
- col: 0.into()
+ col_utf16: 0
}
);
assert_eq!(
index.line_col(1.into()),
LineCol {
line: 1,
- col: 0.into()
+ col_utf16: 0
}
);
assert_eq!(
index.line_col(2.into()),
LineCol {
line: 1,
- col: 1.into()
+ col_utf16: 1
}
);
assert_eq!(
index.line_col(6.into()),
LineCol {
line: 1,
- col: 5.into()
+ col_utf16: 5
}
);
assert_eq!(
index.line_col(7.into()),
LineCol {
line: 2,
- col: 0.into()
+ col_utf16: 0
}
);
}
+
+#[cfg(test)]
+mod test_utf8_utf16_conv {
+ use super::*;
+
+ #[test]
+ fn test_char_len() {
+ assert_eq!('メ'.len_utf8(), 3);
+ assert_eq!('メ'.len_utf16(), 1);
+ }
+
+ #[test]
+ fn test_empty_index() {
+ let col_index = LineIndex::new(
+ "
+const C: char = 'x';
+",
+ );
+ assert_eq!(col_index.utf16_lines.len(), 0);
+ }
+
+ #[test]
+ fn test_single_char() {
+ let col_index = LineIndex::new(
+ "
+const C: char = 'メ';
+",
+ );
+
+ assert_eq!(col_index.utf16_lines.len(), 1);
+ assert_eq!(col_index.utf16_lines[&1].len(), 1);
+ assert_eq!(
+ col_index.utf16_lines[&1][0],
+ Utf16Char {
+ start: 17.into(),
+ end: 20.into()
+ }
+ );
+
+ // UTF-8 to UTF-16, no changes
+ assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
+
+ // UTF-8 to UTF-16
+ assert_eq!(col_index.utf8_to_utf16_col(1, 22.into()), 20);
+
+ // UTF-16 to UTF-8, no changes
+ assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from(15));
+
+ // UTF-16 to UTF-8
+ assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from(21));
+ }
+
+ #[test]
+ fn test_string() {
+ let col_index = LineIndex::new(
+ "
+const C: char = \"メ メ\";
+",
+ );
+
+ assert_eq!(col_index.utf16_lines.len(), 1);
+ assert_eq!(col_index.utf16_lines[&1].len(), 2);
+ assert_eq!(
+ col_index.utf16_lines[&1][0],
+ Utf16Char {
+ start: 17.into(),
+ end: 20.into()
+ }
+ );
+ assert_eq!(
+ col_index.utf16_lines[&1][1],
+ Utf16Char {
+ start: 21.into(),
+ end: 24.into()
+ }
+ );
+
+ // UTF-8 to UTF-16
+ assert_eq!(col_index.utf8_to_utf16_col(1, 15.into()), 15);
+
+ assert_eq!(col_index.utf8_to_utf16_col(1, 21.into()), 19);
+ assert_eq!(col_index.utf8_to_utf16_col(1, 25.into()), 21);
+
+ assert!(col_index.utf8_to_utf16_col(2, 15.into()) == 15);
+
+ // UTF-16 to UTF-8
+ assert_eq!(col_index.utf16_to_utf8_col(1, 15), TextUnit::from_usize(15));
+
+ assert_eq!(col_index.utf16_to_utf8_col(1, 18), TextUnit::from_usize(20));
+ assert_eq!(col_index.utf16_to_utf8_col(1, 19), TextUnit::from_usize(23));
+
+ assert_eq!(col_index.utf16_to_utf8_col(2, 15), TextUnit::from_usize(15));
+ }
+}
use ra_syntax::{
algo::visit::{visitor, Visitor},
- ast::{self, NameOwner},
- AstNode, File, SmolStr, SyntaxKind, SyntaxNodeRef, WalkEvent,
+ ast::{self, DocCommentsOwner, NameOwner},
+ AstNode, SourceFileNode, SmolStr, SyntaxKind, SyntaxNodeRef, WalkEvent,
};
#[derive(Debug, Clone)]
pub kind: SyntaxKind,
}
-pub fn file_symbols(file: &File) -> Vec<FileSymbol> {
+impl FileSymbol {
+ pub fn docs(&self, file: &SourceFileNode) -> Option<String> {
+ file.syntax()
+ .descendants()
+ .filter(|node| node.kind() == self.kind && node.range() == self.node_range)
+ .filter_map(|node: SyntaxNodeRef| {
+ fn doc_comments<'a, N: DocCommentsOwner<'a>>(node: N) -> Option<String> {
+ let comments = node.doc_comment_text();
+ if comments.is_empty() {
+ None
+ } else {
+ Some(comments)
+ }
+ }
+
+ visitor()
+ .visit(doc_comments::<ast::FnDef>)
+ .visit(doc_comments::<ast::StructDef>)
+ .visit(doc_comments::<ast::EnumDef>)
+ .visit(doc_comments::<ast::TraitDef>)
+ .visit(doc_comments::<ast::Module>)
+ .visit(doc_comments::<ast::TypeDef>)
+ .visit(doc_comments::<ast::ConstDef>)
+ .visit(doc_comments::<ast::StaticDef>)
+ .accept(node)?
+ })
+ .nth(0)
+ }
+}
+
+pub fn file_symbols(file: &SourceFileNode) -> Vec<FileSymbol> {
file.syntax().descendants().filter_map(to_symbol).collect()
}
.accept(node)?
}
-pub fn file_structure(file: &File) -> Vec<StructureNode> {
+pub fn file_structure(file: &SourceFileNode) -> Vec<StructureNode> {
let mut res = Vec::new();
let mut stack = Vec::new();
#[test]
fn test_file_structure() {
- let file = File::parse(
+ let file = SourceFileNode::parse(
r#"
struct Foo {
x: i32
use crate::LocalEdit;
pub use crate::_test_utils::*;
-use ra_syntax::{File, TextRange, TextUnit};
+use ra_syntax::{SourceFileNode, TextRange, TextUnit};
-pub fn check_action<F: Fn(&File, TextUnit) -> Option<LocalEdit>>(before: &str, after: &str, f: F) {
+pub fn check_action<F: Fn(&SourceFileNode, TextUnit) -> Option<LocalEdit>>(
+ before: &str,
+ after: &str,
+ f: F,
+) {
let (before_cursor_pos, before) = extract_offset(before);
- let file = File::parse(&before);
+ let file = SourceFileNode::parse(&before);
let result = f(&file, before_cursor_pos).expect("code action is not applicable");
let actual = result.edit.apply(&before);
let actual_cursor_pos = match result.cursor_position {
assert_eq_text!(after, &actual);
}
-pub fn check_action_range<F: Fn(&File, TextRange) -> Option<LocalEdit>>(
+pub fn check_action_range<F: Fn(&SourceFileNode, TextRange) -> Option<LocalEdit>>(
before: &str,
after: &str,
f: F,
) {
let (range, before) = extract_range(before);
- let file = File::parse(&before);
+ let file = SourceFileNode::parse(&before);
let result = f(&file, range).expect("code action is not applicable");
let actual = result.edit.apply(&before);
let actual_cursor_pos = match result.cursor_position {
algo::{find_covering_node, find_leaf_at_offset, LeafAtOffset},
ast,
text_utils::{contains_offset_nonstrict, intersect},
- AstNode, File, SyntaxKind,
+ AstNode, SourceFileNode, SyntaxKind,
SyntaxKind::*,
SyntaxNodeRef, TextRange, TextUnit,
};
use crate::{find_node_at_offset, EditBuilder, LocalEdit};
-pub fn join_lines(file: &File, range: TextRange) -> LocalEdit {
+pub fn join_lines(file: &SourceFileNode, range: TextRange) -> LocalEdit {
let range = if range.is_empty() {
let syntax = file.syntax();
let text = syntax.text().slice(range.start()..);
}
}
-pub fn on_enter(file: &File, offset: TextUnit) -> Option<LocalEdit> {
+pub fn on_enter(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit> {
let comment = find_leaf_at_offset(file.syntax(), offset)
.left_biased()
.and_then(ast::Comment::cast)?;
})
}
-fn node_indent<'a>(file: &'a File, node: SyntaxNodeRef) -> Option<&'a str> {
+fn node_indent<'a>(file: &'a SourceFileNode, node: SyntaxNodeRef) -> Option<&'a str> {
let ws = match find_leaf_at_offset(file.syntax(), node.range().start()) {
LeafAtOffset::Between(l, r) => {
assert!(r == node);
Some(&text[pos..])
}
-pub fn on_eq_typed(file: &File, offset: TextUnit) -> Option<LocalEdit> {
+pub fn on_eq_typed(file: &SourceFileNode, offset: TextUnit) -> Option<LocalEdit> {
let let_stmt: ast::LetStmt = find_node_at_offset(file.syntax(), offset)?;
if let_stmt.has_semi() {
return None;
fn check_join_lines_sel(before: &str, after: &str) {
let (sel, before) = extract_range(before);
- let file = File::parse(&before);
+ let file = SourceFileNode::parse(&before);
let result = join_lines(&file, sel);
let actual = result.edit.apply(&before);
assert_eq_text!(after, &actual);
fn test_on_eq_typed() {
fn do_check(before: &str, after: &str) {
let (offset, before) = extract_offset(before);
- let file = File::parse(&before);
+ let file = SourceFileNode::parse(&before);
let result = on_eq_typed(&file, offset).unwrap();
let actual = result.edit.apply(&before);
assert_eq_text!(after, &actual);
fn test_on_enter() {
fn apply_on_enter(before: &str) -> Option<String> {
let (offset, before) = extract_offset(before);
- let file = File::parse(&before);
+ let file = SourceFileNode::parse(&before);
let result = on_enter(&file, offset)?;
let actual = result.edit.apply(&before);
let actual = add_cursor(&actual, result.cursor_position.unwrap());
serde_derive = "1.0.71"
drop_bomb = "0.1.0"
crossbeam-channel = "0.2.4"
-flexi_logger = "0.9.1"
+flexi_logger = "0.10.0"
log = "0.4.3"
url_serde = "0.2.0"
languageserver-types = "0.51.0"
save: None,
},
)),
- hover_provider: None,
+ hover_provider: Some(true),
completion_provider: Some(CompletionOptions {
resolve_provider: None,
trigger_characters: None,
Location, Position, Range, SymbolKind, TextDocumentEdit, TextDocumentIdentifier,
TextDocumentItem, TextDocumentPositionParams, TextEdit, Url, VersionedTextDocumentIdentifier,
};
-use ra_analysis::{FileId, FileSystemEdit, SourceChange, SourceFileEdit, FilePosition};
+use ra_analysis::{FileId, FileSystemEdit, SourceChange, SourceFileNodeEdit, FilePosition};
use ra_editor::{AtomEdit, Edit, LineCol, LineIndex};
use ra_syntax::{SyntaxKind, TextRange, TextUnit};
type Output = TextUnit;
fn conv_with(self, line_index: &LineIndex) -> TextUnit {
- // TODO: UTF-16
let line_col = LineCol {
line: self.line as u32,
- col: (self.character as u32).into(),
+ col_utf16: self.character as u32,
};
line_index.offset(line_col)
}
fn conv_with(self, line_index: &LineIndex) -> Position {
let line_col = line_index.line_col(self);
- // TODO: UTF-16
- Position::new(u64::from(line_col.line), u64::from(u32::from(line_col.col)))
+ Position::new(
+ u64::from(line_col.line),
+ u64::from(u32::from(line_col.col_utf16)),
+ )
}
}
.map(|it| it.edits.as_slice())
.unwrap_or(&[]);
let line_col = translate_offset_with_edit(&*line_index, pos.offset, edits);
- let position =
- Position::new(u64::from(line_col.line), u64::from(u32::from(line_col.col)));
+ let position = Position::new(
+ u64::from(line_col.line),
+ u64::from(u32::from(line_col.col_utf16)),
+ );
Some(TextDocumentPositionParams {
text_document: TextDocumentIdentifier::new(pos.file_id.try_conv_with(world)?),
position,
if in_edit_line_col.line == 0 {
LineCol {
line: edit_line_col.line,
- col: edit_line_col.col + in_edit_line_col.col,
+ col_utf16: edit_line_col.col_utf16 + in_edit_line_col.col_utf16,
}
} else {
LineCol {
line: edit_line_col.line + in_edit_line_col.line,
- col: in_edit_line_col.col,
+ col_utf16: in_edit_line_col.col_utf16,
}
}
}
-impl TryConvWith for SourceFileEdit {
+impl TryConvWith for SourceFileNodeEdit {
type Ctx = ServerWorld;
type Output = TextDocumentEdit;
fn try_conv_with(self, world: &ServerWorld) -> Result<TextDocumentEdit> {
extern crate log;
#[macro_use]
extern crate failure;
+#[macro_use]
+extern crate serde_derive;
+extern crate serde;
extern crate flexi_logger;
extern crate gen_lsp_server;
extern crate ra_lsp_server;
+use serde::Deserialize;
use flexi_logger::{Duplicate, Logger};
use gen_lsp_server::{run_server, stdio_transport};
use ra_lsp_server::Result;
}
}
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct InitializationOptions {
+ publish_decorations: bool,
+}
+
fn main_inner() -> Result<()> {
let (receiver, sender, threads) = stdio_transport();
let cwd = ::std::env::current_dir()?;
.root_uri
.and_then(|it| it.to_file_path().ok())
.unwrap_or(cwd);
- ra_lsp_server::main_loop(false, root, r, s)
+ let publish_decorations = params
+ .initialization_options
+ .and_then(|v| InitializationOptions::deserialize(v).ok())
+ .map(|it| it.publish_decorations)
+ == Some(true);
+ ra_lsp_server::main_loop(false, root, publish_decorations, r, s)
},
)?;
info!("shutting down IO...");
info!("... IO is down");
Ok(())
}
+
+/*
+ (let ((backend (eglot-xref-backend)))
+ (mapcar
+ (lambda (xref)
+ (let ((loc (xref-item-location xref)))
+ (propertize
+ (concat
+ (when (xref-file-location-p loc)
+ (with-slots (file line column) loc
+ (format "%s:%s:%s:"
+ (propertize (file-relative-name file)
+ 'face 'compilation-info)
+ (propertize (format "%s" line)
+ 'face 'compilation-line
+ )
+ column)))
+ (xref-item-summary xref))
+ 'xref xref)))
+ (xref-backend-apropos backend "Analysis"))
+ )
+
+
+*/
use languageserver_types::{
CodeActionResponse, Command, CompletionItem, CompletionItemKind, Diagnostic,
DiagnosticSeverity, DocumentSymbol, Documentation, FoldingRange, FoldingRangeKind,
- FoldingRangeParams, InsertTextFormat, Location, MarkupContent, MarkupKind, Position,
+ FoldingRangeParams, InsertTextFormat, Location, MarkupContent, MarkupKind, MarkedString, Position,
PrepareRenameResponse, RenameParams, SymbolInformation, TextDocumentIdentifier, TextEdit,
- WorkspaceEdit, ParameterInformation, SignatureInformation,
+ WorkspaceEdit, ParameterInformation, SignatureInformation, Hover, HoverContents,
};
use ra_analysis::{FileId, FoldKind, Query, RunnableKind, FilePosition};
use ra_syntax::text_utils::contains_offset_nonstrict;
}
}
+pub fn handle_hover(
+ world: ServerWorld,
+ params: req::TextDocumentPositionParams,
+) -> Result<Option<Hover>> {
+ let position = params.try_conv_with(&world)?;
+ let line_index = world.analysis().file_line_index(position.file_id);
+
+ for (file_id, symbol) in world.analysis().approximately_resolve_symbol(position)? {
+ let range = symbol.node_range.conv_with(&line_index);
+ let comment = world.analysis.doc_comment_for(file_id, symbol)?;
+
+ if comment.is_some() {
+ let contents = HoverContents::Scalar(MarkedString::String(comment.unwrap()));
+
+ return Ok(Some(Hover {
+ contents,
+ range: Some(range),
+ }));
+ }
+ }
+
+ Ok(None)
+}
+
pub fn handle_prepare_rename(
world: ServerWorld,
params: req::TextDocumentPositionParams,
pub fn main_loop(
internal_mode: bool,
root: PathBuf,
+ publish_decorations: bool,
msg_receiver: &Receiver<RawMessage>,
msg_sender: &Sender<RawMessage>,
) -> Result<()> {
let mut subs = Subscriptions::new();
let main_res = main_loop_inner(
internal_mode,
+ publish_decorations,
root,
&pool,
msg_sender,
fn main_loop_inner(
internal_mode: bool,
+ publish_decorations: bool,
ws_root: PathBuf,
pool: &ThreadPool,
msg_sender: &Sender<RawMessage>,
update_file_notifications_on_threadpool(
pool,
state.snapshot(),
+ publish_decorations,
task_sender.clone(),
subs.subscriptions(),
)
.on::<req::CodeActionRequest>(handlers::handle_code_action)?
.on::<req::FoldingRangeRequest>(handlers::handle_folding_range)?
.on::<req::SignatureHelpRequest>(handlers::handle_signature_help)?
+ .on::<req::HoverRequest>(handlers::handle_hover)?
.on::<req::PrepareRenameRequest>(handlers::handle_prepare_rename)?
.on::<req::Rename>(handlers::handle_rename)?
.on::<req::References>(handlers::handle_references)?
fn update_file_notifications_on_threadpool(
pool: &ThreadPool,
world: ServerWorld,
+ publish_decorations: bool,
sender: Sender<Task>,
subscriptions: Vec<FileId>,
) {
sender.send(Task::Notify(not));
}
}
- match handlers::publish_decorations(&world, file_id) {
- Err(e) => {
- if !is_canceled(&e) {
- error!("failed to compute decorations: {:?}", e);
+ if publish_decorations {
+ match handlers::publish_decorations(&world, file_id) {
+ Err(e) => {
+ if !is_canceled(&e) {
+ error!("failed to compute decorations: {:?}", e);
+ }
+ }
+ Ok(params) => {
+ let not = RawNotification::new::<req::PublishDecorations>(¶ms);
+ sender.send(Task::Notify(not))
}
- }
- Ok(params) => {
- let not = RawNotification::new::<req::PublishDecorations>(¶ms);
- sender.send(Task::Notify(not))
}
}
}
"test server",
128,
move |mut msg_receiver, mut msg_sender| {
- main_loop(true, path, &mut msg_receiver, &mut msg_sender).unwrap()
+ main_loop(true, path, true, &mut msg_receiver, &mut msg_sender).unwrap()
},
);
let res = Server {
repository = "https://github.com/rust-analyzer/rust-analyzer"
[dependencies]
+arrayvec = "0.4.7"
unicode-xid = "0.1.0"
itertools = "0.7.8"
drop_bomb = "0.1.4"
#![cfg_attr(rustfmt, rustfmt_skip)]
+use std::hash::{Hash, Hasher};
+
use crate::{
ast,
SyntaxNode, SyntaxNodeRef, AstNode,
};
// ArgList
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ArgListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ArgList<'a> = ArgListNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ArgListNode<R1>> for ArgListNode<R2> {
+ fn eq(&self, other: &ArgListNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ArgListNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ArgListNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ArgList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// ArrayExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ArrayExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ArrayExpr<'a> = ArrayExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ArrayExprNode<R1>> for ArrayExprNode<R2> {
+ fn eq(&self, other: &ArrayExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ArrayExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ArrayExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ArrayExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ArrayExpr<'a> {}
// ArrayType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ArrayTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ArrayType<'a> = ArrayTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ArrayTypeNode<R1>> for ArrayTypeNode<R2> {
+ fn eq(&self, other: &ArrayTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ArrayTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ArrayTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ArrayType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ArrayType<'a> {}
// Attr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct AttrNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Attr<'a> = AttrNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<AttrNode<R1>> for AttrNode<R2> {
+ fn eq(&self, other: &AttrNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for AttrNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for AttrNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Attr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// BinExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct BinExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type BinExpr<'a> = BinExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<BinExprNode<R1>> for BinExprNode<R2> {
+ fn eq(&self, other: &BinExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for BinExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for BinExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for BinExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> BinExpr<'a> {}
// BindPat
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct BindPatNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type BindPat<'a> = BindPatNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<BindPatNode<R1>> for BindPatNode<R2> {
+ fn eq(&self, other: &BindPatNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for BindPatNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for BindPatNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for BindPat<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> BindPat<'a> {}
// Block
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct BlockNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Block<'a> = BlockNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<BlockNode<R1>> for BlockNode<R2> {
+ fn eq(&self, other: &BlockNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for BlockNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for BlockNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Block<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// BlockExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct BlockExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type BlockExpr<'a> = BlockExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<BlockExprNode<R1>> for BlockExprNode<R2> {
+ fn eq(&self, other: &BlockExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for BlockExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for BlockExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for BlockExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// BreakExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct BreakExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type BreakExpr<'a> = BreakExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<BreakExprNode<R1>> for BreakExprNode<R2> {
+ fn eq(&self, other: &BreakExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for BreakExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for BreakExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for BreakExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> BreakExpr<'a> {}
+// Byte
+#[derive(Debug, Clone, Copy,)]
+pub struct ByteNode<R: TreeRoot<RaTypes> = OwnedRoot> {
+ pub(crate) syntax: SyntaxNode<R>,
+}
+pub type Byte<'a> = ByteNode<RefRoot<'a>>;
+
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ByteNode<R1>> for ByteNode<R2> {
+ fn eq(&self, other: &ByteNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ByteNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ByteNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
+impl<'a> AstNode<'a> for Byte<'a> {
+ fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
+ match syntax.kind() {
+ BYTE => Some(Byte { syntax }),
+ _ => None,
+ }
+ }
+ fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
+}
+
+impl<R: TreeRoot<RaTypes>> ByteNode<R> {
+ pub fn borrowed(&self) -> Byte {
+ ByteNode { syntax: self.syntax.borrowed() }
+ }
+ pub fn owned(&self) -> ByteNode {
+ ByteNode { syntax: self.syntax.owned() }
+ }
+}
+
+
+impl<'a> Byte<'a> {}
+
+// ByteString
+#[derive(Debug, Clone, Copy,)]
+pub struct ByteStringNode<R: TreeRoot<RaTypes> = OwnedRoot> {
+ pub(crate) syntax: SyntaxNode<R>,
+}
+pub type ByteString<'a> = ByteStringNode<RefRoot<'a>>;
+
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ByteStringNode<R1>> for ByteStringNode<R2> {
+ fn eq(&self, other: &ByteStringNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ByteStringNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ByteStringNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
+impl<'a> AstNode<'a> for ByteString<'a> {
+ fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
+ match syntax.kind() {
+ BYTE_STRING => Some(ByteString { syntax }),
+ _ => None,
+ }
+ }
+ fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
+}
+
+impl<R: TreeRoot<RaTypes>> ByteStringNode<R> {
+ pub fn borrowed(&self) -> ByteString {
+ ByteStringNode { syntax: self.syntax.borrowed() }
+ }
+ pub fn owned(&self) -> ByteStringNode {
+ ByteStringNode { syntax: self.syntax.owned() }
+ }
+}
+
+
+impl<'a> ByteString<'a> {}
+
// CallExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct CallExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type CallExpr<'a> = CallExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<CallExprNode<R1>> for CallExprNode<R2> {
+ fn eq(&self, other: &CallExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for CallExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for CallExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for CallExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// CastExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct CastExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type CastExpr<'a> = CastExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<CastExprNode<R1>> for CastExprNode<R2> {
+ fn eq(&self, other: &CastExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for CastExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for CastExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for CastExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> CastExpr<'a> {}
// Char
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct CharNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Char<'a> = CharNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<CharNode<R1>> for CharNode<R2> {
+ fn eq(&self, other: &CharNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for CharNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for CharNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Char<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> Char<'a> {}
// Comment
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct CommentNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Comment<'a> = CommentNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<CommentNode<R1>> for CommentNode<R2> {
+ fn eq(&self, other: &CommentNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for CommentNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for CommentNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Comment<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> Comment<'a> {}
// Condition
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ConditionNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Condition<'a> = ConditionNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ConditionNode<R1>> for ConditionNode<R2> {
+ fn eq(&self, other: &ConditionNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ConditionNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ConditionNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Condition<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// ConstDef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ConstDefNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ConstDef<'a> = ConstDefNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ConstDefNode<R1>> for ConstDefNode<R2> {
+ fn eq(&self, other: &ConstDefNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ConstDefNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ConstDefNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ConstDef<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ast::NameOwner<'a> for ConstDef<'a> {}
impl<'a> ast::TypeParamsOwner<'a> for ConstDef<'a> {}
impl<'a> ast::AttrsOwner<'a> for ConstDef<'a> {}
+impl<'a> ast::DocCommentsOwner<'a> for ConstDef<'a> {}
impl<'a> ConstDef<'a> {}
// ContinueExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ContinueExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ContinueExpr<'a> = ContinueExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ContinueExprNode<R1>> for ContinueExprNode<R2> {
+ fn eq(&self, other: &ContinueExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ContinueExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ContinueExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ContinueExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ContinueExpr<'a> {}
// DynTraitType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct DynTraitTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type DynTraitType<'a> = DynTraitTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<DynTraitTypeNode<R1>> for DynTraitTypeNode<R2> {
+ fn eq(&self, other: &DynTraitTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for DynTraitTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for DynTraitTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for DynTraitType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> DynTraitType<'a> {}
// EnumDef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct EnumDefNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type EnumDef<'a> = EnumDefNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<EnumDefNode<R1>> for EnumDefNode<R2> {
+ fn eq(&self, other: &EnumDefNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for EnumDefNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for EnumDefNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for EnumDef<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ast::NameOwner<'a> for EnumDef<'a> {}
impl<'a> ast::TypeParamsOwner<'a> for EnumDef<'a> {}
impl<'a> ast::AttrsOwner<'a> for EnumDef<'a> {}
+impl<'a> ast::DocCommentsOwner<'a> for EnumDef<'a> {}
impl<'a> EnumDef<'a> {}
// Expr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Expr<'a> {
TupleExpr(TupleExpr<'a>),
ArrayExpr(ArrayExpr<'a>),
impl<'a> Expr<'a> {}
// ExprStmt
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ExprStmtNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ExprStmt<'a> = ExprStmtNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ExprStmtNode<R1>> for ExprStmtNode<R2> {
+ fn eq(&self, other: &ExprStmtNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ExprStmtNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ExprStmtNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ExprStmt<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// ExternCrateItem
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ExternCrateItemNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ExternCrateItem<'a> = ExternCrateItemNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ExternCrateItemNode<R1>> for ExternCrateItemNode<R2> {
+ fn eq(&self, other: &ExternCrateItemNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ExternCrateItemNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ExternCrateItemNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ExternCrateItem<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ExternCrateItem<'a> {}
// FieldExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct FieldExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type FieldExpr<'a> = FieldExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<FieldExprNode<R1>> for FieldExprNode<R2> {
+ fn eq(&self, other: &FieldExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for FieldExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for FieldExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for FieldExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> FieldExpr<'a> {}
// FieldPatList
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct FieldPatListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type FieldPatList<'a> = FieldPatListNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<FieldPatListNode<R1>> for FieldPatListNode<R2> {
+ fn eq(&self, other: &FieldPatListNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for FieldPatListNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for FieldPatListNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for FieldPatList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> FieldPatList<'a> {}
// FnDef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct FnDefNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type FnDef<'a> = FnDefNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<FnDefNode<R1>> for FnDefNode<R2> {
+ fn eq(&self, other: &FnDefNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for FnDefNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for FnDefNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for FnDef<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// FnPointerType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct FnPointerTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type FnPointerType<'a> = FnPointerTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<FnPointerTypeNode<R1>> for FnPointerTypeNode<R2> {
+ fn eq(&self, other: &FnPointerTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for FnPointerTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for FnPointerTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for FnPointerType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> FnPointerType<'a> {}
// ForExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ForExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ForExpr<'a> = ForExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ForExprNode<R1>> for ForExprNode<R2> {
+ fn eq(&self, other: &ForExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ForExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ForExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ForExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// ForType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ForTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ForType<'a> = ForTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ForTypeNode<R1>> for ForTypeNode<R2> {
+ fn eq(&self, other: &ForTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ForTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ForTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ForType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ForType<'a> {}
// IfExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct IfExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type IfExpr<'a> = IfExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<IfExprNode<R1>> for IfExprNode<R2> {
+ fn eq(&self, other: &IfExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for IfExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for IfExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for IfExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// ImplItem
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ImplItemNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ImplItem<'a> = ImplItemNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ImplItemNode<R1>> for ImplItemNode<R2> {
+ fn eq(&self, other: &ImplItemNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ImplItemNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ImplItemNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ImplItem<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ImplItem<'a> {}
// ImplTraitType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ImplTraitTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ImplTraitType<'a> = ImplTraitTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ImplTraitTypeNode<R1>> for ImplTraitTypeNode<R2> {
+ fn eq(&self, other: &ImplTraitTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ImplTraitTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ImplTraitTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ImplTraitType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ImplTraitType<'a> {}
// IndexExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct IndexExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type IndexExpr<'a> = IndexExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<IndexExprNode<R1>> for IndexExprNode<R2> {
+ fn eq(&self, other: &IndexExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for IndexExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for IndexExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for IndexExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> IndexExpr<'a> {}
// ItemList
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ItemListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ItemList<'a> = ItemListNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ItemListNode<R1>> for ItemListNode<R2> {
+ fn eq(&self, other: &ItemListNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ItemListNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ItemListNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ItemList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ItemList<'a> {}
// Label
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct LabelNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Label<'a> = LabelNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<LabelNode<R1>> for LabelNode<R2> {
+ fn eq(&self, other: &LabelNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for LabelNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for LabelNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Label<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> Label<'a> {}
// LambdaExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct LambdaExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type LambdaExpr<'a> = LambdaExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<LambdaExprNode<R1>> for LambdaExprNode<R2> {
+ fn eq(&self, other: &LambdaExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for LambdaExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for LambdaExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for LambdaExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// LetStmt
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct LetStmtNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type LetStmt<'a> = LetStmtNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<LetStmtNode<R1>> for LetStmtNode<R2> {
+ fn eq(&self, other: &LetStmtNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for LetStmtNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for LetStmtNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for LetStmt<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// Lifetime
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct LifetimeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Lifetime<'a> = LifetimeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<LifetimeNode<R1>> for LifetimeNode<R2> {
+ fn eq(&self, other: &LifetimeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for LifetimeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for LifetimeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Lifetime<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> Lifetime<'a> {}
// LifetimeParam
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct LifetimeParamNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type LifetimeParam<'a> = LifetimeParamNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<LifetimeParamNode<R1>> for LifetimeParamNode<R2> {
+ fn eq(&self, other: &LifetimeParamNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for LifetimeParamNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for LifetimeParamNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for LifetimeParam<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// Literal
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct LiteralNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Literal<'a> = LiteralNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<LiteralNode<R1>> for LiteralNode<R2> {
+ fn eq(&self, other: &LiteralNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for LiteralNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for LiteralNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Literal<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> Literal<'a> {}
// LoopExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct LoopExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type LoopExpr<'a> = LoopExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<LoopExprNode<R1>> for LoopExprNode<R2> {
+ fn eq(&self, other: &LoopExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for LoopExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for LoopExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for LoopExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> LoopExpr<'a> {}
// MatchArm
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct MatchArmNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type MatchArm<'a> = MatchArmNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<MatchArmNode<R1>> for MatchArmNode<R2> {
+ fn eq(&self, other: &MatchArmNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for MatchArmNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for MatchArmNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for MatchArm<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// MatchArmList
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct MatchArmListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type MatchArmList<'a> = MatchArmListNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<MatchArmListNode<R1>> for MatchArmListNode<R2> {
+ fn eq(&self, other: &MatchArmListNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for MatchArmListNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for MatchArmListNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for MatchArmList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// MatchExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct MatchExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type MatchExpr<'a> = MatchExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<MatchExprNode<R1>> for MatchExprNode<R2> {
+ fn eq(&self, other: &MatchExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for MatchExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for MatchExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for MatchExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// MatchGuard
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct MatchGuardNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type MatchGuard<'a> = MatchGuardNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<MatchGuardNode<R1>> for MatchGuardNode<R2> {
+ fn eq(&self, other: &MatchGuardNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for MatchGuardNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for MatchGuardNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for MatchGuard<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> MatchGuard<'a> {}
// MethodCallExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct MethodCallExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type MethodCallExpr<'a> = MethodCallExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<MethodCallExprNode<R1>> for MethodCallExprNode<R2> {
+ fn eq(&self, other: &MethodCallExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for MethodCallExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for MethodCallExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for MethodCallExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// Module
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ModuleNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Module<'a> = ModuleNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ModuleNode<R1>> for ModuleNode<R2> {
+ fn eq(&self, other: &ModuleNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ModuleNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ModuleNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Module<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ast::NameOwner<'a> for Module<'a> {}
impl<'a> ast::AttrsOwner<'a> for Module<'a> {}
+impl<'a> ast::DocCommentsOwner<'a> for Module<'a> {}
impl<'a> Module<'a> {
pub fn item_list(self) -> Option<ItemList<'a>> {
super::child_opt(self)
}
// ModuleItem
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum ModuleItem<'a> {
StructDef(StructDef<'a>),
EnumDef(EnumDef<'a>),
impl<'a> ModuleItem<'a> {}
// Name
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct NameNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Name<'a> = NameNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<NameNode<R1>> for NameNode<R2> {
+ fn eq(&self, other: &NameNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for NameNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for NameNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Name<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> Name<'a> {}
// NameRef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct NameRefNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type NameRef<'a> = NameRefNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<NameRefNode<R1>> for NameRefNode<R2> {
+ fn eq(&self, other: &NameRefNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for NameRefNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for NameRefNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for NameRef<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> NameRef<'a> {}
// NamedField
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct NamedFieldNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type NamedField<'a> = NamedFieldNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<NamedFieldNode<R1>> for NamedFieldNode<R2> {
+ fn eq(&self, other: &NamedFieldNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for NamedFieldNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for NamedFieldNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for NamedField<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> NamedField<'a> {}
// NamedFieldDef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct NamedFieldDefNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type NamedFieldDef<'a> = NamedFieldDefNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<NamedFieldDefNode<R1>> for NamedFieldDefNode<R2> {
+ fn eq(&self, other: &NamedFieldDefNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for NamedFieldDefNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for NamedFieldDefNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for NamedFieldDef<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> NamedFieldDef<'a> {}
// NamedFieldList
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct NamedFieldListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type NamedFieldList<'a> = NamedFieldListNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<NamedFieldListNode<R1>> for NamedFieldListNode<R2> {
+ fn eq(&self, other: &NamedFieldListNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for NamedFieldListNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for NamedFieldListNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for NamedFieldList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> NamedFieldList<'a> {}
// NeverType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct NeverTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type NeverType<'a> = NeverTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<NeverTypeNode<R1>> for NeverTypeNode<R2> {
+ fn eq(&self, other: &NeverTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for NeverTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for NeverTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for NeverType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> NeverType<'a> {}
// NominalDef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum NominalDef<'a> {
StructDef(StructDef<'a>),
EnumDef(EnumDef<'a>),
impl<'a> NominalDef<'a> {}
// Param
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ParamNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Param<'a> = ParamNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ParamNode<R1>> for ParamNode<R2> {
+ fn eq(&self, other: &ParamNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ParamNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ParamNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Param<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// ParamList
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ParamListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ParamList<'a> = ParamListNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ParamListNode<R1>> for ParamListNode<R2> {
+ fn eq(&self, other: &ParamListNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ParamListNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ParamListNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ParamList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// ParenExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ParenExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ParenExpr<'a> = ParenExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ParenExprNode<R1>> for ParenExprNode<R2> {
+ fn eq(&self, other: &ParenExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ParenExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ParenExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ParenExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ParenExpr<'a> {}
// ParenType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ParenTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ParenType<'a> = ParenTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ParenTypeNode<R1>> for ParenTypeNode<R2> {
+ fn eq(&self, other: &ParenTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ParenTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ParenTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ParenType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ParenType<'a> {}
// Pat
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Pat<'a> {
RefPat(RefPat<'a>),
BindPat(BindPat<'a>),
impl<'a> Pat<'a> {}
// Path
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct PathNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Path<'a> = PathNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PathNode<R1>> for PathNode<R2> {
+ fn eq(&self, other: &PathNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for PathNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for PathNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Path<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// PathExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct PathExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type PathExpr<'a> = PathExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PathExprNode<R1>> for PathExprNode<R2> {
+ fn eq(&self, other: &PathExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for PathExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for PathExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for PathExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// PathPat
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct PathPatNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type PathPat<'a> = PathPatNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PathPatNode<R1>> for PathPatNode<R2> {
+ fn eq(&self, other: &PathPatNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for PathPatNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for PathPatNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for PathPat<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> PathPat<'a> {}
// PathSegment
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct PathSegmentNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type PathSegment<'a> = PathSegmentNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PathSegmentNode<R1>> for PathSegmentNode<R2> {
+ fn eq(&self, other: &PathSegmentNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for PathSegmentNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for PathSegmentNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for PathSegment<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// PathType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct PathTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type PathType<'a> = PathTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PathTypeNode<R1>> for PathTypeNode<R2> {
+ fn eq(&self, other: &PathTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for PathTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for PathTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for PathType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> PathType<'a> {}
// PlaceholderPat
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct PlaceholderPatNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type PlaceholderPat<'a> = PlaceholderPatNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PlaceholderPatNode<R1>> for PlaceholderPatNode<R2> {
+ fn eq(&self, other: &PlaceholderPatNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for PlaceholderPatNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for PlaceholderPatNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for PlaceholderPat<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> PlaceholderPat<'a> {}
// PlaceholderType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct PlaceholderTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type PlaceholderType<'a> = PlaceholderTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PlaceholderTypeNode<R1>> for PlaceholderTypeNode<R2> {
+ fn eq(&self, other: &PlaceholderTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for PlaceholderTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for PlaceholderTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for PlaceholderType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> PlaceholderType<'a> {}
// PointerType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct PointerTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type PointerType<'a> = PointerTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PointerTypeNode<R1>> for PointerTypeNode<R2> {
+ fn eq(&self, other: &PointerTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for PointerTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for PointerTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for PointerType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> PointerType<'a> {}
// PrefixExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct PrefixExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type PrefixExpr<'a> = PrefixExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<PrefixExprNode<R1>> for PrefixExprNode<R2> {
+ fn eq(&self, other: &PrefixExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for PrefixExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for PrefixExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for PrefixExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> PrefixExpr<'a> {}
// RangeExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct RangeExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type RangeExpr<'a> = RangeExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<RangeExprNode<R1>> for RangeExprNode<R2> {
+ fn eq(&self, other: &RangeExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for RangeExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for RangeExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for RangeExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> RangeExpr<'a> {}
// RangePat
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct RangePatNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type RangePat<'a> = RangePatNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<RangePatNode<R1>> for RangePatNode<R2> {
+ fn eq(&self, other: &RangePatNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for RangePatNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for RangePatNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for RangePat<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> RangePat<'a> {}
// RefExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct RefExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type RefExpr<'a> = RefExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<RefExprNode<R1>> for RefExprNode<R2> {
+ fn eq(&self, other: &RefExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for RefExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for RefExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for RefExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> RefExpr<'a> {}
// RefPat
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct RefPatNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type RefPat<'a> = RefPatNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<RefPatNode<R1>> for RefPatNode<R2> {
+ fn eq(&self, other: &RefPatNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for RefPatNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for RefPatNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for RefPat<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> RefPat<'a> {}
// ReferenceType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ReferenceTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ReferenceType<'a> = ReferenceTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ReferenceTypeNode<R1>> for ReferenceTypeNode<R2> {
+ fn eq(&self, other: &ReferenceTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ReferenceTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ReferenceTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ReferenceType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ReferenceType<'a> {}
// RetType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct RetTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type RetType<'a> = RetTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<RetTypeNode<R1>> for RetTypeNode<R2> {
+ fn eq(&self, other: &RetTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for RetTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for RetTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for RetType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> RetType<'a> {}
// ReturnExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct ReturnExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type ReturnExpr<'a> = ReturnExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<ReturnExprNode<R1>> for ReturnExprNode<R2> {
+ fn eq(&self, other: &ReturnExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for ReturnExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for ReturnExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for ReturnExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ReturnExpr<'a> {}
-// Root
-#[derive(Debug, Clone, Copy)]
-pub struct RootNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
-}
-pub type Root<'a> = RootNode<RefRoot<'a>>;
-
-impl<'a> AstNode<'a> for Root<'a> {
- fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
- match syntax.kind() {
- ROOT => Some(Root { syntax }),
- _ => None,
- }
- }
- fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
-}
-
-impl<R: TreeRoot<RaTypes>> RootNode<R> {
- pub fn borrowed(&self) -> Root {
- RootNode { syntax: self.syntax.borrowed() }
- }
- pub fn owned(&self) -> RootNode {
- RootNode { syntax: self.syntax.owned() }
- }
-}
-
-
-impl<'a> ast::ModuleItemOwner<'a> for Root<'a> {}
-impl<'a> ast::FnDefOwner<'a> for Root<'a> {}
-impl<'a> Root<'a> {
- pub fn modules(self) -> impl Iterator<Item = Module<'a>> + 'a {
- super::children(self)
- }
-}
-
// SelfParam
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct SelfParamNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type SelfParam<'a> = SelfParamNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<SelfParamNode<R1>> for SelfParamNode<R2> {
+ fn eq(&self, other: &SelfParamNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for SelfParamNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for SelfParamNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for SelfParam<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> SelfParam<'a> {}
// SlicePat
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct SlicePatNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type SlicePat<'a> = SlicePatNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<SlicePatNode<R1>> for SlicePatNode<R2> {
+ fn eq(&self, other: &SlicePatNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for SlicePatNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for SlicePatNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for SlicePat<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> SlicePat<'a> {}
// SliceType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct SliceTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type SliceType<'a> = SliceTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<SliceTypeNode<R1>> for SliceTypeNode<R2> {
+ fn eq(&self, other: &SliceTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for SliceTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for SliceTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for SliceType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> SliceType<'a> {}
+// SourceFile
+#[derive(Debug, Clone, Copy,)]
+pub struct SourceFileNode<R: TreeRoot<RaTypes> = OwnedRoot> {
+ pub(crate) syntax: SyntaxNode<R>,
+}
+pub type SourceFile<'a> = SourceFileNode<RefRoot<'a>>;
+
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<SourceFileNode<R1>> for SourceFileNode<R2> {
+ fn eq(&self, other: &SourceFileNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for SourceFileNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for SourceFileNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
+impl<'a> AstNode<'a> for SourceFile<'a> {
+ fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
+ match syntax.kind() {
+ SOURCE_FILE => Some(SourceFile { syntax }),
+ _ => None,
+ }
+ }
+ fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
+}
+
+impl<R: TreeRoot<RaTypes>> SourceFileNode<R> {
+ pub fn borrowed(&self) -> SourceFile {
+ SourceFileNode { syntax: self.syntax.borrowed() }
+ }
+ pub fn owned(&self) -> SourceFileNode {
+ SourceFileNode { syntax: self.syntax.owned() }
+ }
+}
+
+
+impl<'a> ast::ModuleItemOwner<'a> for SourceFile<'a> {}
+impl<'a> ast::FnDefOwner<'a> for SourceFile<'a> {}
+impl<'a> SourceFile<'a> {
+ pub fn modules(self) -> impl Iterator<Item = Module<'a>> + 'a {
+ super::children(self)
+ }
+}
+
// StaticDef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct StaticDefNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type StaticDef<'a> = StaticDefNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<StaticDefNode<R1>> for StaticDefNode<R2> {
+ fn eq(&self, other: &StaticDefNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for StaticDefNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for StaticDefNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for StaticDef<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ast::NameOwner<'a> for StaticDef<'a> {}
impl<'a> ast::TypeParamsOwner<'a> for StaticDef<'a> {}
impl<'a> ast::AttrsOwner<'a> for StaticDef<'a> {}
+impl<'a> ast::DocCommentsOwner<'a> for StaticDef<'a> {}
impl<'a> StaticDef<'a> {}
// Stmt
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum Stmt<'a> {
ExprStmt(ExprStmt<'a>),
LetStmt(LetStmt<'a>),
impl<'a> Stmt<'a> {}
+// String
+#[derive(Debug, Clone, Copy,)]
+pub struct StringNode<R: TreeRoot<RaTypes> = OwnedRoot> {
+ pub(crate) syntax: SyntaxNode<R>,
+}
+pub type String<'a> = StringNode<RefRoot<'a>>;
+
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<StringNode<R1>> for StringNode<R2> {
+ fn eq(&self, other: &StringNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for StringNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for StringNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
+impl<'a> AstNode<'a> for String<'a> {
+ fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
+ match syntax.kind() {
+ STRING => Some(String { syntax }),
+ _ => None,
+ }
+ }
+ fn syntax(self) -> SyntaxNodeRef<'a> { self.syntax }
+}
+
+impl<R: TreeRoot<RaTypes>> StringNode<R> {
+ pub fn borrowed(&self) -> String {
+ StringNode { syntax: self.syntax.borrowed() }
+ }
+ pub fn owned(&self) -> StringNode {
+ StringNode { syntax: self.syntax.owned() }
+ }
+}
+
+
+impl<'a> String<'a> {}
+
// StructDef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct StructDefNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type StructDef<'a> = StructDefNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<StructDefNode<R1>> for StructDefNode<R2> {
+ fn eq(&self, other: &StructDefNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for StructDefNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for StructDefNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for StructDef<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ast::NameOwner<'a> for StructDef<'a> {}
impl<'a> ast::TypeParamsOwner<'a> for StructDef<'a> {}
impl<'a> ast::AttrsOwner<'a> for StructDef<'a> {}
+impl<'a> ast::DocCommentsOwner<'a> for StructDef<'a> {}
impl<'a> StructDef<'a> {
pub fn fields(self) -> impl Iterator<Item = NamedFieldDef<'a>> + 'a {
super::children(self)
}
// StructLit
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct StructLitNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type StructLit<'a> = StructLitNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<StructLitNode<R1>> for StructLitNode<R2> {
+ fn eq(&self, other: &StructLitNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for StructLitNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for StructLitNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for StructLit<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> StructLit<'a> {}
// StructPat
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct StructPatNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type StructPat<'a> = StructPatNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<StructPatNode<R1>> for StructPatNode<R2> {
+ fn eq(&self, other: &StructPatNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for StructPatNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for StructPatNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for StructPat<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> StructPat<'a> {}
// TokenTree
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct TokenTreeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type TokenTree<'a> = TokenTreeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<TokenTreeNode<R1>> for TokenTreeNode<R2> {
+ fn eq(&self, other: &TokenTreeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for TokenTreeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for TokenTreeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for TokenTree<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> TokenTree<'a> {}
// TraitDef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct TraitDefNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type TraitDef<'a> = TraitDefNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<TraitDefNode<R1>> for TraitDefNode<R2> {
+ fn eq(&self, other: &TraitDefNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for TraitDefNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for TraitDefNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for TraitDef<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ast::NameOwner<'a> for TraitDef<'a> {}
impl<'a> ast::AttrsOwner<'a> for TraitDef<'a> {}
+impl<'a> ast::DocCommentsOwner<'a> for TraitDef<'a> {}
impl<'a> TraitDef<'a> {}
// TryExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct TryExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type TryExpr<'a> = TryExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<TryExprNode<R1>> for TryExprNode<R2> {
+ fn eq(&self, other: &TryExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for TryExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for TryExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for TryExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> TryExpr<'a> {}
// TupleExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct TupleExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type TupleExpr<'a> = TupleExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<TupleExprNode<R1>> for TupleExprNode<R2> {
+ fn eq(&self, other: &TupleExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for TupleExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for TupleExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for TupleExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> TupleExpr<'a> {}
// TuplePat
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct TuplePatNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type TuplePat<'a> = TuplePatNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<TuplePatNode<R1>> for TuplePatNode<R2> {
+ fn eq(&self, other: &TuplePatNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for TuplePatNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for TuplePatNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for TuplePat<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> TuplePat<'a> {}
// TupleStructPat
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct TupleStructPatNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type TupleStructPat<'a> = TupleStructPatNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<TupleStructPatNode<R1>> for TupleStructPatNode<R2> {
+ fn eq(&self, other: &TupleStructPatNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for TupleStructPatNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for TupleStructPatNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for TupleStructPat<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> TupleStructPat<'a> {}
// TupleType
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct TupleTypeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type TupleType<'a> = TupleTypeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<TupleTypeNode<R1>> for TupleTypeNode<R2> {
+ fn eq(&self, other: &TupleTypeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for TupleTypeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for TupleTypeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for TupleType<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> TupleType<'a> {}
// TypeDef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct TypeDefNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type TypeDef<'a> = TypeDefNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<TypeDefNode<R1>> for TypeDefNode<R2> {
+ fn eq(&self, other: &TypeDefNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for TypeDefNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for TypeDefNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for TypeDef<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> ast::NameOwner<'a> for TypeDef<'a> {}
impl<'a> ast::TypeParamsOwner<'a> for TypeDef<'a> {}
impl<'a> ast::AttrsOwner<'a> for TypeDef<'a> {}
+impl<'a> ast::DocCommentsOwner<'a> for TypeDef<'a> {}
impl<'a> TypeDef<'a> {}
// TypeParam
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct TypeParamNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type TypeParam<'a> = TypeParamNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<TypeParamNode<R1>> for TypeParamNode<R2> {
+ fn eq(&self, other: &TypeParamNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for TypeParamNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for TypeParamNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for TypeParam<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> TypeParam<'a> {}
// TypeParamList
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct TypeParamListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type TypeParamList<'a> = TypeParamListNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<TypeParamListNode<R1>> for TypeParamListNode<R2> {
+ fn eq(&self, other: &TypeParamListNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for TypeParamListNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for TypeParamListNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for TypeParamList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// TypeRef
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TypeRef<'a> {
ParenType(ParenType<'a>),
TupleType(TupleType<'a>),
impl<'a> TypeRef<'a> {}
// UseItem
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct UseItemNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type UseItem<'a> = UseItemNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<UseItemNode<R1>> for UseItemNode<R2> {
+ fn eq(&self, other: &UseItemNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for UseItemNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for UseItemNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for UseItem<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// UseTree
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct UseTreeNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type UseTree<'a> = UseTreeNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<UseTreeNode<R1>> for UseTreeNode<R2> {
+ fn eq(&self, other: &UseTreeNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for UseTreeNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for UseTreeNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for UseTree<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// UseTreeList
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct UseTreeListNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type UseTreeList<'a> = UseTreeListNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<UseTreeListNode<R1>> for UseTreeListNode<R2> {
+ fn eq(&self, other: &UseTreeListNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for UseTreeListNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for UseTreeListNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for UseTreeList<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// WhereClause
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct WhereClauseNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type WhereClause<'a> = WhereClauseNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<WhereClauseNode<R1>> for WhereClauseNode<R2> {
+ fn eq(&self, other: &WhereClauseNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for WhereClauseNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for WhereClauseNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for WhereClause<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
impl<'a> WhereClause<'a> {}
// WhileExpr
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct WhileExprNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type WhileExpr<'a> = WhileExprNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<WhileExprNode<R1>> for WhileExprNode<R2> {
+ fn eq(&self, other: &WhileExprNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for WhileExprNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for WhileExprNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for WhileExpr<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
}
// Whitespace
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct WhitespaceNode<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type Whitespace<'a> = WhitespaceNode<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<WhitespaceNode<R1>> for WhitespaceNode<R2> {
+ fn eq(&self, other: &WhitespaceNode<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for WhitespaceNode<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for WhitespaceNode<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for Whitespace<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
#![cfg_attr(rustfmt, rustfmt_skip)]
+use std::hash::{Hash, Hasher};
+
use crate::{
ast,
SyntaxNode, SyntaxNodeRef, AstNode,
// {{ node }}
{%- if methods.enum %}
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum {{ node }}<'a> {
{%- for kind in methods.enum %}
{{ kind }}({{ kind }}<'a>),
}
}
{% else %}
-#[derive(Debug, Clone, Copy)]
+#[derive(Debug, Clone, Copy,)]
pub struct {{ node }}Node<R: TreeRoot<RaTypes> = OwnedRoot> {
- syntax: SyntaxNode<R>,
+ pub(crate) syntax: SyntaxNode<R>,
}
pub type {{ node }}<'a> = {{ node }}Node<RefRoot<'a>>;
+impl<R1: TreeRoot<RaTypes>, R2: TreeRoot<RaTypes>> PartialEq<{{node}}Node<R1>> for {{node}}Node<R2> {
+ fn eq(&self, other: &{{node}}Node<R1>) -> bool { self.syntax == other.syntax }
+}
+impl<R: TreeRoot<RaTypes>> Eq for {{node}}Node<R> {}
+impl<R: TreeRoot<RaTypes>> Hash for {{node}}Node<R> {
+ fn hash<H: Hasher>(&self, state: &mut H) { self.syntax.hash(state) }
+}
+
impl<'a> AstNode<'a> for {{ node }}<'a> {
fn cast(syntax: SyntaxNodeRef<'a>) -> Option<Self> {
match syntax.kind() {
mod generated;
use std::marker::PhantomData;
+use std::string::String as RustString;
use itertools::Itertools;
/// Returns the textual content of a doc comment block as a single string.
/// That is, strips leading `///` and joins lines
- fn doc_comment_text(self) -> String {
+ fn doc_comment_text(self) -> RustString {
self.doc_comments()
.map(|comment| {
let prefix = comment.prefix();
}
}
+impl<'a> Byte<'a> {
+ pub fn text(&self) -> &SmolStr {
+ &self.syntax().leaf_text().unwrap()
+ }
+}
+
+impl<'a> ByteString<'a> {
+ pub fn text(&self) -> &SmolStr {
+ &self.syntax().leaf_text().unwrap()
+ }
+}
+
+impl<'a> String<'a> {
+ pub fn text(&self) -> &SmolStr {
+ &self.syntax().leaf_text().unwrap()
+ }
+}
+
impl<'a> Comment<'a> {
pub fn text(&self) -> &SmolStr {
self.syntax().leaf_text().unwrap()
}
}
+impl<'a> UseTreeList<'a> {
+ pub fn parent_use_tree(self) -> UseTree<'a> {
+ self.syntax()
+ .parent()
+ .and_then(UseTree::cast)
+ .expect("UseTreeLists are always nested in UseTrees")
+ }
+}
+
fn child_opt<'a, P: AstNode<'a>, C: AstNode<'a>>(parent: P) -> Option<C> {
children(parent).next()
}
"SHEBANG",
],
nodes: [
- "ROOT",
+ "SOURCE_FILE",
"STRUCT_DEF",
"ENUM_DEF",
"ARG_LIST",
],
ast: {
- "Root": (
+ "SourceFile": (
traits: [ "ModuleItemOwner", "FnDefOwner" ],
collections: [
["modules", "Module"],
"NameOwner",
"TypeParamsOwner",
"AttrsOwner",
+ "DocCommentsOwner"
],
collections: [
["fields", "NamedFieldDef"]
"NameOwner",
"TypeParamsOwner",
"AttrsOwner",
+ "DocCommentsOwner"
] ),
- "TraitDef": ( traits: ["NameOwner", "AttrsOwner"] ),
+ "TraitDef": ( traits: ["NameOwner", "AttrsOwner", "DocCommentsOwner"] ),
"Module": (
- traits: ["NameOwner", "AttrsOwner" ],
+ traits: ["NameOwner", "AttrsOwner", "DocCommentsOwner" ],
options: [ "ItemList" ]
),
"ItemList": (
"NameOwner",
"TypeParamsOwner",
"AttrsOwner",
+ "DocCommentsOwner"
] ),
"StaticDef": ( traits: [
"NameOwner",
"TypeParamsOwner",
"AttrsOwner",
+ "DocCommentsOwner"
] ),
"TypeDef": ( traits: [
"NameOwner",
"TypeParamsOwner",
"AttrsOwner",
+ "DocCommentsOwner"
] ),
"ImplItem": (),
"PrefixExpr": (),
"RangeExpr": (),
"BinExpr": (),
+ "String": (),
+ "Byte": (),
+ "ByteString": (),
"Char": (),
"Literal": (),
let m = p.start();
p.eat(SHEBANG);
items::mod_contents(p, false);
- m.complete(p, ROOT);
+ m.complete(p, SOURCE_FILE);
}
#[derive(Clone, Copy, PartialEq, Eq)]
/// Gets the nth character from the current.
/// For example, 0 will return the current token, 1 will return the next, etc.
pub fn nth(&self, n: u32) -> Option<char> {
- let mut chars = self.chars().peekable();
- chars.by_ref().nth(n as usize)
+ self.chars().nth(n as usize)
}
/// Checks whether the current character is `c`.
#![allow(missing_docs)]
//#![warn(unreachable_pub)] // rust-lang/rust#47816
+extern crate arrayvec;
extern crate drop_bomb;
extern crate itertools;
extern crate parking_lot;
use crate::yellow::GreenNode;
-/// File represents a parse tree for a single Rust file.
-#[derive(Clone, Debug, Hash, PartialEq, Eq)]
-pub struct File {
- root: SyntaxNode,
-}
+/// `SourceFileNode` represents a parse tree for a single Rust file.
+pub use crate::ast::SourceFileNode;
-impl File {
- fn new(green: GreenNode, errors: Vec<SyntaxError>) -> File {
+impl SourceFileNode {
+ fn new(green: GreenNode, errors: Vec<SyntaxError>) -> SourceFileNode {
let root = SyntaxNode::new(green, errors);
if cfg!(debug_assertions) {
utils::validate_block_structure(root.borrowed());
}
- File { root }
+ assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
+ ast::SourceFileNode { syntax: root }
}
- pub fn parse(text: &str) -> File {
+ pub fn parse(text: &str) -> SourceFileNode {
let tokens = tokenize(&text);
let (green, errors) =
parser_impl::parse_with(yellow::GreenBuilder::new(), text, &tokens, grammar::root);
- File::new(green, errors)
+ SourceFileNode::new(green, errors)
}
- pub fn reparse(&self, edit: &AtomEdit) -> File {
+ pub fn reparse(&self, edit: &AtomEdit) -> SourceFileNode {
self.incremental_reparse(edit)
.unwrap_or_else(|| self.full_reparse(edit))
}
- pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<File> {
+ pub fn incremental_reparse(&self, edit: &AtomEdit) -> Option<SourceFileNode> {
reparsing::incremental_reparse(self.syntax(), edit, self.errors())
- .map(|(green_node, errors)| File::new(green_node, errors))
+ .map(|(green_node, errors)| SourceFileNode::new(green_node, errors))
}
- fn full_reparse(&self, edit: &AtomEdit) -> File {
+ fn full_reparse(&self, edit: &AtomEdit) -> SourceFileNode {
let text =
text_utils::replace_range(self.syntax().text().to_string(), edit.delete, &edit.insert);
- File::parse(&text)
+ SourceFileNode::parse(&text)
}
/// Typed AST representation of the parse tree.
- pub fn ast(&self) -> ast::Root {
- ast::Root::cast(self.syntax()).unwrap()
+ pub fn ast(&self) -> ast::SourceFile {
+ self.borrowed()
}
/// Untyped homogeneous representation of the parse tree.
pub fn syntax(&self) -> SyntaxNodeRef {
- self.root.borrowed()
+ self.syntax.borrowed()
}
pub fn errors(&self) -> Vec<SyntaxError> {
- let mut errors = self.root.root_data().clone();
+ let mut errors = self.syntax.root_data().clone();
errors.extend(validation::validate(self));
errors
}
}
fn start(&mut self, kind: SyntaxKind) {
- if kind == ROOT {
+ if kind == SOURCE_FILE {
self.sink.start_internal(kind);
return;
}
#[cfg(test)]
mod tests {
use super::{
- super::{test_utils::extract_range, text_utils::replace_range, utils::dump_tree, File},
+ super::{test_utils::extract_range, text_utils::replace_range, utils::dump_tree, SourceFileNode},
reparse_block, reparse_leaf, AtomEdit, GreenNode, SyntaxError, SyntaxNodeRef,
};
let (range, before) = extract_range(before);
let after = replace_range(before.clone(), range, replace_with);
- let fully_reparsed = File::parse(&after);
+ let fully_reparsed = SourceFileNode::parse(&after);
let incrementally_reparsed = {
- let f = File::parse(&before);
+ let f = SourceFileNode::parse(&before);
let edit = AtomEdit {
delete: range,
insert: replace_with.to_string(),
reparser(f.syntax(), &edit).expect("cannot incrementally reparse");
let green_root = node.replace_with(green);
let errors = super::merge_errors(f.errors(), new_errors, node, &edit);
- File::new(green_root, errors)
+ SourceFileNode::new(green_root, errors)
};
assert_eq_text!(
--- /dev/null
+use super::parser::Parser;
+use super::CharComponent;
+
+pub fn parse_byte_literal(src: &str) -> ByteComponentIterator {
+ ByteComponentIterator {
+ parser: Parser::new(src),
+ has_closing_quote: false,
+ }
+}
+
+pub struct ByteComponentIterator<'a> {
+ parser: Parser<'a>,
+ pub has_closing_quote: bool,
+}
+
+impl<'a> Iterator for ByteComponentIterator<'a> {
+ type Item = CharComponent;
+ fn next(&mut self) -> Option<CharComponent> {
+ if self.parser.pos == 0 {
+ assert!(
+ self.parser.advance() == 'b',
+ "Byte literal should start with a `b`"
+ );
+
+ assert!(
+ self.parser.advance() == '\'',
+ "Byte literal should start with a `b`, followed by a quote"
+ );
+ }
+
+ if let Some(component) = self.parser.parse_char_component() {
+ return Some(component);
+ }
+
+ // We get here when there are no char components left to parse
+ if self.parser.peek() == Some('\'') {
+ self.parser.advance();
+ self.has_closing_quote = true;
+ }
+
+ assert!(
+ self.parser.peek() == None,
+ "byte literal should leave no unparsed input: src = {}, pos = {}, length = {}",
+ self.parser.src,
+ self.parser.pos,
+ self.parser.src.len()
+ );
+
+ None
+ }
+}
--- /dev/null
+use super::parser::Parser;
+use super::StringComponent;
+
+pub fn parse_byte_string_literal(src: &str) -> ByteStringComponentIterator {
+ ByteStringComponentIterator {
+ parser: Parser::new(src),
+ has_closing_quote: false,
+ }
+}
+
+pub struct ByteStringComponentIterator<'a> {
+ parser: Parser<'a>,
+ pub has_closing_quote: bool,
+}
+
+impl<'a> Iterator for ByteStringComponentIterator<'a> {
+ type Item = StringComponent;
+ fn next(&mut self) -> Option<StringComponent> {
+ if self.parser.pos == 0 {
+ assert!(
+ self.parser.advance() == 'b',
+ "byte string literal should start with a `b`"
+ );
+
+ assert!(
+ self.parser.advance() == '"',
+ "byte string literal should start with a `b`, followed by double quotes"
+ );
+ }
+
+ if let Some(component) = self.parser.parse_string_component() {
+ return Some(component);
+ }
+
+ // We get here when there are no char components left to parse
+ if self.parser.peek() == Some('"') {
+ self.parser.advance();
+ self.has_closing_quote = true;
+ }
+
+ assert!(
+ self.parser.peek() == None,
+ "byte string literal should leave no unparsed input: src = {}, pos = {}, length = {}",
+ self.parser.src,
+ self.parser.pos,
+ self.parser.src.len()
+ );
+
+ None
+ }
+}
--- /dev/null
+use super::parser::Parser;
+use super::CharComponent;
+
+pub fn parse_char_literal(src: &str) -> CharComponentIterator {
+ CharComponentIterator {
+ parser: Parser::new(src),
+ has_closing_quote: false,
+ }
+}
+
+pub struct CharComponentIterator<'a> {
+ parser: Parser<'a>,
+ pub has_closing_quote: bool,
+}
+
+impl<'a> Iterator for CharComponentIterator<'a> {
+ type Item = CharComponent;
+ fn next(&mut self) -> Option<CharComponent> {
+ if self.parser.pos == 0 {
+ assert!(
+ self.parser.advance() == '\'',
+ "char literal should start with a quote"
+ );
+ }
+
+ if let Some(component) = self.parser.parse_char_component() {
+ return Some(component);
+ }
+
+ // We get here when there are no char components left to parse
+ if self.parser.peek() == Some('\'') {
+ self.parser.advance();
+ self.has_closing_quote = true;
+ }
+
+ assert!(
+ self.parser.peek() == None,
+ "char literal should leave no unparsed input: src = {}, pos = {}, length = {}",
+ self.parser.src,
+ self.parser.pos,
+ self.parser.src.len()
+ );
+
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use rowan::TextRange;
+ use crate::string_lexing::{
+ CharComponent,
+ CharComponentKind::*,
+};
+
+ fn parse(src: &str) -> (bool, Vec<CharComponent>) {
+ let component_iterator = &mut super::parse_char_literal(src);
+ let components: Vec<_> = component_iterator.collect();
+ (component_iterator.has_closing_quote, components)
+ }
+
+ fn unclosed_char_component(src: &str) -> CharComponent {
+ let (has_closing_quote, components) = parse(src);
+ assert!(!has_closing_quote, "char should not have closing quote");
+ assert!(components.len() == 1);
+ components[0].clone()
+ }
+
+ fn closed_char_component(src: &str) -> CharComponent {
+ let (has_closing_quote, components) = parse(src);
+ assert!(has_closing_quote, "char should have closing quote");
+ assert!(
+ components.len() == 1,
+ "Literal: {}\nComponents: {:#?}",
+ src,
+ components
+ );
+ components[0].clone()
+ }
+
+ fn closed_char_components(src: &str) -> Vec<CharComponent> {
+ let (has_closing_quote, components) = parse(src);
+ assert!(has_closing_quote, "char should have closing quote");
+ components
+ }
+
+ fn range_closed(src: &str) -> TextRange {
+ TextRange::from_to(1.into(), (src.len() as u32 - 1).into())
+ }
+
+ fn range_unclosed(src: &str) -> TextRange {
+ TextRange::from_to(1.into(), (src.len() as u32).into())
+ }
+
+ #[test]
+ fn test_unicode_escapes() {
+ let unicode_escapes = &[r"{DEAD}", "{BEEF}", "{FF}", "{}", ""];
+ for escape in unicode_escapes {
+ let escape_sequence = format!(r"'\u{}'", escape);
+ let component = closed_char_component(&escape_sequence);
+ let expected_range = range_closed(&escape_sequence);
+ assert_eq!(component.kind, UnicodeEscape);
+ assert_eq!(component.range, expected_range);
+ }
+ }
+
+ #[test]
+ fn test_unicode_escapes_unclosed() {
+ let unicode_escapes = &["{DEAD", "{BEEF", "{FF"];
+ for escape in unicode_escapes {
+ let escape_sequence = format!(r"'\u{}'", escape);
+ let component = unclosed_char_component(&escape_sequence);
+ let expected_range = range_unclosed(&escape_sequence);
+ assert_eq!(component.kind, UnicodeEscape);
+ assert_eq!(component.range, expected_range);
+ }
+ }
+
+ #[test]
+ fn test_empty_char() {
+ let (has_closing_quote, components) = parse("''");
+ assert!(has_closing_quote, "char should have closing quote");
+ assert!(components.len() == 0);
+ }
+
+ #[test]
+ fn test_unclosed_char() {
+ let component = unclosed_char_component("'a");
+ assert!(component.kind == CodePoint);
+ assert!(component.range == TextRange::from_to(1.into(), 2.into()));
+ }
+
+ #[test]
+ fn test_digit_escapes() {
+ let literals = &[r"", r"5", r"55"];
+
+ for literal in literals {
+ let lit_text = format!(r"'\x{}'", literal);
+ let component = closed_char_component(&lit_text);
+ assert!(component.kind == AsciiCodeEscape);
+ assert!(component.range == range_closed(&lit_text));
+ }
+
+ // More than 2 digits starts a new codepoint
+ let components = closed_char_components(r"'\x555'");
+ assert!(components.len() == 2);
+ assert!(components[1].kind == CodePoint);
+ }
+
+ #[test]
+ fn test_ascii_escapes() {
+ let literals = &[
+ r"\'", "\\\"", // equivalent to \"
+ r"\n", r"\r", r"\t", r"\\", r"\0",
+ ];
+
+ for literal in literals {
+ let lit_text = format!("'{}'", literal);
+ let component = closed_char_component(&lit_text);
+ assert!(component.kind == AsciiEscape);
+ assert!(component.range == range_closed(&lit_text));
+ }
+ }
+
+ #[test]
+ fn test_no_escapes() {
+ let literals = &['"', 'n', 'r', 't', '0', 'x', 'u'];
+
+ for &literal in literals {
+ let lit_text = format!("'{}'", literal);
+ let component = closed_char_component(&lit_text);
+ assert!(component.kind == CodePoint);
+ assert!(component.range == range_closed(&lit_text));
+ }
+ }
+}
-use self::CharComponentKind::*;
-use rowan::{TextRange, TextUnit};
-
-pub fn parse_char_literal(src: &str) -> CharComponentIterator {
- CharComponentIterator {
- parser: Parser::new(src),
- has_closing_quote: false,
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone)]
-pub struct CharComponent {
- pub range: TextRange,
- pub kind: CharComponentKind,
-}
-
-impl CharComponent {
- fn new(range: TextRange, kind: CharComponentKind) -> CharComponent {
- CharComponent { range, kind }
- }
-}
-
-#[derive(Debug, Eq, PartialEq, Clone)]
-pub enum CharComponentKind {
- CodePoint,
- AsciiEscape,
- AsciiCodeEscape,
- UnicodeEscape,
-}
-
-pub struct CharComponentIterator<'a> {
- parser: Parser<'a>,
- pub has_closing_quote: bool,
-}
-
-impl<'a> Iterator for CharComponentIterator<'a> {
- type Item = CharComponent;
- fn next(&mut self) -> Option<CharComponent> {
- if self.parser.pos == 0 {
- assert!(
- self.parser.advance() == '\'',
- "char literal should start with a quote"
- );
- }
-
- if let Some(component) = self.parser.parse_char_component() {
- return Some(component);
- }
-
- // We get here when there are no char components left to parse
- if self.parser.peek() == Some('\'') {
- self.parser.advance();
- self.has_closing_quote = true;
- }
-
- assert!(
- self.parser.peek() == None,
- "char literal should leave no unparsed input: src = {}, pos = {}, length = {}",
- self.parser.src,
- self.parser.pos,
- self.parser.src.len()
- );
-
- None
- }
-}
-
-pub struct Parser<'a> {
- src: &'a str,
- pos: usize,
-}
-
-impl<'a> Parser<'a> {
- pub fn new(src: &'a str) -> Parser<'a> {
- Parser { src, pos: 0 }
- }
-
- // Utility methods
-
- pub fn peek(&self) -> Option<char> {
- if self.pos == self.src.len() {
- return None;
- }
-
- self.src[self.pos..].chars().next()
- }
-
- pub fn advance(&mut self) -> char {
- let next = self
- .peek()
- .expect("cannot advance if end of input is reached");
- self.pos += next.len_utf8();
- next
- }
-
- pub fn get_pos(&self) -> TextUnit {
- (self.pos as u32).into()
- }
-
- // Char parsing methods
-
- fn parse_unicode_escape(&mut self, start: TextUnit) -> CharComponent {
- match self.peek() {
- Some('{') => {
- self.advance();
-
- // Parse anything until we reach `}`
- while let Some(next) = self.peek() {
- self.advance();
- if next == '}' {
- break;
- }
- }
-
- let end = self.get_pos();
- CharComponent::new(TextRange::from_to(start, end), UnicodeEscape)
- }
- Some(_) | None => {
- let end = self.get_pos();
- CharComponent::new(TextRange::from_to(start, end), UnicodeEscape)
- }
- }
- }
-
- fn parse_ascii_code_escape(&mut self, start: TextUnit) -> CharComponent {
- let code_start = self.get_pos();
- while let Some(next) = self.peek() {
- if next == '\'' || (self.get_pos() - code_start == 2.into()) {
- break;
- }
-
- self.advance();
- }
-
- let end = self.get_pos();
- CharComponent::new(TextRange::from_to(start, end), AsciiCodeEscape)
- }
-
- fn parse_escape(&mut self, start: TextUnit) -> CharComponent {
- if self.peek().is_none() {
- return CharComponent::new(TextRange::from_to(start, start), AsciiEscape);
- }
-
- let next = self.advance();
- let end = self.get_pos();
- let range = TextRange::from_to(start, end);
- match next {
- 'x' => self.parse_ascii_code_escape(start),
- 'u' => self.parse_unicode_escape(start),
- _ => CharComponent::new(range, AsciiEscape),
- }
- }
-
- pub fn parse_char_component(&mut self) -> Option<CharComponent> {
- let next = self.peek()?;
-
- // Ignore character close
- if next == '\'' {
- return None;
- }
-
- let start = self.get_pos();
- self.advance();
-
- if next == '\\' {
- Some(self.parse_escape(start))
- } else {
- let end = self.get_pos();
- Some(CharComponent::new(
- TextRange::from_to(start, end),
- CodePoint,
- ))
- }
- }
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
-
- fn parse(src: &str) -> (bool, Vec<CharComponent>) {
- let component_iterator = &mut super::parse_char_literal(src);
- let components: Vec<_> = component_iterator.collect();
- (component_iterator.has_closing_quote, components)
- }
-
- fn unclosed_char_component(src: &str) -> CharComponent {
- let (has_closing_quote, components) = parse(src);
- assert!(!has_closing_quote, "char should not have closing quote");
- assert!(components.len() == 1);
- components[0].clone()
- }
-
- fn closed_char_component(src: &str) -> CharComponent {
- let (has_closing_quote, components) = parse(src);
- assert!(has_closing_quote, "char should have closing quote");
- assert!(
- components.len() == 1,
- "Literal: {}\nComponents: {:#?}",
- src,
- components
- );
- components[0].clone()
- }
-
- fn closed_char_components(src: &str) -> Vec<CharComponent> {
- let (has_closing_quote, components) = parse(src);
- assert!(has_closing_quote, "char should have closing quote");
- components
- }
-
- fn range_closed(src: &str) -> TextRange {
- TextRange::from_to(1.into(), (src.len() as u32 - 1).into())
- }
-
- fn range_unclosed(src: &str) -> TextRange {
- TextRange::from_to(1.into(), (src.len() as u32).into())
- }
-
- #[test]
- fn test_unicode_escapes() {
- let unicode_escapes = &[r"{DEAD}", "{BEEF}", "{FF}", ""];
- for escape in unicode_escapes {
- let escape_sequence = format!(r"'\u{}'", escape);
- let component = closed_char_component(&escape_sequence);
- let expected_range = range_closed(&escape_sequence);
- assert_eq!(component.kind, CharComponentKind::UnicodeEscape);
- assert_eq!(component.range, expected_range);
- }
- }
-
- #[test]
- fn test_unicode_escapes_unclosed() {
- let unicode_escapes = &["{DEAD", "{BEEF", "{FF"];
- for escape in unicode_escapes {
- let escape_sequence = format!(r"'\u{}'", escape);
- let component = unclosed_char_component(&escape_sequence);
- let expected_range = range_unclosed(&escape_sequence);
- assert_eq!(component.kind, CharComponentKind::UnicodeEscape);
- assert_eq!(component.range, expected_range);
- }
- }
-
- #[test]
- fn test_empty_char() {
- let (has_closing_quote, components) = parse("''");
- assert!(has_closing_quote, "char should have closing quote");
- assert!(components.len() == 0);
- }
-
- #[test]
- fn test_unclosed_char() {
- let component = unclosed_char_component("'a");
- assert!(component.kind == CodePoint);
- assert!(component.range == TextRange::from_to(1.into(), 2.into()));
- }
-
- #[test]
- fn test_digit_escapes() {
- let literals = &[r"", r"5", r"55"];
-
- for literal in literals {
- let lit_text = format!(r"'\x{}'", literal);
- let component = closed_char_component(&lit_text);
- assert!(component.kind == CharComponentKind::AsciiCodeEscape);
- assert!(component.range == range_closed(&lit_text));
- }
-
- // More than 2 digits starts a new codepoint
- let components = closed_char_components(r"'\x555'");
- assert!(components.len() == 2);
- assert!(components[1].kind == CharComponentKind::CodePoint);
- }
-
- #[test]
- fn test_ascii_escapes() {
- let literals = &[
- r"\'", "\\\"", // equivalent to \"
- r"\n", r"\r", r"\t", r"\\", r"\0",
- ];
-
- for literal in literals {
- let lit_text = format!("'{}'", literal);
- let component = closed_char_component(&lit_text);
- assert!(component.kind == CharComponentKind::AsciiEscape);
- assert!(component.range == range_closed(&lit_text));
- }
- }
-
- #[test]
- fn test_no_escapes() {
- let literals = &['"', 'n', 'r', 't', '0', 'x', 'u'];
-
- for &literal in literals {
- let lit_text = format!("'{}'", literal);
- let component = closed_char_component(&lit_text);
- assert!(component.kind == CharComponentKind::CodePoint);
- assert!(component.range == range_closed(&lit_text));
- }
- }
-}
+mod parser;
+mod byte;
+mod byte_string;
+mod char;
+mod string;
+
+pub use self::{
+ byte::parse_byte_literal,
+ byte_string::parse_byte_string_literal,
+ char::parse_char_literal,
+ parser::{CharComponent, CharComponentKind, StringComponent, StringComponentKind},
+ string::parse_string_literal,
+};
--- /dev/null
+use rowan::{TextRange, TextUnit};
+
+use self::CharComponentKind::*;
+
+pub struct Parser<'a> {
+ pub(super) src: &'a str,
+ pub(super) pos: usize,
+}
+
+impl<'a> Parser<'a> {
+ pub fn new(src: &'a str) -> Parser<'a> {
+ Parser { src, pos: 0 }
+ }
+
+ // Utility methods
+
+ pub fn peek(&self) -> Option<char> {
+ if self.pos == self.src.len() {
+ return None;
+ }
+
+ self.src[self.pos..].chars().next()
+ }
+
+ pub fn advance(&mut self) -> char {
+ let next = self
+ .peek()
+ .expect("cannot advance if end of input is reached");
+ self.pos += next.len_utf8();
+ next
+ }
+
+ pub fn skip_whitespace(&mut self) {
+ while self.peek().map(|c| c.is_whitespace()) == Some(true) {
+ self.advance();
+ }
+ }
+
+ pub fn get_pos(&self) -> TextUnit {
+ (self.pos as u32).into()
+ }
+
+ // Char parsing methods
+
+ fn parse_unicode_escape(&mut self, start: TextUnit) -> CharComponent {
+ match self.peek() {
+ Some('{') => {
+ self.advance();
+
+ // Parse anything until we reach `}`
+ while let Some(next) = self.peek() {
+ self.advance();
+ if next == '}' {
+ break;
+ }
+ }
+
+ let end = self.get_pos();
+ CharComponent::new(TextRange::from_to(start, end), UnicodeEscape)
+ }
+ Some(_) | None => {
+ let end = self.get_pos();
+ CharComponent::new(TextRange::from_to(start, end), UnicodeEscape)
+ }
+ }
+ }
+
+ fn parse_ascii_code_escape(&mut self, start: TextUnit) -> CharComponent {
+ let code_start = self.get_pos();
+ while let Some(next) = self.peek() {
+ if next == '\'' || (self.get_pos() - code_start == 2.into()) {
+ break;
+ }
+
+ self.advance();
+ }
+
+ let end = self.get_pos();
+ CharComponent::new(TextRange::from_to(start, end), AsciiCodeEscape)
+ }
+
+ fn parse_escape(&mut self, start: TextUnit) -> CharComponent {
+ if self.peek().is_none() {
+ return CharComponent::new(TextRange::from_to(start, start), AsciiEscape);
+ }
+
+ let next = self.advance();
+ let end = self.get_pos();
+ let range = TextRange::from_to(start, end);
+ match next {
+ 'x' => self.parse_ascii_code_escape(start),
+ 'u' => self.parse_unicode_escape(start),
+ _ => CharComponent::new(range, AsciiEscape),
+ }
+ }
+
+ pub fn parse_char_component(&mut self) -> Option<CharComponent> {
+ let next = self.peek()?;
+
+ // Ignore character close
+ if next == '\'' {
+ return None;
+ }
+
+ let start = self.get_pos();
+ self.advance();
+
+ if next == '\\' {
+ Some(self.parse_escape(start))
+ } else {
+ let end = self.get_pos();
+ Some(CharComponent::new(
+ TextRange::from_to(start, end),
+ CodePoint,
+ ))
+ }
+ }
+
+ pub fn parse_ignore_newline(&mut self, start: TextUnit) -> Option<StringComponent> {
+ // In string literals, when a `\` occurs immediately before the newline, the `\`,
+ // the newline, and all whitespace at the beginning of the next line are ignored
+ match self.peek() {
+ Some('\n') | Some('\r') => {
+ self.skip_whitespace();
+ Some(StringComponent::new(
+ TextRange::from_to(start, self.get_pos()),
+ StringComponentKind::IgnoreNewline,
+ ))
+ }
+ _ => None,
+ }
+ }
+
+ pub fn parse_string_component(&mut self) -> Option<StringComponent> {
+ let next = self.peek()?;
+
+ // Ignore string close
+ if next == '"' {
+ return None;
+ }
+
+ let start = self.get_pos();
+ self.advance();
+
+ if next == '\\' {
+ // Strings can use `\` to ignore newlines, so we first try to parse one of those
+ // before falling back to parsing char escapes
+ self.parse_ignore_newline(start).or_else(|| {
+ let char_component = self.parse_escape(start);
+ Some(StringComponent::new(
+ char_component.range,
+ StringComponentKind::Char(char_component.kind),
+ ))
+ })
+ } else {
+ let end = self.get_pos();
+ Some(StringComponent::new(
+ TextRange::from_to(start, end),
+ StringComponentKind::Char(CodePoint),
+ ))
+ }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone)]
+pub struct StringComponent {
+ pub range: TextRange,
+ pub kind: StringComponentKind,
+}
+
+impl StringComponent {
+ fn new(range: TextRange, kind: StringComponentKind) -> StringComponent {
+ StringComponent { range, kind }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone)]
+pub enum StringComponentKind {
+ IgnoreNewline,
+ Char(CharComponentKind),
+}
+
+#[derive(Debug, Eq, PartialEq, Clone)]
+pub struct CharComponent {
+ pub range: TextRange,
+ pub kind: CharComponentKind,
+}
+
+impl CharComponent {
+ fn new(range: TextRange, kind: CharComponentKind) -> CharComponent {
+ CharComponent { range, kind }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq, Clone)]
+pub enum CharComponentKind {
+ CodePoint,
+ AsciiEscape,
+ AsciiCodeEscape,
+ UnicodeEscape,
+}
--- /dev/null
+use super::parser::Parser;
+use super::StringComponent;
+
+pub fn parse_string_literal(src: &str) -> StringComponentIterator {
+ StringComponentIterator {
+ parser: Parser::new(src),
+ has_closing_quote: false,
+ }
+}
+
+pub struct StringComponentIterator<'a> {
+ parser: Parser<'a>,
+ pub has_closing_quote: bool,
+}
+
+impl<'a> Iterator for StringComponentIterator<'a> {
+ type Item = StringComponent;
+ fn next(&mut self) -> Option<StringComponent> {
+ if self.parser.pos == 0 {
+ assert!(
+ self.parser.advance() == '"',
+ "string literal should start with double quotes"
+ );
+ }
+
+ if let Some(component) = self.parser.parse_string_component() {
+ return Some(component);
+ }
+
+ // We get here when there are no char components left to parse
+ if self.parser.peek() == Some('"') {
+ self.parser.advance();
+ self.has_closing_quote = true;
+ }
+
+ assert!(
+ self.parser.peek() == None,
+ "string literal should leave no unparsed input: src = {}, pos = {}, length = {}",
+ self.parser.src,
+ self.parser.pos,
+ self.parser.src.len()
+ );
+
+ None
+ }
+}
RAW_BYTE_STRING,
COMMENT,
SHEBANG,
- ROOT,
+ SOURCE_FILE,
STRUCT_DEF,
ENUM_DEF,
FN_DEF,
RAW_BYTE_STRING => &SyntaxInfo { name: "RAW_BYTE_STRING" },
COMMENT => &SyntaxInfo { name: "COMMENT" },
SHEBANG => &SyntaxInfo { name: "SHEBANG" },
- ROOT => &SyntaxInfo { name: "ROOT" },
+ SOURCE_FILE => &SyntaxInfo { name: "SOURCE_FILE" },
STRUCT_DEF => &SyntaxInfo { name: "STRUCT_DEF" },
ENUM_DEF => &SyntaxInfo { name: "ENUM_DEF" },
FN_DEF => &SyntaxInfo { name: "FN_DEF" },
-use crate::{File, SyntaxKind, SyntaxNodeRef, WalkEvent};
+use crate::{SourceFileNode, SyntaxKind, SyntaxNodeRef, WalkEvent};
use std::fmt::Write;
+use std::str;
/// Parse a file and create a string representation of the resulting parse tree.
pub fn dump_tree(syntax: SyntaxNodeRef) -> String {
}
pub fn check_fuzz_invariants(text: &str) {
- let file = File::parse(text);
+ let file = SourceFileNode::parse(text);
let root = file.syntax();
validate_block_structure(root);
let _ = file.ast();
+++ /dev/null
-use crate::{
- algo::visit::{visitor_ctx, VisitorCtx},
- ast::{self, AstNode},
- File,
- string_lexing::{self, CharComponentKind},
- yellow::{
- SyntaxError,
- SyntaxErrorKind::*,
- },
-};
-
-pub(crate) fn validate(file: &File) -> Vec<SyntaxError> {
- let mut errors = Vec::new();
- for node in file.root.borrowed().descendants() {
- let _ = visitor_ctx(&mut errors)
- .visit::<ast::Char, _>(validate_char)
- .accept(node);
- }
- errors
-}
-
-fn validate_char(node: ast::Char, errors: &mut Vec<SyntaxError>) {
- let mut components = string_lexing::parse_char_literal(node.text());
- let mut len = 0;
- for component in &mut components {
- len += 1;
-
- // Validate escapes
- let text = &node.text()[component.range];
- let range = component.range + node.syntax().range().start();
- use self::CharComponentKind::*;
- match component.kind {
- AsciiEscape => {
- if text.len() == 1 {
- // Escape sequence consists only of leading `\`
- errors.push(SyntaxError::new(EmptyAsciiEscape, range));
- } else {
- let escape_code = text.chars().skip(1).next().unwrap();
- if !is_ascii_escape(escape_code) {
- errors.push(SyntaxError::new(InvalidAsciiEscape, range));
- }
- }
- }
- AsciiCodeEscape => {
- // TODO:
- // * First digit is octal
- // * Second digit is hex
- }
- UnicodeEscape => {
- // TODO:
- // * Only hex digits or underscores allowed
- // * Max 6 chars
- // * Within allowed range (must be at most 10FFFF)
- }
- // Code points are always valid
- CodePoint => (),
- }
- }
-
- if !components.has_closing_quote {
- errors.push(SyntaxError::new(UnclosedChar, node.syntax().range()));
- }
-
- if len == 0 {
- errors.push(SyntaxError::new(EmptyChar, node.syntax().range()));
- }
-
- if len > 1 {
- errors.push(SyntaxError::new(LongChar, node.syntax().range()));
- }
-}
-
-fn is_ascii_escape(code: char) -> bool {
- match code {
- '\'' | '"' | 'n' | 'r' | 't' | '0' => true,
- _ => false,
- }
-}
--- /dev/null
+//! Validation of byte literals
+
+use crate::{
+ ast::{self, AstNode},
+ string_lexing::{self, CharComponentKind},
+ TextRange,
+ validation::char,
+ yellow::{
+ SyntaxError,
+ SyntaxErrorKind::*,
+ },
+};
+
+pub(super) fn validate_byte_node(node: ast::Byte, errors: &mut Vec<SyntaxError>) {
+ let literal_text = node.text();
+ let literal_range = node.syntax().range();
+ let mut components = string_lexing::parse_byte_literal(literal_text);
+ let mut len = 0;
+ for component in &mut components {
+ len += 1;
+ let text = &literal_text[component.range];
+ let range = component.range + literal_range.start();
+ validate_byte_component(text, component.kind, range, errors);
+ }
+
+ if !components.has_closing_quote {
+ errors.push(SyntaxError::new(UnclosedByte, literal_range));
+ }
+
+ if len == 0 {
+ errors.push(SyntaxError::new(EmptyByte, literal_range));
+ }
+
+ if len > 1 {
+ errors.push(SyntaxError::new(OverlongByte, literal_range));
+ }
+}
+
+pub(super) fn validate_byte_component(
+ text: &str,
+ kind: CharComponentKind,
+ range: TextRange,
+ errors: &mut Vec<SyntaxError>,
+) {
+ use self::CharComponentKind::*;
+ match kind {
+ AsciiEscape => validate_byte_escape(text, range, errors),
+ AsciiCodeEscape => validate_byte_code_escape(text, range, errors),
+ UnicodeEscape => errors.push(SyntaxError::new(UnicodeEscapeForbidden, range)),
+ CodePoint => {
+ let c = text
+ .chars()
+ .next()
+ .expect("Code points should be one character long");
+
+ // These bytes must always be escaped
+ if c == '\t' || c == '\r' || c == '\n' {
+ errors.push(SyntaxError::new(UnescapedByte, range));
+ }
+
+ // Only ASCII bytes are allowed
+ if c > 0x7F as char {
+ errors.push(SyntaxError::new(ByteOutOfRange, range));
+ }
+ }
+ }
+}
+
+fn validate_byte_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) {
+ if text.len() == 1 {
+ // Escape sequence consists only of leading `\`
+ errors.push(SyntaxError::new(EmptyByteEscape, range));
+ } else {
+ let escape_code = text.chars().skip(1).next().unwrap();
+ if !char::is_ascii_escape(escape_code) {
+ errors.push(SyntaxError::new(InvalidByteEscape, range));
+ }
+ }
+}
+
+fn validate_byte_code_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) {
+ // A ByteCodeEscape has 4 chars, example: `\xDD`
+ if text.len() < 4 {
+ errors.push(SyntaxError::new(TooShortByteCodeEscape, range));
+ } else {
+ assert!(
+ text.chars().count() == 4,
+ "ByteCodeEscape cannot be longer than 4 chars"
+ );
+
+ if u8::from_str_radix(&text[2..], 16).is_err() {
+ errors.push(SyntaxError::new(MalformedByteCodeEscape, range));
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use crate::SourceFileNode;
+
+ fn build_file(literal: &str) -> SourceFileNode {
+ let src = format!("const C: u8 = b'{}';", literal);
+ SourceFileNode::parse(&src)
+ }
+
+ fn assert_valid_byte(literal: &str) {
+ let file = build_file(literal);
+ assert!(
+ file.errors().len() == 0,
+ "Errors for literal '{}': {:?}",
+ literal,
+ file.errors()
+ );
+ }
+
+ fn assert_invalid_byte(literal: &str) {
+ let file = build_file(literal);
+ assert!(file.errors().len() > 0);
+ }
+
+ #[test]
+ fn test_ansi_codepoints() {
+ for byte in 0..128 {
+ match byte {
+ b'\n' | b'\r' | b'\t' => assert_invalid_byte(&(byte as char).to_string()),
+ b'\'' | b'\\' => { /* Ignore character close and backslash */ }
+ _ => assert_valid_byte(&(byte as char).to_string()),
+ }
+ }
+
+ for byte in 128..=255u8 {
+ assert_invalid_byte(&(byte as char).to_string());
+ }
+ }
+
+ #[test]
+ fn test_unicode_codepoints() {
+ let invalid = ["Ƒ", "バ", "メ", "﷽"];
+ for c in &invalid {
+ assert_invalid_byte(c);
+ }
+ }
+
+ #[test]
+ fn test_unicode_multiple_codepoints() {
+ let invalid = ["नी", "👨👨"];
+ for c in &invalid {
+ assert_invalid_byte(c);
+ }
+ }
+
+ #[test]
+ fn test_valid_byte_escape() {
+ let valid = [r"\'", "\"", "\\\\", "\\\"", r"\n", r"\r", r"\t", r"\0"];
+ for c in &valid {
+ assert_valid_byte(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_byte_escape() {
+ let invalid = [r"\a", r"\?", r"\"];
+ for c in &invalid {
+ assert_invalid_byte(c);
+ }
+ }
+
+ #[test]
+ fn test_valid_byte_code_escape() {
+ let valid = [r"\x00", r"\x7F", r"\x55", r"\xF0"];
+ for c in &valid {
+ assert_valid_byte(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_byte_code_escape() {
+ let invalid = [r"\x", r"\x7"];
+ for c in &invalid {
+ assert_invalid_byte(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_unicode_escape() {
+ let well_formed = [
+ r"\u{FF}",
+ r"\u{0}",
+ r"\u{F}",
+ r"\u{10FFFF}",
+ r"\u{1_0__FF___FF_____}",
+ ];
+ for c in &well_formed {
+ assert_invalid_byte(c);
+ }
+
+ let invalid = [
+ r"\u",
+ r"\u{}",
+ r"\u{",
+ r"\u{FF",
+ r"\u{FFFFFF}",
+ r"\u{_F}",
+ r"\u{00FFFFF}",
+ r"\u{110000}",
+ ];
+ for c in &invalid {
+ assert_invalid_byte(c);
+ }
+ }
+}
--- /dev/null
+use crate::{
+ ast::{self, AstNode},
+ string_lexing::{self, StringComponentKind},
+ yellow::{
+ SyntaxError,
+ SyntaxErrorKind::*,
+ },
+};
+
+use super::byte;
+
+pub(crate) fn validate_byte_string_node(node: ast::ByteString, errors: &mut Vec<SyntaxError>) {
+ let literal_text = node.text();
+ let literal_range = node.syntax().range();
+ let mut components = string_lexing::parse_byte_string_literal(literal_text);
+ for component in &mut components {
+ let range = component.range + literal_range.start();
+
+ match component.kind {
+ StringComponentKind::Char(kind) => {
+ // Chars must escape \t, \n and \r codepoints, but strings don't
+ let text = &literal_text[component.range];
+ match text {
+ "\t" | "\n" | "\r" => { /* always valid */ }
+ _ => byte::validate_byte_component(text, kind, range, errors),
+ }
+ }
+ StringComponentKind::IgnoreNewline => { /* always valid */ }
+ }
+ }
+
+ if !components.has_closing_quote {
+ errors.push(SyntaxError::new(UnclosedString, literal_range));
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use crate::SourceFileNode;
+
+ fn build_file(literal: &str) -> SourceFileNode {
+ let src = format!(r#"const S: &'static [u8] = b"{}";"#, literal);
+ println!("Source: {}", src);
+ SourceFileNode::parse(&src)
+ }
+
+ fn assert_valid_str(literal: &str) {
+ let file = build_file(literal);
+ assert!(
+ file.errors().len() == 0,
+ "Errors for literal '{}': {:?}",
+ literal,
+ file.errors()
+ );
+ }
+
+ fn assert_invalid_str(literal: &str) {
+ let file = build_file(literal);
+ assert!(file.errors().len() > 0);
+ }
+
+ #[test]
+ fn test_ansi_codepoints() {
+ for byte in 0..128 {
+ match byte {
+ b'\"' | b'\\' => { /* Ignore string close and backslash */ }
+ _ => assert_valid_str(&(byte as char).to_string()),
+ }
+ }
+
+ for byte in 128..=255u8 {
+ assert_invalid_str(&(byte as char).to_string());
+ }
+ }
+
+ #[test]
+ fn test_unicode_codepoints() {
+ let invalid = ["Ƒ", "バ", "メ", "﷽"];
+ for c in &invalid {
+ assert_invalid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_unicode_multiple_codepoints() {
+ let invalid = ["नी", "👨👨"];
+ for c in &invalid {
+ assert_invalid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_valid_ascii_escape() {
+ let valid = [r"\'", r#"\""#, r"\\", r"\n", r"\r", r"\t", r"\0", "a", "b"];
+ for c in &valid {
+ assert_valid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_ascii_escape() {
+ let invalid = [r"\a", r"\?", r"\"];
+ for c in &invalid {
+ assert_invalid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_valid_ascii_code_escape() {
+ let valid = [r"\x00", r"\x7F", r"\x55", r"\xF0"];
+ for c in &valid {
+ assert_valid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_ascii_code_escape() {
+ let invalid = [r"\x", r"\x7"];
+ for c in &invalid {
+ assert_invalid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_unicode_escape() {
+ let well_formed = [
+ r"\u{FF}",
+ r"\u{0}",
+ r"\u{F}",
+ r"\u{10FFFF}",
+ r"\u{1_0__FF___FF_____}",
+ ];
+ for c in &well_formed {
+ assert_invalid_str(c);
+ }
+
+ let invalid = [
+ r"\u",
+ r"\u{}",
+ r"\u{",
+ r"\u{FF",
+ r"\u{FFFFFF}",
+ r"\u{_F}",
+ r"\u{00FFFFF}",
+ r"\u{110000}",
+ ];
+ for c in &invalid {
+ assert_invalid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_mixed_invalid() {
+ assert_invalid_str(
+ r"This is the tale of a string
+with a newline in between, some emoji (👨👨) here and there,
+unicode escapes like this: \u{1FFBB} and weird stuff like
+this ﷽",
+ );
+ }
+
+ #[test]
+ fn test_mixed_valid() {
+ assert_valid_str(
+ r"This is the tale of a string
+with a newline in between, no emoji at all,
+nor unicode escapes or weird stuff",
+ );
+ }
+
+ #[test]
+ fn test_ignore_newline() {
+ assert_valid_str(
+ "Hello \
+ World",
+ );
+ }
+}
--- /dev/null
+//! Validation of char literals
+
+use std::u32;
+
+use arrayvec::ArrayString;
+
+use crate::{
+ ast::{self, AstNode},
+ string_lexing::{self, CharComponentKind},
+ TextRange,
+ yellow::{
+ SyntaxError,
+ SyntaxErrorKind::*,
+ },
+};
+
+pub(super) fn validate_char_node(node: ast::Char, errors: &mut Vec<SyntaxError>) {
+ let literal_text = node.text();
+ let literal_range = node.syntax().range();
+ let mut components = string_lexing::parse_char_literal(literal_text);
+ let mut len = 0;
+ for component in &mut components {
+ len += 1;
+ let text = &literal_text[component.range];
+ let range = component.range + literal_range.start();
+ validate_char_component(text, component.kind, range, errors);
+ }
+
+ if !components.has_closing_quote {
+ errors.push(SyntaxError::new(UnclosedChar, literal_range));
+ }
+
+ if len == 0 {
+ errors.push(SyntaxError::new(EmptyChar, literal_range));
+ }
+
+ if len > 1 {
+ errors.push(SyntaxError::new(OverlongChar, literal_range));
+ }
+}
+
+pub(super) fn validate_char_component(
+ text: &str,
+ kind: CharComponentKind,
+ range: TextRange,
+ errors: &mut Vec<SyntaxError>,
+) {
+ // Validate escapes
+ use self::CharComponentKind::*;
+ match kind {
+ AsciiEscape => validate_ascii_escape(text, range, errors),
+ AsciiCodeEscape => validate_ascii_code_escape(text, range, errors),
+ UnicodeEscape => validate_unicode_escape(text, range, errors),
+ CodePoint => {
+ // These code points must always be escaped
+ if text == "\t" || text == "\r" || text == "\n" {
+ errors.push(SyntaxError::new(UnescapedCodepoint, range));
+ }
+ }
+ }
+}
+
+fn validate_ascii_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) {
+ if text.len() == 1 {
+ // Escape sequence consists only of leading `\`
+ errors.push(SyntaxError::new(EmptyAsciiEscape, range));
+ } else {
+ let escape_code = text.chars().skip(1).next().unwrap();
+ if !is_ascii_escape(escape_code) {
+ errors.push(SyntaxError::new(InvalidAsciiEscape, range));
+ }
+ }
+}
+
+pub(super) fn is_ascii_escape(code: char) -> bool {
+ match code {
+ '\\' | '\'' | '"' | 'n' | 'r' | 't' | '0' => true,
+ _ => false,
+ }
+}
+
+fn validate_ascii_code_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) {
+ // An AsciiCodeEscape has 4 chars, example: `\xDD`
+ if text.len() < 4 {
+ errors.push(SyntaxError::new(TooShortAsciiCodeEscape, range));
+ } else {
+ assert!(
+ text.chars().count() == 4,
+ "AsciiCodeEscape cannot be longer than 4 chars"
+ );
+
+ match u8::from_str_radix(&text[2..], 16) {
+ Ok(code) if code < 128 => { /* Escape code is valid */ }
+ Ok(_) => errors.push(SyntaxError::new(AsciiCodeEscapeOutOfRange, range)),
+ Err(_) => errors.push(SyntaxError::new(MalformedAsciiCodeEscape, range)),
+ }
+ }
+}
+
+fn validate_unicode_escape(text: &str, range: TextRange, errors: &mut Vec<SyntaxError>) {
+ assert!(&text[..2] == "\\u", "UnicodeEscape always starts with \\u");
+
+ if text.len() == 2 {
+ // No starting `{`
+ errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
+ return;
+ }
+
+ if text.len() == 3 {
+ // Only starting `{`
+ errors.push(SyntaxError::new(UnclosedUnicodeEscape, range));
+ return;
+ }
+
+ let mut code = ArrayString::<[_; 6]>::new();
+ let mut closed = false;
+ for c in text[3..].chars() {
+ assert!(!closed, "no characters after escape is closed");
+
+ if c.is_digit(16) {
+ if code.len() == 6 {
+ errors.push(SyntaxError::new(OverlongUnicodeEscape, range));
+ return;
+ }
+
+ code.push(c);
+ } else if c == '_' {
+ // Reject leading _
+ if code.len() == 0 {
+ errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
+ return;
+ }
+ } else if c == '}' {
+ closed = true;
+ } else {
+ errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
+ return;
+ }
+ }
+
+ if !closed {
+ errors.push(SyntaxError::new(UnclosedUnicodeEscape, range))
+ }
+
+ if code.len() == 0 {
+ errors.push(SyntaxError::new(EmptyUnicodeEcape, range));
+ return;
+ }
+
+ match u32::from_str_radix(&code, 16) {
+ Ok(code_u32) if code_u32 > 0x10FFFF => {
+ errors.push(SyntaxError::new(UnicodeEscapeOutOfRange, range));
+ }
+ Ok(_) => {
+ // Valid escape code
+ }
+ Err(_) => {
+ errors.push(SyntaxError::new(MalformedUnicodeEscape, range));
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use crate::SourceFileNode;
+
+ fn build_file(literal: &str) -> SourceFileNode {
+ let src = format!("const C: char = '{}';", literal);
+ SourceFileNode::parse(&src)
+ }
+
+ fn assert_valid_char(literal: &str) {
+ let file = build_file(literal);
+ assert!(
+ file.errors().len() == 0,
+ "Errors for literal '{}': {:?}",
+ literal,
+ file.errors()
+ );
+ }
+
+ fn assert_invalid_char(literal: &str) {
+ let file = build_file(literal);
+ assert!(file.errors().len() > 0);
+ }
+
+ #[test]
+ fn test_ansi_codepoints() {
+ for byte in 0..=255u8 {
+ match byte {
+ b'\n' | b'\r' | b'\t' => assert_invalid_char(&(byte as char).to_string()),
+ b'\'' | b'\\' => { /* Ignore character close and backslash */ }
+ _ => assert_valid_char(&(byte as char).to_string()),
+ }
+ }
+ }
+
+ #[test]
+ fn test_unicode_codepoints() {
+ let valid = ["Ƒ", "バ", "メ", "﷽"];
+ for c in &valid {
+ assert_valid_char(c);
+ }
+ }
+
+ #[test]
+ fn test_unicode_multiple_codepoints() {
+ let invalid = ["नी", "👨👨"];
+ for c in &invalid {
+ assert_invalid_char(c);
+ }
+ }
+
+ #[test]
+ fn test_valid_ascii_escape() {
+ let valid = [r"\'", "\"", "\\\\", "\\\"", r"\n", r"\r", r"\t", r"\0"];
+ for c in &valid {
+ assert_valid_char(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_ascii_escape() {
+ let invalid = [r"\a", r"\?", r"\"];
+ for c in &invalid {
+ assert_invalid_char(c);
+ }
+ }
+
+ #[test]
+ fn test_valid_ascii_code_escape() {
+ let valid = [r"\x00", r"\x7F", r"\x55"];
+ for c in &valid {
+ assert_valid_char(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_ascii_code_escape() {
+ let invalid = [r"\x", r"\x7", r"\xF0"];
+ for c in &invalid {
+ assert_invalid_char(c);
+ }
+ }
+
+ #[test]
+ fn test_valid_unicode_escape() {
+ let valid = [
+ r"\u{FF}",
+ r"\u{0}",
+ r"\u{F}",
+ r"\u{10FFFF}",
+ r"\u{1_0__FF___FF_____}",
+ ];
+ for c in &valid {
+ assert_valid_char(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_unicode_escape() {
+ let invalid = [
+ r"\u",
+ r"\u{}",
+ r"\u{",
+ r"\u{FF",
+ r"\u{FFFFFF}",
+ r"\u{_F}",
+ r"\u{00FFFFF}",
+ r"\u{110000}",
+ ];
+ for c in &invalid {
+ assert_invalid_char(c);
+ }
+ }
+}
--- /dev/null
+use crate::{
+ algo::visit::{visitor_ctx, VisitorCtx},
+ ast,
+ SourceFileNode,
+ yellow::SyntaxError,
+};
+
+mod byte;
+mod byte_string;
+mod char;
+mod string;
+
+pub(crate) fn validate(file: &SourceFileNode) -> Vec<SyntaxError> {
+ let mut errors = Vec::new();
+ for node in file.syntax().descendants() {
+ let _ = visitor_ctx(&mut errors)
+ .visit::<ast::Byte, _>(self::byte::validate_byte_node)
+ .visit::<ast::ByteString, _>(self::byte_string::validate_byte_string_node)
+ .visit::<ast::Char, _>(self::char::validate_char_node)
+ .visit::<ast::String, _>(self::string::validate_string_node)
+ .accept(node);
+ }
+ errors
+}
--- /dev/null
+use crate::{
+ ast::{self, AstNode},
+ string_lexing::{self, StringComponentKind},
+ yellow::{
+ SyntaxError,
+ SyntaxErrorKind::*,
+ },
+};
+
+use super::char;
+
+pub(crate) fn validate_string_node(node: ast::String, errors: &mut Vec<SyntaxError>) {
+ let literal_text = node.text();
+ let literal_range = node.syntax().range();
+ let mut components = string_lexing::parse_string_literal(literal_text);
+ for component in &mut components {
+ let range = component.range + literal_range.start();
+
+ match component.kind {
+ StringComponentKind::Char(kind) => {
+ // Chars must escape \t, \n and \r codepoints, but strings don't
+ let text = &literal_text[component.range];
+ match text {
+ "\t" | "\n" | "\r" => { /* always valid */ }
+ _ => char::validate_char_component(text, kind, range, errors),
+ }
+ }
+ StringComponentKind::IgnoreNewline => { /* always valid */ }
+ }
+ }
+
+ if !components.has_closing_quote {
+ errors.push(SyntaxError::new(UnclosedString, literal_range));
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use crate::SourceFileNode;
+
+ fn build_file(literal: &str) -> SourceFileNode {
+ let src = format!(r#"const S: &'static str = "{}";"#, literal);
+ println!("Source: {}", src);
+ SourceFileNode::parse(&src)
+ }
+
+ fn assert_valid_str(literal: &str) {
+ let file = build_file(literal);
+ assert!(
+ file.errors().len() == 0,
+ "Errors for literal '{}': {:?}",
+ literal,
+ file.errors()
+ );
+ }
+
+ fn assert_invalid_str(literal: &str) {
+ let file = build_file(literal);
+ assert!(file.errors().len() > 0);
+ }
+
+ #[test]
+ fn test_ansi_codepoints() {
+ for byte in 0..=255u8 {
+ match byte {
+ b'\"' | b'\\' => { /* Ignore string close and backslash */ }
+ _ => assert_valid_str(&(byte as char).to_string()),
+ }
+ }
+ }
+
+ #[test]
+ fn test_unicode_codepoints() {
+ let valid = ["Ƒ", "バ", "メ", "﷽"];
+ for c in &valid {
+ assert_valid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_unicode_multiple_codepoints() {
+ let valid = ["नी", "👨👨"];
+ for c in &valid {
+ assert_valid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_valid_ascii_escape() {
+ let valid = [r"\'", r#"\""#, r"\\", r"\n", r"\r", r"\t", r"\0", "a", "b"];
+ for c in &valid {
+ assert_valid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_ascii_escape() {
+ let invalid = [r"\a", r"\?", r"\"];
+ for c in &invalid {
+ assert_invalid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_valid_ascii_code_escape() {
+ let valid = [r"\x00", r"\x7F", r"\x55"];
+ for c in &valid {
+ assert_valid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_ascii_code_escape() {
+ let invalid = [r"\x", r"\x7", r"\xF0"];
+ for c in &invalid {
+ assert_invalid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_valid_unicode_escape() {
+ let valid = [
+ r"\u{FF}",
+ r"\u{0}",
+ r"\u{F}",
+ r"\u{10FFFF}",
+ r"\u{1_0__FF___FF_____}",
+ ];
+ for c in &valid {
+ assert_valid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_invalid_unicode_escape() {
+ let invalid = [
+ r"\u",
+ r"\u{}",
+ r"\u{",
+ r"\u{FF",
+ r"\u{FFFFFF}",
+ r"\u{_F}",
+ r"\u{00FFFFF}",
+ r"\u{110000}",
+ ];
+ for c in &invalid {
+ assert_invalid_str(c);
+ }
+ }
+
+ #[test]
+ fn test_mixed() {
+ assert_valid_str(
+ r"This is the tale of a string
+with a newline in between, some emoji (👨👨) here and there,
+unicode escapes like this: \u{1FFBB} and weird stuff like
+this ﷽",
+ );
+ }
+
+ #[test]
+ fn test_ignore_newline() {
+ assert_valid_str(
+ "Hello \
+ World",
+ );
+ }
+}
}
}
+ pub fn kind(&self) -> SyntaxErrorKind {
+ self.kind.clone()
+ }
+
pub fn location(&self) -> Location {
self.location.clone()
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub enum SyntaxErrorKind {
ParseError(ParseError),
+ UnescapedCodepoint,
EmptyChar,
UnclosedChar,
- LongChar,
+ OverlongChar,
+ EmptyByte,
+ UnclosedByte,
+ OverlongByte,
+ ByteOutOfRange,
+ UnescapedByte,
+ EmptyByteEscape,
+ InvalidByteEscape,
+ TooShortByteCodeEscape,
+ MalformedByteCodeEscape,
+ UnicodeEscapeForbidden,
EmptyAsciiEscape,
InvalidAsciiEscape,
+ TooShortAsciiCodeEscape,
+ AsciiCodeEscapeOutOfRange,
+ MalformedAsciiCodeEscape,
+ UnclosedUnicodeEscape,
+ MalformedUnicodeEscape,
+ EmptyUnicodeEcape,
+ OverlongUnicodeEscape,
+ UnicodeEscapeOutOfRange,
+ UnclosedString,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::SyntaxErrorKind::*;
match self {
+ UnescapedCodepoint => write!(f, "This codepoint should always be escaped"),
EmptyAsciiEscape => write!(f, "Empty escape sequence"),
InvalidAsciiEscape => write!(f, "Invalid escape sequence"),
EmptyChar => write!(f, "Empty char literal"),
UnclosedChar => write!(f, "Unclosed char literal"),
- LongChar => write!(f, "Char literal should be one character long"),
+ OverlongChar => write!(f, "Char literal should be one character long"),
+ EmptyByte => write!(f, "Empty byte literal"),
+ UnclosedByte => write!(f, "Unclosed byte literal"),
+ OverlongByte => write!(f, "Byte literal should be one character long"),
+ ByteOutOfRange => write!(f, "Byte should be a valid ASCII character"),
+ UnescapedByte => write!(f, "This byte should always be escaped"),
+ EmptyByteEscape => write!(f, "Empty escape sequence"),
+ InvalidByteEscape => write!(f, "Invalid escape sequence"),
+ TooShortByteCodeEscape => write!(f, "Escape sequence should have two digits"),
+ MalformedByteCodeEscape => write!(f, "Escape sequence should be a hexadecimal number"),
+ UnicodeEscapeForbidden => write!(
+ f,
+ "Unicode escapes are not allowed in byte literals or byte strings"
+ ),
+ TooShortAsciiCodeEscape => write!(f, "Escape sequence should have two digits"),
+ AsciiCodeEscapeOutOfRange => {
+ write!(f, "Escape sequence should be between \\x00 and \\x7F")
+ }
+ MalformedAsciiCodeEscape => write!(f, "Escape sequence should be a hexadecimal number"),
+ UnclosedUnicodeEscape => write!(f, "Missing `}}`"),
+ MalformedUnicodeEscape => write!(f, "Malformed unicode escape sequence"),
+ EmptyUnicodeEcape => write!(f, "Empty unicode escape sequence"),
+ OverlongUnicodeEscape => {
+ write!(f, "Unicode escape sequence should have at most 6 digits")
+ }
+ UnicodeEscapeOutOfRange => write!(f, "Unicode escape code should be at most 0x10FFFF"),
+ UnclosedString => write!(f, "Unclosed string literal"),
ParseError(msg) => write!(f, "{}", msg.0),
}
}
-ROOT@[0; 34)
+SOURCE_FILE@[0; 34)
STRUCT_DEF@[0; 34)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 21)
+SOURCE_FILE@[0; 21)
ERROR@[0; 2)
IF_KW@[0; 2)
err: `expected an item`
-ROOT@[0; 42)
+SOURCE_FILE@[0; 42)
SHEBANG@[0; 20)
WHITESPACE@[20; 21)
err: `expected an item`
-ROOT@[0; 40)
+SOURCE_FILE@[0; 40)
STRUCT_DEF@[0; 40)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 12)
+SOURCE_FILE@[0; 12)
USE_ITEM@[0; 12)
USE_KW@[0; 3)
WHITESPACE@[3; 4)
-ROOT@[0; 54)
+SOURCE_FILE@[0; 54)
FN_DEF@[0; 31)
ATTR@[0; 18)
POUND@[0; 1)
-ROOT@[0; 74)
+SOURCE_FILE@[0; 74)
STRUCT_DEF@[0; 73)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 31)
+SOURCE_FILE@[0; 31)
ERROR@[0; 1)
R_CURLY@[0; 1)
err: `unmatched `}``
-ROOT@[0; 95)
+SOURCE_FILE@[0; 95)
FN_DEF@[0; 12)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 43)
+SOURCE_FILE@[0; 43)
STRUCT_DEF@[0; 11)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 42)
+SOURCE_FILE@[0; 42)
FN_DEF@[0; 41)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 19)
+SOURCE_FILE@[0; 19)
ERROR@[0; 6)
ABI@[0; 6)
EXTERN_KW@[0; 6)
-ROOT@[0; 389)
+SOURCE_FILE@[0; 389)
FN_DEF@[0; 389)
VISIBILITY@[0; 10)
PUB_KW@[0; 3)
-ROOT@[0; 86)
+SOURCE_FILE@[0; 86)
STRUCT_DEF@[0; 72)
VISIBILITY@[0; 3)
PUB_KW@[0; 3)
-ROOT@[0; 23)
+SOURCE_FILE@[0; 23)
FN_DEF@[0; 22)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 14)
+SOURCE_FILE@[0; 14)
FN_DEF@[0; 7)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 56)
+SOURCE_FILE@[0; 56)
FN_DEF@[0; 55)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 47)
+SOURCE_FILE@[0; 47)
FN_DEF@[0; 46)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 183)
+SOURCE_FILE@[0; 183)
IMPL_ITEM@[0; 182)
IMPL_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 139)
+SOURCE_FILE@[0; 139)
FN_DEF@[0; 138)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 16)
+SOURCE_FILE@[0; 16)
FN_DEF@[0; 2)
FN_KW@[0; 2)
err: `expected a name`
-ROOT@[0; 22)
+SOURCE_FILE@[0; 22)
FN_DEF@[0; 21)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 112)
+SOURCE_FILE@[0; 112)
FN_DEF@[0; 33)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 94)
+SOURCE_FILE@[0; 94)
FN_DEF@[0; 55)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 240)
+SOURCE_FILE@[0; 240)
FN_DEF@[0; 53)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 575)
+SOURCE_FILE@[0; 575)
FN_DEF@[0; 574)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 38)
+SOURCE_FILE@[0; 38)
IMPL_ITEM@[0; 14)
IMPL_KW@[0; 4)
TYPE_PARAM_LIST@[4; 14)
-ROOT@[0; 30)
+SOURCE_FILE@[0; 30)
FN_DEF@[0; 29)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 25)
+SOURCE_FILE@[0; 25)
FN_DEF@[0; 24)
CONST_KW@[0; 5)
WHITESPACE@[5; 6)
-ROOT@[0; 18)
+SOURCE_FILE@[0; 18)
FN_DEF@[0; 17)
CONST_KW@[0; 5)
WHITESPACE@[5; 6)
-ROOT@[0; 10)
+SOURCE_FILE@[0; 10)
EXTERN_BLOCK@[0; 9)
ABI@[0; 6)
EXTERN_KW@[0; 6)
-ROOT@[0; 19)
+SOURCE_FILE@[0; 19)
FN_DEF@[0; 18)
ABI@[0; 6)
EXTERN_KW@[0; 6)
-ROOT@[0; 18)
+SOURCE_FILE@[0; 18)
EXTERN_CRATE_ITEM@[0; 17)
EXTERN_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 18)
+SOURCE_FILE@[0; 18)
TRAIT_DEF@[0; 17)
UNSAFE_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 19)
+SOURCE_FILE@[0; 19)
IMPL_ITEM@[0; 18)
UNSAFE_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 23)
+SOURCE_FILE@[0; 23)
TRAIT_DEF@[0; 22)
UNSAFE_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 27)
+SOURCE_FILE@[0; 27)
IMPL_ITEM@[0; 26)
UNSAFE_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 19)
+SOURCE_FILE@[0; 19)
FN_DEF@[0; 18)
UNSAFE_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 30)
+SOURCE_FILE@[0; 30)
FN_DEF@[0; 29)
UNSAFE_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 33)
+SOURCE_FILE@[0; 33)
FN_DEF@[0; 10)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 21)
+SOURCE_FILE@[0; 21)
TYPE_DEF@[0; 20)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 16)
+SOURCE_FILE@[0; 16)
TYPE_DEF@[0; 15)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 31)
+SOURCE_FILE@[0; 31)
TYPE_DEF@[0; 30)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 16)
+SOURCE_FILE@[0; 16)
TYPE_DEF@[0; 15)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 13)
+SOURCE_FILE@[0; 13)
TYPE_DEF@[0; 12)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 17)
+SOURCE_FILE@[0; 17)
TYPE_DEF@[0; 16)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 16)
+SOURCE_FILE@[0; 16)
TYPE_DEF@[0; 15)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 14)
+SOURCE_FILE@[0; 14)
TYPE_DEF@[0; 13)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 36)
+SOURCE_FILE@[0; 36)
TYPE_DEF@[0; 17)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 18)
+SOURCE_FILE@[0; 18)
TYPE_DEF@[0; 12)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 19)
+SOURCE_FILE@[0; 19)
TYPE_DEF@[0; 18)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 15)
+SOURCE_FILE@[0; 15)
TYPE_DEF@[0; 14)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 54)
+SOURCE_FILE@[0; 54)
TYPE_DEF@[0; 13)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 22)
+SOURCE_FILE@[0; 22)
TYPE_DEF@[0; 21)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 70)
+SOURCE_FILE@[0; 70)
TYPE_DEF@[0; 14)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 20)
+SOURCE_FILE@[0; 20)
TYPE_DEF@[0; 15)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 21)
+SOURCE_FILE@[0; 21)
TYPE_DEF@[0; 20)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 29)
+SOURCE_FILE@[0; 29)
TYPE_DEF@[0; 28)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 71)
+SOURCE_FILE@[0; 71)
TYPE_DEF@[0; 13)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 146)
+SOURCE_FILE@[0; 146)
FN_DEF@[0; 145)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 52)
+SOURCE_FILE@[0; 52)
FN_DEF@[0; 51)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 26)
+SOURCE_FILE@[0; 26)
FN_DEF@[0; 25)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 81)
+SOURCE_FILE@[0; 81)
STRUCT_DEF@[0; 20)
VISIBILITY@[0; 10)
PUB_KW@[0; 3)
-ROOT@[0; 30)
+SOURCE_FILE@[0; 30)
FN_DEF@[0; 11)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 91)
+SOURCE_FILE@[0; 91)
FN_DEF@[0; 90)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 189)
+SOURCE_FILE@[0; 189)
FN_DEF@[0; 188)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 35)
+SOURCE_FILE@[0; 35)
STRUCT_DEF@[0; 34)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 19)
+SOURCE_FILE@[0; 19)
STRUCT_DEF@[0; 18)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 57)
+SOURCE_FILE@[0; 57)
FN_DEF@[0; 56)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 52)
+SOURCE_FILE@[0; 52)
FN_DEF@[0; 51)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 65)
+SOURCE_FILE@[0; 65)
FN_DEF@[0; 9)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 20)
+SOURCE_FILE@[0; 20)
IMPL_ITEM@[0; 19)
DEFAULT_KW@[0; 7)
WHITESPACE@[7; 8)
-ROOT@[0; 12)
+SOURCE_FILE@[0; 12)
IMPL_ITEM@[0; 11)
IMPL_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 20)
+SOURCE_FILE@[0; 20)
IMPL_ITEM@[0; 19)
IMPL_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 83)
+SOURCE_FILE@[0; 83)
IMPL_ITEM@[0; 82)
IMPL_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 77)
+SOURCE_FILE@[0; 77)
FN_DEF@[0; 76)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 49)
+SOURCE_FILE@[0; 49)
FN_DEF@[0; 48)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 37)
+SOURCE_FILE@[0; 37)
FN_DEF@[0; 36)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 21)
+SOURCE_FILE@[0; 21)
FN_DEF@[0; 20)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 119)
+SOURCE_FILE@[0; 119)
FN_DEF@[0; 118)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 128)
+SOURCE_FILE@[0; 128)
IMPL_ITEM@[0; 127)
IMPL_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 42)
+SOURCE_FILE@[0; 42)
TRAIT_DEF@[0; 41)
TRAIT_KW@[0; 5)
WHITESPACE@[5; 6)
-ROOT@[0; 16)
+SOURCE_FILE@[0; 16)
TRAIT_DEF@[0; 15)
AUTO_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 35)
+SOURCE_FILE@[0; 35)
TYPE_DEF@[0; 34)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 29)
+SOURCE_FILE@[0; 29)
FN_DEF@[0; 28)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 28)
+SOURCE_FILE@[0; 28)
FN_DEF@[0; 27)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 86)
+SOURCE_FILE@[0; 86)
FN_DEF@[0; 85)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 43)
+SOURCE_FILE@[0; 43)
TYPE_DEF@[0; 42)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 74)
+SOURCE_FILE@[0; 74)
FN_DEF@[0; 74)
FN_KW@[0; 2)
NAME@[2; 6)
-ROOT@[0; 67)
+SOURCE_FILE@[0; 67)
FN_DEF@[0; 9)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 107)
+SOURCE_FILE@[0; 107)
FN_DEF@[0; 106)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 79)
+SOURCE_FILE@[0; 79)
FN_DEF@[0; 78)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 52)
+SOURCE_FILE@[0; 52)
FN_DEF@[0; 51)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 29)
+SOURCE_FILE@[0; 29)
FN_DEF@[0; 20)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 40)
+SOURCE_FILE@[0; 40)
FN_DEF@[0; 39)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 125)
+SOURCE_FILE@[0; 125)
FN_DEF@[0; 124)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 47)
+SOURCE_FILE@[0; 47)
FN_DEF@[0; 46)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 97)
+SOURCE_FILE@[0; 97)
FN_DEF@[0; 96)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 103)
+SOURCE_FILE@[0; 103)
FN_DEF@[0; 102)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 89)
+SOURCE_FILE@[0; 89)
IMPL_ITEM@[0; 88)
IMPL_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 44)
+SOURCE_FILE@[0; 44)
FN_DEF@[0; 43)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 21)
+SOURCE_FILE@[0; 21)
FN_DEF@[0; 20)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 38)
+SOURCE_FILE@[0; 38)
FN_DEF@[0; 37)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 70)
+SOURCE_FILE@[0; 70)
FN_DEF@[0; 69)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 70)
+SOURCE_FILE@[0; 70)
FN_DEF@[0; 11)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 28)
+SOURCE_FILE@[0; 28)
FN_DEF@[0; 27)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 40)
+SOURCE_FILE@[0; 40)
FN_DEF@[0; 39)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 26)
+SOURCE_FILE@[0; 26)
FN_DEF@[0; 25)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 39)
+SOURCE_FILE@[0; 39)
FN_DEF@[0; 38)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 26)
+SOURCE_FILE@[0; 26)
FN_DEF@[0; 25)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 26)
+SOURCE_FILE@[0; 26)
FN_DEF@[0; 25)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 33)
+SOURCE_FILE@[0; 33)
FN_DEF@[0; 32)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 83)
+SOURCE_FILE@[0; 83)
FN_DEF@[0; 82)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 55)
+SOURCE_FILE@[0; 55)
FN_DEF@[0; 54)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 107)
+SOURCE_FILE@[0; 107)
FN_DEF@[0; 106)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 84)
+SOURCE_FILE@[0; 84)
FN_DEF@[0; 83)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 46)
+SOURCE_FILE@[0; 46)
FN_DEF@[0; 45)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 39)
+SOURCE_FILE@[0; 39)
FN_DEF@[0; 38)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 22)
+SOURCE_FILE@[0; 22)
TRAIT_DEF@[0; 21)
TRAIT_KW@[0; 5)
WHITESPACE@[5; 6)
-ROOT@[0; 95)
+SOURCE_FILE@[0; 95)
FN_DEF@[0; 94)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 32)
+SOURCE_FILE@[0; 32)
TYPE_DEF@[0; 31)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 47)
+SOURCE_FILE@[0; 47)
FN_DEF@[0; 46)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 27)
+SOURCE_FILE@[0; 27)
FN_DEF@[0; 26)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 54)
+SOURCE_FILE@[0; 54)
TYPE_DEF@[0; 53)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 35)
+SOURCE_FILE@[0; 35)
FN_DEF@[0; 34)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 87)
+SOURCE_FILE@[0; 87)
FN_DEF@[0; 86)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 20)
+SOURCE_FILE@[0; 20)
FN_DEF@[0; 19)
VISIBILITY@[0; 5)
CRATE_KW@[0; 5)
-ROOT@[0; 42)
+SOURCE_FILE@[0; 42)
TYPE_DEF@[0; 41)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 71)
+SOURCE_FILE@[0; 71)
TYPE_DEF@[0; 26)
TYPE_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 21)
+SOURCE_FILE@[0; 21)
FN_DEF@[0; 20)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 64)
+SOURCE_FILE@[0; 64)
STRUCT_DEF@[0; 63)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 69)
+SOURCE_FILE@[0; 69)
IMPL_ITEM@[0; 68)
IMPL_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 69)
+SOURCE_FILE@[0; 69)
FN_DEF@[0; 68)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 102)
+SOURCE_FILE@[0; 102)
FN_DEF@[0; 101)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 74)
+SOURCE_FILE@[0; 74)
FN_DEF@[0; 73)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 30)
+SOURCE_FILE@[0; 30)
FN_DEF@[0; 29)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 106)
+SOURCE_FILE@[0; 106)
STRUCT_DEF@[0; 11)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 51)
+SOURCE_FILE@[0; 51)
STRUCT_DEF@[0; 12)
UNION_KW@[0; 5)
WHITESPACE@[5; 6)
-ROOT@[0; 87)
+SOURCE_FILE@[0; 87)
IMPL_ITEM@[0; 12)
IMPL_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 16)
+SOURCE_FILE@[0; 16)
USE_ITEM@[0; 15)
USE_KW@[0; 3)
WHITESPACE@[3; 4)
-ROOT@[0; 49)
+SOURCE_FILE@[0; 49)
FN_DEF@[0; 48)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 0)
+SOURCE_FILE@[0; 0)
-ROOT@[0; 32)
+SOURCE_FILE@[0; 32)
STRUCT_DEF@[0; 31)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 25)
+SOURCE_FILE@[0; 25)
STRUCT_DEF@[0; 25)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 20)
+SOURCE_FILE@[0; 20)
SHEBANG@[0; 20)
-ROOT@[0; 13)
+SOURCE_FILE@[0; 13)
FN_DEF@[0; 12)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 236)
+SOURCE_FILE@[0; 236)
ATTR@[0; 8)
POUND@[0; 1)
EXCL@[1; 2)
-ROOT@[0; 43)
+SOURCE_FILE@[0; 43)
EXTERN_CRATE_ITEM@[0; 17)
EXTERN_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 118)
+SOURCE_FILE@[0; 118)
MODULE@[0; 6)
MOD_KW@[0; 3)
WHITESPACE@[3; 4)
-ROOT@[0; 19)
+SOURCE_FILE@[0; 19)
USE_ITEM@[0; 8)
USE_KW@[0; 3)
WHITESPACE@[3; 4)
-ROOT@[0; 40)
+SOURCE_FILE@[0; 40)
USE_ITEM@[0; 20)
USE_KW@[0; 3)
WHITESPACE@[3; 4)
-ROOT@[0; 35)
+SOURCE_FILE@[0; 35)
FN_DEF@[0; 34)
ATTR@[0; 12)
POUND@[0; 1)
-ROOT@[0; 98)
+SOURCE_FILE@[0; 98)
FN_DEF@[0; 9)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 65)
+SOURCE_FILE@[0; 65)
USE_ITEM@[0; 14)
USE_KW@[0; 3)
WHITESPACE@[3; 4)
-ROOT@[0; 81)
+SOURCE_FILE@[0; 81)
USE_ITEM@[0; 6)
USE_KW@[0; 3)
WHITESPACE@[3; 4)
-ROOT@[0; 55)
+SOURCE_FILE@[0; 55)
USE_ITEM@[0; 15)
USE_KW@[0; 3)
WHITESPACE@[3; 4)
-ROOT@[0; 97)
+SOURCE_FILE@[0; 97)
STRUCT_DEF@[0; 9)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 23)
+SOURCE_FILE@[0; 23)
FN_DEF@[0; 22)
ATTR@[0; 10)
POUND@[0; 1)
-ROOT@[0; 290)
+SOURCE_FILE@[0; 290)
STRUCT_DEF@[0; 13)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 182)
+SOURCE_FILE@[0; 182)
ENUM_DEF@[0; 11)
ENUM_KW@[0; 4)
WHITESPACE@[4; 5)
-ROOT@[0; 200)
+SOURCE_FILE@[0; 200)
STRUCT_DEF@[0; 12)
STRUCT_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 71)
+SOURCE_FILE@[0; 71)
FN_DEF@[0; 19)
ABI@[0; 6)
EXTERN_KW@[0; 6)
-ROOT@[0; 27)
+SOURCE_FILE@[0; 27)
EXTERN_BLOCK@[0; 10)
ABI@[0; 6)
EXTERN_KW@[0; 6)
-ROOT@[0; 47)
+SOURCE_FILE@[0; 47)
STATIC_DEF@[0; 20)
STATIC_KW@[0; 6)
WHITESPACE@[6; 7)
-ROOT@[0; 46)
+SOURCE_FILE@[0; 46)
CONST_DEF@[0; 20)
CONST_KW@[0; 5)
WHITESPACE@[5; 6)
-ROOT@[0; 35)
+SOURCE_FILE@[0; 35)
FN_DEF@[0; 34)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 34)
+SOURCE_FILE@[0; 34)
FN_DEF@[0; 33)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 53)
+SOURCE_FILE@[0; 53)
FN_DEF@[0; 52)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 248)
+SOURCE_FILE@[0; 248)
FN_DEF@[0; 247)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 79)
+SOURCE_FILE@[0; 79)
FN_DEF@[0; 78)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 164)
+SOURCE_FILE@[0; 164)
TRAIT_DEF@[0; 66)
VISIBILITY@[0; 3)
PUB_KW@[0; 3)
-ROOT@[0; 1598)
+SOURCE_FILE@[0; 1598)
EXTERN_BLOCK@[0; 1597)
ABI@[0; 6)
EXTERN_KW@[0; 6)
-ROOT@[0; 116)
+SOURCE_FILE@[0; 116)
FN_DEF@[0; 115)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
-ROOT@[0; 506)
+SOURCE_FILE@[0; 506)
FN_DEF@[0; 505)
COMMENT@[0; 33)
WHITESPACE@[33; 34)
-ROOT@[0; 350)
+SOURCE_FILE@[0; 350)
MACRO_CALL@[0; 41)
PATH@[0; 5)
PATH_SEGMENT@[0; 5)
-ROOT@[0; 350)
+SOURCE_FILE@[0; 350)
MACRO_CALL@[0; 41)
PATH@[0; 5)
PATH_SEGMENT@[0; 5)
-ROOT@[0; 62)
+SOURCE_FILE@[0; 62)
FN_DEF@[0; 61)
FN_KW@[0; 2)
WHITESPACE@[2; 3)
use ra_syntax::{
utils::{check_fuzz_invariants, dump_tree},
- File,
+ SourceFileNode,
};
#[test]
#[test]
fn parser_tests() {
dir_tests(&["parser/inline", "parser/ok", "parser/err"], |text| {
- let file = File::parse(text);
+ let file = SourceFileNode::parse(text);
dump_tree(file.syntax())
})
}
debug: run
};
const clientOptions: lc.LanguageClientOptions = {
- documentSelector: [{ scheme: 'file', language: 'rust' }]
+ documentSelector: [{ scheme: 'file', language: 'rust' }],
+ initializationOptions: {
+ publishDecorations: true,
+ }
};
Server.client = new lc.LanguageClient(
"Set selections cache for current buffer state and START END."
(setq ra--selections-cache `(,(buffer-modified-tick) 0 ,(ra--selections start end))))
+
+(require 'eglot)
+(require 'ivy)
+(require 'counsel)
+
+
+(defun workspace-symbols ()
+ (interactive)
+ (let ((buf (current-buffer)))
+ (ivy-read "Symbol name: "
+ (lambda (str)
+ (with-current-buffer buf
+ (let ((backend (eglot-xref-backend)))
+ (mapcar
+ (lambda (xref)
+ (let ((loc (xref-item-location xref)))
+ (propertize
+ (concat
+ (when (xref-file-location-p loc)
+ (with-slots (file line column) loc
+ (format "%s:%s:%s:"
+ (propertize (file-relative-name file)
+ 'face 'compilation-info)
+ (propertize (format "%s" line)
+ 'face 'compilation-line
+ )
+ column)))
+ (xref-item-summary xref))
+ 'xref xref)))
+ (xref-backend-apropos backend str))
+ )))
+ :dynamic-collection t
+ :action (lambda (item)
+ (xref--pop-to-location (get-text-property 0 'xref item))))))
+
+(add-to-list 'eglot-server-programs '(rust-mode . ("ra_lsp_server")))
+
+; (require 'rust-mode)
+; (define-key rust-mode-map (kbd "C-n") 'workspace-symbols)
+
+(define-key)
(provide 'ra)
;;; ra.el ends here