--- /dev/null
- **rust-analyzer version**: (eg. output of "Rust Analyzer: Show RA Version" command)
+---
+name: Bug report
+about: Create a bug report for rust-analyzer.
+title: ''
+labels: ''
+assignees: ''
+
+---
+
+<!--
+Troubleshooting guide: https://rust-analyzer.github.io/manual.html#troubleshooting
+Forum for questions: https://users.rust-lang.org/c/ide/14
+
+Before submitting, please make sure that you're not running into one of these known issues:
+
+ 1. extension doesn't load in VSCodium: #11080
+ 2. on-the-fly diagnostics are mostly unimplemented (`cargo check` diagnostics will be shown when saving a file): #3107
+
+Otherwise please try to provide information which will help us to fix the issue faster. Minimal reproducible examples with few dependencies are especially lovely <3.
+-->
+
++**rust-analyzer version**: (eg. output of "rust-analyzer: Show RA Version" command, accessible in VSCode via <kbd>Ctrl/⌘</kbd>+<kbd>Shift</kbd>+<kbd>P</kbd>)
+
+**rustc version**: (eg. output of `rustc -V`)
+
+**relevant settings**: (eg. client settings, or environment variables like `CARGO`, `RUSTUP_HOME` or `CARGO_HOME`)
--- /dev/null
- if: matrix.target == 'aarch64-unknown-linux-gnu'
+name: release
+on:
+ schedule:
+ - cron: "0 0 * * *" # midnight UTC
+
+ workflow_dispatch:
+
+ push:
+ branches:
+ - release
+ - trigger-nightly
+
+env:
+ CARGO_INCREMENTAL: 0
+ CARGO_NET_RETRY: 10
+ RUSTFLAGS: "-D warnings -W unreachable-pub"
+ RUSTUP_MAX_RETRIES: 10
+ FETCH_DEPTH: 0 # pull in the tags for the version string
+ MACOSX_DEPLOYMENT_TARGET: 10.15
+ CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER: aarch64-linux-gnu-gcc
++ CARGO_TARGET_ARM_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
+
+jobs:
+ dist:
+ strategy:
+ matrix:
+ include:
+ - os: windows-latest
+ target: x86_64-pc-windows-msvc
+ code-target: win32-x64
+ - os: windows-latest
+ target: aarch64-pc-windows-msvc
+ code-target: win32-arm64
+ - os: ubuntu-18.04
+ target: x86_64-unknown-linux-gnu
+ code-target: linux-x64
+ - os: ubuntu-18.04
+ target: aarch64-unknown-linux-gnu
+ code-target: linux-arm64
++ - os: ubuntu-18.04
++ target: arm-unknown-linux-gnueabihf
++ code-target: linux-armhf
+ - os: macos-11
+ target: x86_64-apple-darwin
+ code-target: darwin-x64
+ - os: macos-11
+ target: aarch64-apple-darwin
+ code-target: darwin-arm64
+
+ name: dist (${{ matrix.target }})
+ runs-on: ${{ matrix.os }}
+
+ env:
+ RA_TARGET: ${{ matrix.target }}
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: ${{ env.FETCH_DEPTH }}
+
+ - name: Install Rust toolchain
+ run: |
+ rustup update --no-self-update stable
+ rustup target add ${{ matrix.target }}
+ rustup component add rust-src
+
+ - name: Install Node.js
+ uses: actions/setup-node@v1
+ with:
+ node-version: 14.x
+
+ - name: Update apt repositories
- - name: Install target toolchain
++ if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf'
+ run: sudo apt-get update
+
++ - name: Install AArch64 target toolchain
+ if: matrix.target == 'aarch64-unknown-linux-gnu'
+ run: sudo apt-get install gcc-aarch64-linux-gnu
+
++ - name: Install ARM target toolchain
++ if: matrix.target == 'arm-unknown-linux-gnueabihf'
++ run: sudo apt-get install gcc-arm-linux-gnueabihf
++
+ - name: Dist
+ run: cargo xtask dist --client-patch-version ${{ github.run_number }}
+
+ - run: npm ci
+ working-directory: editors/code
+
+ - name: Package Extension (release)
+ if: github.ref == 'refs/heads/release'
+ run: npx vsce package -o "../../dist/rust-analyzer-${{ matrix.code-target }}.vsix" --target ${{ matrix.code-target }}
+ working-directory: editors/code
+
+ - name: Package Extension (nightly)
+ if: github.ref != 'refs/heads/release'
+ run: npx vsce package -o "../../dist/rust-analyzer-${{ matrix.code-target }}.vsix" --target ${{ matrix.code-target }} --pre-release
+ working-directory: editors/code
+
+ - if: matrix.target == 'x86_64-unknown-linux-gnu'
+ run: rm -rf editors/code/server
+
+ - if: matrix.target == 'x86_64-unknown-linux-gnu' && github.ref == 'refs/heads/release'
+ run: npx vsce package -o ../../dist/rust-analyzer-no-server.vsix
+ working-directory: editors/code
+
+ - if: matrix.target == 'x86_64-unknown-linux-gnu' && github.ref != 'refs/heads/release'
+ run: npx vsce package -o ../../dist/rust-analyzer-no-server.vsix --pre-release
+ working-directory: editors/code
+
+ - name: Run analysis-stats on rust-analyzer
+ if: matrix.target == 'x86_64-unknown-linux-gnu'
+ run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats .
+
+ - name: Run analysis-stats on rust std library
+ if: matrix.target == 'x86_64-unknown-linux-gnu'
+ run: target/${{ matrix.target }}/release/rust-analyzer analysis-stats --with-deps $(rustc --print sysroot)/lib/rustlib/src/rust/library/std
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v1
+ with:
+ name: dist-${{ matrix.target }}
+ path: ./dist
+
+ dist-x86_64-unknown-linux-musl:
+ name: dist (x86_64-unknown-linux-musl)
+ runs-on: ubuntu-latest
+ env:
+ RA_TARGET: x86_64-unknown-linux-musl
+ # For some reason `-crt-static` is not working for clang without lld
+ RUSTFLAGS: "-C link-arg=-fuse-ld=lld -C target-feature=-crt-static"
+ container:
+ image: rust:alpine
+ volumes:
+ - /usr/local/cargo/registry
+
+ steps:
+ - name: Install dependencies
+ run: apk add --no-cache git clang lld musl-dev nodejs npm
+
+ - name: Checkout repository
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: ${{ env.FETCH_DEPTH }}
+
+ - name: Dist
+ run: cargo xtask dist --client-patch-version ${{ github.run_number }}
+
+ - run: npm ci
+ working-directory: editors/code
+
+ - name: Package Extension (release)
+ if: github.ref == 'refs/heads/release'
+ run: npx vsce package -o "../../dist/rust-analyzer-alpine-x64.vsix" --target alpine-x64
+ working-directory: editors/code
+
+ - name: Package Extension (nightly)
+ if: github.ref != 'refs/heads/release'
+ run: npx vsce package -o "../../dist/rust-analyzer-alpine-x64.vsix" --target alpine-x64 --pre-release
+ working-directory: editors/code
+
+ - run: rm -rf editors/code/server
+
+ - name: Upload artifacts
+ uses: actions/upload-artifact@v1
+ with:
+ name: dist-x86_64-unknown-linux-musl
+ path: ./dist
+
+ publish:
+ name: publish
+ runs-on: ubuntu-latest
+ needs: ["dist", "dist-x86_64-unknown-linux-musl"]
+ steps:
+ - name: Install Nodejs
+ uses: actions/setup-node@v1
+ with:
+ node-version: 14.x
+
+ - run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV
+ if: github.ref == 'refs/heads/release'
+ - run: echo "TAG=nightly" >> $GITHUB_ENV
+ if: github.ref != 'refs/heads/release'
+ - run: 'echo "TAG: $TAG"'
+
+ - name: Checkout repository
+ uses: actions/checkout@v3
+ with:
+ fetch-depth: ${{ env.FETCH_DEPTH }}
+
+ - run: echo "HEAD_SHA=$(git rev-parse HEAD)" >> $GITHUB_ENV
+ - run: 'echo "HEAD_SHA: $HEAD_SHA"'
+
+ - uses: actions/download-artifact@v1
+ with:
+ name: dist-aarch64-apple-darwin
+ path: dist
+ - uses: actions/download-artifact@v1
+ with:
+ name: dist-x86_64-apple-darwin
+ path: dist
+ - uses: actions/download-artifact@v1
+ with:
+ name: dist-x86_64-unknown-linux-gnu
+ path: dist
+ - uses: actions/download-artifact@v1
+ with:
+ name: dist-x86_64-unknown-linux-musl
+ path: dist
+ - uses: actions/download-artifact@v1
+ with:
+ name: dist-aarch64-unknown-linux-gnu
+ path: dist
++ - uses: actions/download-artifact@v1
++ with:
++ name: dist-arm-unknown-linux-gnueabihf
++ path: dist
+ - uses: actions/download-artifact@v1
+ with:
+ name: dist-x86_64-pc-windows-msvc
+ path: dist
+ - uses: actions/download-artifact@v1
+ with:
+ name: dist-aarch64-pc-windows-msvc
+ path: dist
+ - run: ls -al ./dist
+
+ - name: Publish Release
+ uses: ./.github/actions/github-release
+ with:
+ files: "dist/*"
+ name: ${{ env.TAG }}
+ token: ${{ secrets.GITHUB_TOKEN }}
+
+ - run: rm dist/rust-analyzer-no-server.vsix
+
+ - run: npm ci
+ working-directory: ./editors/code
+
+ - name: Publish Extension (Code Marketplace, release)
+ if: github.ref == 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer')
+ working-directory: ./editors/code
+ # token from https://dev.azure.com/rust-analyzer/
+ run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix
+
+ - name: Publish Extension (OpenVSX, release)
+ if: github.ref == 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer')
+ working-directory: ./editors/code
+ # token from https://dev.azure.com/rust-analyzer/
+ run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix
+
+ - name: Publish Extension (Code Marketplace, nightly)
+ if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer')
+ working-directory: ./editors/code
+ run: npx vsce publish --pat ${{ secrets.MARKETPLACE_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix --pre-release
+
+ - name: Publish Extension (OpenVSX, nightly)
+ if: github.ref != 'refs/heads/release' && (github.repository == 'rust-analyzer/rust-analyzer' || github.repository == 'rust-lang/rust-analyzer')
+ working-directory: ./editors/code
+ run: npx ovsx publish --pat ${{ secrets.OPENVSX_TOKEN }} --packagePath ../../dist/rust-analyzer-*.vsix --pre-release
--- /dev/null
- Rust analyzer is primarily distributed under the terms of both the MIT
+<p align="center">
+ <img
+ src="https://raw.githubusercontent.com/rust-analyzer/rust-analyzer/master/assets/logo-wide.svg"
+ alt="rust-analyzer logo">
+</p>
+
+rust-analyzer is a modular compiler frontend for the Rust language.
+It is a part of a larger rls-2.0 effort to create excellent IDE support for Rust.
+
+## Quick Start
+
+https://rust-analyzer.github.io/manual.html#installation
+
+## Documentation
+
+If you want to **contribute** to rust-analyzer or are just curious about how
+things work under the hood, check the [./docs/dev](./docs/dev) folder.
+
+If you want to **use** rust-analyzer's language server with your editor of
+choice, check [the manual](https://rust-analyzer.github.io/manual.html) folder.
+It also contains some tips & tricks to help you be more productive when using rust-analyzer.
+
+## Security and Privacy
+
+See the corresponding sections of [the manual](https://rust-analyzer.github.io/manual.html#security).
+
+## Communication
+
+For usage and troubleshooting requests, please use "IDEs and Editors" category of the Rust forum:
+
+https://users.rust-lang.org/c/ide/14
+
+For questions about development and implementation, join rust-analyzer working group on Zulip:
+
+https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
+
+## Quick Links
+
+* Website: https://rust-analyzer.github.io/
+* Metrics: https://rust-analyzer.github.io/metrics/
+* API docs: https://rust-lang.github.io/rust-analyzer/ide/
+* Changelog: https://rust-analyzer.github.io/thisweek
+
+## License
+
++rust-analyzer is primarily distributed under the terms of both the MIT
+license and the Apache License (Version 2.0).
+
+See LICENSE-APACHE and LICENSE-MIT for details.
--- /dev/null
- FlycheckHandle { sender, _thread: thread }
+//! Flycheck provides the functionality needed to run `cargo check` or
+//! another compatible command (f.x. clippy) in a background thread and provide
+//! LSP diagnostics based on the output of the command.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{
+ fmt, io,
+ process::{ChildStderr, ChildStdout, Command, Stdio},
+ time::Duration,
+};
+
+use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
+use paths::AbsPathBuf;
+use serde::Deserialize;
+use stdx::{process::streaming_output, JodChild};
+
+pub use cargo_metadata::diagnostic::{
+ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan,
+ DiagnosticSpanMacroExpansion,
+};
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum FlycheckConfig {
+ CargoCommand {
+ command: String,
+ target_triple: Option<String>,
+ all_targets: bool,
+ no_default_features: bool,
+ all_features: bool,
+ features: Vec<String>,
+ extra_args: Vec<String>,
+ },
+ CustomCommand {
+ command: String,
+ args: Vec<String>,
+ },
+}
+
+impl fmt::Display for FlycheckConfig {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ FlycheckConfig::CargoCommand { command, .. } => write!(f, "cargo {}", command),
+ FlycheckConfig::CustomCommand { command, args } => {
+ write!(f, "{} {}", command, args.join(" "))
+ }
+ }
+ }
+}
+
+/// Flycheck wraps the shared state and communication machinery used for
+/// running `cargo check` (or other compatible command) and providing
+/// diagnostics based on the output.
+/// The spawned thread is shut down when this struct is dropped.
+#[derive(Debug)]
+pub struct FlycheckHandle {
+ // XXX: drop order is significant
+ sender: Sender<Restart>,
+ _thread: jod_thread::JoinHandle,
++ id: usize,
+}
+
+impl FlycheckHandle {
+ pub fn spawn(
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ ) -> FlycheckHandle {
+ let actor = FlycheckActor::new(id, sender, config, workspace_root);
+ let (sender, receiver) = unbounded::<Restart>();
+ let thread = jod_thread::Builder::new()
+ .name("Flycheck".to_owned())
+ .spawn(move || actor.run(receiver))
+ .expect("failed to spawn thread");
- AddDiagnostic { workspace_root: AbsPathBuf, diagnostic: Diagnostic },
++ FlycheckHandle { id, sender, _thread: thread }
+ }
+
+ /// Schedule a re-start of the cargo check worker.
+ pub fn update(&self) {
+ self.sender.send(Restart).unwrap();
+ }
++
++ pub fn id(&self) -> usize {
++ self.id
++ }
+}
+
+pub enum Message {
+ /// Request adding a diagnostic with fixes included to a file
- Message::AddDiagnostic { workspace_root, diagnostic } => f
++ AddDiagnostic { id: usize, workspace_root: AbsPathBuf, diagnostic: Diagnostic },
+
+ /// Request check progress notification to client
+ Progress {
+ /// Flycheck instance ID
+ id: usize,
+ progress: Progress,
+ },
+}
+
+impl fmt::Debug for Message {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
- tracing::debug!("flycheck finished");
++ Message::AddDiagnostic { id, workspace_root, diagnostic } => f
+ .debug_struct("AddDiagnostic")
++ .field("id", id)
+ .field("workspace_root", workspace_root)
+ .field("diagnostic_code", &diagnostic.code.as_ref().map(|it| &it.code))
+ .finish(),
+ Message::Progress { id, progress } => {
+ f.debug_struct("Progress").field("id", id).field("progress", progress).finish()
+ }
+ }
+ }
+}
+
+#[derive(Debug)]
+pub enum Progress {
+ DidStart,
+ DidCheckCrate(String),
+ DidFinish(io::Result<()>),
+ DidCancel,
+}
+
+struct Restart;
+
+struct FlycheckActor {
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ /// CargoHandle exists to wrap around the communication needed to be able to
+ /// run `cargo check` without blocking. Currently the Rust standard library
+ /// doesn't provide a way to read sub-process output without blocking, so we
+ /// have to wrap sub-processes output handling in a thread and pass messages
+ /// back over a channel.
+ cargo_handle: Option<CargoHandle>,
+}
+
+enum Event {
+ Restart(Restart),
+ CheckEvent(Option<CargoMessage>),
+}
+
+impl FlycheckActor {
+ fn new(
+ id: usize,
+ sender: Box<dyn Fn(Message) + Send>,
+ config: FlycheckConfig,
+ workspace_root: AbsPathBuf,
+ ) -> FlycheckActor {
+ FlycheckActor { id, sender, config, workspace_root, cargo_handle: None }
+ }
+ fn progress(&self, progress: Progress) {
+ self.send(Message::Progress { id: self.id, progress });
+ }
+ fn next_event(&self, inbox: &Receiver<Restart>) -> Option<Event> {
+ let check_chan = self.cargo_handle.as_ref().map(|cargo| &cargo.receiver);
+ select! {
+ recv(inbox) -> msg => msg.ok().map(Event::Restart),
+ recv(check_chan.unwrap_or(&never())) -> msg => Some(Event::CheckEvent(msg.ok())),
+ }
+ }
+ fn run(mut self, inbox: Receiver<Restart>) {
+ while let Some(event) = self.next_event(&inbox) {
+ match event {
+ Event::Restart(Restart) => {
+ // Cancel the previously spawned process
+ self.cancel_check_process();
+ while let Ok(Restart) = inbox.recv_timeout(Duration::from_millis(50)) {}
+
+ let command = self.check_command();
+ tracing::debug!(?command, "will restart flycheck");
+ match CargoHandle::spawn(command) {
+ Ok(cargo_handle) => {
+ tracing::debug!(
+ command = ?self.check_command(),
+ "did restart flycheck"
+ );
+ self.cargo_handle = Some(cargo_handle);
+ self.progress(Progress::DidStart);
+ }
+ Err(error) => {
+ tracing::error!(
+ command = ?self.check_command(),
+ %error, "failed to restart flycheck"
+ );
+ }
+ }
+ }
+ Event::CheckEvent(None) => {
++ tracing::debug!(flycheck_id = self.id, "flycheck finished");
+
+ // Watcher finished
+ let cargo_handle = self.cargo_handle.take().unwrap();
+ let res = cargo_handle.join();
+ if res.is_err() {
+ tracing::error!(
+ "Flycheck failed to run the following command: {:?}",
+ self.check_command()
+ );
+ }
+ self.progress(Progress::DidFinish(res));
+ }
+ Event::CheckEvent(Some(message)) => match message {
+ CargoMessage::CompilerArtifact(msg) => {
+ self.progress(Progress::DidCheckCrate(msg.target.name));
+ }
+
+ CargoMessage::Diagnostic(msg) => {
+ self.send(Message::AddDiagnostic {
++ id: self.id,
+ workspace_root: self.workspace_root.clone(),
+ diagnostic: msg,
+ });
+ }
+ },
+ }
+ }
+ // If we rerun the thread, we need to discard the previous check results first
+ self.cancel_check_process();
+ }
+
+ fn cancel_check_process(&mut self) {
+ if let Some(cargo_handle) = self.cargo_handle.take() {
+ cargo_handle.cancel();
+ self.progress(Progress::DidCancel);
+ }
+ }
+
+ fn check_command(&self) -> Command {
+ let mut cmd = match &self.config {
+ FlycheckConfig::CargoCommand {
+ command,
+ target_triple,
+ no_default_features,
+ all_targets,
+ all_features,
+ extra_args,
+ features,
+ } => {
+ let mut cmd = Command::new(toolchain::cargo());
+ cmd.arg(command);
+ cmd.current_dir(&self.workspace_root);
+ cmd.args(&["--workspace", "--message-format=json", "--manifest-path"])
+ .arg(self.workspace_root.join("Cargo.toml").as_os_str());
+
+ if let Some(target) = target_triple {
+ cmd.args(&["--target", target.as_str()]);
+ }
+ if *all_targets {
+ cmd.arg("--all-targets");
+ }
+ if *all_features {
+ cmd.arg("--all-features");
+ } else {
+ if *no_default_features {
+ cmd.arg("--no-default-features");
+ }
+ if !features.is_empty() {
+ cmd.arg("--features");
+ cmd.arg(features.join(" "));
+ }
+ }
+ cmd.args(extra_args);
+ cmd
+ }
+ FlycheckConfig::CustomCommand { command, args } => {
+ let mut cmd = Command::new(command);
+ cmd.args(args);
+ cmd
+ }
+ };
+ cmd.current_dir(&self.workspace_root);
+ cmd
+ }
+
+ fn send(&self, check_task: Message) {
+ (self.sender)(check_task);
+ }
+}
+
+/// A handle to a cargo process used for fly-checking.
+struct CargoHandle {
+ /// The handle to the actual cargo process. As we cannot cancel directly from with
+ /// a read syscall dropping and therefor terminating the process is our best option.
+ child: JodChild,
+ thread: jod_thread::JoinHandle<io::Result<(bool, String)>>,
+ receiver: Receiver<CargoMessage>,
+}
+
+impl CargoHandle {
+ fn spawn(mut command: Command) -> std::io::Result<CargoHandle> {
+ command.stdout(Stdio::piped()).stderr(Stdio::piped()).stdin(Stdio::null());
+ let mut child = JodChild::spawn(command)?;
+
+ let stdout = child.stdout.take().unwrap();
+ let stderr = child.stderr.take().unwrap();
+
+ let (sender, receiver) = unbounded();
+ let actor = CargoActor::new(sender, stdout, stderr);
+ let thread = jod_thread::Builder::new()
+ .name("CargoHandle".to_owned())
+ .spawn(move || actor.run())
+ .expect("failed to spawn thread");
+ Ok(CargoHandle { child, thread, receiver })
+ }
+
+ fn cancel(mut self) {
+ let _ = self.child.kill();
+ let _ = self.child.wait();
+ }
+
+ fn join(mut self) -> io::Result<()> {
+ let _ = self.child.kill();
+ let exit_status = self.child.wait()?;
+ let (read_at_least_one_message, error) = self.thread.join()?;
+ if read_at_least_one_message || exit_status.success() {
+ Ok(())
+ } else {
+ Err(io::Error::new(io::ErrorKind::Other, format!(
+ "Cargo watcher failed, the command produced no valid metadata (exit code: {:?}):\n{}",
+ exit_status, error
+ )))
+ }
+ }
+}
+
+struct CargoActor {
+ sender: Sender<CargoMessage>,
+ stdout: ChildStdout,
+ stderr: ChildStderr,
+}
+
+impl CargoActor {
+ fn new(sender: Sender<CargoMessage>, stdout: ChildStdout, stderr: ChildStderr) -> CargoActor {
+ CargoActor { sender, stdout, stderr }
+ }
+
+ fn run(self) -> io::Result<(bool, String)> {
+ // We manually read a line at a time, instead of using serde's
+ // stream deserializers, because the deserializer cannot recover
+ // from an error, resulting in it getting stuck, because we try to
+ // be resilient against failures.
+ //
+ // Because cargo only outputs one JSON object per line, we can
+ // simply skip a line if it doesn't parse, which just ignores any
+ // erroneus output.
+
+ let mut error = String::new();
+ let mut read_at_least_one_message = false;
+ let output = streaming_output(
+ self.stdout,
+ self.stderr,
+ &mut |line| {
+ read_at_least_one_message = true;
+
+ // Try to deserialize a message from Cargo or Rustc.
+ let mut deserializer = serde_json::Deserializer::from_str(line);
+ deserializer.disable_recursion_limit();
+ if let Ok(message) = JsonMessage::deserialize(&mut deserializer) {
+ match message {
+ // Skip certain kinds of messages to only spend time on what's useful
+ JsonMessage::Cargo(message) => match message {
+ cargo_metadata::Message::CompilerArtifact(artifact)
+ if !artifact.fresh =>
+ {
+ self.sender.send(CargoMessage::CompilerArtifact(artifact)).unwrap();
+ }
+ cargo_metadata::Message::CompilerMessage(msg) => {
+ self.sender.send(CargoMessage::Diagnostic(msg.message)).unwrap();
+ }
+ _ => (),
+ },
+ JsonMessage::Rustc(message) => {
+ self.sender.send(CargoMessage::Diagnostic(message)).unwrap();
+ }
+ }
+ }
+ },
+ &mut |line| {
+ error.push_str(line);
+ error.push('\n');
+ },
+ );
+ match output {
+ Ok(_) => Ok((read_at_least_one_message, error)),
+ Err(e) => Err(io::Error::new(e.kind(), format!("{:?}: {}", e, error))),
+ }
+ }
+}
+
+enum CargoMessage {
+ CompilerArtifact(cargo_metadata::Artifact),
+ Diagnostic(Diagnostic),
+}
+
+#[derive(Deserialize)]
+#[serde(untagged)]
+enum JsonMessage {
+ Cargo(cargo_metadata::Message),
+ Rustc(Diagnostic),
+}
--- /dev/null
- params.insert(idx, Either::Right(trait_ref))
+//! Many kinds of items or constructs can have generic parameters: functions,
+//! structs, impls, traits, etc. This module provides a common HIR for these
+//! generic parameters. See also the `Generics` type and the `generics_of` query
+//! in rustc.
+
+use base_db::FileId;
+use either::Either;
+use hir_expand::{
+ name::{AsName, Name},
+ ExpandResult, HirFileId, InFile,
+};
+use la_arena::{Arena, ArenaMap, Idx};
+use once_cell::unsync::Lazy;
+use std::ops::DerefMut;
+use stdx::impl_from;
+use syntax::ast::{self, HasGenericParams, HasName, HasTypeBounds};
+
+use crate::{
+ body::{Expander, LowerCtx},
+ child_by_source::ChildBySource,
+ db::DefDatabase,
+ dyn_map::DynMap,
+ intern::Interned,
+ keys,
+ src::{HasChildSource, HasSource},
+ type_ref::{LifetimeRef, TypeBound, TypeRef},
+ AdtId, ConstParamId, GenericDefId, HasModule, LifetimeParamId, LocalLifetimeParamId,
+ LocalTypeOrConstParamId, Lookup, TypeOrConstParamId, TypeParamId,
+};
+
+/// Data about a generic type parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct TypeParamData {
+ pub name: Option<Name>,
+ pub default: Option<Interned<TypeRef>>,
+ pub provenance: TypeParamProvenance,
+}
+
+/// Data about a generic lifetime parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct LifetimeParamData {
+ pub name: Name,
+}
+
+/// Data about a generic const parameter (to a function, struct, impl, ...).
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub struct ConstParamData {
+ pub name: Name,
+ pub ty: Interned<TypeRef>,
+ pub has_default: bool,
+}
+
+#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)]
+pub enum TypeParamProvenance {
+ TypeParamList,
+ TraitSelf,
+ ArgumentImplTrait,
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum TypeOrConstParamData {
+ TypeParamData(TypeParamData),
+ ConstParamData(ConstParamData),
+}
+
+impl TypeOrConstParamData {
+ pub fn name(&self) -> Option<&Name> {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => x.name.as_ref(),
+ TypeOrConstParamData::ConstParamData(x) => Some(&x.name),
+ }
+ }
+
+ pub fn has_default(&self) -> bool {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => x.default.is_some(),
+ TypeOrConstParamData::ConstParamData(x) => x.has_default,
+ }
+ }
+
+ pub fn type_param(&self) -> Option<&TypeParamData> {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => Some(x),
+ TypeOrConstParamData::ConstParamData(_) => None,
+ }
+ }
+
+ pub fn const_param(&self) -> Option<&ConstParamData> {
+ match self {
+ TypeOrConstParamData::TypeParamData(_) => None,
+ TypeOrConstParamData::ConstParamData(x) => Some(x),
+ }
+ }
+
+ pub fn is_trait_self(&self) -> bool {
+ match self {
+ TypeOrConstParamData::TypeParamData(x) => {
+ x.provenance == TypeParamProvenance::TraitSelf
+ }
+ TypeOrConstParamData::ConstParamData(_) => false,
+ }
+ }
+}
+
+impl_from!(TypeParamData, ConstParamData for TypeOrConstParamData);
+
+/// Data about the generic parameters of a function, struct, impl, etc.
+#[derive(Clone, PartialEq, Eq, Debug, Default, Hash)]
+pub struct GenericParams {
+ pub type_or_consts: Arena<TypeOrConstParamData>,
+ pub lifetimes: Arena<LifetimeParamData>,
+ pub where_predicates: Vec<WherePredicate>,
+}
+
+/// A single predicate from a where clause, i.e. `where Type: Trait`. Combined
+/// where clauses like `where T: Foo + Bar` are turned into multiple of these.
+/// It might still result in multiple actual predicates though, because of
+/// associated type bindings like `Iterator<Item = u32>`.
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum WherePredicate {
+ TypeBound {
+ target: WherePredicateTypeTarget,
+ bound: Interned<TypeBound>,
+ },
+ Lifetime {
+ target: LifetimeRef,
+ bound: LifetimeRef,
+ },
+ ForLifetime {
+ lifetimes: Box<[Name]>,
+ target: WherePredicateTypeTarget,
+ bound: Interned<TypeBound>,
+ },
+}
+
+#[derive(Clone, PartialEq, Eq, Debug, Hash)]
+pub enum WherePredicateTypeTarget {
+ TypeRef(Interned<TypeRef>),
+ /// For desugared where predicates that can directly refer to a type param.
+ TypeOrConstParam(LocalTypeOrConstParamId),
+}
+
+impl GenericParams {
+ /// Iterator of type_or_consts field
+ pub fn iter<'a>(
+ &'a self,
+ ) -> impl DoubleEndedIterator<Item = (Idx<TypeOrConstParamData>, &TypeOrConstParamData)> {
+ self.type_or_consts.iter()
+ }
+
+ pub(crate) fn generic_params_query(
+ db: &dyn DefDatabase,
+ def: GenericDefId,
+ ) -> Interned<GenericParams> {
+ let _p = profile::span("generic_params_query");
+
+ macro_rules! id_to_generics {
+ ($id:ident) => {{
+ let id = $id.lookup(db).id;
+ let tree = id.item_tree(db);
+ let item = &tree[id.value];
+ item.generic_params.clone()
+ }};
+ }
+
+ match def {
+ GenericDefId::FunctionId(id) => {
+ let loc = id.lookup(db);
+ let tree = loc.id.item_tree(db);
+ let item = &tree[loc.id.value];
+
+ let mut generic_params = GenericParams::clone(&item.explicit_generic_params);
+
+ let module = loc.container.module(db);
+ let func_data = db.function_data(id);
+
+ // Don't create an `Expander` nor call `loc.source(db)` if not needed since this
+ // causes a reparse after the `ItemTree` has been created.
+ let mut expander = Lazy::new(|| Expander::new(db, loc.source(db).file_id, module));
+ for (_, param) in &func_data.params {
+ generic_params.fill_implicit_impl_trait_args(db, &mut expander, param);
+ }
+
+ Interned::new(generic_params)
+ }
+ GenericDefId::AdtId(AdtId::StructId(id)) => id_to_generics!(id),
+ GenericDefId::AdtId(AdtId::EnumId(id)) => id_to_generics!(id),
+ GenericDefId::AdtId(AdtId::UnionId(id)) => id_to_generics!(id),
+ GenericDefId::TraitId(id) => id_to_generics!(id),
+ GenericDefId::TypeAliasId(id) => id_to_generics!(id),
+ GenericDefId::ImplId(id) => id_to_generics!(id),
+ GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => {
+ Interned::new(GenericParams::default())
+ }
+ }
+ }
+
+ pub(crate) fn fill(&mut self, lower_ctx: &LowerCtx<'_>, node: &dyn HasGenericParams) {
+ if let Some(params) = node.generic_param_list() {
+ self.fill_params(lower_ctx, params)
+ }
+ if let Some(where_clause) = node.where_clause() {
+ self.fill_where_predicates(lower_ctx, where_clause);
+ }
+ }
+
+ pub(crate) fn fill_bounds(
+ &mut self,
+ lower_ctx: &LowerCtx<'_>,
+ node: &dyn ast::HasTypeBounds,
+ target: Either<TypeRef, LifetimeRef>,
+ ) {
+ for bound in
+ node.type_bound_list().iter().flat_map(|type_bound_list| type_bound_list.bounds())
+ {
+ self.add_where_predicate_from_bound(lower_ctx, bound, None, target.clone());
+ }
+ }
+
+ fn fill_params(&mut self, lower_ctx: &LowerCtx<'_>, params: ast::GenericParamList) {
+ for type_or_const_param in params.type_or_const_params() {
+ match type_or_const_param {
+ ast::TypeOrConstParam::Type(type_param) => {
+ let name = type_param.name().map_or_else(Name::missing, |it| it.as_name());
+ // FIXME: Use `Path::from_src`
+ let default = type_param
+ .default_type()
+ .map(|it| Interned::new(TypeRef::from_ast(lower_ctx, it)));
+ let param = TypeParamData {
+ name: Some(name.clone()),
+ default,
+ provenance: TypeParamProvenance::TypeParamList,
+ };
+ self.type_or_consts.alloc(param.into());
+ let type_ref = TypeRef::Path(name.into());
+ self.fill_bounds(lower_ctx, &type_param, Either::Left(type_ref));
+ }
+ ast::TypeOrConstParam::Const(const_param) => {
+ let name = const_param.name().map_or_else(Name::missing, |it| it.as_name());
+ let ty = const_param
+ .ty()
+ .map_or(TypeRef::Error, |it| TypeRef::from_ast(lower_ctx, it));
+ let param = ConstParamData {
+ name,
+ ty: Interned::new(ty),
+ has_default: const_param.default_val().is_some(),
+ };
+ self.type_or_consts.alloc(param.into());
+ }
+ }
+ }
+ for lifetime_param in params.lifetime_params() {
+ let name =
+ lifetime_param.lifetime().map_or_else(Name::missing, |lt| Name::new_lifetime(<));
+ let param = LifetimeParamData { name: name.clone() };
+ self.lifetimes.alloc(param);
+ let lifetime_ref = LifetimeRef::new_name(name);
+ self.fill_bounds(lower_ctx, &lifetime_param, Either::Right(lifetime_ref));
+ }
+ }
+
+ fn fill_where_predicates(&mut self, lower_ctx: &LowerCtx<'_>, where_clause: ast::WhereClause) {
+ for pred in where_clause.predicates() {
+ let target = if let Some(type_ref) = pred.ty() {
+ Either::Left(TypeRef::from_ast(lower_ctx, type_ref))
+ } else if let Some(lifetime) = pred.lifetime() {
+ Either::Right(LifetimeRef::new(&lifetime))
+ } else {
+ continue;
+ };
+
+ let lifetimes: Option<Box<_>> = pred.generic_param_list().map(|param_list| {
+ // Higher-Ranked Trait Bounds
+ param_list
+ .lifetime_params()
+ .map(|lifetime_param| {
+ lifetime_param
+ .lifetime()
+ .map_or_else(Name::missing, |lt| Name::new_lifetime(<))
+ })
+ .collect()
+ });
+ for bound in pred.type_bound_list().iter().flat_map(|l| l.bounds()) {
+ self.add_where_predicate_from_bound(
+ lower_ctx,
+ bound,
+ lifetimes.as_ref(),
+ target.clone(),
+ );
+ }
+ }
+ }
+
+ fn add_where_predicate_from_bound(
+ &mut self,
+ lower_ctx: &LowerCtx<'_>,
+ bound: ast::TypeBound,
+ hrtb_lifetimes: Option<&Box<[Name]>>,
+ target: Either<TypeRef, LifetimeRef>,
+ ) {
+ let bound = TypeBound::from_ast(lower_ctx, bound);
+ let predicate = match (target, bound) {
+ (Either::Left(type_ref), bound) => match hrtb_lifetimes {
+ Some(hrtb_lifetimes) => WherePredicate::ForLifetime {
+ lifetimes: hrtb_lifetimes.clone(),
+ target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)),
+ bound: Interned::new(bound),
+ },
+ None => WherePredicate::TypeBound {
+ target: WherePredicateTypeTarget::TypeRef(Interned::new(type_ref)),
+ bound: Interned::new(bound),
+ },
+ },
+ (Either::Right(lifetime), TypeBound::Lifetime(bound)) => {
+ WherePredicate::Lifetime { target: lifetime, bound }
+ }
+ _ => return,
+ };
+ self.where_predicates.push(predicate);
+ }
+
+ pub(crate) fn fill_implicit_impl_trait_args(
+ &mut self,
+ db: &dyn DefDatabase,
+ expander: &mut impl DerefMut<Target = Expander>,
+ type_ref: &TypeRef,
+ ) {
+ type_ref.walk(&mut |type_ref| {
+ if let TypeRef::ImplTrait(bounds) = type_ref {
+ let param = TypeParamData {
+ name: None,
+ default: None,
+ provenance: TypeParamProvenance::ArgumentImplTrait,
+ };
+ let param_id = self.type_or_consts.alloc(param.into());
+ for bound in bounds {
+ self.where_predicates.push(WherePredicate::TypeBound {
+ target: WherePredicateTypeTarget::TypeOrConstParam(param_id),
+ bound: bound.clone(),
+ });
+ }
+ }
+ if let TypeRef::Macro(mc) = type_ref {
+ let macro_call = mc.to_node(db.upcast());
+ match expander.enter_expand::<ast::Type>(db, macro_call) {
+ Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
+ let ctx = LowerCtx::new(db, expander.current_file_id());
+ let type_ref = TypeRef::from_ast(&ctx, expanded);
+ self.fill_implicit_impl_trait_args(db, expander, &type_ref);
+ expander.exit(db, mark);
+ }
+ _ => {}
+ }
+ }
+ });
+ }
+
+ pub(crate) fn shrink_to_fit(&mut self) {
+ let Self { lifetimes, type_or_consts: types, where_predicates } = self;
+ lifetimes.shrink_to_fit();
+ types.shrink_to_fit();
+ where_predicates.shrink_to_fit();
+ }
+
+ pub fn find_type_by_name(&self, name: &Name, parent: GenericDefId) -> Option<TypeParamId> {
+ self.type_or_consts.iter().find_map(|(id, p)| {
+ if p.name().as_ref() == Some(&name) && p.type_param().is_some() {
+ Some(TypeParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
+ } else {
+ None
+ }
+ })
+ }
+
+ pub fn find_const_by_name(&self, name: &Name, parent: GenericDefId) -> Option<ConstParamId> {
+ self.type_or_consts.iter().find_map(|(id, p)| {
+ if p.name().as_ref() == Some(&name) && p.const_param().is_some() {
+ Some(ConstParamId::from_unchecked(TypeOrConstParamId { local_id: id, parent }))
+ } else {
+ None
+ }
+ })
+ }
+
+ pub fn find_trait_self_param(&self) -> Option<LocalTypeOrConstParamId> {
+ self.type_or_consts.iter().find_map(|(id, p)| {
+ matches!(
+ p,
+ TypeOrConstParamData::TypeParamData(TypeParamData {
+ provenance: TypeParamProvenance::TraitSelf,
+ ..
+ })
+ )
+ .then(|| id)
+ })
+ }
+}
+
+fn file_id_and_params_of(
+ def: GenericDefId,
+ db: &dyn DefDatabase,
+) -> (HirFileId, Option<ast::GenericParamList>) {
+ match def {
+ GenericDefId::FunctionId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::AdtId(AdtId::StructId(it)) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::AdtId(AdtId::UnionId(it)) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::AdtId(AdtId::EnumId(it)) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::TraitId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::TypeAliasId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ GenericDefId::ImplId(it) => {
+ let src = it.lookup(db).source(db);
+ (src.file_id, src.value.generic_param_list())
+ }
+ // We won't be using this ID anyway
+ GenericDefId::EnumVariantId(_) | GenericDefId::ConstId(_) => (FileId(!0).into(), None),
+ }
+}
+
+impl HasChildSource<LocalTypeOrConstParamId> for GenericDefId {
+ type Value = Either<ast::TypeOrConstParam, ast::Trait>;
+ fn child_source(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> InFile<ArenaMap<LocalTypeOrConstParamId, Self::Value>> {
+ let generic_params = db.generic_params(*self);
+ let mut idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx);
+
+ let (file_id, generic_params_list) = file_id_and_params_of(*self, db);
+
+ let mut params = ArenaMap::default();
+
+ // For traits the first type index is `Self`, we need to add it before the other params.
+ if let GenericDefId::TraitId(id) = *self {
+ let trait_ref = id.lookup(db).source(db).value;
+ let idx = idx_iter.next().unwrap();
++ params.insert(idx, Either::Right(trait_ref));
+ }
+
+ if let Some(generic_params_list) = generic_params_list {
+ for (idx, ast_param) in idx_iter.zip(generic_params_list.type_or_const_params()) {
+ params.insert(idx, Either::Left(ast_param));
+ }
+ }
+
+ InFile::new(file_id, params)
+ }
+}
+
+impl HasChildSource<LocalLifetimeParamId> for GenericDefId {
+ type Value = ast::LifetimeParam;
+ fn child_source(
+ &self,
+ db: &dyn DefDatabase,
+ ) -> InFile<ArenaMap<LocalLifetimeParamId, Self::Value>> {
+ let generic_params = db.generic_params(*self);
+ let idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx);
+
+ let (file_id, generic_params_list) = file_id_and_params_of(*self, db);
+
+ let mut params = ArenaMap::default();
+
+ if let Some(generic_params_list) = generic_params_list {
+ for (idx, ast_param) in idx_iter.zip(generic_params_list.lifetime_params()) {
+ params.insert(idx, ast_param);
+ }
+ }
+
+ InFile::new(file_id, params)
+ }
+}
+
+impl ChildBySource for GenericDefId {
+ fn child_by_source_to(&self, db: &dyn DefDatabase, res: &mut DynMap, file_id: HirFileId) {
+ let (gfile_id, generic_params_list) = file_id_and_params_of(*self, db);
+ if gfile_id != file_id {
+ return;
+ }
+
+ let generic_params = db.generic_params(*self);
+ let mut toc_idx_iter = generic_params.type_or_consts.iter().map(|(idx, _)| idx);
+ let lts_idx_iter = generic_params.lifetimes.iter().map(|(idx, _)| idx);
+
+ // For traits the first type index is `Self`, skip it.
+ if let GenericDefId::TraitId(_) = *self {
+ toc_idx_iter.next().unwrap(); // advance_by(1);
+ }
+
+ if let Some(generic_params_list) = generic_params_list {
+ for (local_id, ast_param) in
+ toc_idx_iter.zip(generic_params_list.type_or_const_params())
+ {
+ let id = TypeOrConstParamId { parent: *self, local_id };
+ match ast_param {
+ ast::TypeOrConstParam::Type(a) => res[keys::TYPE_PARAM].insert(a, id),
+ ast::TypeOrConstParam::Const(a) => res[keys::CONST_PARAM].insert(a, id),
+ }
+ }
+ for (local_id, ast_param) in lts_idx_iter.zip(generic_params_list.lifetime_params()) {
+ let id = LifetimeParamId { parent: *self, local_id };
+ res[keys::LIFETIME_PARAM].insert(ast_param, id);
+ }
+ }
+ }
+}
--- /dev/null
- //! "Rust Analyzer: Debug ItemTree".
+//! A simplified AST that only contains items.
+//!
+//! This is the primary IR used throughout `hir_def`. It is the input to the name resolution
+//! algorithm, as well as to the queries defined in `adt.rs`, `data.rs`, and most things in
+//! `attr.rs`.
+//!
+//! `ItemTree`s are built per `HirFileId`, from the syntax tree of the parsed file. This means that
+//! they are crate-independent: they don't know which `#[cfg]`s are active or which module they
+//! belong to, since those concepts don't exist at this level (a single `ItemTree` might be part of
+//! multiple crates, or might be included into the same crate twice via `#[path]`).
+//!
+//! One important purpose of this layer is to provide an "invalidation barrier" for incremental
+//! computations: when typing inside an item body, the `ItemTree` of the modified file is typically
+//! unaffected, so we don't have to recompute name resolution results or item data (see `data.rs`).
+//!
+//! The `ItemTree` for the currently open file can be displayed by using the VS Code command
++//! "rust-analyzer: Debug ItemTree".
+//!
+//! Compared to rustc's architecture, `ItemTree` has properties from both rustc's AST and HIR: many
+//! syntax-level Rust features are already desugared to simpler forms in the `ItemTree`, but name
+//! resolution has not yet been performed. `ItemTree`s are per-file, while rustc's AST and HIR are
+//! per-crate, because we are interested in incrementally computing it.
+//!
+//! The representation of items in the `ItemTree` should generally mirror the surface syntax: it is
+//! usually a bad idea to desugar a syntax-level construct to something that is structurally
+//! different here. Name resolution needs to be able to process attributes and expand macros
+//! (including attribute macros), and having a 1-to-1 mapping between syntax and the `ItemTree`
+//! avoids introducing subtle bugs.
+//!
+//! In general, any item in the `ItemTree` stores its `AstId`, which allows mapping it back to its
+//! surface syntax.
+
+mod lower;
+mod pretty;
+#[cfg(test)]
+mod tests;
+
+use std::{
+ fmt::{self, Debug},
+ hash::{Hash, Hasher},
+ marker::PhantomData,
+ ops::Index,
+ sync::Arc,
+};
+
+use ast::{AstNode, HasName, StructKind};
+use base_db::CrateId;
+use either::Either;
+use hir_expand::{
+ ast_id_map::FileAstId,
+ hygiene::Hygiene,
+ name::{name, AsName, Name},
+ ExpandTo, HirFileId, InFile,
+};
+use la_arena::{Arena, Idx, IdxRange, RawIdx};
+use profile::Count;
+use rustc_hash::FxHashMap;
+use smallvec::SmallVec;
+use stdx::never;
+use syntax::{ast, match_ast, SyntaxKind};
+
+use crate::{
+ attr::{Attrs, RawAttrs},
+ db::DefDatabase,
+ generics::GenericParams,
+ intern::Interned,
+ path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind},
+ type_ref::{Mutability, TraitRef, TypeBound, TypeRef},
+ visibility::RawVisibility,
+ BlockId,
+};
+
+#[derive(Copy, Clone, Eq, PartialEq)]
+pub struct RawVisibilityId(u32);
+
+impl RawVisibilityId {
+ pub const PUB: Self = RawVisibilityId(u32::max_value());
+ pub const PRIV: Self = RawVisibilityId(u32::max_value() - 1);
+ pub const PUB_CRATE: Self = RawVisibilityId(u32::max_value() - 2);
+}
+
+impl fmt::Debug for RawVisibilityId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut f = f.debug_tuple("RawVisibilityId");
+ match *self {
+ Self::PUB => f.field(&"pub"),
+ Self::PRIV => f.field(&"pub(self)"),
+ Self::PUB_CRATE => f.field(&"pub(crate)"),
+ _ => f.field(&self.0),
+ };
+ f.finish()
+ }
+}
+
+/// The item tree of a source file.
+#[derive(Debug, Default, Eq, PartialEq)]
+pub struct ItemTree {
+ _c: Count<Self>,
+
+ top_level: SmallVec<[ModItem; 1]>,
+ attrs: FxHashMap<AttrOwner, RawAttrs>,
+
+ data: Option<Box<ItemTreeData>>,
+}
+
+impl ItemTree {
+ pub(crate) fn file_item_tree_query(db: &dyn DefDatabase, file_id: HirFileId) -> Arc<ItemTree> {
+ let _p = profile::span("file_item_tree_query").detail(|| format!("{:?}", file_id));
+ let syntax = match db.parse_or_expand(file_id) {
+ Some(node) => node,
+ None => return Default::default(),
+ };
+ if never!(syntax.kind() == SyntaxKind::ERROR) {
+ // FIXME: not 100% sure why these crop up, but return an empty tree to avoid a panic
+ return Default::default();
+ }
+
+ let ctx = lower::Ctx::new(db, file_id);
+ let mut top_attrs = None;
+ let mut item_tree = match_ast! {
+ match syntax {
+ ast::SourceFile(file) => {
+ top_attrs = Some(RawAttrs::new(db, &file, ctx.hygiene()));
+ ctx.lower_module_items(&file)
+ },
+ ast::MacroItems(items) => {
+ ctx.lower_module_items(&items)
+ },
+ ast::MacroStmts(stmts) => {
+ // The produced statements can include items, which should be added as top-level
+ // items.
+ ctx.lower_macro_stmts(stmts)
+ },
+ _ => {
+ panic!("cannot create item tree from {:?} {}", syntax, syntax);
+ },
+ }
+ };
+
+ if let Some(attrs) = top_attrs {
+ item_tree.attrs.insert(AttrOwner::TopLevel, attrs);
+ }
+ item_tree.shrink_to_fit();
+ Arc::new(item_tree)
+ }
+
+ /// Returns an iterator over all items located at the top level of the `HirFileId` this
+ /// `ItemTree` was created from.
+ pub fn top_level_items(&self) -> &[ModItem] {
+ &self.top_level
+ }
+
+ /// Returns the inner attributes of the source file.
+ pub fn top_level_attrs(&self, db: &dyn DefDatabase, krate: CrateId) -> Attrs {
+ self.attrs.get(&AttrOwner::TopLevel).unwrap_or(&RawAttrs::EMPTY).clone().filter(db, krate)
+ }
+
+ pub(crate) fn raw_attrs(&self, of: AttrOwner) -> &RawAttrs {
+ self.attrs.get(&of).unwrap_or(&RawAttrs::EMPTY)
+ }
+
+ pub(crate) fn attrs(&self, db: &dyn DefDatabase, krate: CrateId, of: AttrOwner) -> Attrs {
+ self.raw_attrs(of).clone().filter(db, krate)
+ }
+
+ pub fn pretty_print(&self) -> String {
+ pretty::print_item_tree(self)
+ }
+
+ fn data(&self) -> &ItemTreeData {
+ self.data.as_ref().expect("attempted to access data of empty ItemTree")
+ }
+
+ fn data_mut(&mut self) -> &mut ItemTreeData {
+ self.data.get_or_insert_with(Box::default)
+ }
+
+ fn block_item_tree(db: &dyn DefDatabase, block: BlockId) -> Arc<ItemTree> {
+ let loc = db.lookup_intern_block(block);
+ let block = loc.ast_id.to_node(db.upcast());
+ let ctx = lower::Ctx::new(db, loc.ast_id.file_id);
+ Arc::new(ctx.lower_block(&block))
+ }
+
+ fn shrink_to_fit(&mut self) {
+ if let Some(data) = &mut self.data {
+ let ItemTreeData {
+ imports,
+ extern_crates,
+ extern_blocks,
+ functions,
+ params,
+ structs,
+ fields,
+ unions,
+ enums,
+ variants,
+ consts,
+ statics,
+ traits,
+ impls,
+ type_aliases,
+ mods,
+ macro_calls,
+ macro_rules,
+ macro_defs,
+ vis,
+ } = &mut **data;
+
+ imports.shrink_to_fit();
+ extern_crates.shrink_to_fit();
+ extern_blocks.shrink_to_fit();
+ functions.shrink_to_fit();
+ params.shrink_to_fit();
+ structs.shrink_to_fit();
+ fields.shrink_to_fit();
+ unions.shrink_to_fit();
+ enums.shrink_to_fit();
+ variants.shrink_to_fit();
+ consts.shrink_to_fit();
+ statics.shrink_to_fit();
+ traits.shrink_to_fit();
+ impls.shrink_to_fit();
+ type_aliases.shrink_to_fit();
+ mods.shrink_to_fit();
+ macro_calls.shrink_to_fit();
+ macro_rules.shrink_to_fit();
+ macro_defs.shrink_to_fit();
+
+ vis.arena.shrink_to_fit();
+ }
+ }
+}
+
+#[derive(Default, Debug, Eq, PartialEq)]
+struct ItemVisibilities {
+ arena: Arena<RawVisibility>,
+}
+
+impl ItemVisibilities {
+ fn alloc(&mut self, vis: RawVisibility) -> RawVisibilityId {
+ match &vis {
+ RawVisibility::Public => RawVisibilityId::PUB,
+ RawVisibility::Module(path) if path.segments().is_empty() => match &path.kind {
+ PathKind::Super(0) => RawVisibilityId::PRIV,
+ PathKind::Crate => RawVisibilityId::PUB_CRATE,
+ _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()),
+ },
+ _ => RawVisibilityId(self.arena.alloc(vis).into_raw().into()),
+ }
+ }
+}
+
+static VIS_PUB: RawVisibility = RawVisibility::Public;
+static VIS_PRIV: RawVisibility = RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)));
+static VIS_PUB_CRATE: RawVisibility = RawVisibility::Module(ModPath::from_kind(PathKind::Crate));
+
+#[derive(Default, Debug, Eq, PartialEq)]
+struct ItemTreeData {
+ imports: Arena<Import>,
+ extern_crates: Arena<ExternCrate>,
+ extern_blocks: Arena<ExternBlock>,
+ functions: Arena<Function>,
+ params: Arena<Param>,
+ structs: Arena<Struct>,
+ fields: Arena<Field>,
+ unions: Arena<Union>,
+ enums: Arena<Enum>,
+ variants: Arena<Variant>,
+ consts: Arena<Const>,
+ statics: Arena<Static>,
+ traits: Arena<Trait>,
+ impls: Arena<Impl>,
+ type_aliases: Arena<TypeAlias>,
+ mods: Arena<Mod>,
+ macro_calls: Arena<MacroCall>,
+ macro_rules: Arena<MacroRules>,
+ macro_defs: Arena<MacroDef>,
+
+ vis: ItemVisibilities,
+}
+
+#[derive(Debug, Eq, PartialEq, Hash)]
+pub enum AttrOwner {
+ /// Attributes on an item.
+ ModItem(ModItem),
+ /// Inner attributes of the source file.
+ TopLevel,
+
+ Variant(Idx<Variant>),
+ Field(Idx<Field>),
+ Param(Idx<Param>),
+}
+
+macro_rules! from_attrs {
+ ( $( $var:ident($t:ty) ),+ ) => {
+ $(
+ impl From<$t> for AttrOwner {
+ fn from(t: $t) -> AttrOwner {
+ AttrOwner::$var(t)
+ }
+ }
+ )+
+ };
+}
+
+from_attrs!(ModItem(ModItem), Variant(Idx<Variant>), Field(Idx<Field>), Param(Idx<Param>));
+
+/// Trait implemented by all item nodes in the item tree.
+pub trait ItemTreeNode: Clone {
+ type Source: AstNode + Into<ast::Item>;
+
+ fn ast_id(&self) -> FileAstId<Self::Source>;
+
+ /// Looks up an instance of `Self` in an item tree.
+ fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self;
+
+ /// Downcasts a `ModItem` to a `FileItemTreeId` specific to this type.
+ fn id_from_mod_item(mod_item: ModItem) -> Option<FileItemTreeId<Self>>;
+
+ /// Upcasts a `FileItemTreeId` to a generic `ModItem`.
+ fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem;
+}
+
+pub struct FileItemTreeId<N: ItemTreeNode> {
+ index: Idx<N>,
+ _p: PhantomData<N>,
+}
+
+impl<N: ItemTreeNode> Clone for FileItemTreeId<N> {
+ fn clone(&self) -> Self {
+ Self { index: self.index, _p: PhantomData }
+ }
+}
+impl<N: ItemTreeNode> Copy for FileItemTreeId<N> {}
+
+impl<N: ItemTreeNode> PartialEq for FileItemTreeId<N> {
+ fn eq(&self, other: &FileItemTreeId<N>) -> bool {
+ self.index == other.index
+ }
+}
+impl<N: ItemTreeNode> Eq for FileItemTreeId<N> {}
+
+impl<N: ItemTreeNode> Hash for FileItemTreeId<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.index.hash(state)
+ }
+}
+
+impl<N: ItemTreeNode> fmt::Debug for FileItemTreeId<N> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.index.fmt(f)
+ }
+}
+
+/// Identifies a particular [`ItemTree`].
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
+pub struct TreeId {
+ file: HirFileId,
+ block: Option<BlockId>,
+}
+
+impl TreeId {
+ pub(crate) fn new(file: HirFileId, block: Option<BlockId>) -> Self {
+ Self { file, block }
+ }
+
+ pub(crate) fn item_tree(&self, db: &dyn DefDatabase) -> Arc<ItemTree> {
+ match self.block {
+ Some(block) => ItemTree::block_item_tree(db, block),
+ None => db.file_item_tree(self.file),
+ }
+ }
+
+ pub(crate) fn file_id(self) -> HirFileId {
+ self.file
+ }
+
+ pub(crate) fn is_block(self) -> bool {
+ self.block.is_some()
+ }
+}
+
+#[derive(Debug)]
+pub struct ItemTreeId<N: ItemTreeNode> {
+ tree: TreeId,
+ pub value: FileItemTreeId<N>,
+}
+
+impl<N: ItemTreeNode> ItemTreeId<N> {
+ pub fn new(tree: TreeId, idx: FileItemTreeId<N>) -> Self {
+ Self { tree, value: idx }
+ }
+
+ pub fn file_id(self) -> HirFileId {
+ self.tree.file
+ }
+
+ pub fn tree_id(self) -> TreeId {
+ self.tree
+ }
+
+ pub fn item_tree(self, db: &dyn DefDatabase) -> Arc<ItemTree> {
+ self.tree.item_tree(db)
+ }
+}
+
+impl<N: ItemTreeNode> Copy for ItemTreeId<N> {}
+impl<N: ItemTreeNode> Clone for ItemTreeId<N> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+
+impl<N: ItemTreeNode> PartialEq for ItemTreeId<N> {
+ fn eq(&self, other: &Self) -> bool {
+ self.tree == other.tree && self.value == other.value
+ }
+}
+
+impl<N: ItemTreeNode> Eq for ItemTreeId<N> {}
+
+impl<N: ItemTreeNode> Hash for ItemTreeId<N> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.tree.hash(state);
+ self.value.hash(state);
+ }
+}
+
+macro_rules! mod_items {
+ ( $( $typ:ident in $fld:ident -> $ast:ty ),+ $(,)? ) => {
+ #[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
+ pub enum ModItem {
+ $(
+ $typ(FileItemTreeId<$typ>),
+ )+
+ }
+
+ $(
+ impl From<FileItemTreeId<$typ>> for ModItem {
+ fn from(id: FileItemTreeId<$typ>) -> ModItem {
+ ModItem::$typ(id)
+ }
+ }
+ )+
+
+ $(
+ impl ItemTreeNode for $typ {
+ type Source = $ast;
+
+ fn ast_id(&self) -> FileAstId<Self::Source> {
+ self.ast_id
+ }
+
+ fn lookup(tree: &ItemTree, index: Idx<Self>) -> &Self {
+ &tree.data().$fld[index]
+ }
+
+ fn id_from_mod_item(mod_item: ModItem) -> Option<FileItemTreeId<Self>> {
+ match mod_item {
+ ModItem::$typ(id) => Some(id),
+ _ => None,
+ }
+ }
+
+ fn id_to_mod_item(id: FileItemTreeId<Self>) -> ModItem {
+ ModItem::$typ(id)
+ }
+ }
+
+ impl Index<Idx<$typ>> for ItemTree {
+ type Output = $typ;
+
+ fn index(&self, index: Idx<$typ>) -> &Self::Output {
+ &self.data().$fld[index]
+ }
+ }
+ )+
+ };
+}
+
+mod_items! {
+ Import in imports -> ast::Use,
+ ExternCrate in extern_crates -> ast::ExternCrate,
+ ExternBlock in extern_blocks -> ast::ExternBlock,
+ Function in functions -> ast::Fn,
+ Struct in structs -> ast::Struct,
+ Union in unions -> ast::Union,
+ Enum in enums -> ast::Enum,
+ Const in consts -> ast::Const,
+ Static in statics -> ast::Static,
+ Trait in traits -> ast::Trait,
+ Impl in impls -> ast::Impl,
+ TypeAlias in type_aliases -> ast::TypeAlias,
+ Mod in mods -> ast::Module,
+ MacroCall in macro_calls -> ast::MacroCall,
+ MacroRules in macro_rules -> ast::MacroRules,
+ MacroDef in macro_defs -> ast::MacroDef,
+}
+
+macro_rules! impl_index {
+ ( $($fld:ident: $t:ty),+ $(,)? ) => {
+ $(
+ impl Index<Idx<$t>> for ItemTree {
+ type Output = $t;
+
+ fn index(&self, index: Idx<$t>) -> &Self::Output {
+ &self.data().$fld[index]
+ }
+ }
+ )+
+ };
+}
+
+impl_index!(fields: Field, variants: Variant, params: Param);
+
+impl Index<RawVisibilityId> for ItemTree {
+ type Output = RawVisibility;
+ fn index(&self, index: RawVisibilityId) -> &Self::Output {
+ match index {
+ RawVisibilityId::PRIV => &VIS_PRIV,
+ RawVisibilityId::PUB => &VIS_PUB,
+ RawVisibilityId::PUB_CRATE => &VIS_PUB_CRATE,
+ _ => &self.data().vis.arena[Idx::from_raw(index.0.into())],
+ }
+ }
+}
+
+impl<N: ItemTreeNode> Index<FileItemTreeId<N>> for ItemTree {
+ type Output = N;
+ fn index(&self, id: FileItemTreeId<N>) -> &N {
+ N::lookup(self, id.index)
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Import {
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::Use>,
+ pub use_tree: UseTree,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct UseTree {
+ pub index: Idx<ast::UseTree>,
+ kind: UseTreeKind,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum UseTreeKind {
+ /// ```
+ /// use path::to::Item;
+ /// use path::to::Item as Renamed;
+ /// use path::to::Trait as _;
+ /// ```
+ Single { path: Interned<ModPath>, alias: Option<ImportAlias> },
+
+ /// ```
+ /// use *; // (invalid, but can occur in nested tree)
+ /// use path::*;
+ /// ```
+ Glob { path: Option<Interned<ModPath>> },
+
+ /// ```
+ /// use prefix::{self, Item, ...};
+ /// ```
+ Prefixed { prefix: Option<Interned<ModPath>>, list: Box<[UseTree]> },
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ExternCrate {
+ pub name: Name,
+ pub alias: Option<ImportAlias>,
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::ExternCrate>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct ExternBlock {
+ pub abi: Option<Interned<str>>,
+ pub ast_id: FileAstId<ast::ExternBlock>,
+ pub children: Box<[ModItem]>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Function {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub explicit_generic_params: Interned<GenericParams>,
+ pub abi: Option<Interned<str>>,
+ pub params: IdxRange<Param>,
+ pub ret_type: Interned<TypeRef>,
+ pub async_ret_type: Option<Interned<TypeRef>>,
+ pub ast_id: FileAstId<ast::Fn>,
+ pub(crate) flags: FnFlags,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum Param {
+ Normal(Option<Name>, Interned<TypeRef>),
+ Varargs,
+}
+
+bitflags::bitflags! {
+ #[derive(Default)]
+ pub(crate) struct FnFlags: u8 {
+ const HAS_SELF_PARAM = 1 << 0;
+ const HAS_BODY = 1 << 1;
+ const HAS_DEFAULT_KW = 1 << 2;
+ const HAS_CONST_KW = 1 << 3;
+ const HAS_ASYNC_KW = 1 << 4;
+ const HAS_UNSAFE_KW = 1 << 5;
+ const IS_VARARGS = 1 << 6;
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Struct {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub fields: Fields,
+ pub ast_id: FileAstId<ast::Struct>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Union {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub fields: Fields,
+ pub ast_id: FileAstId<ast::Union>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Enum {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub variants: IdxRange<Variant>,
+ pub ast_id: FileAstId<ast::Enum>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Const {
+ /// `None` for `const _: () = ();`
+ pub name: Option<Name>,
+ pub visibility: RawVisibilityId,
+ pub type_ref: Interned<TypeRef>,
+ pub ast_id: FileAstId<ast::Const>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Static {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub mutable: bool,
+ pub type_ref: Interned<TypeRef>,
+ pub ast_id: FileAstId<ast::Static>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Trait {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub generic_params: Interned<GenericParams>,
+ pub is_auto: bool,
+ pub is_unsafe: bool,
+ pub items: Box<[AssocItem]>,
+ pub ast_id: FileAstId<ast::Trait>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Impl {
+ pub generic_params: Interned<GenericParams>,
+ pub target_trait: Option<Interned<TraitRef>>,
+ pub self_ty: Interned<TypeRef>,
+ pub is_negative: bool,
+ pub items: Box<[AssocItem]>,
+ pub ast_id: FileAstId<ast::Impl>,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct TypeAlias {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ /// Bounds on the type alias itself. Only valid in trait declarations, eg. `type Assoc: Copy;`.
+ pub bounds: Box<[Interned<TypeBound>]>,
+ pub generic_params: Interned<GenericParams>,
+ pub type_ref: Option<Interned<TypeRef>>,
+ pub ast_id: FileAstId<ast::TypeAlias>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct Mod {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub kind: ModKind,
+ pub ast_id: FileAstId<ast::Module>,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub enum ModKind {
+ /// `mod m { ... }`
+ Inline { items: Box<[ModItem]> },
+
+ /// `mod m;`
+ Outline,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroCall {
+ /// Path to the called macro.
+ pub path: Interned<ModPath>,
+ pub ast_id: FileAstId<ast::MacroCall>,
+ pub expand_to: ExpandTo,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroRules {
+ /// The name of the declared macro.
+ pub name: Name,
+ pub ast_id: FileAstId<ast::MacroRules>,
+}
+
+/// "Macros 2.0" macro definition.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct MacroDef {
+ pub name: Name,
+ pub visibility: RawVisibilityId,
+ pub ast_id: FileAstId<ast::MacroDef>,
+}
+
+impl Import {
+ /// Maps a `UseTree` contained in this import back to its AST node.
+ pub fn use_tree_to_ast(
+ &self,
+ db: &dyn DefDatabase,
+ file_id: HirFileId,
+ index: Idx<ast::UseTree>,
+ ) -> ast::UseTree {
+ // Re-lower the AST item and get the source map.
+ // Note: The AST unwraps are fine, since if they fail we should have never obtained `index`.
+ let ast = InFile::new(file_id, self.ast_id).to_node(db.upcast());
+ let ast_use_tree = ast.use_tree().expect("missing `use_tree`");
+ let hygiene = Hygiene::new(db.upcast(), file_id);
+ let (_, source_map) =
+ lower::lower_use_tree(db, &hygiene, ast_use_tree).expect("failed to lower use tree");
+ source_map[index].clone()
+ }
+}
+
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum ImportKind {
+ /// The `ModPath` is imported normally.
+ Plain,
+ /// This is a glob-import of all names in the `ModPath`.
+ Glob,
+ /// This is a `some::path::self` import, which imports `some::path` only in type namespace.
+ TypeOnly,
+}
+
+impl UseTree {
+ /// Expands the `UseTree` into individually imported `ModPath`s.
+ pub fn expand(
+ &self,
+ mut cb: impl FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
+ ) {
+ self.expand_impl(None, &mut cb)
+ }
+
+ fn expand_impl(
+ &self,
+ prefix: Option<ModPath>,
+ cb: &mut dyn FnMut(Idx<ast::UseTree>, ModPath, ImportKind, Option<ImportAlias>),
+ ) {
+ fn concat_mod_paths(
+ prefix: Option<ModPath>,
+ path: &ModPath,
+ ) -> Option<(ModPath, ImportKind)> {
+ match (prefix, &path.kind) {
+ (None, _) => Some((path.clone(), ImportKind::Plain)),
+ (Some(mut prefix), PathKind::Plain) => {
+ for segment in path.segments() {
+ prefix.push_segment(segment.clone());
+ }
+ Some((prefix, ImportKind::Plain))
+ }
+ (Some(mut prefix), PathKind::Super(n))
+ if *n > 0 && prefix.segments().is_empty() =>
+ {
+ // `super::super` + `super::rest`
+ match &mut prefix.kind {
+ PathKind::Super(m) => {
+ cov_mark::hit!(concat_super_mod_paths);
+ *m += *n;
+ for segment in path.segments() {
+ prefix.push_segment(segment.clone());
+ }
+ Some((prefix, ImportKind::Plain))
+ }
+ _ => None,
+ }
+ }
+ (Some(prefix), PathKind::Super(0)) if path.segments().is_empty() => {
+ // `some::path::self` == `some::path`
+ Some((prefix, ImportKind::TypeOnly))
+ }
+ (Some(_), _) => None,
+ }
+ }
+
+ match &self.kind {
+ UseTreeKind::Single { path, alias } => {
+ if let Some((path, kind)) = concat_mod_paths(prefix, path) {
+ cb(self.index, path, kind, alias.clone());
+ }
+ }
+ UseTreeKind::Glob { path: Some(path) } => {
+ if let Some((path, _)) = concat_mod_paths(prefix, path) {
+ cb(self.index, path, ImportKind::Glob, None);
+ }
+ }
+ UseTreeKind::Glob { path: None } => {
+ if let Some(prefix) = prefix {
+ cb(self.index, prefix, ImportKind::Glob, None);
+ }
+ }
+ UseTreeKind::Prefixed { prefix: additional_prefix, list } => {
+ let prefix = match additional_prefix {
+ Some(path) => match concat_mod_paths(prefix, path) {
+ Some((path, ImportKind::Plain)) => Some(path),
+ _ => return,
+ },
+ None => prefix,
+ };
+ for tree in &**list {
+ tree.expand_impl(prefix.clone(), cb);
+ }
+ }
+ }
+ }
+}
+
+macro_rules! impl_froms {
+ ($e:ident { $($v:ident ($t:ty)),* $(,)? }) => {
+ $(
+ impl From<$t> for $e {
+ fn from(it: $t) -> $e {
+ $e::$v(it)
+ }
+ }
+ )*
+ }
+}
+
+impl ModItem {
+ pub fn as_assoc_item(&self) -> Option<AssocItem> {
+ match self {
+ ModItem::Import(_)
+ | ModItem::ExternCrate(_)
+ | ModItem::ExternBlock(_)
+ | ModItem::Struct(_)
+ | ModItem::Union(_)
+ | ModItem::Enum(_)
+ | ModItem::Static(_)
+ | ModItem::Trait(_)
+ | ModItem::Impl(_)
+ | ModItem::Mod(_)
+ | ModItem::MacroRules(_)
+ | ModItem::MacroDef(_) => None,
+ ModItem::MacroCall(call) => Some(AssocItem::MacroCall(*call)),
+ ModItem::Const(konst) => Some(AssocItem::Const(*konst)),
+ ModItem::TypeAlias(alias) => Some(AssocItem::TypeAlias(*alias)),
+ ModItem::Function(func) => Some(AssocItem::Function(*func)),
+ }
+ }
+
+ pub fn downcast<N: ItemTreeNode>(self) -> Option<FileItemTreeId<N>> {
+ N::id_from_mod_item(self)
+ }
+
+ pub fn ast_id(&self, tree: &ItemTree) -> FileAstId<ast::Item> {
+ match self {
+ ModItem::Import(it) => tree[it.index].ast_id().upcast(),
+ ModItem::ExternCrate(it) => tree[it.index].ast_id().upcast(),
+ ModItem::ExternBlock(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Function(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Struct(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Union(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Enum(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Const(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Static(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Trait(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Impl(it) => tree[it.index].ast_id().upcast(),
+ ModItem::TypeAlias(it) => tree[it.index].ast_id().upcast(),
+ ModItem::Mod(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroCall(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroRules(it) => tree[it.index].ast_id().upcast(),
+ ModItem::MacroDef(it) => tree[it.index].ast_id().upcast(),
+ }
+ }
+}
+
+#[derive(Debug, Copy, Clone, Eq, PartialEq)]
+pub enum AssocItem {
+ Function(FileItemTreeId<Function>),
+ TypeAlias(FileItemTreeId<TypeAlias>),
+ Const(FileItemTreeId<Const>),
+ MacroCall(FileItemTreeId<MacroCall>),
+}
+
+impl_froms!(AssocItem {
+ Function(FileItemTreeId<Function>),
+ TypeAlias(FileItemTreeId<TypeAlias>),
+ Const(FileItemTreeId<Const>),
+ MacroCall(FileItemTreeId<MacroCall>),
+});
+
+impl From<AssocItem> for ModItem {
+ fn from(item: AssocItem) -> Self {
+ match item {
+ AssocItem::Function(it) => it.into(),
+ AssocItem::TypeAlias(it) => it.into(),
+ AssocItem::Const(it) => it.into(),
+ AssocItem::MacroCall(it) => it.into(),
+ }
+ }
+}
+
+impl AssocItem {
+ pub fn ast_id(self, tree: &ItemTree) -> FileAstId<ast::AssocItem> {
+ match self {
+ AssocItem::Function(id) => tree[id].ast_id.upcast(),
+ AssocItem::TypeAlias(id) => tree[id].ast_id.upcast(),
+ AssocItem::Const(id) => tree[id].ast_id.upcast(),
+ AssocItem::MacroCall(id) => tree[id].ast_id.upcast(),
+ }
+ }
+}
+
+#[derive(Debug, Eq, PartialEq)]
+pub struct Variant {
+ pub name: Name,
+ pub fields: Fields,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum Fields {
+ Record(IdxRange<Field>),
+ Tuple(IdxRange<Field>),
+ Unit,
+}
+
+/// A single field of an enum variant or struct
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct Field {
+ pub name: Name,
+ pub type_ref: Interned<TypeRef>,
+ pub visibility: RawVisibilityId,
+}
--- /dev/null
- res.insert(field_id, field_data.visibility.resolve(db, &resolver))
+//! Defines hir-level representation of visibility (e.g. `pub` and `pub(crate)`).
+
+use std::{iter, sync::Arc};
+
+use hir_expand::{hygiene::Hygiene, InFile};
+use la_arena::ArenaMap;
+use syntax::ast;
+
+use crate::{
+ db::DefDatabase,
+ nameres::DefMap,
+ path::{ModPath, PathKind},
+ resolver::HasResolver,
+ ConstId, FunctionId, HasModule, LocalFieldId, ModuleId, VariantId,
+};
+
+/// Visibility of an item, not yet resolved.
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum RawVisibility {
+ /// `pub(in module)`, `pub(crate)` or `pub(super)`. Also private, which is
+ /// equivalent to `pub(self)`.
+ Module(ModPath),
+ /// `pub`.
+ Public,
+}
+
+impl RawVisibility {
+ pub(crate) const fn private() -> RawVisibility {
+ RawVisibility::Module(ModPath::from_kind(PathKind::Super(0)))
+ }
+
+ pub(crate) fn from_ast(
+ db: &dyn DefDatabase,
+ node: InFile<Option<ast::Visibility>>,
+ ) -> RawVisibility {
+ Self::from_ast_with_hygiene(db, node.value, &Hygiene::new(db.upcast(), node.file_id))
+ }
+
+ pub(crate) fn from_ast_with_hygiene(
+ db: &dyn DefDatabase,
+ node: Option<ast::Visibility>,
+ hygiene: &Hygiene,
+ ) -> RawVisibility {
+ Self::from_ast_with_hygiene_and_default(db, node, RawVisibility::private(), hygiene)
+ }
+
+ pub(crate) fn from_ast_with_hygiene_and_default(
+ db: &dyn DefDatabase,
+ node: Option<ast::Visibility>,
+ default: RawVisibility,
+ hygiene: &Hygiene,
+ ) -> RawVisibility {
+ let node = match node {
+ None => return default,
+ Some(node) => node,
+ };
+ match node.kind() {
+ ast::VisibilityKind::In(path) => {
+ let path = ModPath::from_src(db.upcast(), path, hygiene);
+ let path = match path {
+ None => return RawVisibility::private(),
+ Some(path) => path,
+ };
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::PubCrate => {
+ let path = ModPath::from_kind(PathKind::Crate);
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::PubSuper => {
+ let path = ModPath::from_kind(PathKind::Super(1));
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::PubSelf => {
+ let path = ModPath::from_kind(PathKind::Plain);
+ RawVisibility::Module(path)
+ }
+ ast::VisibilityKind::Pub => RawVisibility::Public,
+ }
+ }
+
+ pub fn resolve(
+ &self,
+ db: &dyn DefDatabase,
+ resolver: &crate::resolver::Resolver,
+ ) -> Visibility {
+ // we fall back to public visibility (i.e. fail open) if the path can't be resolved
+ resolver.resolve_visibility(db, self).unwrap_or(Visibility::Public)
+ }
+}
+
+/// Visibility of an item, with the path resolved.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum Visibility {
+ /// Visibility is restricted to a certain module.
+ Module(ModuleId),
+ /// Visibility is unrestricted.
+ Public,
+}
+
+impl Visibility {
+ pub fn is_visible_from(self, db: &dyn DefDatabase, from_module: ModuleId) -> bool {
+ let to_module = match self {
+ Visibility::Module(m) => m,
+ Visibility::Public => return true,
+ };
+ // if they're not in the same crate, it can't be visible
+ if from_module.krate != to_module.krate {
+ return false;
+ }
+ let def_map = from_module.def_map(db);
+ self.is_visible_from_def_map(db, &def_map, from_module.local_id)
+ }
+
+ pub(crate) fn is_visible_from_other_crate(self) -> bool {
+ matches!(self, Visibility::Public)
+ }
+
+ pub(crate) fn is_visible_from_def_map(
+ self,
+ db: &dyn DefDatabase,
+ def_map: &DefMap,
+ mut from_module: crate::LocalModuleId,
+ ) -> bool {
+ let mut to_module = match self {
+ Visibility::Module(m) => m,
+ Visibility::Public => return true,
+ };
+
+ // `to_module` might be the root module of a block expression. Those have the same
+ // visibility as the containing module (even though no items are directly nameable from
+ // there, getting this right is important for method resolution).
+ // In that case, we adjust the visibility of `to_module` to point to the containing module.
+ // Additional complication: `to_module` might be in `from_module`'s `DefMap`, which we're
+ // currently computing, so we must not call the `def_map` query for it.
+ let arc;
+ let to_module_def_map =
+ if to_module.krate == def_map.krate() && to_module.block == def_map.block_id() {
+ cov_mark::hit!(is_visible_from_same_block_def_map);
+ def_map
+ } else {
+ arc = to_module.def_map(db);
+ &arc
+ };
+ let is_block_root = matches!(to_module.block, Some(_) if to_module_def_map[to_module.local_id].parent.is_none());
+ if is_block_root {
+ to_module = to_module_def_map.containing_module(to_module.local_id).unwrap();
+ }
+
+ // from_module needs to be a descendant of to_module
+ let mut def_map = def_map;
+ let mut parent_arc;
+ loop {
+ if def_map.module_id(from_module) == to_module {
+ return true;
+ }
+ match def_map[from_module].parent {
+ Some(parent) => from_module = parent,
+ None => {
+ match def_map.parent() {
+ Some(module) => {
+ parent_arc = module.def_map(db);
+ def_map = &*parent_arc;
+ from_module = module.local_id;
+ }
+ // Reached the root module, nothing left to check.
+ None => return false,
+ }
+ }
+ }
+ }
+ }
+
+ /// Returns the most permissive visibility of `self` and `other`.
+ ///
+ /// If there is no subset relation between `self` and `other`, returns `None` (ie. they're only
+ /// visible in unrelated modules).
+ pub(crate) fn max(self, other: Visibility, def_map: &DefMap) -> Option<Visibility> {
+ match (self, other) {
+ (Visibility::Module(_) | Visibility::Public, Visibility::Public)
+ | (Visibility::Public, Visibility::Module(_)) => Some(Visibility::Public),
+ (Visibility::Module(mod_a), Visibility::Module(mod_b)) => {
+ if mod_a.krate != mod_b.krate {
+ return None;
+ }
+
+ let mut a_ancestors = iter::successors(Some(mod_a.local_id), |&m| {
+ let parent_id = def_map[m].parent?;
+ Some(parent_id)
+ });
+ let mut b_ancestors = iter::successors(Some(mod_b.local_id), |&m| {
+ let parent_id = def_map[m].parent?;
+ Some(parent_id)
+ });
+
+ if a_ancestors.any(|m| m == mod_b.local_id) {
+ // B is above A
+ return Some(Visibility::Module(mod_b));
+ }
+
+ if b_ancestors.any(|m| m == mod_a.local_id) {
+ // A is above B
+ return Some(Visibility::Module(mod_a));
+ }
+
+ None
+ }
+ }
+ }
+}
+
+/// Resolve visibility of all specific fields of a struct or union variant.
+pub(crate) fn field_visibilities_query(
+ db: &dyn DefDatabase,
+ variant_id: VariantId,
+) -> Arc<ArenaMap<LocalFieldId, Visibility>> {
+ let var_data = match variant_id {
+ VariantId::StructId(it) => db.struct_data(it).variant_data.clone(),
+ VariantId::UnionId(it) => db.union_data(it).variant_data.clone(),
+ VariantId::EnumVariantId(it) => {
+ db.enum_data(it.parent).variants[it.local_id].variant_data.clone()
+ }
+ };
+ let resolver = variant_id.module(db).resolver(db);
+ let mut res = ArenaMap::default();
+ for (field_id, field_data) in var_data.fields().iter() {
++ res.insert(field_id, field_data.visibility.resolve(db, &resolver));
+ }
+ Arc::new(res)
+}
+
+/// Resolve visibility of a function.
+pub(crate) fn function_visibility_query(db: &dyn DefDatabase, def: FunctionId) -> Visibility {
+ let resolver = def.resolver(db);
+ db.function_data(def).visibility.resolve(db, &resolver)
+}
+
+/// Resolve visibility of a const.
+pub(crate) fn const_visibility_query(db: &dyn DefDatabase, def: ConstId) -> Visibility {
+ let resolver = def.resolver(db);
+ db.const_data(def).visibility.resolve(db, &resolver)
+}
--- /dev/null
- ast::{self, AstNode},
+//! To make attribute macros work reliably when typing, we need to take care to
+//! fix up syntax errors in the code we're passing to them.
+use std::mem;
+
+use mbe::{SyntheticToken, SyntheticTokenId, TokenMap};
+use rustc_hash::FxHashMap;
+use syntax::{
- // FIXME: for, loop, match etc.
++ ast::{self, AstNode, HasLoopBody},
+ match_ast, SyntaxElement, SyntaxKind, SyntaxNode, TextRange,
+};
+use tt::Subtree;
+
+/// The result of calculating fixes for a syntax node -- a bunch of changes
+/// (appending to and replacing nodes), the information that is needed to
+/// reverse those changes afterwards, and a token map.
+#[derive(Debug)]
+pub(crate) struct SyntaxFixups {
+ pub(crate) append: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) replace: FxHashMap<SyntaxElement, Vec<SyntheticToken>>,
+ pub(crate) undo_info: SyntaxFixupUndoInfo,
+ pub(crate) token_map: TokenMap,
+ pub(crate) next_id: u32,
+}
+
+/// This is the information needed to reverse the fixups.
+#[derive(Debug, PartialEq, Eq)]
+pub struct SyntaxFixupUndoInfo {
+ original: Vec<Subtree>,
+}
+
+const EMPTY_ID: SyntheticTokenId = SyntheticTokenId(!0);
+
+pub(crate) fn fixup_syntax(node: &SyntaxNode) -> SyntaxFixups {
+ let mut append = FxHashMap::<SyntaxElement, _>::default();
+ let mut replace = FxHashMap::<SyntaxElement, _>::default();
+ let mut preorder = node.preorder();
+ let mut original = Vec::new();
+ let mut token_map = TokenMap::default();
+ let mut next_id = 0;
+ while let Some(event) = preorder.next() {
+ let node = match event {
+ syntax::WalkEvent::Enter(node) => node,
+ syntax::WalkEvent::Leave(_) => continue,
+ };
+
+ if can_handle_error(&node) && has_error_to_handle(&node) {
+ // the node contains an error node, we have to completely replace it by something valid
+ let (original_tree, new_tmap, new_next_id) =
+ mbe::syntax_node_to_token_tree_with_modifications(
+ &node,
+ mem::take(&mut token_map),
+ next_id,
+ Default::default(),
+ Default::default(),
+ );
+ token_map = new_tmap;
+ next_id = new_next_id;
+ let idx = original.len() as u32;
+ original.push(original_tree);
+ let replacement = SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: node.text_range(),
+ id: SyntheticTokenId(idx),
+ };
+ replace.insert(node.clone().into(), vec![replacement]);
+ preorder.skip_subtree();
+ continue;
+ }
+
+ // In some other situations, we can fix things by just appending some tokens.
+ let end_range = TextRange::empty(node.text_range().end());
+ match_ast! {
+ match node {
+ ast::FieldExpr(it) => {
+ if it.name_ref().is_none() {
+ // incomplete field access: some_expr.|
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::ExprStmt(it) => {
+ if it.semicolon_token().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::SEMICOLON,
+ text: ";".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::LetStmt(it) => {
+ if it.semicolon_token().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::SEMICOLON,
+ text: ";".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
+ ast::IfExpr(it) => {
+ if it.condition().is_none() {
+ // insert placeholder token after the if token
+ let if_token = match it.if_token() {
+ Some(t) => t,
+ None => continue,
+ };
+ append.insert(if_token.into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::IDENT,
+ text: "__ra_fixup".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ if it.then_branch().is_none() {
+ append.insert(node.clone().into(), vec![
+ SyntheticToken {
+ kind: SyntaxKind::L_CURLY,
+ text: "{".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ SyntheticToken {
+ kind: SyntaxKind::R_CURLY,
+ text: "}".into(),
+ range: end_range,
+ id: EMPTY_ID,
+ },
+ ]);
+ }
+ },
++ ast::WhileExpr(it) => {
++ if it.condition().is_none() {
++ // insert placeholder token after the while token
++ let while_token = match it.while_token() {
++ Some(t) => t,
++ None => continue,
++ };
++ append.insert(while_token.into(), vec![
++ SyntheticToken {
++ kind: SyntaxKind::IDENT,
++ text: "__ra_fixup".into(),
++ range: end_range,
++ id: EMPTY_ID,
++ },
++ ]);
++ }
++ if it.loop_body().is_none() {
++ append.insert(node.clone().into(), vec![
++ SyntheticToken {
++ kind: SyntaxKind::L_CURLY,
++ text: "{".into(),
++ range: end_range,
++ id: EMPTY_ID,
++ },
++ SyntheticToken {
++ kind: SyntaxKind::R_CURLY,
++ text: "}".into(),
++ range: end_range,
++ id: EMPTY_ID,
++ },
++ ]);
++ }
++ },
++ ast::LoopExpr(it) => {
++ if it.loop_body().is_none() {
++ append.insert(node.clone().into(), vec![
++ SyntheticToken {
++ kind: SyntaxKind::L_CURLY,
++ text: "{".into(),
++ range: end_range,
++ id: EMPTY_ID,
++ },
++ SyntheticToken {
++ kind: SyntaxKind::R_CURLY,
++ text: "}".into(),
++ range: end_range,
++ id: EMPTY_ID,
++ },
++ ]);
++ }
++ },
+ // FIXME: foo::
++ // FIXME: for, match etc.
+ _ => (),
+ }
+ }
+ }
+ SyntaxFixups {
+ append,
+ replace,
+ token_map,
+ next_id,
+ undo_info: SyntaxFixupUndoInfo { original },
+ }
+}
+
+fn has_error(node: &SyntaxNode) -> bool {
+ node.children().any(|c| c.kind() == SyntaxKind::ERROR)
+}
+
+fn can_handle_error(node: &SyntaxNode) -> bool {
+ ast::Expr::can_cast(node.kind())
+}
+
+fn has_error_to_handle(node: &SyntaxNode) -> bool {
+ has_error(node) || node.children().any(|c| !can_handle_error(&c) && has_error_to_handle(&c))
+}
+
+pub(crate) fn reverse_fixups(
+ tt: &mut Subtree,
+ token_map: &TokenMap,
+ undo_info: &SyntaxFixupUndoInfo,
+) {
+ tt.token_trees.retain(|tt| match tt {
+ tt::TokenTree::Leaf(leaf) => {
+ token_map.synthetic_token_id(leaf.id()).is_none()
+ || token_map.synthetic_token_id(leaf.id()) != Some(EMPTY_ID)
+ }
+ tt::TokenTree::Subtree(st) => st.delimiter.map_or(true, |d| {
+ token_map.synthetic_token_id(d.id).is_none()
+ || token_map.synthetic_token_id(d.id) != Some(EMPTY_ID)
+ }),
+ });
+ tt.token_trees.iter_mut().for_each(|tt| match tt {
+ tt::TokenTree::Subtree(tt) => reverse_fixups(tt, token_map, undo_info),
+ tt::TokenTree::Leaf(leaf) => {
+ if let Some(id) = token_map.synthetic_token_id(leaf.id()) {
+ let original = &undo_info.original[id.0 as usize];
+ *tt = tt::TokenTree::Subtree(original.clone());
+ }
+ }
+ });
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use super::reverse_fixups;
+
+ #[track_caller]
+ fn check(ra_fixture: &str, mut expect: Expect) {
+ let parsed = syntax::SourceFile::parse(ra_fixture);
+ let fixups = super::fixup_syntax(&parsed.syntax_node());
+ let (mut tt, tmap, _) = mbe::syntax_node_to_token_tree_with_modifications(
+ &parsed.syntax_node(),
+ fixups.token_map,
+ fixups.next_id,
+ fixups.replace,
+ fixups.append,
+ );
+
+ let mut actual = tt.to_string();
+ actual.push('\n');
+
+ expect.indent(false);
+ expect.assert_eq(&actual);
+
+ // the fixed-up tree should be syntactically valid
+ let (parse, _) = mbe::token_tree_to_syntax_node(&tt, ::mbe::TopEntryPoint::MacroItems);
+ assert_eq!(
+ parse.errors(),
+ &[],
+ "parse has syntax errors. parse tree:\n{:#?}",
+ parse.syntax_node()
+ );
+
+ reverse_fixups(&mut tt, &tmap, &fixups.undo_info);
+
+ // the fixed-up + reversed version should be equivalent to the original input
+ // (but token IDs don't matter)
+ let (original_as_tt, _) = mbe::syntax_node_to_token_tree(&parsed.syntax_node());
+ assert_eq!(tt.to_string(), original_as_tt.to_string());
+ }
+
+ #[test]
+ fn incomplete_field_expr_1() {
+ check(
+ r#"
+fn foo() {
+ a.
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_2() {
+ check(
+ r#"
+fn foo() {
+ a. ;
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_3() {
+ check(
+ r#"
+fn foo() {
+ a. ;
+ bar();
+}
+"#,
+ expect![[r#"
+fn foo () {a . __ra_fixup ; bar () ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_let() {
+ check(
+ r#"
+fn foo() {
+ let x = a
+}
+"#,
+ expect![[r#"
+fn foo () {let x = a ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn incomplete_field_expr_in_let() {
+ check(
+ r#"
+fn foo() {
+ let x = a.
+}
+"#,
+ expect![[r#"
+fn foo () {let x = a . __ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn field_expr_before_call() {
+ // another case that easily happens while typing
+ check(
+ r#"
+fn foo() {
+ a.b
+ bar();
+}
+"#,
+ expect![[r#"
+fn foo () {a . b ; bar () ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn extraneous_comma() {
+ check(
+ r#"
+fn foo() {
+ bar(,);
+}
+"#,
+ expect![[r#"
+fn foo () {__ra_fixup ;}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_1() {
+ check(
+ r#"
+fn foo() {
+ if a
+}
+"#,
+ expect![[r#"
+fn foo () {if a {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_2() {
+ check(
+ r#"
+fn foo() {
+ if
+}
+"#,
+ expect![[r#"
+fn foo () {if __ra_fixup {}}
+"#]],
+ )
+ }
+
+ #[test]
+ fn fixup_if_3() {
+ check(
+ r#"
+fn foo() {
+ if {}
+}
+"#,
+ // the {} gets parsed as the condition, I think?
+ expect![[r#"
+fn foo () {if {} {}}
++"#]],
++ )
++ }
++
++ #[test]
++ fn fixup_while_1() {
++ check(
++ r#"
++fn foo() {
++ while
++}
++"#,
++ expect![[r#"
++fn foo () {while __ra_fixup {}}
++"#]],
++ )
++ }
++
++ #[test]
++ fn fixup_while_2() {
++ check(
++ r#"
++fn foo() {
++ while foo
++}
++"#,
++ expect![[r#"
++fn foo () {while foo {}}
++"#]],
++ )
++ }
++ #[test]
++ fn fixup_while_3() {
++ check(
++ r#"
++fn foo() {
++ while {}
++}
++"#,
++ expect![[r#"
++fn foo () {while __ra_fixup {}}
++"#]],
++ )
++ }
++
++ #[test]
++ fn fixup_loop() {
++ check(
++ r#"
++fn foo() {
++ loop
++}
++"#,
++ expect![[r#"
++fn foo () {loop {}}
+"#]],
+ )
+ }
+}
--- /dev/null
+//! See [`Name`].
+
+use std::fmt;
+
+use syntax::{ast, SmolStr, SyntaxKind};
+
+/// `Name` is a wrapper around string, which is used in hir for both references
+/// and declarations. In theory, names should also carry hygiene info, but we are
+/// not there yet!
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct Name(Repr);
+
+/// `EscapedName` will add a prefix "r#" to the wrapped `Name` when it is a raw identifier
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+pub struct EscapedName<'a>(&'a Name);
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
+enum Repr {
+ Text(SmolStr),
+ TupleField(usize),
+}
+
+impl fmt::Display for Name {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.0 {
+ Repr::Text(text) => fmt::Display::fmt(&text, f),
+ Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
+ }
+ }
+}
+
+fn is_raw_identifier(name: &str) -> bool {
+ let is_keyword = SyntaxKind::from_keyword(name).is_some();
+ is_keyword && !matches!(name, "self" | "crate" | "super" | "Self")
+}
+
+impl<'a> fmt::Display for EscapedName<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match &self.0 .0 {
+ Repr::Text(text) => {
+ if is_raw_identifier(text) {
+ write!(f, "r#{}", &text)
+ } else {
+ fmt::Display::fmt(&text, f)
+ }
+ }
+ Repr::TupleField(idx) => fmt::Display::fmt(&idx, f),
+ }
+ }
+}
+
+impl<'a> EscapedName<'a> {
+ pub fn is_escaped(&self) -> bool {
+ match &self.0 .0 {
+ Repr::Text(it) => is_raw_identifier(&it),
+ Repr::TupleField(_) => false,
+ }
+ }
+
+ /// Returns the textual representation of this name as a [`SmolStr`].
+ /// Prefer using this over [`ToString::to_string`] if possible as this conversion is cheaper in
+ /// the general case.
+ pub fn to_smol_str(&self) -> SmolStr {
+ match &self.0 .0 {
+ Repr::Text(it) => {
+ if is_raw_identifier(&it) {
+ SmolStr::from_iter(["r#", &it])
+ } else {
+ it.clone()
+ }
+ }
+ Repr::TupleField(it) => SmolStr::new(&it.to_string()),
+ }
+ }
+}
+
+impl Name {
+ /// Note: this is private to make creating name from random string hard.
+ /// Hopefully, this should allow us to integrate hygiene cleaner in the
+ /// future, and to switch to interned representation of names.
+ const fn new_text(text: SmolStr) -> Name {
+ Name(Repr::Text(text))
+ }
+
+ pub fn new_tuple_field(idx: usize) -> Name {
+ Name(Repr::TupleField(idx))
+ }
+
+ pub fn new_lifetime(lt: &ast::Lifetime) -> Name {
+ Self::new_text(lt.text().into())
+ }
+
+ /// Shortcut to create inline plain text name
+ const fn new_inline(text: &str) -> Name {
+ Name::new_text(SmolStr::new_inline(text))
+ }
+
+ /// Resolve a name from the text of token.
+ fn resolve(raw_text: &str) -> Name {
+ match raw_text.strip_prefix("r#") {
+ Some(text) => Name::new_text(SmolStr::new(text)),
+ None => Name::new_text(raw_text.into()),
+ }
+ }
+
+ /// A fake name for things missing in the source code.
+ ///
+ /// For example, `impl Foo for {}` should be treated as a trait impl for a
+ /// type with a missing name. Similarly, `struct S { : u32 }` should have a
+ /// single field with a missing name.
+ ///
+ /// Ideally, we want a `gensym` semantics for missing names -- each missing
+ /// name is equal only to itself. It's not clear how to implement this in
+ /// salsa though, so we punt on that bit for a moment.
+ pub const fn missing() -> Name {
+ Name::new_inline("[missing name]")
+ }
+
+ /// Returns the tuple index this name represents if it is a tuple field.
+ pub fn as_tuple_index(&self) -> Option<usize> {
+ match self.0 {
+ Repr::TupleField(idx) => Some(idx),
+ _ => None,
+ }
+ }
+
+ /// Returns the text this name represents if it isn't a tuple field.
+ pub fn as_text(&self) -> Option<SmolStr> {
+ match &self.0 {
+ Repr::Text(it) => Some(it.clone()),
+ _ => None,
+ }
+ }
+
+ /// Returns the textual representation of this name as a [`SmolStr`].
+ /// Prefer using this over [`ToString::to_string`] if possible as this conversion is cheaper in
+ /// the general case.
+ pub fn to_smol_str(&self) -> SmolStr {
+ match &self.0 {
+ Repr::Text(it) => it.clone(),
+ Repr::TupleField(it) => SmolStr::new(&it.to_string()),
+ }
+ }
+
+ pub fn escaped(&self) -> EscapedName<'_> {
+ EscapedName(self)
+ }
+}
+
+pub trait AsName {
+ fn as_name(&self) -> Name;
+}
+
+impl AsName for ast::NameRef {
+ fn as_name(&self) -> Name {
+ match self.as_tuple_field() {
+ Some(idx) => Name::new_tuple_field(idx),
+ None => Name::resolve(&self.text()),
+ }
+ }
+}
+
+impl AsName for ast::Name {
+ fn as_name(&self) -> Name {
+ Name::resolve(&self.text())
+ }
+}
+
+impl AsName for ast::NameOrNameRef {
+ fn as_name(&self) -> Name {
+ match self {
+ ast::NameOrNameRef::Name(it) => it.as_name(),
+ ast::NameOrNameRef::NameRef(it) => it.as_name(),
+ }
+ }
+}
+
+impl AsName for tt::Ident {
+ fn as_name(&self) -> Name {
+ Name::resolve(&self.text)
+ }
+}
+
+impl AsName for ast::FieldKind {
+ fn as_name(&self) -> Name {
+ match self {
+ ast::FieldKind::Name(nr) => nr.as_name(),
+ ast::FieldKind::Index(idx) => {
+ let idx = idx.text().parse::<usize>().unwrap_or(0);
+ Name::new_tuple_field(idx)
+ }
+ }
+ }
+}
+
+impl AsName for base_db::Dependency {
+ fn as_name(&self) -> Name {
+ Name::new_text(SmolStr::new(&*self.name))
+ }
+}
+
+pub mod known {
+ macro_rules! known_names {
+ ($($ident:ident),* $(,)?) => {
+ $(
+ #[allow(bad_style)]
+ pub const $ident: super::Name =
+ super::Name::new_inline(stringify!($ident));
+ )*
+ };
+ }
+
+ known_names!(
+ // Primitives
+ isize,
+ i8,
+ i16,
+ i32,
+ i64,
+ i128,
+ usize,
+ u8,
+ u16,
+ u32,
+ u64,
+ u128,
+ f32,
+ f64,
+ bool,
+ char,
+ str,
+ // Special names
+ macro_rules,
+ doc,
+ cfg,
+ cfg_attr,
+ register_attr,
+ register_tool,
+ // Components of known path (value or mod name)
+ std,
+ core,
+ alloc,
+ iter,
+ ops,
+ future,
+ result,
+ boxed,
+ option,
+ prelude,
+ rust_2015,
+ rust_2018,
+ rust_2021,
+ v1,
+ // Components of known path (type name)
+ Iterator,
+ IntoIterator,
+ Item,
+ Try,
+ Ok,
+ Future,
+ Result,
+ Option,
+ Output,
+ Target,
+ Box,
+ RangeFrom,
+ RangeFull,
+ RangeInclusive,
+ RangeToInclusive,
+ RangeTo,
+ Range,
+ Neg,
+ Not,
+ None,
+ Index,
+ // Components of known path (function name)
+ filter_map,
+ next,
+ iter_mut,
+ len,
+ is_empty,
+ new,
+ // Builtin macros
+ asm,
+ assert,
+ column,
+ compile_error,
+ concat_idents,
+ concat_bytes,
+ concat,
+ const_format_args,
+ core_panic,
+ env,
+ file,
+ format_args_nl,
+ format_args,
+ global_asm,
+ include_bytes,
+ include_str,
+ include,
+ line,
+ llvm_asm,
+ log_syntax,
+ module_path,
+ option_env,
+ std_panic,
+ stringify,
+ trace_macros,
+ unreachable,
+ // Builtin derives
+ Copy,
+ Clone,
+ Default,
+ Debug,
+ Hash,
+ Ord,
+ PartialOrd,
+ Eq,
+ PartialEq,
+ // Builtin attributes
+ bench,
+ cfg_accessible,
+ cfg_eval,
+ crate_type,
+ derive,
+ global_allocator,
+ test,
+ test_case,
+ recursion_limit,
+ // Safe intrinsics
+ abort,
+ add_with_overflow,
+ black_box,
+ bitreverse,
+ bswap,
+ caller_location,
+ ctlz,
+ ctpop,
+ cttz,
+ discriminant_value,
+ forget,
+ likely,
+ maxnumf32,
+ maxnumf64,
+ min_align_of_val,
+ min_align_of,
+ minnumf32,
+ minnumf64,
+ mul_with_overflow,
+ needs_drop,
+ ptr_guaranteed_eq,
+ ptr_guaranteed_ne,
+ rotate_left,
+ rotate_right,
+ rustc_peek,
+ saturating_add,
+ saturating_sub,
+ size_of_val,
+ size_of,
+ sub_with_overflow,
+ type_id,
+ type_name,
+ unlikely,
+ variant_count,
+ wrapping_add,
+ wrapping_mul,
+ wrapping_sub,
+ // known methods of lang items
+ eq,
+ ne,
+ ge,
+ gt,
+ le,
+ lt,
+ // lang items
+ add_assign,
+ add,
+ bitand_assign,
+ bitand,
+ bitor_assign,
+ bitor,
+ bitxor_assign,
+ bitxor,
++ branch,
+ deref_mut,
+ deref,
+ div_assign,
+ div,
+ fn_mut,
+ fn_once,
+ future_trait,
+ index,
+ index_mut,
+ mul_assign,
+ mul,
+ neg,
+ not,
+ owned_box,
+ partial_ord,
++ poll,
+ r#fn,
+ rem_assign,
+ rem,
+ shl_assign,
+ shl,
+ shr_assign,
+ shr,
+ sub_assign,
+ sub,
+ );
+
+ // self/Self cannot be used as an identifier
+ pub const SELF_PARAM: super::Name = super::Name::new_inline("self");
+ pub const SELF_TYPE: super::Name = super::Name::new_inline("Self");
+
+ pub const STATIC_LIFETIME: super::Name = super::Name::new_inline("'static");
+
+ #[macro_export]
+ macro_rules! name {
+ (self) => {
+ $crate::name::known::SELF_PARAM
+ };
+ (Self) => {
+ $crate::name::known::SELF_TYPE
+ };
+ ('static) => {
+ $crate::name::known::STATIC_LIFETIME
+ };
+ ($ident:ident) => {
+ $crate::name::known::$ident
+ };
+ }
+}
+
+pub use crate::name;
--- /dev/null
- expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Ordering, Statement, UnaryOp},
+//! Type inference for expressions.
+
+use std::{
+ collections::hash_map::Entry,
+ iter::{repeat, repeat_with},
+ mem,
+};
+
+use chalk_ir::{
+ cast::Cast, fold::Shift, DebruijnIndex, GenericArgData, Mutability, TyVariableKind,
+};
+use hir_def::{
- ConstParamId, FieldId, FunctionId, ItemContainerId, Lookup,
++ expr::{ArithOp, Array, BinaryOp, CmpOp, Expr, ExprId, Literal, Statement, UnaryOp},
+ generics::TypeOrConstParamData,
+ path::{GenericArg, GenericArgs},
+ resolver::resolver_for_expr,
- use hir_expand::name::{name, Name};
++ ConstParamId, FieldId, ItemContainerId, Lookup,
+};
- method_resolution::{self, VisibleFromModule},
++use hir_expand::name::Name;
+use stdx::always;
+use syntax::ast::RangeOp;
+
+use crate::{
+ autoderef::{self, Autoderef},
+ consteval,
+ infer::coerce::CoerceMany,
+ lower::{
+ const_or_path_to_chalk, generic_arg_to_chalk, lower_to_chalk_mutability, ParamLoweringMode,
+ },
+ mapping::{from_chalk, ToChalk},
- let func = self.resolve_binop_method(op);
++ method_resolution::{self, lang_names_for_bin_op, VisibleFromModule},
+ primitive::{self, UintTy},
+ static_lifetime, to_chalk_trait_id,
+ utils::{generics, Generics},
+ AdtId, Binders, CallableDefId, FnPointer, FnSig, FnSubst, Interner, Rawness, Scalar,
+ Substitution, TraitRef, Ty, TyBuilder, TyExt, TyKind,
+};
+
+use super::{
+ coerce::auto_deref_adjust_steps, find_breakable, BindingMode, BreakableContext, Diverges,
+ Expectation, InferenceContext, InferenceDiagnostic, TypeMismatch,
+};
+
+impl<'a> InferenceContext<'a> {
+ pub(crate) fn infer_expr(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(tgt_expr, expected);
+ if let Some(expected_ty) = expected.only_has_type(&mut self.table) {
+ let could_unify = self.unify(&ty, &expected_ty);
+ if !could_unify {
+ self.result.type_mismatches.insert(
+ tgt_expr.into(),
+ TypeMismatch { expected: expected_ty, actual: ty.clone() },
+ );
+ }
+ }
+ ty
+ }
+
+ /// Infer type of expression with possibly implicit coerce to the expected type.
+ /// Return the type after possible coercion.
+ pub(super) fn infer_expr_coerce(&mut self, expr: ExprId, expected: &Expectation) -> Ty {
+ let ty = self.infer_expr_inner(expr, expected);
+ if let Some(target) = expected.only_has_type(&mut self.table) {
+ match self.coerce(Some(expr), &ty, &target) {
+ Ok(res) => res,
+ Err(_) => {
+ self.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch { expected: target.clone(), actual: ty.clone() },
+ );
+ target
+ }
+ }
+ } else {
+ ty
+ }
+ }
+
+ fn infer_expr_inner(&mut self, tgt_expr: ExprId, expected: &Expectation) -> Ty {
+ self.db.unwind_if_cancelled();
+
+ let ty = match &self.body[tgt_expr] {
+ Expr::Missing => self.err_ty(),
+ &Expr::If { condition, then_branch, else_branch } => {
+ self.infer_expr(
+ condition,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+
+ let condition_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut both_arms_diverge = Diverges::Always;
+
+ let result_ty = self.table.new_type_var();
+ let then_ty = self.infer_expr_inner(then_branch, expected);
+ both_arms_diverge &= mem::replace(&mut self.diverges, Diverges::Maybe);
+ let mut coerce = CoerceMany::new(result_ty);
+ coerce.coerce(self, Some(then_branch), &then_ty);
+ let else_ty = match else_branch {
+ Some(else_branch) => self.infer_expr_inner(else_branch, expected),
+ None => TyBuilder::unit(),
+ };
+ both_arms_diverge &= self.diverges;
+ // FIXME: create a synthetic `else {}` so we have something to refer to here instead of None?
+ coerce.coerce(self, else_branch, &else_ty);
+
+ self.diverges = condition_diverges | both_arms_diverge;
+
+ coerce.complete()
+ }
+ &Expr::Let { pat, expr } => {
+ let input_ty = self.infer_expr(expr, &Expectation::none());
+ self.infer_pat(pat, &input_ty, BindingMode::default());
+ TyKind::Scalar(Scalar::Bool).intern(Interner)
+ }
+ Expr::Block { statements, tail, label, id: _ } => {
+ let old_resolver = mem::replace(
+ &mut self.resolver,
+ resolver_for_expr(self.db.upcast(), self.owner, tgt_expr),
+ );
+ let ty = match label {
+ Some(_) => {
+ let break_ty = self.table.new_type_var();
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(break_ty.clone()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ let ty = self.infer_block(
+ tgt_expr,
+ statements,
+ *tail,
+ &Expectation::has_type(break_ty),
+ );
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+ if ctxt.may_break {
+ ctxt.coerce.complete()
+ } else {
+ ty
+ }
+ }
+ None => self.infer_block(tgt_expr, statements, *tail, expected),
+ };
+ self.resolver = old_resolver;
+ ty
+ }
+ Expr::Unsafe { body } | Expr::Const { body } => self.infer_expr(*body, expected),
+ Expr::TryBlock { body } => {
+ let _inner = self.infer_expr(*body, expected);
+ // FIXME should be std::result::Result<{inner}, _>
+ self.err_ty()
+ }
+ Expr::Async { body } => {
+ let ret_ty = self.table.new_type_var();
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ let inner_ty = self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ // Use the first type parameter as the output type of future.
+ // existential type AsyncBlockImplTrait<InnerType>: Future<Output = InnerType>
+ let impl_trait_id = crate::ImplTraitId::AsyncBlockTypeImplTrait(self.owner, *body);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ TyKind::OpaqueType(opaque_ty_id, Substitution::from1(Interner, inner_ty))
+ .intern(Interner)
+ }
+ Expr::Loop { body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.table.new_type_var()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+
+ let ctxt = self.breakables.pop().expect("breakable stack broken");
+
+ if ctxt.may_break {
+ self.diverges = Diverges::Maybe;
+ ctxt.coerce.complete()
+ } else {
+ TyKind::Never.intern(Interner)
+ }
+ }
+ Expr::While { condition, body, label } => {
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.err_ty()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ self.infer_expr(
+ *condition,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ TyBuilder::unit()
+ }
+ Expr::For { iterable, body, pat, label } => {
+ let iterable_ty = self.infer_expr(*iterable, &Expectation::none());
+
+ self.breakables.push(BreakableContext {
+ may_break: false,
+ coerce: CoerceMany::new(self.err_ty()),
+ label: label.map(|label| self.body[label].name.clone()),
+ });
+ let pat_ty =
+ self.resolve_associated_type(iterable_ty, self.resolve_into_iter_item());
+
+ self.infer_pat(*pat, &pat_ty, BindingMode::default());
+
+ self.infer_expr(*body, &Expectation::has_type(TyBuilder::unit()));
+ let _ctxt = self.breakables.pop().expect("breakable stack broken");
+ // the body may not run, so it diverging doesn't mean we diverge
+ self.diverges = Diverges::Maybe;
+ TyBuilder::unit()
+ }
+ Expr::Closure { body, args, ret_type, arg_types } => {
+ assert_eq!(args.len(), arg_types.len());
+
+ let mut sig_tys = Vec::new();
+
+ // collect explicitly written argument types
+ for arg_type in arg_types.iter() {
+ let arg_ty = match arg_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(arg_ty);
+ }
+
+ // add return type
+ let ret_ty = match ret_type {
+ Some(type_ref) => self.make_ty(type_ref),
+ None => self.table.new_type_var(),
+ };
+ sig_tys.push(ret_ty.clone());
+ let sig_ty = TyKind::Function(FnPointer {
+ num_binders: 0,
+ sig: FnSig { abi: (), safety: chalk_ir::Safety::Safe, variadic: false },
+ substitution: FnSubst(
+ Substitution::from_iter(Interner, sig_tys.clone()).shifted_in(Interner),
+ ),
+ })
+ .intern(Interner);
+ let closure_id = self.db.intern_closure((self.owner, tgt_expr)).into();
+ let closure_ty =
+ TyKind::Closure(closure_id, Substitution::from1(Interner, sig_ty.clone()))
+ .intern(Interner);
+
+ // Eagerly try to relate the closure type with the expected
+ // type, otherwise we often won't have enough information to
+ // infer the body.
+ self.deduce_closure_type_from_expectations(
+ tgt_expr,
+ &closure_ty,
+ &sig_ty,
+ expected,
+ );
+
+ // Now go through the argument patterns
+ for (arg_pat, arg_ty) in args.iter().zip(sig_tys) {
+ self.infer_pat(*arg_pat, &arg_ty, BindingMode::default());
+ }
+
+ let prev_diverges = mem::replace(&mut self.diverges, Diverges::Maybe);
+ let prev_ret_ty = mem::replace(&mut self.return_ty, ret_ty.clone());
+
+ self.infer_expr_coerce(*body, &Expectation::has_type(ret_ty));
+
+ self.diverges = prev_diverges;
+ self.return_ty = prev_ret_ty;
+
+ closure_ty
+ }
+ Expr::Call { callee, args, .. } => {
+ let callee_ty = self.infer_expr(*callee, &Expectation::none());
+ let mut derefs = Autoderef::new(&mut self.table, callee_ty.clone());
+ let mut res = None;
+ let mut derefed_callee = callee_ty.clone();
+ // manual loop to be able to access `derefs.table`
+ while let Some((callee_deref_ty, _)) = derefs.next() {
+ res = derefs.table.callable_sig(&callee_deref_ty, args.len());
+ if res.is_some() {
+ derefed_callee = callee_deref_ty;
+ break;
+ }
+ }
+ // if the function is unresolved, we use is_varargs=true to
+ // suppress the arg count diagnostic here
+ let is_varargs =
+ derefed_callee.callable_sig(self.db).map_or(false, |sig| sig.is_varargs)
+ || res.is_none();
+ let (param_tys, ret_ty) = match res {
+ Some(res) => {
+ let adjustments = auto_deref_adjust_steps(&derefs);
+ self.write_expr_adj(*callee, adjustments);
+ res
+ }
+ None => (Vec::new(), self.err_ty()), // FIXME diagnostic
+ };
+ let indices_to_skip = self.check_legacy_const_generics(derefed_callee, args);
+ self.register_obligations_for_call(&callee_ty);
+
+ let expected_inputs = self.expected_inputs_for_expected_output(
+ expected,
+ ret_ty.clone(),
+ param_tys.clone(),
+ );
+
+ self.check_call_arguments(
+ tgt_expr,
+ args,
+ &expected_inputs,
+ ¶m_tys,
+ &indices_to_skip,
+ is_varargs,
+ );
+ self.normalize_associated_types_in(ret_ty)
+ }
+ Expr::MethodCall { receiver, args, method_name, generic_args } => self
+ .infer_method_call(
+ tgt_expr,
+ *receiver,
+ args,
+ method_name,
+ generic_args.as_deref(),
+ expected,
+ ),
+ Expr::Match { expr, arms } => {
+ let input_ty = self.infer_expr(*expr, &Expectation::none());
+
+ let expected = expected.adjust_for_branches(&mut self.table);
+
+ let result_ty = if arms.is_empty() {
+ TyKind::Never.intern(Interner)
+ } else {
+ match &expected {
+ Expectation::HasType(ty) => ty.clone(),
+ _ => self.table.new_type_var(),
+ }
+ };
+ let mut coerce = CoerceMany::new(result_ty);
+
+ let matchee_diverges = self.diverges;
+ let mut all_arms_diverge = Diverges::Always;
+
+ for arm in arms.iter() {
+ self.diverges = Diverges::Maybe;
+ let _pat_ty = self.infer_pat(arm.pat, &input_ty, BindingMode::default());
+ if let Some(guard_expr) = arm.guard {
+ self.infer_expr(
+ guard_expr,
+ &Expectation::has_type(TyKind::Scalar(Scalar::Bool).intern(Interner)),
+ );
+ }
+
+ let arm_ty = self.infer_expr_inner(arm.expr, &expected);
+ all_arms_diverge &= self.diverges;
+ coerce.coerce(self, Some(arm.expr), &arm_ty);
+ }
+
+ self.diverges = matchee_diverges | all_arms_diverge;
+
+ coerce.complete()
+ }
+ Expr::Path(p) => {
+ // FIXME this could be more efficient...
+ let resolver = resolver_for_expr(self.db.upcast(), self.owner, tgt_expr);
+ self.infer_path(&resolver, p, tgt_expr.into()).unwrap_or_else(|| self.err_ty())
+ }
+ Expr::Continue { .. } => TyKind::Never.intern(Interner),
+ Expr::Break { expr, label } => {
+ let mut coerce = match find_breakable(&mut self.breakables, label.as_ref()) {
+ Some(ctxt) => {
+ // avoiding the borrowck
+ mem::replace(
+ &mut ctxt.coerce,
+ CoerceMany::new(self.result.standard_types.unknown.clone()),
+ )
+ }
+ None => CoerceMany::new(self.result.standard_types.unknown.clone()),
+ };
+
+ let val_ty = if let Some(expr) = *expr {
+ self.infer_expr(expr, &Expectation::none())
+ } else {
+ TyBuilder::unit()
+ };
+
+ // FIXME: create a synthetic `()` during lowering so we have something to refer to here?
+ coerce.coerce(self, *expr, &val_ty);
+
+ if let Some(ctxt) = find_breakable(&mut self.breakables, label.as_ref()) {
+ ctxt.coerce = coerce;
+ ctxt.may_break = true;
+ } else {
+ self.push_diagnostic(InferenceDiagnostic::BreakOutsideOfLoop {
+ expr: tgt_expr,
+ });
+ };
+
+ TyKind::Never.intern(Interner)
+ }
+ Expr::Return { expr } => {
+ if let Some(expr) = expr {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(self.return_ty.clone()));
+ } else {
+ let unit = TyBuilder::unit();
+ let _ = self.coerce(Some(tgt_expr), &unit, &self.return_ty.clone());
+ }
+ TyKind::Never.intern(Interner)
+ }
+ Expr::Yield { expr } => {
+ // FIXME: track yield type for coercion
+ if let Some(expr) = expr {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ TyKind::Never.intern(Interner)
+ }
+ Expr::RecordLit { path, fields, spread, .. } => {
+ let (ty, def_id) = self.resolve_variant(path.as_deref(), false);
+ if let Some(variant) = def_id {
+ self.write_variant_resolution(tgt_expr.into(), variant);
+ }
+
+ if let Some(t) = expected.only_has_type(&mut self.table) {
+ self.unify(&ty, &t);
+ }
+
+ let substs = ty
+ .as_adt()
+ .map(|(_, s)| s.clone())
+ .unwrap_or_else(|| Substitution::empty(Interner));
+ let field_types = def_id.map(|it| self.db.field_types(it)).unwrap_or_default();
+ let variant_data = def_id.map(|it| it.variant_data(self.db.upcast()));
+ for field in fields.iter() {
+ let field_def =
+ variant_data.as_ref().and_then(|it| match it.field(&field.name) {
+ Some(local_id) => Some(FieldId { parent: def_id.unwrap(), local_id }),
+ None => {
+ self.push_diagnostic(InferenceDiagnostic::NoSuchField {
+ expr: field.expr,
+ });
+ None
+ }
+ });
+ let field_ty = field_def.map_or(self.err_ty(), |it| {
+ field_types[it.local_id].clone().substitute(Interner, &substs)
+ });
+ self.infer_expr_coerce(field.expr, &Expectation::has_type(field_ty));
+ }
+ if let Some(expr) = spread {
+ self.infer_expr(*expr, &Expectation::has_type(ty.clone()));
+ }
+ ty
+ }
+ Expr::Field { expr, name } => {
+ let receiver_ty = self.infer_expr_inner(*expr, &Expectation::none());
+
+ let mut autoderef = Autoderef::new(&mut self.table, receiver_ty);
+ let ty = autoderef.by_ref().find_map(|(derefed_ty, _)| {
+ let (field_id, parameters) = match derefed_ty.kind(Interner) {
+ TyKind::Tuple(_, substs) => {
+ return name.as_tuple_index().and_then(|idx| {
+ substs
+ .as_slice(Interner)
+ .get(idx)
+ .map(|a| a.assert_ty_ref(Interner))
+ .cloned()
+ });
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::StructId(s)), parameters) => {
+ let local_id = self.db.struct_data(*s).variant_data.field(name)?;
+ let field = FieldId { parent: (*s).into(), local_id };
+ (field, parameters.clone())
+ }
+ TyKind::Adt(AdtId(hir_def::AdtId::UnionId(u)), parameters) => {
+ let local_id = self.db.union_data(*u).variant_data.field(name)?;
+ let field = FieldId { parent: (*u).into(), local_id };
+ (field, parameters.clone())
+ }
+ _ => return None,
+ };
+ let is_visible = self.db.field_visibilities(field_id.parent)[field_id.local_id]
+ .is_visible_from(self.db.upcast(), self.resolver.module());
+ if !is_visible {
+ // Write down the first field resolution even if it is not visible
+ // This aids IDE features for private fields like goto def and in
+ // case of autoderef finding an applicable field, this will be
+ // overwritten in a following cycle
+ if let Entry::Vacant(entry) = self.result.field_resolutions.entry(tgt_expr)
+ {
+ entry.insert(field_id);
+ }
+ return None;
+ }
+ // can't have `write_field_resolution` here because `self.table` is borrowed :(
+ self.result.field_resolutions.insert(tgt_expr, field_id);
+ let ty = self.db.field_types(field_id.parent)[field_id.local_id]
+ .clone()
+ .substitute(Interner, ¶meters);
+ Some(ty)
+ });
+ let ty = match ty {
+ Some(ty) => {
+ let adjustments = auto_deref_adjust_steps(&autoderef);
+ self.write_expr_adj(*expr, adjustments);
+ let ty = self.insert_type_vars(ty);
+ let ty = self.normalize_associated_types_in(ty);
+ ty
+ }
+ _ => self.err_ty(),
+ };
+ ty
+ }
+ Expr::Await { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_future_future_output())
+ }
+ Expr::Try { expr } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ self.resolve_associated_type(inner_ty, self.resolve_ops_try_ok())
+ }
+ Expr::Cast { expr, type_ref } => {
+ // FIXME: propagate the "castable to" expectation (and find a test case that shows this is necessary)
+ let _inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let cast_ty = self.make_ty(type_ref);
+ // FIXME check the cast...
+ cast_ty
+ }
+ Expr::Ref { expr, rawness, mutability } => {
+ let mutability = lower_to_chalk_mutability(*mutability);
+ let expectation = if let Some((exp_inner, exp_rawness, exp_mutability)) = expected
+ .only_has_type(&mut self.table)
+ .as_ref()
+ .and_then(|t| t.as_reference_or_ptr())
+ {
+ if exp_mutability == Mutability::Mut && mutability == Mutability::Not {
+ // FIXME: record type error - expected mut reference but found shared ref,
+ // which cannot be coerced
+ }
+ if exp_rawness == Rawness::Ref && *rawness == Rawness::RawPtr {
+ // FIXME: record type error - expected reference but found ptr,
+ // which cannot be coerced
+ }
+ Expectation::rvalue_hint(&mut self.table, Ty::clone(exp_inner))
+ } else {
+ Expectation::none()
+ };
+ let inner_ty = self.infer_expr_inner(*expr, &expectation);
+ match rawness {
+ Rawness::RawPtr => TyKind::Raw(mutability, inner_ty),
+ Rawness::Ref => TyKind::Ref(mutability, static_lifetime(), inner_ty),
+ }
+ .intern(Interner)
+ }
+ &Expr::Box { expr } => self.infer_expr_box(expr, expected),
+ Expr::UnaryOp { expr, op } => {
+ let inner_ty = self.infer_expr_inner(*expr, &Expectation::none());
+ let inner_ty = self.resolve_ty_shallow(&inner_ty);
+ match op {
+ UnaryOp::Deref => {
+ autoderef::deref(&mut self.table, inner_ty).unwrap_or_else(|| self.err_ty())
+ }
+ UnaryOp::Neg => {
+ match inner_ty.kind(Interner) {
+ // Fast path for builtins
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_))
+ | TyKind::InferenceVar(
+ _,
+ TyVariableKind::Integer | TyVariableKind::Float,
+ ) => inner_ty,
+ // Otherwise we resolve via the std::ops::Neg trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_neg_output()),
+ }
+ }
+ UnaryOp::Not => {
+ match inner_ty.kind(Interner) {
+ // Fast path for builtins
+ TyKind::Scalar(Scalar::Bool | Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer) => inner_ty,
+ // Otherwise we resolve via the std::ops::Not trait
+ _ => self
+ .resolve_associated_type(inner_ty, self.resolve_ops_not_output()),
+ }
+ }
+ }
+ }
+ Expr::BinaryOp { lhs, rhs, op } => match op {
+ Some(BinaryOp::Assignment { op: None }) => {
+ let lhs = *lhs;
+ let is_ordinary = match &self.body[lhs] {
+ Expr::Array(_)
+ | Expr::RecordLit { .. }
+ | Expr::Tuple { .. }
+ | Expr::Underscore => false,
+ Expr::Call { callee, .. } => !matches!(&self.body[*callee], Expr::Path(_)),
+ _ => true,
+ };
+
+ // In ordinary (non-destructuring) assignments, the type of
+ // `lhs` must be inferred first so that the ADT fields
+ // instantiations in RHS can be coerced to it. Note that this
+ // cannot happen in destructuring assignments because of how
+ // they are desugared.
+ if is_ordinary {
+ let lhs_ty = self.infer_expr(lhs, &Expectation::none());
+ self.infer_expr_coerce(*rhs, &Expectation::has_type(lhs_ty));
+ } else {
+ let rhs_ty = self.infer_expr(*rhs, &Expectation::none());
+ self.infer_assignee_expr(lhs, &rhs_ty);
+ }
+ self.result.standard_types.unit.clone()
+ }
+ Some(BinaryOp::LogicOp(_)) => {
+ let bool_ty = self.result.standard_types.bool_.clone();
+ self.infer_expr_coerce(*lhs, &Expectation::HasType(bool_ty.clone()));
+ let lhs_diverges = self.diverges;
+ self.infer_expr_coerce(*rhs, &Expectation::HasType(bool_ty.clone()));
+ // Depending on the LHS' value, the RHS can never execute.
+ self.diverges = lhs_diverges;
+ bool_ty
+ }
+ Some(op) => self.infer_overloadable_binop(*lhs, *op, *rhs, tgt_expr),
+ _ => self.err_ty(),
+ },
+ Expr::Range { lhs, rhs, range_type } => {
+ let lhs_ty = lhs.map(|e| self.infer_expr_inner(e, &Expectation::none()));
+ let rhs_expect = lhs_ty
+ .as_ref()
+ .map_or_else(Expectation::none, |ty| Expectation::has_type(ty.clone()));
+ let rhs_ty = rhs.map(|e| self.infer_expr(e, &rhs_expect));
+ match (range_type, lhs_ty, rhs_ty) {
+ (RangeOp::Exclusive, None, None) => match self.resolve_range_full() {
+ Some(adt) => TyBuilder::adt(self.db, adt).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Exclusive, None, Some(ty)) => match self.resolve_range_to() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, None, Some(ty)) => {
+ match self.resolve_range_to_inclusive() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ }
+ }
+ (RangeOp::Exclusive, Some(_), Some(ty)) => match self.resolve_range() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, Some(_), Some(ty)) => {
+ match self.resolve_range_inclusive() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ }
+ }
+ (RangeOp::Exclusive, Some(ty), None) => match self.resolve_range_from() {
+ Some(adt) => TyBuilder::adt(self.db, adt).push(ty).build(),
+ None => self.err_ty(),
+ },
+ (RangeOp::Inclusive, _, None) => self.err_ty(),
+ }
+ }
+ Expr::Index { base, index } => {
+ let base_ty = self.infer_expr_inner(*base, &Expectation::none());
+ let index_ty = self.infer_expr(*index, &Expectation::none());
+
+ if let Some(index_trait) = self.resolve_ops_index() {
+ let canonicalized = self.canonicalize(base_ty.clone());
+ let receiver_adjustments = method_resolution::resolve_indexing_op(
+ self.db,
+ self.trait_env.clone(),
+ canonicalized.value,
+ index_trait,
+ );
+ let (self_ty, adj) = receiver_adjustments
+ .map_or((self.err_ty(), Vec::new()), |adj| {
+ adj.apply(&mut self.table, base_ty)
+ });
+ self.write_expr_adj(*base, adj);
+ self.resolve_associated_type_with_params(
+ self_ty,
+ self.resolve_ops_index_output(),
+ &[GenericArgData::Ty(index_ty).intern(Interner)],
+ )
+ } else {
+ self.err_ty()
+ }
+ }
+ Expr::Tuple { exprs, .. } => {
+ let mut tys = match expected
+ .only_has_type(&mut self.table)
+ .as_ref()
+ .map(|t| t.kind(Interner))
+ {
+ Some(TyKind::Tuple(_, substs)) => substs
+ .iter(Interner)
+ .map(|a| a.assert_ty_ref(Interner).clone())
+ .chain(repeat_with(|| self.table.new_type_var()))
+ .take(exprs.len())
+ .collect::<Vec<_>>(),
+ _ => (0..exprs.len()).map(|_| self.table.new_type_var()).collect(),
+ };
+
+ for (expr, ty) in exprs.iter().zip(tys.iter_mut()) {
+ self.infer_expr_coerce(*expr, &Expectation::has_type(ty.clone()));
+ }
+
+ TyKind::Tuple(tys.len(), Substitution::from_iter(Interner, tys)).intern(Interner)
+ }
+ Expr::Array(array) => {
+ let elem_ty =
+ match expected.to_option(&mut self.table).as_ref().map(|t| t.kind(Interner)) {
+ Some(TyKind::Array(st, _) | TyKind::Slice(st)) => st.clone(),
+ _ => self.table.new_type_var(),
+ };
+ let mut coerce = CoerceMany::new(elem_ty.clone());
+
+ let expected = Expectation::has_type(elem_ty.clone());
+ let len = match array {
+ Array::ElementList { elements, .. } => {
+ for &expr in elements.iter() {
+ let cur_elem_ty = self.infer_expr_inner(expr, &expected);
+ coerce.coerce(self, Some(expr), &cur_elem_ty);
+ }
+ consteval::usize_const(Some(elements.len() as u128))
+ }
+ &Array::Repeat { initializer, repeat } => {
+ self.infer_expr_coerce(initializer, &Expectation::has_type(elem_ty));
+ self.infer_expr(
+ repeat,
+ &Expectation::has_type(
+ TyKind::Scalar(Scalar::Uint(UintTy::Usize)).intern(Interner),
+ ),
+ );
+
+ if let Some(g_def) = self.owner.as_generic_def_id() {
+ let generics = generics(self.db.upcast(), g_def);
+ consteval::eval_to_const(
+ repeat,
+ ParamLoweringMode::Placeholder,
+ self,
+ || generics,
+ DebruijnIndex::INNERMOST,
+ )
+ } else {
+ consteval::usize_const(None)
+ }
+ }
+ };
+
+ TyKind::Array(coerce.complete(), len).intern(Interner)
+ }
+ Expr::Literal(lit) => match lit {
+ Literal::Bool(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ Literal::String(..) => {
+ TyKind::Ref(Mutability::Not, static_lifetime(), TyKind::Str.intern(Interner))
+ .intern(Interner)
+ }
+ Literal::ByteString(bs) => {
+ let byte_type = TyKind::Scalar(Scalar::Uint(UintTy::U8)).intern(Interner);
+
+ let len = consteval::usize_const(Some(bs.len() as u128));
+
+ let array_type = TyKind::Array(byte_type, len).intern(Interner);
+ TyKind::Ref(Mutability::Not, static_lifetime(), array_type).intern(Interner)
+ }
+ Literal::Char(..) => TyKind::Scalar(Scalar::Char).intern(Interner),
+ Literal::Int(_v, ty) => match ty {
+ Some(int_ty) => {
+ TyKind::Scalar(Scalar::Int(primitive::int_ty_from_builtin(*int_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_integer_var(),
+ },
+ Literal::Uint(_v, ty) => match ty {
+ Some(int_ty) => {
+ TyKind::Scalar(Scalar::Uint(primitive::uint_ty_from_builtin(*int_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_integer_var(),
+ },
+ Literal::Float(_v, ty) => match ty {
+ Some(float_ty) => {
+ TyKind::Scalar(Scalar::Float(primitive::float_ty_from_builtin(*float_ty)))
+ .intern(Interner)
+ }
+ None => self.table.new_float_var(),
+ },
+ },
+ Expr::MacroStmts { tail, statements } => {
+ self.infer_block(tgt_expr, statements, *tail, expected)
+ }
+ Expr::Underscore => {
+ // Underscore expressions may only appear in assignee expressions,
+ // which are handled by `infer_assignee_expr()`, so any underscore
+ // expression reaching this branch is an error.
+ self.err_ty()
+ }
+ };
+ // use a new type variable if we got unknown here
+ let ty = self.insert_type_vars_shallow(ty);
+ self.write_expr_ty(tgt_expr, ty.clone());
+ if self.resolve_ty_shallow(&ty).is_never() {
+ // Any expression that produces a value of type `!` must have diverged
+ self.diverges = Diverges::Always;
+ }
+ ty
+ }
+
+ fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty {
+ if let Some(box_id) = self.resolve_boxed_box() {
+ let table = &mut self.table;
+ let inner_exp = expected
+ .to_option(table)
+ .as_ref()
+ .map(|e| e.as_adt())
+ .flatten()
+ .filter(|(e_adt, _)| e_adt == &box_id)
+ .map(|(_, subts)| {
+ let g = subts.at(Interner, 0);
+ Expectation::rvalue_hint(table, Ty::clone(g.assert_ty_ref(Interner)))
+ })
+ .unwrap_or_else(Expectation::none);
+
+ let inner_ty = self.infer_expr_inner(inner_expr, &inner_exp);
+ TyBuilder::adt(self.db, box_id)
+ .push(inner_ty)
+ .fill_with_defaults(self.db, || self.table.new_type_var())
+ .build()
+ } else {
+ self.err_ty()
+ }
+ }
+
+ pub(super) fn infer_assignee_expr(&mut self, lhs: ExprId, rhs_ty: &Ty) -> Ty {
+ let is_rest_expr = |expr| {
+ matches!(
+ &self.body[expr],
+ Expr::Range { lhs: None, rhs: None, range_type: RangeOp::Exclusive },
+ )
+ };
+
+ let rhs_ty = self.resolve_ty_shallow(rhs_ty);
+
+ let ty = match &self.body[lhs] {
+ Expr::Tuple { exprs, .. } => {
+ // We don't consider multiple ellipses. This is analogous to
+ // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
+ let ellipsis = exprs.iter().position(|e| is_rest_expr(*e));
+ let exprs: Vec<_> = exprs.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
+
+ self.infer_tuple_pat_like(&rhs_ty, (), ellipsis, &exprs)
+ }
+ Expr::Call { callee, args, .. } => {
+ // Tuple structs
+ let path = match &self.body[*callee] {
+ Expr::Path(path) => Some(path),
+ _ => None,
+ };
+
+ // We don't consider multiple ellipses. This is analogous to
+ // `hir_def::body::lower::ExprCollector::collect_tuple_pat()`.
+ let ellipsis = args.iter().position(|e| is_rest_expr(*e));
+ let args: Vec<_> = args.iter().filter(|e| !is_rest_expr(**e)).copied().collect();
+
+ self.infer_tuple_struct_pat_like(path, &rhs_ty, (), lhs, ellipsis, &args)
+ }
+ Expr::Array(Array::ElementList { elements, .. }) => {
+ let elem_ty = match rhs_ty.kind(Interner) {
+ TyKind::Array(st, _) => st.clone(),
+ _ => self.err_ty(),
+ };
+
+ // There's no need to handle `..` as it cannot be bound.
+ let sub_exprs = elements.iter().filter(|e| !is_rest_expr(**e));
+
+ for e in sub_exprs {
+ self.infer_assignee_expr(*e, &elem_ty);
+ }
+
+ match rhs_ty.kind(Interner) {
+ TyKind::Array(_, _) => rhs_ty.clone(),
+ // Even when `rhs_ty` is not an array type, this assignee
+ // expression is inferred to be an array (of unknown element
+ // type and length). This should not be just an error type,
+ // because we are to compute the unifiability of this type and
+ // `rhs_ty` in the end of this function to issue type mismatches.
+ _ => TyKind::Array(self.err_ty(), crate::consteval::usize_const(None))
+ .intern(Interner),
+ }
+ }
+ Expr::RecordLit { path, fields, .. } => {
+ let subs = fields.iter().map(|f| (f.name.clone(), f.expr));
+
+ self.infer_record_pat_like(path.as_deref(), &rhs_ty, (), lhs.into(), subs)
+ }
+ Expr::Underscore => rhs_ty.clone(),
+ _ => {
+ // `lhs` is a place expression, a unit struct, or an enum variant.
+ let lhs_ty = self.infer_expr(lhs, &Expectation::none());
+
+ // This is the only branch where this function may coerce any type.
+ // We are returning early to avoid the unifiability check below.
+ let lhs_ty = self.insert_type_vars_shallow(lhs_ty);
+ let ty = match self.coerce(None, &rhs_ty, &lhs_ty) {
+ Ok(ty) => ty,
+ Err(_) => {
+ self.result.type_mismatches.insert(
+ lhs.into(),
+ TypeMismatch { expected: rhs_ty.clone(), actual: lhs_ty.clone() },
+ );
+ // `rhs_ty` is returned so no further type mismatches are
+ // reported because of this mismatch.
+ rhs_ty
+ }
+ };
+ self.write_expr_ty(lhs, ty.clone());
+ return ty;
+ }
+ };
+
+ let ty = self.insert_type_vars_shallow(ty);
+ if !self.unify(&ty, &rhs_ty) {
+ self.result
+ .type_mismatches
+ .insert(lhs.into(), TypeMismatch { expected: rhs_ty.clone(), actual: ty.clone() });
+ }
+ self.write_expr_ty(lhs, ty.clone());
+ ty
+ }
+
+ fn infer_overloadable_binop(
+ &mut self,
+ lhs: ExprId,
+ op: BinaryOp,
+ rhs: ExprId,
+ tgt_expr: ExprId,
+ ) -> Ty {
+ let lhs_expectation = Expectation::none();
+ let lhs_ty = self.infer_expr(lhs, &lhs_expectation);
+ let rhs_ty = self.table.new_type_var();
+
-
- fn resolve_binop_method(&self, op: BinaryOp) -> Option<FunctionId> {
- let (name, lang_item) = match op {
- BinaryOp::LogicOp(_) => return None,
- BinaryOp::ArithOp(aop) => match aop {
- ArithOp::Add => (name!(add), name!(add)),
- ArithOp::Mul => (name!(mul), name!(mul)),
- ArithOp::Sub => (name!(sub), name!(sub)),
- ArithOp::Div => (name!(div), name!(div)),
- ArithOp::Rem => (name!(rem), name!(rem)),
- ArithOp::Shl => (name!(shl), name!(shl)),
- ArithOp::Shr => (name!(shr), name!(shr)),
- ArithOp::BitXor => (name!(bitxor), name!(bitxor)),
- ArithOp::BitOr => (name!(bitor), name!(bitor)),
- ArithOp::BitAnd => (name!(bitand), name!(bitand)),
- },
- BinaryOp::Assignment { op: Some(aop) } => match aop {
- ArithOp::Add => (name!(add_assign), name!(add_assign)),
- ArithOp::Mul => (name!(mul_assign), name!(mul_assign)),
- ArithOp::Sub => (name!(sub_assign), name!(sub_assign)),
- ArithOp::Div => (name!(div_assign), name!(div_assign)),
- ArithOp::Rem => (name!(rem_assign), name!(rem_assign)),
- ArithOp::Shl => (name!(shl_assign), name!(shl_assign)),
- ArithOp::Shr => (name!(shr_assign), name!(shr_assign)),
- ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)),
- ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)),
- ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)),
- },
- BinaryOp::CmpOp(cop) => match cop {
- CmpOp::Eq { negated: false } => (name!(eq), name!(eq)),
- CmpOp::Eq { negated: true } => (name!(ne), name!(eq)),
- CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
- (name!(le), name!(partial_ord))
- }
- CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
- (name!(lt), name!(partial_ord))
- }
- CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
- (name!(ge), name!(partial_ord))
- }
- CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
- (name!(gt), name!(partial_ord))
- }
- },
- BinaryOp::Assignment { op: None } => return None,
- };
-
- let trait_ = self.resolve_lang_item(lang_item)?.as_trait()?;
-
- self.db.trait_data(trait_).method_by_name(&name)
- }
++ let func = lang_names_for_bin_op(op).and_then(|(name, lang_item)| {
++ self.db.trait_data(self.resolve_lang_item(lang_item)?.as_trait()?).method_by_name(&name)
++ });
+ let func = match func {
+ Some(func) => func,
+ None => {
+ let rhs_ty = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone());
+ let rhs_ty = self.infer_expr_coerce(rhs, &Expectation::from_option(rhs_ty));
+ return self
+ .builtin_binary_op_return_ty(op, lhs_ty, rhs_ty)
+ .unwrap_or_else(|| self.err_ty());
+ }
+ };
+
+ let subst = TyBuilder::subst_for_def(self.db, func)
+ .push(lhs_ty.clone())
+ .push(rhs_ty.clone())
+ .build();
+ self.write_method_resolution(tgt_expr, func, subst.clone());
+
+ let method_ty = self.db.value_ty(func.into()).substitute(Interner, &subst);
+ self.register_obligations_for_call(&method_ty);
+
+ self.infer_expr_coerce(rhs, &Expectation::has_type(rhs_ty.clone()));
+
+ let ret_ty = match method_ty.callable_sig(self.db) {
+ Some(sig) => sig.ret().clone(),
+ None => self.err_ty(),
+ };
+
+ let ret_ty = self.normalize_associated_types_in(ret_ty);
+
+ // FIXME: record autoref adjustments
+
+ // use knowledge of built-in binary ops, which can sometimes help inference
+ if let Some(builtin_rhs) = self.builtin_binary_op_rhs_expectation(op, lhs_ty.clone()) {
+ self.unify(&builtin_rhs, &rhs_ty);
+ }
+ if let Some(builtin_ret) = self.builtin_binary_op_return_ty(op, lhs_ty, rhs_ty) {
+ self.unify(&builtin_ret, &ret_ty);
+ }
+
+ ret_ty
+ }
+
+ fn infer_block(
+ &mut self,
+ expr: ExprId,
+ statements: &[Statement],
+ tail: Option<ExprId>,
+ expected: &Expectation,
+ ) -> Ty {
+ for stmt in statements {
+ match stmt {
+ Statement::Let { pat, type_ref, initializer, else_branch } => {
+ let decl_ty = type_ref
+ .as_ref()
+ .map(|tr| self.make_ty(tr))
+ .unwrap_or_else(|| self.err_ty());
+
+ // Always use the declared type when specified
+ let mut ty = decl_ty.clone();
+
+ if let Some(expr) = initializer {
+ let actual_ty =
+ self.infer_expr_coerce(*expr, &Expectation::has_type(decl_ty.clone()));
+ if decl_ty.is_unknown() {
+ ty = actual_ty;
+ }
+ }
+
+ if let Some(expr) = else_branch {
+ self.infer_expr_coerce(
+ *expr,
+ &Expectation::has_type(Ty::new(Interner, TyKind::Never)),
+ );
+ }
+
+ self.infer_pat(*pat, &ty, BindingMode::default());
+ }
+ Statement::Expr { expr, .. } => {
+ self.infer_expr(*expr, &Expectation::none());
+ }
+ }
+ }
+
+ if let Some(expr) = tail {
+ self.infer_expr_coerce(expr, expected)
+ } else {
+ // Citing rustc: if there is no explicit tail expression,
+ // that is typically equivalent to a tail expression
+ // of `()` -- except if the block diverges. In that
+ // case, there is no value supplied from the tail
+ // expression (assuming there are no other breaks,
+ // this implies that the type of the block will be
+ // `!`).
+ if self.diverges.is_always() {
+ // we don't even make an attempt at coercion
+ self.table.new_maybe_never_var()
+ } else {
+ if let Some(t) = expected.only_has_type(&mut self.table) {
+ if self.coerce(Some(expr), &TyBuilder::unit(), &t).is_err() {
+ self.result.type_mismatches.insert(
+ expr.into(),
+ TypeMismatch { expected: t.clone(), actual: TyBuilder::unit() },
+ );
+ }
+ t
+ } else {
+ TyBuilder::unit()
+ }
+ }
+ }
+ }
+
+ fn infer_method_call(
+ &mut self,
+ tgt_expr: ExprId,
+ receiver: ExprId,
+ args: &[ExprId],
+ method_name: &Name,
+ generic_args: Option<&GenericArgs>,
+ expected: &Expectation,
+ ) -> Ty {
+ let receiver_ty = self.infer_expr(receiver, &Expectation::none());
+ let canonicalized_receiver = self.canonicalize(receiver_ty.clone());
+
+ let traits_in_scope = self.resolver.traits_in_scope(self.db.upcast());
+
+ let resolved = method_resolution::lookup_method(
+ &canonicalized_receiver.value,
+ self.db,
+ self.trait_env.clone(),
+ &traits_in_scope,
+ VisibleFromModule::Filter(self.resolver.module()),
+ method_name,
+ );
+ let (receiver_ty, method_ty, substs) = match resolved {
+ Some((adjust, func)) => {
+ let (ty, adjustments) = adjust.apply(&mut self.table, receiver_ty);
+ let generics = generics(self.db.upcast(), func.into());
+ let substs = self.substs_for_method_call(generics, generic_args);
+ self.write_expr_adj(receiver, adjustments);
+ self.write_method_resolution(tgt_expr, func, substs.clone());
+ (ty, self.db.value_ty(func.into()), substs)
+ }
+ None => (
+ receiver_ty,
+ Binders::empty(Interner, self.err_ty()),
+ Substitution::empty(Interner),
+ ),
+ };
+ let method_ty = method_ty.substitute(Interner, &substs);
+ self.register_obligations_for_call(&method_ty);
+ let (formal_receiver_ty, param_tys, ret_ty, is_varargs) =
+ match method_ty.callable_sig(self.db) {
+ Some(sig) => {
+ if !sig.params().is_empty() {
+ (
+ sig.params()[0].clone(),
+ sig.params()[1..].to_vec(),
+ sig.ret().clone(),
+ sig.is_varargs,
+ )
+ } else {
+ (self.err_ty(), Vec::new(), sig.ret().clone(), sig.is_varargs)
+ }
+ }
+ None => (self.err_ty(), Vec::new(), self.err_ty(), true),
+ };
+ self.unify(&formal_receiver_ty, &receiver_ty);
+
+ let expected_inputs =
+ self.expected_inputs_for_expected_output(expected, ret_ty.clone(), param_tys.clone());
+
+ self.check_call_arguments(tgt_expr, args, &expected_inputs, ¶m_tys, &[], is_varargs);
+ self.normalize_associated_types_in(ret_ty)
+ }
+
+ fn expected_inputs_for_expected_output(
+ &mut self,
+ expected_output: &Expectation,
+ output: Ty,
+ inputs: Vec<Ty>,
+ ) -> Vec<Ty> {
+ if let Some(expected_ty) = expected_output.to_option(&mut self.table) {
+ self.table.fudge_inference(|table| {
+ if table.try_unify(&expected_ty, &output).is_ok() {
+ table.resolve_with_fallback(inputs, &|var, kind, _, _| match kind {
+ chalk_ir::VariableKind::Ty(tk) => var.to_ty(Interner, tk).cast(Interner),
+ chalk_ir::VariableKind::Lifetime => {
+ var.to_lifetime(Interner).cast(Interner)
+ }
+ chalk_ir::VariableKind::Const(ty) => {
+ var.to_const(Interner, ty).cast(Interner)
+ }
+ })
+ } else {
+ Vec::new()
+ }
+ })
+ } else {
+ Vec::new()
+ }
+ }
+
+ fn check_call_arguments(
+ &mut self,
+ expr: ExprId,
+ args: &[ExprId],
+ expected_inputs: &[Ty],
+ param_tys: &[Ty],
+ skip_indices: &[u32],
+ is_varargs: bool,
+ ) {
+ if args.len() != param_tys.len() + skip_indices.len() && !is_varargs {
+ self.push_diagnostic(InferenceDiagnostic::MismatchedArgCount {
+ call_expr: expr,
+ expected: param_tys.len() + skip_indices.len(),
+ found: args.len(),
+ });
+ }
+
+ // Quoting https://github.com/rust-lang/rust/blob/6ef275e6c3cb1384ec78128eceeb4963ff788dca/src/librustc_typeck/check/mod.rs#L3325 --
+ // We do this in a pretty awful way: first we type-check any arguments
+ // that are not closures, then we type-check the closures. This is so
+ // that we have more information about the types of arguments when we
+ // type-check the functions. This isn't really the right way to do this.
+ for &check_closures in &[false, true] {
+ let mut skip_indices = skip_indices.into_iter().copied().fuse().peekable();
+ let param_iter = param_tys.iter().cloned().chain(repeat(self.err_ty()));
+ let expected_iter = expected_inputs
+ .iter()
+ .cloned()
+ .chain(param_iter.clone().skip(expected_inputs.len()));
+ for (idx, ((&arg, param_ty), expected_ty)) in
+ args.iter().zip(param_iter).zip(expected_iter).enumerate()
+ {
+ let is_closure = matches!(&self.body[arg], Expr::Closure { .. });
+ if is_closure != check_closures {
+ continue;
+ }
+
+ while skip_indices.peek().map_or(false, |i| *i < idx as u32) {
+ skip_indices.next();
+ }
+ if skip_indices.peek().copied() == Some(idx as u32) {
+ continue;
+ }
+
+ // the difference between param_ty and expected here is that
+ // expected is the parameter when the expected *return* type is
+ // taken into account. So in `let _: &[i32] = identity(&[1, 2])`
+ // the expected type is already `&[i32]`, whereas param_ty is
+ // still an unbound type variable. We don't always want to force
+ // the parameter to coerce to the expected type (for example in
+ // `coerce_unsize_expected_type_4`).
+ let param_ty = self.normalize_associated_types_in(param_ty);
+ let expected = Expectation::rvalue_hint(&mut self.table, expected_ty);
+ // infer with the expected type we have...
+ let ty = self.infer_expr_inner(arg, &expected);
+
+ // then coerce to either the expected type or just the formal parameter type
+ let coercion_target = if let Some(ty) = expected.only_has_type(&mut self.table) {
+ // if we are coercing to the expectation, unify with the
+ // formal parameter type to connect everything
+ self.unify(&ty, ¶m_ty);
+ ty
+ } else {
+ param_ty
+ };
+ if !coercion_target.is_unknown() {
+ if self.coerce(Some(arg), &ty, &coercion_target).is_err() {
+ self.result.type_mismatches.insert(
+ arg.into(),
+ TypeMismatch { expected: coercion_target, actual: ty.clone() },
+ );
+ }
+ }
+ }
+ }
+ }
+
+ fn substs_for_method_call(
+ &mut self,
+ def_generics: Generics,
+ generic_args: Option<&GenericArgs>,
+ ) -> Substitution {
+ let (parent_params, self_params, type_params, const_params, impl_trait_params) =
+ def_generics.provenance_split();
+ assert_eq!(self_params, 0); // method shouldn't have another Self param
+ let total_len = parent_params + type_params + const_params + impl_trait_params;
+ let mut substs = Vec::with_capacity(total_len);
+ // Parent arguments are unknown
+ for (id, param) in def_generics.iter_parent() {
+ match param {
+ TypeOrConstParamData::TypeParamData(_) => {
+ substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner));
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ let ty = self.db.const_param_ty(ConstParamId::from_unchecked(id));
+ substs
+ .push(GenericArgData::Const(self.table.new_const_var(ty)).intern(Interner));
+ }
+ }
+ }
+ // handle provided arguments
+ if let Some(generic_args) = generic_args {
+ // if args are provided, it should be all of them, but we can't rely on that
+ for (arg, kind_id) in generic_args
+ .args
+ .iter()
+ .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
+ .take(type_params + const_params)
+ .zip(def_generics.iter_id().skip(parent_params))
+ {
+ if let Some(g) = generic_arg_to_chalk(
+ self.db,
+ kind_id,
+ arg,
+ self,
+ |this, type_ref| this.make_ty(type_ref),
+ |this, c, ty| {
+ const_or_path_to_chalk(
+ this.db,
+ &this.resolver,
+ ty,
+ c,
+ ParamLoweringMode::Placeholder,
+ || generics(this.db.upcast(), (&this.resolver).generic_def().unwrap()),
+ DebruijnIndex::INNERMOST,
+ )
+ },
+ ) {
+ substs.push(g);
+ }
+ }
+ };
+ for (id, data) in def_generics.iter().skip(substs.len()) {
+ match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ substs.push(GenericArgData::Ty(self.table.new_type_var()).intern(Interner))
+ }
+ TypeOrConstParamData::ConstParamData(_) => {
+ substs.push(
+ GenericArgData::Const(self.table.new_const_var(
+ self.db.const_param_ty(ConstParamId::from_unchecked(id)),
+ ))
+ .intern(Interner),
+ )
+ }
+ }
+ }
+ assert_eq!(substs.len(), total_len);
+ Substitution::from_iter(Interner, substs)
+ }
+
+ fn register_obligations_for_call(&mut self, callable_ty: &Ty) {
+ let callable_ty = self.resolve_ty_shallow(callable_ty);
+ if let TyKind::FnDef(fn_def, parameters) = callable_ty.kind(Interner) {
+ let def: CallableDefId = from_chalk(self.db, *fn_def);
+ let generic_predicates = self.db.generic_predicates(def.into());
+ for predicate in generic_predicates.iter() {
+ let (predicate, binders) = predicate
+ .clone()
+ .substitute(Interner, parameters)
+ .into_value_and_skipped_binders();
+ always!(binders.len(Interner) == 0); // quantified where clauses not yet handled
+ self.push_obligation(predicate.cast(Interner));
+ }
+ // add obligation for trait implementation, if this is a trait method
+ match def {
+ CallableDefId::FunctionId(f) => {
+ if let ItemContainerId::TraitId(trait_) = f.lookup(self.db.upcast()).container {
+ // construct a TraitRef
+ let substs = crate::subst_prefix(
+ &*parameters,
+ generics(self.db.upcast(), trait_.into()).len(),
+ );
+ self.push_obligation(
+ TraitRef { trait_id: to_chalk_trait_id(trait_), substitution: substs }
+ .cast(Interner),
+ );
+ }
+ }
+ CallableDefId::StructId(_) | CallableDefId::EnumVariantId(_) => {}
+ }
+ }
+ }
+
+ /// Returns the argument indices to skip.
+ fn check_legacy_const_generics(&mut self, callee: Ty, args: &[ExprId]) -> Box<[u32]> {
+ let (func, subst) = match callee.kind(Interner) {
+ TyKind::FnDef(fn_id, subst) => {
+ let callable = CallableDefId::from_chalk(self.db, *fn_id);
+ let func = match callable {
+ CallableDefId::FunctionId(f) => f,
+ _ => return Default::default(),
+ };
+ (func, subst)
+ }
+ _ => return Default::default(),
+ };
+
+ let data = self.db.function_data(func);
+ if data.legacy_const_generics_indices.is_empty() {
+ return Default::default();
+ }
+
+ // only use legacy const generics if the param count matches with them
+ if data.params.len() + data.legacy_const_generics_indices.len() != args.len() {
+ if args.len() <= data.params.len() {
+ return Default::default();
+ } else {
+ // there are more parameters than there should be without legacy
+ // const params; use them
+ let mut indices = data.legacy_const_generics_indices.clone();
+ indices.sort();
+ return indices;
+ }
+ }
+
+ // check legacy const parameters
+ for (subst_idx, arg_idx) in data.legacy_const_generics_indices.iter().copied().enumerate() {
+ let arg = match subst.at(Interner, subst_idx).constant(Interner) {
+ Some(c) => c,
+ None => continue, // not a const parameter?
+ };
+ if arg_idx >= args.len() as u32 {
+ continue;
+ }
+ let _ty = arg.data(Interner).ty.clone();
+ let expected = Expectation::none(); // FIXME use actual const ty, when that is lowered correctly
+ self.infer_expr(args[arg_idx as usize], &expected);
+ // FIXME: evaluate and unify with the const
+ }
+ let mut indices = data.legacy_const_generics_indices.clone();
+ indices.sort();
+ indices
+ }
+
+ fn builtin_binary_op_return_ty(&mut self, op: BinaryOp, lhs_ty: Ty, rhs_ty: Ty) -> Option<Ty> {
+ let lhs_ty = self.resolve_ty_shallow(&lhs_ty);
+ let rhs_ty = self.resolve_ty_shallow(&rhs_ty);
+ match op {
+ BinaryOp::LogicOp(_) | BinaryOp::CmpOp(_) => {
+ Some(TyKind::Scalar(Scalar::Bool).intern(Interner))
+ }
+ BinaryOp::Assignment { .. } => Some(TyBuilder::unit()),
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => {
+ // all integer combinations are valid here
+ if matches!(
+ lhs_ty.kind(Interner),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer)
+ ) && matches!(
+ rhs_ty.kind(Interner),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_))
+ | TyKind::InferenceVar(_, TyVariableKind::Integer)
+ ) {
+ Some(lhs_ty)
+ } else {
+ None
+ }
+ }
+ BinaryOp::ArithOp(_) => match (lhs_ty.kind(Interner), rhs_ty.kind(Interner)) {
+ // (int, int) | (uint, uint) | (float, float)
+ (TyKind::Scalar(Scalar::Int(_)), TyKind::Scalar(Scalar::Int(_)))
+ | (TyKind::Scalar(Scalar::Uint(_)), TyKind::Scalar(Scalar::Uint(_)))
+ | (TyKind::Scalar(Scalar::Float(_)), TyKind::Scalar(Scalar::Float(_))) => {
+ Some(rhs_ty)
+ }
+ // ({int}, int) | ({int}, uint)
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
+ ) => Some(rhs_ty),
+ // (int, {int}) | (uint, {int})
+ (
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_)),
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ ) => Some(lhs_ty),
+ // ({float} | float)
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ TyKind::Scalar(Scalar::Float(_)),
+ ) => Some(rhs_ty),
+ // (float, {float})
+ (
+ TyKind::Scalar(Scalar::Float(_)),
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ ) => Some(lhs_ty),
+ // ({int}, {int}) | ({float}, {float})
+ (
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ TyKind::InferenceVar(_, TyVariableKind::Integer),
+ )
+ | (
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ TyKind::InferenceVar(_, TyVariableKind::Float),
+ ) => Some(rhs_ty),
+ _ => None,
+ },
+ }
+ }
+
+ fn builtin_binary_op_rhs_expectation(&mut self, op: BinaryOp, lhs_ty: Ty) -> Option<Ty> {
+ Some(match op {
+ BinaryOp::LogicOp(..) => TyKind::Scalar(Scalar::Bool).intern(Interner),
+ BinaryOp::Assignment { op: None } => lhs_ty,
+ BinaryOp::CmpOp(CmpOp::Eq { .. }) => match self
+ .resolve_ty_shallow(&lhs_ty)
+ .kind(Interner)
+ {
+ TyKind::Scalar(_) | TyKind::Str => lhs_ty,
+ TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
+ _ => return None,
+ },
+ BinaryOp::ArithOp(ArithOp::Shl | ArithOp::Shr) => return None,
+ BinaryOp::CmpOp(CmpOp::Ord { .. })
+ | BinaryOp::Assignment { op: Some(_) }
+ | BinaryOp::ArithOp(_) => match self.resolve_ty_shallow(&lhs_ty).kind(Interner) {
+ TyKind::Scalar(Scalar::Int(_) | Scalar::Uint(_) | Scalar::Float(_)) => lhs_ty,
+ TyKind::InferenceVar(_, TyVariableKind::Integer | TyVariableKind::Float) => lhs_ty,
+ _ => return None,
+ },
+ })
+ }
+}
--- /dev/null
- res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref)))
+//! Methods for lowering the HIR to types. There are two main cases here:
+//!
+//! - Lowering a type reference like `&usize` or `Option<foo::bar::Baz>` to a
+//! type: The entry point for this is `Ty::from_hir`.
+//! - Building the type for an item: This happens through the `type_for_def` query.
+//!
+//! This usually involves resolving names, collecting generic arguments etc.
+use std::{
+ cell::{Cell, RefCell},
+ iter,
+ sync::Arc,
+};
+
+use base_db::CrateId;
+use chalk_ir::{
+ cast::Cast, fold::Shift, fold::TypeFoldable, interner::HasInterner, Mutability, Safety,
+};
+
+use hir_def::{
+ adt::StructKind,
+ body::{Expander, LowerCtx},
+ builtin_type::BuiltinType,
+ generics::{
+ TypeOrConstParamData, TypeParamProvenance, WherePredicate, WherePredicateTypeTarget,
+ },
+ intern::Interned,
+ lang_item::lang_attr,
+ path::{GenericArg, ModPath, Path, PathKind, PathSegment, PathSegments},
+ resolver::{HasResolver, Resolver, TypeNs},
+ type_ref::{
+ ConstScalarOrPath, TraitBoundModifier, TraitRef as HirTraitRef, TypeBound, TypeRef,
+ },
+ AdtId, AssocItemId, ConstId, ConstParamId, EnumId, EnumVariantId, FunctionId, GenericDefId,
+ HasModule, ImplId, ItemContainerId, LocalFieldId, Lookup, StaticId, StructId, TraitId,
+ TypeAliasId, TypeOrConstParamId, TypeParamId, UnionId, VariantId,
+};
+use hir_expand::{name::Name, ExpandResult};
+use itertools::Either;
+use la_arena::ArenaMap;
+use rustc_hash::FxHashSet;
+use smallvec::SmallVec;
+use stdx::{impl_from, never};
+use syntax::{ast, SmolStr};
+
+use crate::{
+ all_super_traits,
+ consteval::{intern_const_scalar, path_to_const, unknown_const, unknown_const_as_generic},
+ db::HirDatabase,
+ make_binders,
+ mapping::ToChalk,
+ static_lifetime, to_assoc_type_id, to_chalk_trait_id, to_placeholder_idx,
+ utils::Generics,
+ utils::{all_super_trait_refs, associated_type_by_name_including_super_traits, generics},
+ AliasEq, AliasTy, Binders, BoundVar, CallableSig, Const, DebruijnIndex, DynTy, FnPointer,
+ FnSig, FnSubst, GenericArgData, ImplTraitId, Interner, ParamKind, PolyFnSig, ProjectionTy,
+ QuantifiedWhereClause, QuantifiedWhereClauses, ReturnTypeImplTrait, ReturnTypeImplTraits,
+ Substitution, TraitEnvironment, TraitRef, TraitRefExt, Ty, TyBuilder, TyKind, WhereClause,
+};
+
+#[derive(Debug)]
+pub struct TyLoweringContext<'a> {
+ pub db: &'a dyn HirDatabase,
+ pub resolver: &'a Resolver,
+ in_binders: DebruijnIndex,
+ /// Note: Conceptually, it's thinkable that we could be in a location where
+ /// some type params should be represented as placeholders, and others
+ /// should be converted to variables. I think in practice, this isn't
+ /// possible currently, so this should be fine for now.
+ pub type_param_mode: ParamLoweringMode,
+ pub impl_trait_mode: ImplTraitLoweringMode,
+ impl_trait_counter: Cell<u16>,
+ /// When turning `impl Trait` into opaque types, we have to collect the
+ /// bounds at the same time to get the IDs correct (without becoming too
+ /// complicated). I don't like using interior mutability (as for the
+ /// counter), but I've tried and failed to make the lifetimes work for
+ /// passing around a `&mut TyLoweringContext`. The core problem is that
+ /// we're grouping the mutable data (the counter and this field) together
+ /// with the immutable context (the references to the DB and resolver).
+ /// Splitting this up would be a possible fix.
+ opaque_type_data: RefCell<Vec<ReturnTypeImplTrait>>,
+ expander: RefCell<Option<Expander>>,
+ /// Tracks types with explicit `?Sized` bounds.
+ pub(crate) unsized_types: RefCell<FxHashSet<Ty>>,
+}
+
+impl<'a> TyLoweringContext<'a> {
+ pub fn new(db: &'a dyn HirDatabase, resolver: &'a Resolver) -> Self {
+ let impl_trait_counter = Cell::new(0);
+ let impl_trait_mode = ImplTraitLoweringMode::Disallowed;
+ let type_param_mode = ParamLoweringMode::Placeholder;
+ let in_binders = DebruijnIndex::INNERMOST;
+ let opaque_type_data = RefCell::new(Vec::new());
+ Self {
+ db,
+ resolver,
+ in_binders,
+ impl_trait_mode,
+ impl_trait_counter,
+ type_param_mode,
+ opaque_type_data,
+ expander: RefCell::new(None),
+ unsized_types: RefCell::default(),
+ }
+ }
+
+ pub fn with_debruijn<T>(
+ &self,
+ debruijn: DebruijnIndex,
+ f: impl FnOnce(&TyLoweringContext<'_>) -> T,
+ ) -> T {
+ let opaque_ty_data_vec = self.opaque_type_data.take();
+ let expander = self.expander.take();
+ let unsized_types = self.unsized_types.take();
+ let new_ctx = Self {
+ in_binders: debruijn,
+ impl_trait_counter: Cell::new(self.impl_trait_counter.get()),
+ opaque_type_data: RefCell::new(opaque_ty_data_vec),
+ expander: RefCell::new(expander),
+ unsized_types: RefCell::new(unsized_types),
+ ..*self
+ };
+ let result = f(&new_ctx);
+ self.impl_trait_counter.set(new_ctx.impl_trait_counter.get());
+ self.opaque_type_data.replace(new_ctx.opaque_type_data.into_inner());
+ self.expander.replace(new_ctx.expander.into_inner());
+ self.unsized_types.replace(new_ctx.unsized_types.into_inner());
+ result
+ }
+
+ pub fn with_shifted_in<T>(
+ &self,
+ debruijn: DebruijnIndex,
+ f: impl FnOnce(&TyLoweringContext<'_>) -> T,
+ ) -> T {
+ self.with_debruijn(self.in_binders.shifted_in_from(debruijn), f)
+ }
+
+ pub fn with_impl_trait_mode(self, impl_trait_mode: ImplTraitLoweringMode) -> Self {
+ Self { impl_trait_mode, ..self }
+ }
+
+ pub fn with_type_param_mode(self, type_param_mode: ParamLoweringMode) -> Self {
+ Self { type_param_mode, ..self }
+ }
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ImplTraitLoweringMode {
+ /// `impl Trait` gets lowered into an opaque type that doesn't unify with
+ /// anything except itself. This is used in places where values flow 'out',
+ /// i.e. for arguments of the function we're currently checking, and return
+ /// types of functions we're calling.
+ Opaque,
+ /// `impl Trait` gets lowered into a type variable. Used for argument
+ /// position impl Trait when inside the respective function, since it allows
+ /// us to support that without Chalk.
+ Param,
+ /// `impl Trait` gets lowered into a variable that can unify with some
+ /// type. This is used in places where values flow 'in', i.e. for arguments
+ /// of functions we're calling, and the return type of the function we're
+ /// currently checking.
+ Variable,
+ /// `impl Trait` is disallowed and will be an error.
+ Disallowed,
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum ParamLoweringMode {
+ Placeholder,
+ Variable,
+}
+
+impl<'a> TyLoweringContext<'a> {
+ pub fn lower_ty(&self, type_ref: &TypeRef) -> Ty {
+ self.lower_ty_ext(type_ref).0
+ }
+
+ fn generics(&self) -> Generics {
+ generics(
+ self.db.upcast(),
+ self.resolver
+ .generic_def()
+ .expect("there should be generics if there's a generic param"),
+ )
+ }
+
+ pub fn lower_ty_ext(&self, type_ref: &TypeRef) -> (Ty, Option<TypeNs>) {
+ let mut res = None;
+ let ty = match type_ref {
+ TypeRef::Never => TyKind::Never.intern(Interner),
+ TypeRef::Tuple(inner) => {
+ let inner_tys = inner.iter().map(|tr| self.lower_ty(tr));
+ TyKind::Tuple(inner_tys.len(), Substitution::from_iter(Interner, inner_tys))
+ .intern(Interner)
+ }
+ TypeRef::Path(path) => {
+ let (ty, res_) = self.lower_path(path);
+ res = res_;
+ ty
+ }
+ TypeRef::RawPtr(inner, mutability) => {
+ let inner_ty = self.lower_ty(inner);
+ TyKind::Raw(lower_to_chalk_mutability(*mutability), inner_ty).intern(Interner)
+ }
+ TypeRef::Array(inner, len) => {
+ let inner_ty = self.lower_ty(inner);
+ let const_len = const_or_path_to_chalk(
+ self.db,
+ self.resolver,
+ TyBuilder::usize(),
+ len,
+ self.type_param_mode,
+ || self.generics(),
+ self.in_binders,
+ );
+
+ TyKind::Array(inner_ty, const_len).intern(Interner)
+ }
+ TypeRef::Slice(inner) => {
+ let inner_ty = self.lower_ty(inner);
+ TyKind::Slice(inner_ty).intern(Interner)
+ }
+ TypeRef::Reference(inner, _, mutability) => {
+ let inner_ty = self.lower_ty(inner);
+ let lifetime = static_lifetime();
+ TyKind::Ref(lower_to_chalk_mutability(*mutability), lifetime, inner_ty)
+ .intern(Interner)
+ }
+ TypeRef::Placeholder => TyKind::Error.intern(Interner),
+ TypeRef::Fn(params, is_varargs) => {
+ let substs = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ Substitution::from_iter(Interner, params.iter().map(|(_, tr)| ctx.lower_ty(tr)))
+ });
+ TyKind::Function(FnPointer {
+ num_binders: 0, // FIXME lower `for<'a> fn()` correctly
+ sig: FnSig { abi: (), safety: Safety::Safe, variadic: *is_varargs },
+ substitution: FnSubst(substs),
+ })
+ .intern(Interner)
+ }
+ TypeRef::DynTrait(bounds) => {
+ let self_ty =
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
+ let bounds = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ QuantifiedWhereClauses::from_iter(
+ Interner,
+ bounds.iter().flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false)),
+ )
+ });
+ let bounds = crate::make_single_type_binders(bounds);
+ TyKind::Dyn(DynTy { bounds, lifetime: static_lifetime() }).intern(Interner)
+ }
+ TypeRef::ImplTrait(bounds) => {
+ match self.impl_trait_mode {
+ ImplTraitLoweringMode::Opaque => {
+ let idx = self.impl_trait_counter.get();
+ self.impl_trait_counter.set(idx + 1);
+ let func = match self.resolver.generic_def() {
+ Some(GenericDefId::FunctionId(f)) => f,
+ _ => panic!("opaque impl trait lowering in non-function"),
+ };
+
+ assert!(idx as usize == self.opaque_type_data.borrow().len());
+ // this dance is to make sure the data is in the right
+ // place even if we encounter more opaque types while
+ // lowering the bounds
+ self.opaque_type_data.borrow_mut().push(ReturnTypeImplTrait {
+ bounds: crate::make_single_type_binders(Vec::new()),
+ });
+ // We don't want to lower the bounds inside the binders
+ // we're currently in, because they don't end up inside
+ // those binders. E.g. when we have `impl Trait<impl
+ // OtherTrait<T>>`, the `impl OtherTrait<T>` can't refer
+ // to the self parameter from `impl Trait`, and the
+ // bounds aren't actually stored nested within each
+ // other, but separately. So if the `T` refers to a type
+ // parameter of the outer function, it's just one binder
+ // away instead of two.
+ let actual_opaque_type_data = self
+ .with_debruijn(DebruijnIndex::INNERMOST, |ctx| {
+ ctx.lower_impl_trait(bounds, func)
+ });
+ self.opaque_type_data.borrow_mut()[idx as usize] = actual_opaque_type_data;
+
+ let impl_trait_id = ImplTraitId::ReturnTypeImplTrait(func, idx);
+ let opaque_ty_id = self.db.intern_impl_trait_id(impl_trait_id).into();
+ let generics = generics(self.db.upcast(), func.into());
+ let parameters = generics.bound_vars_subst(self.db, self.in_binders);
+ TyKind::OpaqueType(opaque_ty_id, parameters).intern(Interner)
+ }
+ ImplTraitLoweringMode::Param => {
+ let idx = self.impl_trait_counter.get();
+ // FIXME we're probably doing something wrong here
+ self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+ if let Some(def) = self.resolver.generic_def() {
+ let generics = generics(self.db.upcast(), def);
+ let param = generics
+ .iter()
+ .filter(|(_, data)| {
+ matches!(
+ data,
+ TypeOrConstParamData::TypeParamData(data)
+ if data.provenance == TypeParamProvenance::ArgumentImplTrait
+ )
+ })
+ .nth(idx as usize)
+ .map_or(TyKind::Error, |(id, _)| {
+ TyKind::Placeholder(to_placeholder_idx(self.db, id))
+ });
+ param.intern(Interner)
+ } else {
+ TyKind::Error.intern(Interner)
+ }
+ }
+ ImplTraitLoweringMode::Variable => {
+ let idx = self.impl_trait_counter.get();
+ // FIXME we're probably doing something wrong here
+ self.impl_trait_counter.set(idx + count_impl_traits(type_ref) as u16);
+ let (
+ parent_params,
+ self_params,
+ list_params,
+ const_params,
+ _impl_trait_params,
+ ) = if let Some(def) = self.resolver.generic_def() {
+ let generics = generics(self.db.upcast(), def);
+ generics.provenance_split()
+ } else {
+ (0, 0, 0, 0, 0)
+ };
+ TyKind::BoundVar(BoundVar::new(
+ self.in_binders,
+ idx as usize + parent_params + self_params + list_params + const_params,
+ ))
+ .intern(Interner)
+ }
+ ImplTraitLoweringMode::Disallowed => {
+ // FIXME: report error
+ TyKind::Error.intern(Interner)
+ }
+ }
+ }
+ TypeRef::Macro(macro_call) => {
+ let (expander, recursion_start) = {
+ let mut expander = self.expander.borrow_mut();
+ if expander.is_some() {
+ (Some(expander), false)
+ } else {
+ *expander = Some(Expander::new(
+ self.db.upcast(),
+ macro_call.file_id,
+ self.resolver.module(),
+ ));
+ (Some(expander), true)
+ }
+ };
+ let ty = if let Some(mut expander) = expander {
+ let expander_mut = expander.as_mut().unwrap();
+ let macro_call = macro_call.to_node(self.db.upcast());
+ match expander_mut.enter_expand::<ast::Type>(self.db.upcast(), macro_call) {
+ Ok(ExpandResult { value: Some((mark, expanded)), .. }) => {
+ let ctx =
+ LowerCtx::new(self.db.upcast(), expander_mut.current_file_id());
+ let type_ref = TypeRef::from_ast(&ctx, expanded);
+
+ drop(expander);
+ let ty = self.lower_ty(&type_ref);
+
+ self.expander
+ .borrow_mut()
+ .as_mut()
+ .unwrap()
+ .exit(self.db.upcast(), mark);
+ Some(ty)
+ }
+ _ => None,
+ }
+ } else {
+ None
+ };
+ if recursion_start {
+ *self.expander.borrow_mut() = None;
+ }
+ ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
+ }
+ TypeRef::Error => TyKind::Error.intern(Interner),
+ };
+ (ty, res)
+ }
+
+ /// This is only for `generic_predicates_for_param`, where we can't just
+ /// lower the self types of the predicates since that could lead to cycles.
+ /// So we just check here if the `type_ref` resolves to a generic param, and which.
+ fn lower_ty_only_param(&self, type_ref: &TypeRef) -> Option<TypeOrConstParamId> {
+ let path = match type_ref {
+ TypeRef::Path(path) => path,
+ _ => return None,
+ };
+ if path.type_anchor().is_some() {
+ return None;
+ }
+ if path.segments().len() > 1 {
+ return None;
+ }
+ let resolution =
+ match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some((it, None)) => it,
+ _ => return None,
+ };
+ match resolution {
+ TypeNs::GenericParam(param_id) => Some(param_id.into()),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn lower_ty_relative_path(
+ &self,
+ ty: Ty,
+ // We need the original resolution to lower `Self::AssocTy` correctly
+ res: Option<TypeNs>,
+ remaining_segments: PathSegments<'_>,
+ ) -> (Ty, Option<TypeNs>) {
+ match remaining_segments.len() {
+ 0 => (ty, res),
+ 1 => {
+ // resolve unselected assoc types
+ let segment = remaining_segments.first().unwrap();
+ (self.select_associated_type(res, segment), None)
+ }
+ _ => {
+ // FIXME report error (ambiguous associated type)
+ (TyKind::Error.intern(Interner), None)
+ }
+ }
+ }
+
+ pub(crate) fn lower_partly_resolved_path(
+ &self,
+ resolution: TypeNs,
+ resolved_segment: PathSegment<'_>,
+ remaining_segments: PathSegments<'_>,
+ infer_args: bool,
+ ) -> (Ty, Option<TypeNs>) {
+ let ty = match resolution {
+ TypeNs::TraitId(trait_) => {
+ let ty = match remaining_segments.len() {
+ 1 => {
+ let trait_ref =
+ self.lower_trait_ref_from_resolved_path(trait_, resolved_segment, None);
+ let segment = remaining_segments.first().unwrap();
+ let found = self
+ .db
+ .trait_data(trait_ref.hir_trait_id())
+ .associated_type_by_name(segment.name);
+ match found {
+ Some(associated_ty) => {
+ // FIXME handle type parameters on the segment
+ TyKind::Alias(AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: trait_ref.substitution,
+ }))
+ .intern(Interner)
+ }
+ None => {
+ // FIXME: report error (associated type not found)
+ TyKind::Error.intern(Interner)
+ }
+ }
+ }
+ 0 => {
+ let self_ty = Some(
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0))
+ .intern(Interner),
+ );
+ let trait_ref = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ ctx.lower_trait_ref_from_resolved_path(
+ trait_,
+ resolved_segment,
+ self_ty,
+ )
+ });
+ let dyn_ty = DynTy {
+ bounds: crate::make_single_type_binders(
+ QuantifiedWhereClauses::from_iter(
+ Interner,
+ Some(crate::wrap_empty_binders(WhereClause::Implemented(
+ trait_ref,
+ ))),
+ ),
+ ),
+ lifetime: static_lifetime(),
+ };
+ TyKind::Dyn(dyn_ty).intern(Interner)
+ }
+ _ => {
+ // FIXME report error (ambiguous associated type)
+ TyKind::Error.intern(Interner)
+ }
+ };
+ return (ty, None);
+ }
+ TypeNs::GenericParam(param_id) => {
+ let generics = generics(
+ self.db.upcast(),
+ self.resolver.generic_def().expect("generics in scope"),
+ );
+ match self.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ TyKind::Placeholder(to_placeholder_idx(self.db, param_id.into()))
+ }
+ ParamLoweringMode::Variable => {
+ let idx = generics.param_idx(param_id.into()).expect("matching generics");
+ TyKind::BoundVar(BoundVar::new(self.in_binders, idx))
+ }
+ }
+ .intern(Interner)
+ }
+ TypeNs::SelfType(impl_id) => {
+ let generics = generics(self.db.upcast(), impl_id.into());
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
+ ParamLoweringMode::Variable => {
+ generics.bound_vars_subst(self.db, self.in_binders)
+ }
+ };
+ self.db.impl_self_ty(impl_id).substitute(Interner, &substs)
+ }
+ TypeNs::AdtSelfType(adt) => {
+ let generics = generics(self.db.upcast(), adt.into());
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => generics.placeholder_subst(self.db),
+ ParamLoweringMode::Variable => {
+ generics.bound_vars_subst(self.db, self.in_binders)
+ }
+ };
+ self.db.ty(adt.into()).substitute(Interner, &substs)
+ }
+
+ TypeNs::AdtId(it) => self.lower_path_inner(resolved_segment, it.into(), infer_args),
+ TypeNs::BuiltinType(it) => {
+ self.lower_path_inner(resolved_segment, it.into(), infer_args)
+ }
+ TypeNs::TypeAliasId(it) => {
+ self.lower_path_inner(resolved_segment, it.into(), infer_args)
+ }
+ // FIXME: report error
+ TypeNs::EnumVariantId(_) => return (TyKind::Error.intern(Interner), None),
+ };
+ self.lower_ty_relative_path(ty, Some(resolution), remaining_segments)
+ }
+
+ pub(crate) fn lower_path(&self, path: &Path) -> (Ty, Option<TypeNs>) {
+ // Resolve the path (in type namespace)
+ if let Some(type_ref) = path.type_anchor() {
+ let (ty, res) = self.lower_ty_ext(type_ref);
+ return self.lower_ty_relative_path(ty, res, path.segments());
+ }
+ let (resolution, remaining_index) =
+ match self.resolver.resolve_path_in_type_ns(self.db.upcast(), path.mod_path()) {
+ Some(it) => it,
+ None => return (TyKind::Error.intern(Interner), None),
+ };
+ let (resolved_segment, remaining_segments) = match remaining_index {
+ None => (
+ path.segments().last().expect("resolved path has at least one element"),
+ PathSegments::EMPTY,
+ ),
+ Some(i) => (path.segments().get(i - 1).unwrap(), path.segments().skip(i)),
+ };
+ self.lower_partly_resolved_path(resolution, resolved_segment, remaining_segments, false)
+ }
+
+ fn select_associated_type(&self, res: Option<TypeNs>, segment: PathSegment<'_>) -> Ty {
+ let (def, res) = match (self.resolver.generic_def(), res) {
+ (Some(def), Some(res)) => (def, res),
+ _ => return TyKind::Error.intern(Interner),
+ };
+ let ty = named_associated_type_shorthand_candidates(
+ self.db,
+ def,
+ res,
+ Some(segment.name.clone()),
+ move |name, t, associated_ty| {
+ if name == segment.name {
+ let substs = match self.type_param_mode {
+ ParamLoweringMode::Placeholder => {
+ // if we're lowering to placeholders, we have to put
+ // them in now
+ let generics = generics(
+ self.db.upcast(),
+ self.resolver
+ .generic_def()
+ .expect("there should be generics if there's a generic param"),
+ );
+ let s = generics.placeholder_subst(self.db);
+ s.apply(t.substitution.clone(), Interner)
+ }
+ ParamLoweringMode::Variable => t.substitution.clone(),
+ };
+ // We need to shift in the bound vars, since
+ // associated_type_shorthand_candidates does not do that
+ let substs = substs.shifted_in_from(Interner, self.in_binders);
+ // FIXME handle type parameters on the segment
+ Some(
+ TyKind::Alias(AliasTy::Projection(ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: substs,
+ }))
+ .intern(Interner),
+ )
+ } else {
+ None
+ }
+ },
+ );
+
+ ty.unwrap_or_else(|| TyKind::Error.intern(Interner))
+ }
+
+ fn lower_path_inner(
+ &self,
+ segment: PathSegment<'_>,
+ typeable: TyDefId,
+ infer_args: bool,
+ ) -> Ty {
+ let generic_def = match typeable {
+ TyDefId::BuiltinType(_) => None,
+ TyDefId::AdtId(it) => Some(it.into()),
+ TyDefId::TypeAliasId(it) => Some(it.into()),
+ };
+ let substs = self.substs_from_path_segment(segment, generic_def, infer_args, None);
+ self.db.ty(typeable).substitute(Interner, &substs)
+ }
+
+ /// Collect generic arguments from a path into a `Substs`. See also
+ /// `create_substs_for_ast_path` and `def_to_ty` in rustc.
+ pub(super) fn substs_from_path(
+ &self,
+ path: &Path,
+ // Note that we don't call `db.value_type(resolved)` here,
+ // `ValueTyDefId` is just a convenient way to pass generics and
+ // special-case enum variants
+ resolved: ValueTyDefId,
+ infer_args: bool,
+ ) -> Substitution {
+ let last = path.segments().last().expect("path should have at least one segment");
+ let (segment, generic_def) = match resolved {
+ ValueTyDefId::FunctionId(it) => (last, Some(it.into())),
+ ValueTyDefId::StructId(it) => (last, Some(it.into())),
+ ValueTyDefId::UnionId(it) => (last, Some(it.into())),
+ ValueTyDefId::ConstId(it) => (last, Some(it.into())),
+ ValueTyDefId::StaticId(_) => (last, None),
+ ValueTyDefId::EnumVariantId(var) => {
+ // the generic args for an enum variant may be either specified
+ // on the segment referring to the enum, or on the segment
+ // referring to the variant. So `Option::<T>::None` and
+ // `Option::None::<T>` are both allowed (though the former is
+ // preferred). See also `def_ids_for_path_segments` in rustc.
+ let len = path.segments().len();
+ let penultimate = len.checked_sub(2).and_then(|idx| path.segments().get(idx));
+ let segment = match penultimate {
+ Some(segment) if segment.args_and_bindings.is_some() => segment,
+ _ => last,
+ };
+ (segment, Some(var.parent.into()))
+ }
+ };
+ self.substs_from_path_segment(segment, generic_def, infer_args, None)
+ }
+
+ fn substs_from_path_segment(
+ &self,
+ segment: PathSegment<'_>,
+ def_generic: Option<GenericDefId>,
+ infer_args: bool,
+ explicit_self_ty: Option<Ty>,
+ ) -> Substitution {
+ let mut substs = Vec::new();
+ let def_generics = if let Some(def) = def_generic {
+ generics(self.db.upcast(), def)
+ } else {
+ return Substitution::empty(Interner);
+ };
+ let (parent_params, self_params, type_params, const_params, impl_trait_params) =
+ def_generics.provenance_split();
+ let total_len =
+ parent_params + self_params + type_params + const_params + impl_trait_params;
+
+ let ty_error = GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner);
+
+ let mut def_generic_iter = def_generics.iter_id();
+
+ for _ in 0..parent_params {
+ if let Some(eid) = def_generic_iter.next() {
+ match eid {
+ Either::Left(_) => substs.push(ty_error.clone()),
+ Either::Right(x) => {
+ substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
+ }
+ }
+ }
+ }
+
+ let fill_self_params = || {
+ for x in explicit_self_ty
+ .into_iter()
+ .map(|x| GenericArgData::Ty(x).intern(Interner))
+ .chain(iter::repeat(ty_error.clone()))
+ .take(self_params)
+ {
+ if let Some(id) = def_generic_iter.next() {
+ assert!(id.is_left());
+ substs.push(x);
+ }
+ }
+ };
+ let mut had_explicit_args = false;
+
+ if let Some(generic_args) = &segment.args_and_bindings {
+ if !generic_args.has_self_type {
+ fill_self_params();
+ }
+ let expected_num = if generic_args.has_self_type {
+ self_params + type_params + const_params
+ } else {
+ type_params + const_params
+ };
+ let skip = if generic_args.has_self_type && self_params == 0 { 1 } else { 0 };
+ // if args are provided, it should be all of them, but we can't rely on that
+ for arg in generic_args
+ .args
+ .iter()
+ .filter(|arg| !matches!(arg, GenericArg::Lifetime(_)))
+ .skip(skip)
+ .take(expected_num)
+ {
+ if let Some(id) = def_generic_iter.next() {
+ if let Some(x) = generic_arg_to_chalk(
+ self.db,
+ id,
+ arg,
+ &mut (),
+ |_, type_ref| self.lower_ty(type_ref),
+ |_, c, ty| {
+ const_or_path_to_chalk(
+ self.db,
+ &self.resolver,
+ ty,
+ c,
+ self.type_param_mode,
+ || self.generics(),
+ self.in_binders,
+ )
+ },
+ ) {
+ had_explicit_args = true;
+ substs.push(x);
+ } else {
+ // we just filtered them out
+ never!("Unexpected lifetime argument");
+ }
+ }
+ }
+ } else {
+ fill_self_params();
+ }
+
+ // handle defaults. In expression or pattern path segments without
+ // explicitly specified type arguments, missing type arguments are inferred
+ // (i.e. defaults aren't used).
+ if !infer_args || had_explicit_args {
+ if let Some(def_generic) = def_generic {
+ let defaults = self.db.generic_defaults(def_generic);
+ assert_eq!(total_len, defaults.len());
+
+ for default_ty in defaults.iter().skip(substs.len()) {
+ // each default can depend on the previous parameters
+ let substs_so_far = Substitution::from_iter(Interner, substs.clone());
+ if let Some(_id) = def_generic_iter.next() {
+ substs.push(default_ty.clone().substitute(Interner, &substs_so_far));
+ }
+ }
+ }
+ }
+
+ // add placeholders for args that were not provided
+ // FIXME: emit diagnostics in contexts where this is not allowed
+ for eid in def_generic_iter {
+ match eid {
+ Either::Left(_) => substs.push(ty_error.clone()),
+ Either::Right(x) => {
+ substs.push(unknown_const_as_generic(self.db.const_param_ty(x)))
+ }
+ }
+ }
+ // If this assert fails, it means you pushed into subst but didn't call .next() of def_generic_iter
+ assert_eq!(substs.len(), total_len);
+
+ Substitution::from_iter(Interner, substs)
+ }
+
+ fn lower_trait_ref_from_path(
+ &self,
+ path: &Path,
+ explicit_self_ty: Option<Ty>,
+ ) -> Option<TraitRef> {
+ let resolved =
+ match self.resolver.resolve_path_in_type_ns_fully(self.db.upcast(), path.mod_path())? {
+ TypeNs::TraitId(tr) => tr,
+ _ => return None,
+ };
+ let segment = path.segments().last().expect("path should have at least one segment");
+ Some(self.lower_trait_ref_from_resolved_path(resolved, segment, explicit_self_ty))
+ }
+
+ pub(crate) fn lower_trait_ref_from_resolved_path(
+ &self,
+ resolved: TraitId,
+ segment: PathSegment<'_>,
+ explicit_self_ty: Option<Ty>,
+ ) -> TraitRef {
+ let substs = self.trait_ref_substs_from_path(segment, resolved, explicit_self_ty);
+ TraitRef { trait_id: to_chalk_trait_id(resolved), substitution: substs }
+ }
+
+ fn lower_trait_ref(
+ &self,
+ trait_ref: &HirTraitRef,
+ explicit_self_ty: Option<Ty>,
+ ) -> Option<TraitRef> {
+ self.lower_trait_ref_from_path(&trait_ref.path, explicit_self_ty)
+ }
+
+ fn trait_ref_substs_from_path(
+ &self,
+ segment: PathSegment<'_>,
+ resolved: TraitId,
+ explicit_self_ty: Option<Ty>,
+ ) -> Substitution {
+ self.substs_from_path_segment(segment, Some(resolved.into()), false, explicit_self_ty)
+ }
+
+ pub(crate) fn lower_where_predicate(
+ &'a self,
+ where_predicate: &'a WherePredicate,
+ ignore_bindings: bool,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ match where_predicate {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound } => {
+ let self_ty = match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => self.lower_ty(type_ref),
+ WherePredicateTypeTarget::TypeOrConstParam(param_id) => {
+ let generic_def = self.resolver.generic_def().expect("generics in scope");
+ let generics = generics(self.db.upcast(), generic_def);
+ let param_id = hir_def::TypeOrConstParamId {
+ parent: generic_def,
+ local_id: *param_id,
+ };
+ let placeholder = to_placeholder_idx(self.db, param_id);
+ match self.type_param_mode {
+ ParamLoweringMode::Placeholder => TyKind::Placeholder(placeholder),
+ ParamLoweringMode::Variable => {
+ let idx = generics.param_idx(param_id).expect("matching generics");
+ TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, idx))
+ }
+ }
+ .intern(Interner)
+ }
+ };
+ self.lower_type_bound(bound, self_ty, ignore_bindings)
+ .collect::<Vec<_>>()
+ .into_iter()
+ }
+ WherePredicate::Lifetime { .. } => vec![].into_iter(),
+ }
+ }
+
+ pub(crate) fn lower_type_bound(
+ &'a self,
+ bound: &'a TypeBound,
+ self_ty: Ty,
+ ignore_bindings: bool,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ let mut bindings = None;
+ let trait_ref = match bound {
+ TypeBound::Path(path, TraitBoundModifier::None) => {
+ bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
+ bindings
+ .clone()
+ .filter(|tr| {
+ // ignore `T: Drop` or `T: Destruct` bounds.
+ // - `T: ~const Drop` has a special meaning in Rust 1.61 that we don't implement.
+ // (So ideally, we'd only ignore `~const Drop` here)
+ // - `Destruct` impls are built-in in 1.62 (current nightlies as of 08-04-2022), so until
+ // the builtin impls are supported by Chalk, we ignore them here.
+ if let Some(lang) = lang_attr(self.db.upcast(), tr.hir_trait_id()) {
+ if lang == "drop" || lang == "destruct" {
+ return false;
+ }
+ }
+ true
+ })
+ .map(WhereClause::Implemented)
+ .map(crate::wrap_empty_binders)
+ }
+ TypeBound::Path(path, TraitBoundModifier::Maybe) => {
+ let sized_trait = self
+ .db
+ .lang_item(self.resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait());
+ // Don't lower associated type bindings as the only possible relaxed trait bound
+ // `?Sized` has no of them.
+ // If we got another trait here ignore the bound completely.
+ let trait_id = self
+ .lower_trait_ref_from_path(path, Some(self_ty.clone()))
+ .map(|trait_ref| trait_ref.hir_trait_id());
+ if trait_id == sized_trait {
+ self.unsized_types.borrow_mut().insert(self_ty);
+ }
+ None
+ }
+ TypeBound::ForLifetime(_, path) => {
+ // FIXME Don't silently drop the hrtb lifetimes here
+ bindings = self.lower_trait_ref_from_path(path, Some(self_ty));
+ bindings.clone().map(WhereClause::Implemented).map(crate::wrap_empty_binders)
+ }
+ TypeBound::Lifetime(_) => None,
+ TypeBound::Error => None,
+ };
+ trait_ref.into_iter().chain(
+ bindings
+ .into_iter()
+ .filter(move |_| !ignore_bindings)
+ .flat_map(move |tr| self.assoc_type_bindings_from_type_bound(bound, tr)),
+ )
+ }
+
+ fn assoc_type_bindings_from_type_bound(
+ &'a self,
+ bound: &'a TypeBound,
+ trait_ref: TraitRef,
+ ) -> impl Iterator<Item = QuantifiedWhereClause> + 'a {
+ let last_segment = match bound {
+ TypeBound::Path(path, TraitBoundModifier::None) | TypeBound::ForLifetime(_, path) => {
+ path.segments().last()
+ }
+ TypeBound::Path(_, TraitBoundModifier::Maybe)
+ | TypeBound::Error
+ | TypeBound::Lifetime(_) => None,
+ };
+ last_segment
+ .into_iter()
+ .filter_map(|segment| segment.args_and_bindings)
+ .flat_map(|args_and_bindings| &args_and_bindings.bindings)
+ .flat_map(move |binding| {
+ let found = associated_type_by_name_including_super_traits(
+ self.db,
+ trait_ref.clone(),
+ &binding.name,
+ );
+ let (super_trait_ref, associated_ty) = match found {
+ None => return SmallVec::new(),
+ Some(t) => t,
+ };
+ let projection_ty = ProjectionTy {
+ associated_ty_id: to_assoc_type_id(associated_ty),
+ substitution: super_trait_ref.substitution,
+ };
+ let mut preds: SmallVec<[_; 1]> = SmallVec::with_capacity(
+ binding.type_ref.as_ref().map_or(0, |_| 1) + binding.bounds.len(),
+ );
+ if let Some(type_ref) = &binding.type_ref {
+ let ty = self.lower_ty(type_ref);
+ let alias_eq =
+ AliasEq { alias: AliasTy::Projection(projection_ty.clone()), ty };
+ preds.push(crate::wrap_empty_binders(WhereClause::AliasEq(alias_eq)));
+ }
+ for bound in &binding.bounds {
+ preds.extend(self.lower_type_bound(
+ bound,
+ TyKind::Alias(AliasTy::Projection(projection_ty.clone())).intern(Interner),
+ false,
+ ));
+ }
+ preds
+ })
+ }
+
+ fn lower_impl_trait(
+ &self,
+ bounds: &[Interned<TypeBound>],
+ func: FunctionId,
+ ) -> ReturnTypeImplTrait {
+ cov_mark::hit!(lower_rpit);
+ let self_ty = TyKind::BoundVar(BoundVar::new(DebruijnIndex::INNERMOST, 0)).intern(Interner);
+ let predicates = self.with_shifted_in(DebruijnIndex::ONE, |ctx| {
+ let mut predicates: Vec<_> = bounds
+ .iter()
+ .flat_map(|b| ctx.lower_type_bound(b, self_ty.clone(), false))
+ .collect();
+
+ if !ctx.unsized_types.borrow().contains(&self_ty) {
+ let krate = func.lookup(ctx.db.upcast()).module(ctx.db.upcast()).krate();
+ let sized_trait = ctx
+ .db
+ .lang_item(krate, SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+ let sized_clause = sized_trait.map(|trait_id| {
+ let clause = WhereClause::Implemented(TraitRef {
+ trait_id,
+ substitution: Substitution::from1(Interner, self_ty.clone()),
+ });
+ crate::wrap_empty_binders(clause)
+ });
+ predicates.extend(sized_clause.into_iter());
+ predicates.shrink_to_fit();
+ }
+ predicates
+ });
+ ReturnTypeImplTrait { bounds: crate::make_single_type_binders(predicates) }
+ }
+}
+
+fn count_impl_traits(type_ref: &TypeRef) -> usize {
+ let mut count = 0;
+ type_ref.walk(&mut |type_ref| {
+ if matches!(type_ref, TypeRef::ImplTrait(_)) {
+ count += 1;
+ }
+ });
+ count
+}
+
+/// Build the signature of a callable item (function, struct or enum variant).
+pub(crate) fn callable_item_sig(db: &dyn HirDatabase, def: CallableDefId) -> PolyFnSig {
+ match def {
+ CallableDefId::FunctionId(f) => fn_sig_for_fn(db, f),
+ CallableDefId::StructId(s) => fn_sig_for_struct_constructor(db, s),
+ CallableDefId::EnumVariantId(e) => fn_sig_for_enum_variant_constructor(db, e),
+ }
+}
+
+pub fn associated_type_shorthand_candidates<R>(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ res: TypeNs,
+ cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
+) -> Option<R> {
+ named_associated_type_shorthand_candidates(db, def, res, None, cb)
+}
+
+fn named_associated_type_shorthand_candidates<R>(
+ db: &dyn HirDatabase,
+ // If the type parameter is defined in an impl and we're in a method, there
+ // might be additional where clauses to consider
+ def: GenericDefId,
+ res: TypeNs,
+ assoc_name: Option<Name>,
+ mut cb: impl FnMut(&Name, &TraitRef, TypeAliasId) -> Option<R>,
+) -> Option<R> {
+ let mut search = |t| {
+ for t in all_super_trait_refs(db, t) {
+ let data = db.trait_data(t.hir_trait_id());
+
+ for (name, assoc_id) in &data.items {
+ if let AssocItemId::TypeAliasId(alias) = assoc_id {
+ if let Some(result) = cb(name, &t, *alias) {
+ return Some(result);
+ }
+ }
+ }
+ }
+ None
+ };
+
+ match res {
+ TypeNs::SelfType(impl_id) => search(
+ // we're _in_ the impl -- the binders get added back later. Correct,
+ // but it would be nice to make this more explicit
+ db.impl_trait(impl_id)?.into_value_and_skipped_binders().0,
+ ),
+ TypeNs::GenericParam(param_id) => {
+ let predicates = db.generic_predicates_for_param(def, param_id.into(), assoc_name);
+ let res = predicates.iter().find_map(|pred| match pred.skip_binders().skip_binders() {
+ // FIXME: how to correctly handle higher-ranked bounds here?
+ WhereClause::Implemented(tr) => search(
+ tr.clone()
+ .shifted_out_to(Interner, DebruijnIndex::ONE)
+ .expect("FIXME unexpected higher-ranked trait bound"),
+ ),
+ _ => None,
+ });
+ if let Some(_) = res {
+ return res;
+ }
+ // Handle `Self::Type` referring to own associated type in trait definitions
+ if let GenericDefId::TraitId(trait_id) = param_id.parent() {
+ let generics = generics(db.upcast(), trait_id.into());
+ if generics.params.type_or_consts[param_id.local_id()].is_trait_self() {
+ let trait_ref = TyBuilder::trait_ref(db, trait_id)
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, 0)
+ .build();
+ return search(trait_ref);
+ }
+ }
+ None
+ }
+ _ => None,
+ }
+}
+
+/// Build the type of all specific fields of a struct or enum variant.
+pub(crate) fn field_types_query(
+ db: &dyn HirDatabase,
+ variant_id: VariantId,
+) -> Arc<ArenaMap<LocalFieldId, Binders<Ty>>> {
+ let var_data = variant_id.variant_data(db.upcast());
+ let (resolver, def): (_, GenericDefId) = match variant_id {
+ VariantId::StructId(it) => (it.resolver(db.upcast()), it.into()),
+ VariantId::UnionId(it) => (it.resolver(db.upcast()), it.into()),
+ VariantId::EnumVariantId(it) => (it.parent.resolver(db.upcast()), it.parent.into()),
+ };
+ let generics = generics(db.upcast(), def);
+ let mut res = ArenaMap::default();
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ for (field_id, field_data) in var_data.fields().iter() {
++ res.insert(field_id, make_binders(db, &generics, ctx.lower_ty(&field_data.type_ref)));
+ }
+ Arc::new(res)
+}
+
+/// This query exists only to be used when resolving short-hand associated types
+/// like `T::Item`.
+///
+/// See the analogous query in rustc and its comment:
+/// <https://github.com/rust-lang/rust/blob/9150f844e2624eb013ec78ca08c1d416e6644026/src/librustc_typeck/astconv.rs#L46>
+/// This is a query mostly to handle cycles somewhat gracefully; e.g. the
+/// following bounds are disallowed: `T: Foo<U::Item>, U: Foo<T::Item>`, but
+/// these are fine: `T: Foo<U::Item>, U: Foo<()>`.
+pub(crate) fn generic_predicates_for_param_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ param_id: TypeOrConstParamId,
+ assoc_name: Option<Name>,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generics = generics(db.upcast(), def);
+ let mut predicates: Vec<_> = resolver
+ .where_predicates_in_scope()
+ // we have to filter out all other predicates *first*, before attempting to lower them
+ .filter(|pred| match pred {
+ WherePredicate::ForLifetime { target, bound, .. }
+ | WherePredicate::TypeBound { target, bound, .. } => {
+ match target {
+ WherePredicateTypeTarget::TypeRef(type_ref) => {
+ if ctx.lower_ty_only_param(type_ref) != Some(param_id) {
+ return false;
+ }
+ }
+ &WherePredicateTypeTarget::TypeOrConstParam(local_id) => {
+ let target_id = TypeOrConstParamId { parent: def, local_id };
+ if target_id != param_id {
+ return false;
+ }
+ }
+ };
+
+ match &**bound {
+ TypeBound::ForLifetime(_, path) | TypeBound::Path(path, _) => {
+ // Only lower the bound if the trait could possibly define the associated
+ // type we're looking for.
+
+ let assoc_name = match &assoc_name {
+ Some(it) => it,
+ None => return true,
+ };
+ let tr = match resolver
+ .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
+ {
+ Some(TypeNs::TraitId(tr)) => tr,
+ _ => return false,
+ };
+
+ all_super_traits(db.upcast(), tr).iter().any(|tr| {
+ db.trait_data(*tr).items.iter().any(|(name, item)| {
+ matches!(item, AssocItemId::TypeAliasId(_)) && name == assoc_name
+ })
+ })
+ }
+ TypeBound::Lifetime(_) | TypeBound::Error => false,
+ }
+ }
+ WherePredicate::Lifetime { .. } => false,
+ })
+ .flat_map(|pred| {
+ ctx.lower_where_predicate(pred, true).map(|p| make_binders(db, &generics, p))
+ })
+ .collect();
+
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_predicates =
+ implicitly_sized_clauses(db, param_id.parent, &explicitly_unsized_tys, &subst, &resolver)
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
+ predicates.extend(implicitly_sized_predicates);
+ predicates.into()
+}
+
+pub(crate) fn generic_predicates_for_param_recover(
+ _db: &dyn HirDatabase,
+ _cycle: &[String],
+ _def: &GenericDefId,
+ _param_id: &TypeOrConstParamId,
+ _assoc_name: &Option<Name>,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ Arc::new([])
+}
+
+pub(crate) fn trait_environment_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<TraitEnvironment> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Placeholder);
+ let mut traits_in_scope = Vec::new();
+ let mut clauses = Vec::new();
+ for pred in resolver.where_predicates_in_scope() {
+ for pred in ctx.lower_where_predicate(pred, false) {
+ if let WhereClause::Implemented(tr) = &pred.skip_binders() {
+ traits_in_scope.push((tr.self_type_parameter(Interner).clone(), tr.hir_trait_id()));
+ }
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ clauses.push(program_clause.into_from_env_clause(Interner));
+ }
+ }
+
+ let container: Option<ItemContainerId> = match def {
+ // FIXME: is there a function for this?
+ GenericDefId::FunctionId(f) => Some(f.lookup(db.upcast()).container),
+ GenericDefId::AdtId(_) => None,
+ GenericDefId::TraitId(_) => None,
+ GenericDefId::TypeAliasId(t) => Some(t.lookup(db.upcast()).container),
+ GenericDefId::ImplId(_) => None,
+ GenericDefId::EnumVariantId(_) => None,
+ GenericDefId::ConstId(c) => Some(c.lookup(db.upcast()).container),
+ };
+ if let Some(ItemContainerId::TraitId(trait_id)) = container {
+ // add `Self: Trait<T1, T2, ...>` to the environment in trait
+ // function default implementations (and speculative code
+ // inside consts or type aliases)
+ cov_mark::hit!(trait_self_implements_self);
+ let substs = TyBuilder::placeholder_subst(db, trait_id);
+ let trait_ref = TraitRef { trait_id: to_chalk_trait_id(trait_id), substitution: substs };
+ let pred = WhereClause::Implemented(trait_ref);
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ clauses.push(program_clause.into_from_env_clause(Interner));
+ }
+
+ let subst = generics(db.upcast(), def).placeholder_subst(db);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_clauses =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver).map(|pred| {
+ let program_clause: chalk_ir::ProgramClause<Interner> = pred.cast(Interner);
+ program_clause.into_from_env_clause(Interner)
+ });
+ clauses.extend(implicitly_sized_clauses);
+
+ let krate = def.module(db.upcast()).krate();
+
+ let env = chalk_ir::Environment::new(Interner).add_clauses(Interner, clauses);
+
+ Arc::new(TraitEnvironment { krate, traits_from_clauses: traits_in_scope, env })
+}
+
+/// Resolve the where clause(s) of an item with generics.
+pub(crate) fn generic_predicates_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<[Binders<QuantifiedWhereClause>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generics = generics(db.upcast(), def);
+
+ let mut predicates = resolver
+ .where_predicates_in_scope()
+ .flat_map(|pred| {
+ ctx.lower_where_predicate(pred, false).map(|p| make_binders(db, &generics, p))
+ })
+ .collect::<Vec<_>>();
+
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let explicitly_unsized_tys = ctx.unsized_types.into_inner();
+ let implicitly_sized_predicates =
+ implicitly_sized_clauses(db, def, &explicitly_unsized_tys, &subst, &resolver)
+ .map(|p| make_binders(db, &generics, crate::wrap_empty_binders(p)));
+ predicates.extend(implicitly_sized_predicates);
+ predicates.into()
+}
+
+/// Generate implicit `: Sized` predicates for all generics that has no `?Sized` bound.
+/// Exception is Self of a trait def.
+fn implicitly_sized_clauses<'a>(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+ explicitly_unsized_tys: &'a FxHashSet<Ty>,
+ substitution: &'a Substitution,
+ resolver: &Resolver,
+) -> impl Iterator<Item = WhereClause> + 'a {
+ let is_trait_def = matches!(def, GenericDefId::TraitId(..));
+ let generic_args = &substitution.as_slice(Interner)[is_trait_def as usize..];
+ let sized_trait = db
+ .lang_item(resolver.krate(), SmolStr::new_inline("sized"))
+ .and_then(|lang_item| lang_item.as_trait().map(to_chalk_trait_id));
+
+ sized_trait.into_iter().flat_map(move |sized_trait| {
+ let implicitly_sized_tys = generic_args
+ .iter()
+ .filter_map(|generic_arg| generic_arg.ty(Interner))
+ .filter(move |&self_ty| !explicitly_unsized_tys.contains(self_ty));
+ implicitly_sized_tys.map(move |self_ty| {
+ WhereClause::Implemented(TraitRef {
+ trait_id: sized_trait,
+ substitution: Substitution::from1(Interner, self_ty.clone()),
+ })
+ })
+ })
+}
+
+/// Resolve the default type params from generics
+pub(crate) fn generic_defaults_query(
+ db: &dyn HirDatabase,
+ def: GenericDefId,
+) -> Arc<[Binders<chalk_ir::GenericArg<Interner>>]> {
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let generic_params = generics(db.upcast(), def);
+
+ let defaults = generic_params
+ .iter()
+ .enumerate()
+ .map(|(idx, (id, p))| {
+ let p = match p {
+ TypeOrConstParamData::TypeParamData(p) => p,
+ TypeOrConstParamData::ConstParamData(_) => {
+ // FIXME: implement const generic defaults
+ let val = unknown_const_as_generic(
+ db.const_param_ty(ConstParamId::from_unchecked(id)),
+ );
+ return crate::make_binders_with_count(db, idx, &generic_params, val);
+ }
+ };
+ let mut ty =
+ p.default.as_ref().map_or(TyKind::Error.intern(Interner), |t| ctx.lower_ty(t));
+
+ // Each default can only refer to previous parameters.
+ // type variable default referring to parameter coming
+ // after it. This is forbidden (FIXME: report
+ // diagnostic)
+ ty = fallback_bound_vars(ty, idx);
+ let val = GenericArgData::Ty(ty).intern(Interner);
+ crate::make_binders_with_count(db, idx, &generic_params, val)
+ })
+ .collect();
+
+ defaults
+}
+
+pub(crate) fn generic_defaults_recover(
+ db: &dyn HirDatabase,
+ _cycle: &[String],
+ def: &GenericDefId,
+) -> Arc<[Binders<crate::GenericArg>]> {
+ let generic_params = generics(db.upcast(), *def);
+ // FIXME: this code is not covered in tests.
+ // we still need one default per parameter
+ let defaults = generic_params
+ .iter_id()
+ .enumerate()
+ .map(|(count, id)| {
+ let val = match id {
+ itertools::Either::Left(_) => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ itertools::Either::Right(id) => unknown_const_as_generic(db.const_param_ty(id)),
+ };
+ crate::make_binders_with_count(db, count, &generic_params, val)
+ })
+ .collect();
+
+ defaults
+}
+
+fn fn_sig_for_fn(db: &dyn HirDatabase, def: FunctionId) -> PolyFnSig {
+ let data = db.function_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx_params = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Variable)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let params = data.params.iter().map(|(_, tr)| ctx_params.lower_ty(tr)).collect::<Vec<_>>();
+ let ctx_ret = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let ret = ctx_ret.lower_ty(&data.ret_type);
+ let generics = generics(db.upcast(), def.into());
+ let sig = CallableSig::from_params_and_return(params, ret, data.is_varargs());
+ make_binders(db, &generics, sig)
+}
+
+/// Build the declared type of a function. This should not need to look at the
+/// function body.
+fn type_for_fn(db: &dyn HirDatabase, def: FunctionId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), def.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::FunctionId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+/// Build the declared type of a const.
+fn type_for_const(db: &dyn HirDatabase, def: ConstId) -> Binders<Ty> {
+ let data = db.const_data(def);
+ let generics = generics(db.upcast(), def.into());
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+
+ make_binders(db, &generics, ctx.lower_ty(&data.type_ref))
+}
+
+/// Build the declared type of a static.
+fn type_for_static(db: &dyn HirDatabase, def: StaticId) -> Binders<Ty> {
+ let data = db.static_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx = TyLoweringContext::new(db, &resolver);
+
+ Binders::empty(Interner, ctx.lower_ty(&data.type_ref))
+}
+
+fn fn_sig_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> PolyFnSig {
+ let struct_data = db.struct_data(def);
+ let fields = struct_data.variant_data.fields();
+ let resolver = def.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
+ let (ret, binders) = type_for_adt(db, def.into()).into_value_and_skipped_binders();
+ Binders::new(binders, CallableSig::from_params_and_return(params, ret, false))
+}
+
+/// Build the type of a tuple struct constructor.
+fn type_for_struct_constructor(db: &dyn HirDatabase, def: StructId) -> Binders<Ty> {
+ let struct_data = db.struct_data(def);
+ if let StructKind::Unit = struct_data.variant_data.kind() {
+ return type_for_adt(db, def.into());
+ }
+ let generics = generics(db.upcast(), def.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::StructId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+fn fn_sig_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> PolyFnSig {
+ let enum_data = db.enum_data(def.parent);
+ let var_data = &enum_data.variants[def.local_id];
+ let fields = var_data.variant_data.fields();
+ let resolver = def.parent.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let params = fields.iter().map(|(_, field)| ctx.lower_ty(&field.type_ref)).collect::<Vec<_>>();
+ let (ret, binders) = type_for_adt(db, def.parent.into()).into_value_and_skipped_binders();
+ Binders::new(binders, CallableSig::from_params_and_return(params, ret, false))
+}
+
+/// Build the type of a tuple enum variant constructor.
+fn type_for_enum_variant_constructor(db: &dyn HirDatabase, def: EnumVariantId) -> Binders<Ty> {
+ let enum_data = db.enum_data(def.parent);
+ let var_data = &enum_data.variants[def.local_id].variant_data;
+ if let StructKind::Unit = var_data.kind() {
+ return type_for_adt(db, def.parent.into());
+ }
+ let generics = generics(db.upcast(), def.parent.into());
+ let substs = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ make_binders(
+ db,
+ &generics,
+ TyKind::FnDef(CallableDefId::EnumVariantId(def).to_chalk(db), substs).intern(Interner),
+ )
+}
+
+fn type_for_adt(db: &dyn HirDatabase, adt: AdtId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), adt.into());
+ let subst = generics.bound_vars_subst(db, DebruijnIndex::INNERMOST);
+ let ty = TyKind::Adt(crate::AdtId(adt), subst).intern(Interner);
+ make_binders(db, &generics, ty)
+}
+
+fn type_for_type_alias(db: &dyn HirDatabase, t: TypeAliasId) -> Binders<Ty> {
+ let generics = generics(db.upcast(), t.into());
+ let resolver = t.resolver(db.upcast());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ if db.type_alias_data(t).is_extern {
+ Binders::empty(Interner, TyKind::Foreign(crate::to_foreign_def_id(t)).intern(Interner))
+ } else {
+ let type_ref = &db.type_alias_data(t).type_ref;
+ let inner = ctx.lower_ty(type_ref.as_deref().unwrap_or(&TypeRef::Error));
+ make_binders(db, &generics, inner)
+ }
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
+pub enum CallableDefId {
+ FunctionId(FunctionId),
+ StructId(StructId),
+ EnumVariantId(EnumVariantId),
+}
+impl_from!(FunctionId, StructId, EnumVariantId for CallableDefId);
+
+impl CallableDefId {
+ pub fn krate(self, db: &dyn HirDatabase) -> CrateId {
+ let db = db.upcast();
+ match self {
+ CallableDefId::FunctionId(f) => f.lookup(db).module(db),
+ CallableDefId::StructId(s) => s.lookup(db).container,
+ CallableDefId::EnumVariantId(e) => e.parent.lookup(db).container,
+ }
+ .krate()
+ }
+}
+
+impl From<CallableDefId> for GenericDefId {
+ fn from(def: CallableDefId) -> GenericDefId {
+ match def {
+ CallableDefId::FunctionId(f) => f.into(),
+ CallableDefId::StructId(s) => s.into(),
+ CallableDefId::EnumVariantId(e) => e.into(),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum TyDefId {
+ BuiltinType(BuiltinType),
+ AdtId(AdtId),
+ TypeAliasId(TypeAliasId),
+}
+impl_from!(BuiltinType, AdtId(StructId, EnumId, UnionId), TypeAliasId for TyDefId);
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub enum ValueTyDefId {
+ FunctionId(FunctionId),
+ StructId(StructId),
+ UnionId(UnionId),
+ EnumVariantId(EnumVariantId),
+ ConstId(ConstId),
+ StaticId(StaticId),
+}
+impl_from!(FunctionId, StructId, UnionId, EnumVariantId, ConstId, StaticId for ValueTyDefId);
+
+/// Build the declared type of an item. This depends on the namespace; e.g. for
+/// `struct Foo(usize)`, we have two types: The type of the struct itself, and
+/// the constructor function `(usize) -> Foo` which lives in the values
+/// namespace.
+pub(crate) fn ty_query(db: &dyn HirDatabase, def: TyDefId) -> Binders<Ty> {
+ match def {
+ TyDefId::BuiltinType(it) => Binders::empty(Interner, TyBuilder::builtin(it)),
+ TyDefId::AdtId(it) => type_for_adt(db, it),
+ TyDefId::TypeAliasId(it) => type_for_type_alias(db, it),
+ }
+}
+
+pub(crate) fn ty_recover(db: &dyn HirDatabase, _cycle: &[String], def: &TyDefId) -> Binders<Ty> {
+ let generics = match *def {
+ TyDefId::BuiltinType(_) => return Binders::empty(Interner, TyKind::Error.intern(Interner)),
+ TyDefId::AdtId(it) => generics(db.upcast(), it.into()),
+ TyDefId::TypeAliasId(it) => generics(db.upcast(), it.into()),
+ };
+ make_binders(db, &generics, TyKind::Error.intern(Interner))
+}
+
+pub(crate) fn value_ty_query(db: &dyn HirDatabase, def: ValueTyDefId) -> Binders<Ty> {
+ match def {
+ ValueTyDefId::FunctionId(it) => type_for_fn(db, it),
+ ValueTyDefId::StructId(it) => type_for_struct_constructor(db, it),
+ ValueTyDefId::UnionId(it) => type_for_adt(db, it.into()),
+ ValueTyDefId::EnumVariantId(it) => type_for_enum_variant_constructor(db, it),
+ ValueTyDefId::ConstId(it) => type_for_const(db, it),
+ ValueTyDefId::StaticId(it) => type_for_static(db, it),
+ }
+}
+
+pub(crate) fn impl_self_ty_query(db: &dyn HirDatabase, impl_id: ImplId) -> Binders<Ty> {
+ let impl_loc = impl_id.lookup(db.upcast());
+ let impl_data = db.impl_data(impl_id);
+ let resolver = impl_id.resolver(db.upcast());
+ let _cx = stdx::panic_context::enter(format!(
+ "impl_self_ty_query({:?} -> {:?} -> {:?})",
+ impl_id, impl_loc, impl_data
+ ));
+ let generics = generics(db.upcast(), impl_id.into());
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ make_binders(db, &generics, ctx.lower_ty(&impl_data.self_ty))
+}
+
+// returns None if def is a type arg
+pub(crate) fn const_param_ty_query(db: &dyn HirDatabase, def: ConstParamId) -> Ty {
+ let parent_data = db.generic_params(def.parent());
+ let data = &parent_data.type_or_consts[def.local_id()];
+ let resolver = def.parent().resolver(db.upcast());
+ let ctx = TyLoweringContext::new(db, &resolver);
+ match data {
+ TypeOrConstParamData::TypeParamData(_) => {
+ never!();
+ Ty::new(Interner, TyKind::Error)
+ }
+ TypeOrConstParamData::ConstParamData(d) => ctx.lower_ty(&d.ty),
+ }
+}
+
+pub(crate) fn impl_self_ty_recover(
+ db: &dyn HirDatabase,
+ _cycle: &[String],
+ impl_id: &ImplId,
+) -> Binders<Ty> {
+ let generics = generics(db.upcast(), (*impl_id).into());
+ make_binders(db, &generics, TyKind::Error.intern(Interner))
+}
+
+pub(crate) fn impl_trait_query(db: &dyn HirDatabase, impl_id: ImplId) -> Option<Binders<TraitRef>> {
+ let impl_loc = impl_id.lookup(db.upcast());
+ let impl_data = db.impl_data(impl_id);
+ let resolver = impl_id.resolver(db.upcast());
+ let _cx = stdx::panic_context::enter(format!(
+ "impl_trait_query({:?} -> {:?} -> {:?})",
+ impl_id, impl_loc, impl_data
+ ));
+ let ctx =
+ TyLoweringContext::new(db, &resolver).with_type_param_mode(ParamLoweringMode::Variable);
+ let (self_ty, binders) = db.impl_self_ty(impl_id).into_value_and_skipped_binders();
+ let target_trait = impl_data.target_trait.as_ref()?;
+ Some(Binders::new(binders, ctx.lower_trait_ref(target_trait, Some(self_ty))?))
+}
+
+pub(crate) fn return_type_impl_traits(
+ db: &dyn HirDatabase,
+ def: hir_def::FunctionId,
+) -> Option<Arc<Binders<ReturnTypeImplTraits>>> {
+ // FIXME unify with fn_sig_for_fn instead of doing lowering twice, maybe
+ let data = db.function_data(def);
+ let resolver = def.resolver(db.upcast());
+ let ctx_ret = TyLoweringContext::new(db, &resolver)
+ .with_impl_trait_mode(ImplTraitLoweringMode::Opaque)
+ .with_type_param_mode(ParamLoweringMode::Variable);
+ let _ret = (&ctx_ret).lower_ty(&data.ret_type);
+ let generics = generics(db.upcast(), def.into());
+ let return_type_impl_traits =
+ ReturnTypeImplTraits { impl_traits: ctx_ret.opaque_type_data.into_inner() };
+ if return_type_impl_traits.impl_traits.is_empty() {
+ None
+ } else {
+ Some(Arc::new(make_binders(db, &generics, return_type_impl_traits)))
+ }
+}
+
+pub(crate) fn lower_to_chalk_mutability(m: hir_def::type_ref::Mutability) -> Mutability {
+ match m {
+ hir_def::type_ref::Mutability::Shared => Mutability::Not,
+ hir_def::type_ref::Mutability::Mut => Mutability::Mut,
+ }
+}
+
+/// Checks if the provided generic arg matches its expected kind, then lower them via
+/// provided closures. Use unknown if there was kind mismatch.
+///
+/// Returns `Some` of the lowered generic arg. `None` if the provided arg is a lifetime.
+pub(crate) fn generic_arg_to_chalk<'a, T>(
+ db: &dyn HirDatabase,
+ kind_id: Either<TypeParamId, ConstParamId>,
+ arg: &'a GenericArg,
+ this: &mut T,
+ for_type: impl FnOnce(&mut T, &TypeRef) -> Ty + 'a,
+ for_const: impl FnOnce(&mut T, &ConstScalarOrPath, Ty) -> Const + 'a,
+) -> Option<crate::GenericArg> {
+ let kind = match kind_id {
+ Either::Left(_) => ParamKind::Type,
+ Either::Right(id) => {
+ let ty = db.const_param_ty(id);
+ ParamKind::Const(ty)
+ }
+ };
+ Some(match (arg, kind) {
+ (GenericArg::Type(type_ref), ParamKind::Type) => {
+ let ty = for_type(this, type_ref);
+ GenericArgData::Ty(ty).intern(Interner)
+ }
+ (GenericArg::Const(c), ParamKind::Const(c_ty)) => {
+ GenericArgData::Const(for_const(this, c, c_ty)).intern(Interner)
+ }
+ (GenericArg::Const(_), ParamKind::Type) => {
+ GenericArgData::Ty(TyKind::Error.intern(Interner)).intern(Interner)
+ }
+ (GenericArg::Type(t), ParamKind::Const(c_ty)) => {
+ // We want to recover simple idents, which parser detects them
+ // as types. Maybe here is not the best place to do it, but
+ // it works.
+ if let TypeRef::Path(p) = t {
+ let p = p.mod_path();
+ if p.kind == PathKind::Plain {
+ if let [n] = p.segments() {
+ let c = ConstScalarOrPath::Path(n.clone());
+ return Some(
+ GenericArgData::Const(for_const(this, &c, c_ty)).intern(Interner),
+ );
+ }
+ }
+ }
+ unknown_const_as_generic(c_ty)
+ }
+ (GenericArg::Lifetime(_), _) => return None,
+ })
+}
+
+pub(crate) fn const_or_path_to_chalk(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ expected_ty: Ty,
+ value: &ConstScalarOrPath,
+ mode: ParamLoweringMode,
+ args: impl FnOnce() -> Generics,
+ debruijn: DebruijnIndex,
+) -> Const {
+ match value {
+ ConstScalarOrPath::Scalar(s) => intern_const_scalar(s.clone(), expected_ty),
+ ConstScalarOrPath::Path(n) => {
+ let path = ModPath::from_segments(PathKind::Plain, Some(n.clone()));
+ path_to_const(db, resolver, &path, mode, args, debruijn)
+ .unwrap_or_else(|| unknown_const(expected_ty))
+ }
+ }
+}
+
+/// This replaces any 'free' Bound vars in `s` (i.e. those with indices past
+/// num_vars_to_keep) by `TyKind::Unknown`.
+fn fallback_bound_vars<T: TypeFoldable<Interner> + HasInterner<Interner = Interner>>(
+ s: T,
+ num_vars_to_keep: usize,
+) -> T {
+ crate::fold_free_vars(
+ s,
+ |bound, binders| {
+ if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
+ TyKind::Error.intern(Interner)
+ } else {
+ bound.shifted_in_from(binders).to_ty(Interner)
+ }
+ },
+ |ty, bound, binders| {
+ if bound.index >= num_vars_to_keep && bound.debruijn == DebruijnIndex::INNERMOST {
+ unknown_const(ty.clone())
+ } else {
+ bound.shifted_in_from(binders).to_const(Interner, ty)
+ }
+ },
+ )
+}
--- /dev/null
- pub fn inherent_impl_crates_query(
+//! This module is concerned with finding methods that a given type provides.
+//! For details about how this works in rustc, see the method lookup page in the
+//! [rustc guide](https://rust-lang.github.io/rustc-guide/method-lookup.html)
+//! and the corresponding code mostly in librustc_typeck/check/method/probe.rs.
+use std::{iter, ops::ControlFlow, sync::Arc};
+
+use arrayvec::ArrayVec;
+use base_db::{CrateId, Edition};
+use chalk_ir::{cast::Cast, Mutability, UniverseIndex};
+use hir_def::{
+ data::ImplData, item_scope::ItemScope, nameres::DefMap, AssocItemId, BlockId, ConstId,
+ FunctionId, GenericDefId, HasModule, ImplId, ItemContainerId, Lookup, ModuleDefId, ModuleId,
+ TraitId,
+};
+use hir_expand::name::Name;
+use rustc_hash::{FxHashMap, FxHashSet};
+use stdx::never;
+
+use crate::{
+ autoderef::{self, AutoderefKind},
+ db::HirDatabase,
+ from_foreign_def_id,
+ infer::{unify::InferenceTable, Adjust, Adjustment, AutoBorrow, OverloadedDeref, PointerCast},
+ primitive::{FloatTy, IntTy, UintTy},
+ static_lifetime,
+ utils::all_super_traits,
+ AdtId, Canonical, CanonicalVarKinds, DebruijnIndex, ForeignDefId, InEnvironment, Interner,
+ Scalar, TraitEnvironment, TraitRefExt, Ty, TyBuilder, TyExt, TyKind,
+};
+
+/// This is used as a key for indexing impls.
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
+pub enum TyFingerprint {
+ // These are lang item impls:
+ Str,
+ Slice,
+ Array,
+ Never,
+ RawPtr(Mutability),
+ Scalar(Scalar),
+ // These can have user-defined impls:
+ Adt(hir_def::AdtId),
+ Dyn(TraitId),
+ ForeignType(ForeignDefId),
+ // These only exist for trait impls
+ Unit,
+ Unnameable,
+ Function(u32),
+}
+
+impl TyFingerprint {
+ /// Creates a TyFingerprint for looking up an inherent impl. Only certain
+ /// types can have inherent impls: if we have some `struct S`, we can have
+ /// an `impl S`, but not `impl &S`. Hence, this will return `None` for
+ /// reference types and such.
+ pub fn for_inherent_impl(ty: &Ty) -> Option<TyFingerprint> {
+ let fp = match ty.kind(Interner) {
+ TyKind::Str => TyFingerprint::Str,
+ TyKind::Never => TyFingerprint::Never,
+ TyKind::Slice(..) => TyFingerprint::Slice,
+ TyKind::Array(..) => TyFingerprint::Array,
+ TyKind::Scalar(scalar) => TyFingerprint::Scalar(*scalar),
+ TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
+ TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
+ TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
+ TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
+ _ => return None,
+ };
+ Some(fp)
+ }
+
+ /// Creates a TyFingerprint for looking up a trait impl.
+ pub fn for_trait_impl(ty: &Ty) -> Option<TyFingerprint> {
+ let fp = match ty.kind(Interner) {
+ TyKind::Str => TyFingerprint::Str,
+ TyKind::Never => TyFingerprint::Never,
+ TyKind::Slice(..) => TyFingerprint::Slice,
+ TyKind::Array(..) => TyFingerprint::Array,
+ TyKind::Scalar(scalar) => TyFingerprint::Scalar(*scalar),
+ TyKind::Adt(AdtId(adt), _) => TyFingerprint::Adt(*adt),
+ TyKind::Raw(mutability, ..) => TyFingerprint::RawPtr(*mutability),
+ TyKind::Foreign(alias_id, ..) => TyFingerprint::ForeignType(*alias_id),
+ TyKind::Dyn(_) => ty.dyn_trait().map(TyFingerprint::Dyn)?,
+ TyKind::Ref(_, _, ty) => return TyFingerprint::for_trait_impl(ty),
+ TyKind::Tuple(_, subst) => {
+ let first_ty = subst.interned().get(0).map(|arg| arg.assert_ty_ref(Interner));
+ match first_ty {
+ Some(ty) => return TyFingerprint::for_trait_impl(ty),
+ None => TyFingerprint::Unit,
+ }
+ }
+ TyKind::AssociatedType(_, _)
+ | TyKind::OpaqueType(_, _)
+ | TyKind::FnDef(_, _)
+ | TyKind::Closure(_, _)
+ | TyKind::Generator(..)
+ | TyKind::GeneratorWitness(..) => TyFingerprint::Unnameable,
+ TyKind::Function(fn_ptr) => {
+ TyFingerprint::Function(fn_ptr.substitution.0.len(Interner) as u32)
+ }
+ TyKind::Alias(_)
+ | TyKind::Placeholder(_)
+ | TyKind::BoundVar(_)
+ | TyKind::InferenceVar(_, _)
+ | TyKind::Error => return None,
+ };
+ Some(fp)
+ }
+}
+
+pub(crate) const ALL_INT_FPS: [TyFingerprint; 12] = [
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I8)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I16)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I32)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I64)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::I128)),
+ TyFingerprint::Scalar(Scalar::Int(IntTy::Isize)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U8)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U16)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U32)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U64)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::U128)),
+ TyFingerprint::Scalar(Scalar::Uint(UintTy::Usize)),
+];
+
+pub(crate) const ALL_FLOAT_FPS: [TyFingerprint; 2] = [
+ TyFingerprint::Scalar(Scalar::Float(FloatTy::F32)),
+ TyFingerprint::Scalar(Scalar::Float(FloatTy::F64)),
+];
+
+/// Trait impls defined or available in some crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct TraitImpls {
+ // If the `Option<TyFingerprint>` is `None`, the impl may apply to any self type.
+ map: FxHashMap<TraitId, FxHashMap<Option<TyFingerprint>, Vec<ImplId>>>,
+}
+
+impl TraitImpls {
+ pub(crate) fn trait_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_crate_query").detail(|| format!("{krate:?}"));
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ impls.collect_def_map(db, &crate_def_map);
+ impls.shrink_to_fit();
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn trait_impls_in_block_query(
+ db: &dyn HirDatabase,
+ block: BlockId,
+ ) -> Option<Arc<Self>> {
+ let _p = profile::span("trait_impls_in_block_query");
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let block_def_map = db.block_def_map(block)?;
+ impls.collect_def_map(db, &block_def_map);
+ impls.shrink_to_fit();
+
+ Some(Arc::new(impls))
+ }
+
+ pub(crate) fn trait_impls_in_deps_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let _p = profile::span("trait_impls_in_deps_query").detail(|| format!("{krate:?}"));
+ let crate_graph = db.crate_graph();
+ let mut res = Self { map: FxHashMap::default() };
+
+ for krate in crate_graph.transitive_deps(krate) {
+ res.merge(&db.trait_impls_in_crate(krate));
+ }
+ res.shrink_to_fit();
+
+ Arc::new(res)
+ }
+
+ fn shrink_to_fit(&mut self) {
+ self.map.shrink_to_fit();
+ self.map.values_mut().for_each(|map| {
+ map.shrink_to_fit();
+ map.values_mut().for_each(Vec::shrink_to_fit);
+ });
+ }
+
+ fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let target_trait = match db.impl_trait(impl_id) {
+ Some(tr) => tr.skip_binders().hir_trait_id(),
+ None => continue,
+ };
+ let self_ty = db.impl_self_ty(impl_id);
+ let self_ty_fp = TyFingerprint::for_trait_impl(self_ty.skip_binders());
+ self.map
+ .entry(target_trait)
+ .or_default()
+ .entry(self_ty_fp)
+ .or_default()
+ .push(impl_id);
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in collect_unnamed_consts(db, &module_data.scope) {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db.upcast()) {
+ self.collect_def_map(db, &block_def_map);
+ }
+ }
+ }
+ }
+
+ fn merge(&mut self, other: &Self) {
+ for (trait_, other_map) in &other.map {
+ let map = self.map.entry(*trait_).or_default();
+ for (fp, impls) in other_map {
+ map.entry(*fp).or_default().extend(impls);
+ }
+ }
+ }
+
+ /// Queries all trait impls for the given type.
+ pub fn for_self_ty_without_blanket_impls(
+ &self,
+ fp: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .values()
+ .flat_map(move |impls| impls.get(&Some(fp)).into_iter())
+ .flat_map(|it| it.iter().copied())
+ }
+
+ /// Queries all impls of the given trait.
+ pub fn for_trait(&self, trait_: TraitId) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+
+ /// Queries all impls of `trait_` that may apply to `self_ty`.
+ pub fn for_trait_and_self_ty(
+ &self,
+ trait_: TraitId,
+ self_ty: TyFingerprint,
+ ) -> impl Iterator<Item = ImplId> + '_ {
+ self.map
+ .get(&trait_)
+ .into_iter()
+ .flat_map(move |map| map.get(&Some(self_ty)).into_iter().chain(map.get(&None)))
+ .flat_map(|v| v.iter().copied())
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|map| map.values().flat_map(|v| v.iter().copied()))
+ }
+}
+
+/// Inherent impls defined in some crate.
+///
+/// Inherent impls can only be defined in the crate that also defines the self type of the impl
+/// (note that some primitives are considered to be defined by both libcore and liballoc).
+///
+/// This makes inherent impl lookup easier than trait impl lookup since we only have to consider a
+/// single crate.
+#[derive(Debug, Eq, PartialEq)]
+pub struct InherentImpls {
+ map: FxHashMap<TyFingerprint, Vec<ImplId>>,
+}
+
+impl InherentImpls {
+ pub(crate) fn inherent_impls_in_crate_query(db: &dyn HirDatabase, krate: CrateId) -> Arc<Self> {
+ let mut impls = Self { map: FxHashMap::default() };
+
+ let crate_def_map = db.crate_def_map(krate);
+ impls.collect_def_map(db, &crate_def_map);
+ impls.shrink_to_fit();
+
+ Arc::new(impls)
+ }
+
+ pub(crate) fn inherent_impls_in_block_query(
+ db: &dyn HirDatabase,
+ block: BlockId,
+ ) -> Option<Arc<Self>> {
+ let mut impls = Self { map: FxHashMap::default() };
+ if let Some(block_def_map) = db.block_def_map(block) {
+ impls.collect_def_map(db, &block_def_map);
+ impls.shrink_to_fit();
+ return Some(Arc::new(impls));
+ }
+ None
+ }
+
+ fn shrink_to_fit(&mut self) {
+ self.map.values_mut().for_each(Vec::shrink_to_fit);
+ self.map.shrink_to_fit();
+ }
+
+ fn collect_def_map(&mut self, db: &dyn HirDatabase, def_map: &DefMap) {
+ for (_module_id, module_data) in def_map.modules() {
+ for impl_id in module_data.scope.impls() {
+ let data = db.impl_data(impl_id);
+ if data.target_trait.is_some() {
+ continue;
+ }
+
+ let self_ty = db.impl_self_ty(impl_id);
+ let fp = TyFingerprint::for_inherent_impl(self_ty.skip_binders());
+ if let Some(fp) = fp {
+ self.map.entry(fp).or_default().push(impl_id);
+ }
+ // `fp` should only be `None` in error cases (either erroneous code or incomplete name resolution)
+ }
+
+ // To better support custom derives, collect impls in all unnamed const items.
+ // const _: () = { ... };
+ for konst in collect_unnamed_consts(db, &module_data.scope) {
+ let body = db.body(konst.into());
+ for (_, block_def_map) in body.blocks(db.upcast()) {
+ self.collect_def_map(db, &block_def_map);
+ }
+ }
+ }
+ }
+
+ pub fn for_self_ty(&self, self_ty: &Ty) -> &[ImplId] {
+ match TyFingerprint::for_inherent_impl(self_ty) {
+ Some(fp) => self.map.get(&fp).map(|vec| vec.as_ref()).unwrap_or(&[]),
+ None => &[],
+ }
+ }
+
+ pub fn all_impls(&self) -> impl Iterator<Item = ImplId> + '_ {
+ self.map.values().flat_map(|v| v.iter().copied())
+ }
+}
+
++pub(crate) fn inherent_impl_crates_query(
+ db: &dyn HirDatabase,
+ krate: CrateId,
+ fp: TyFingerprint,
+) -> ArrayVec<CrateId, 2> {
+ let _p = profile::span("inherent_impl_crates_query");
+ let mut res = ArrayVec::new();
+ let crate_graph = db.crate_graph();
+
+ for krate in crate_graph.transitive_deps(krate) {
+ if res.is_full() {
+ // we don't currently look for or store more than two crates here,
+ // so don't needlessly look at more crates than necessary.
+ break;
+ }
+ let impls = db.inherent_impls_in_crate(krate);
+ if impls.map.get(&fp).map_or(false, |v| !v.is_empty()) {
+ res.push(krate);
+ }
+ }
+
+ res
+}
+
+fn collect_unnamed_consts<'a>(
+ db: &'a dyn HirDatabase,
+ scope: &'a ItemScope,
+) -> impl Iterator<Item = ConstId> + 'a {
+ let unnamed_consts = scope.unnamed_consts();
+
+ // FIXME: Also treat consts named `_DERIVE_*` as unnamed, since synstructure generates those.
+ // Should be removed once synstructure stops doing that.
+ let synstructure_hack_consts = scope.values().filter_map(|(item, _)| match item {
+ ModuleDefId::ConstId(id) => {
+ let loc = id.lookup(db.upcast());
+ let item_tree = loc.id.item_tree(db.upcast());
+ if item_tree[loc.id.value]
+ .name
+ .as_ref()
+ .map_or(false, |n| n.to_smol_str().starts_with("_DERIVE_"))
+ {
+ Some(id)
+ } else {
+ None
+ }
+ }
+ _ => None,
+ });
+
+ unnamed_consts.chain(synstructure_hack_consts)
+}
+
+pub fn def_crates(
+ db: &dyn HirDatabase,
+ ty: &Ty,
+ cur_crate: CrateId,
+) -> Option<ArrayVec<CrateId, 2>> {
+ let mod_to_crate_ids = |module: ModuleId| Some(iter::once(module.krate()).collect());
+
+ let fp = TyFingerprint::for_inherent_impl(ty);
+
+ match ty.kind(Interner) {
+ TyKind::Adt(AdtId(def_id), _) => mod_to_crate_ids(def_id.module(db.upcast())),
+ TyKind::Foreign(id) => {
+ mod_to_crate_ids(from_foreign_def_id(*id).lookup(db.upcast()).module(db.upcast()))
+ }
+ TyKind::Dyn(_) => ty
+ .dyn_trait()
+ .and_then(|trait_| mod_to_crate_ids(GenericDefId::TraitId(trait_).module(db.upcast()))),
+ // for primitives, there may be impls in various places (core and alloc
+ // mostly). We just check the whole crate graph for crates with impls
+ // (cached behind a query).
+ TyKind::Scalar(_)
+ | TyKind::Str
+ | TyKind::Slice(_)
+ | TyKind::Array(..)
+ | TyKind::Raw(..) => {
+ Some(db.inherent_impl_crates(cur_crate, fp.expect("fingerprint for primitive")))
+ }
+ _ => return None,
+ }
++}
++
++pub fn lang_names_for_bin_op(op: syntax::ast::BinaryOp) -> Option<(Name, Name)> {
++ use hir_expand::name;
++ use syntax::ast::{ArithOp, BinaryOp, CmpOp, Ordering};
++ Some(match op {
++ BinaryOp::LogicOp(_) => return None,
++ BinaryOp::ArithOp(aop) => match aop {
++ ArithOp::Add => (name!(add), name!(add)),
++ ArithOp::Mul => (name!(mul), name!(mul)),
++ ArithOp::Sub => (name!(sub), name!(sub)),
++ ArithOp::Div => (name!(div), name!(div)),
++ ArithOp::Rem => (name!(rem), name!(rem)),
++ ArithOp::Shl => (name!(shl), name!(shl)),
++ ArithOp::Shr => (name!(shr), name!(shr)),
++ ArithOp::BitXor => (name!(bitxor), name!(bitxor)),
++ ArithOp::BitOr => (name!(bitor), name!(bitor)),
++ ArithOp::BitAnd => (name!(bitand), name!(bitand)),
++ },
++ BinaryOp::Assignment { op: Some(aop) } => match aop {
++ ArithOp::Add => (name!(add_assign), name!(add_assign)),
++ ArithOp::Mul => (name!(mul_assign), name!(mul_assign)),
++ ArithOp::Sub => (name!(sub_assign), name!(sub_assign)),
++ ArithOp::Div => (name!(div_assign), name!(div_assign)),
++ ArithOp::Rem => (name!(rem_assign), name!(rem_assign)),
++ ArithOp::Shl => (name!(shl_assign), name!(shl_assign)),
++ ArithOp::Shr => (name!(shr_assign), name!(shr_assign)),
++ ArithOp::BitXor => (name!(bitxor_assign), name!(bitxor_assign)),
++ ArithOp::BitOr => (name!(bitor_assign), name!(bitor_assign)),
++ ArithOp::BitAnd => (name!(bitand_assign), name!(bitand_assign)),
++ },
++ BinaryOp::CmpOp(cop) => match cop {
++ CmpOp::Eq { negated: false } => (name!(eq), name!(eq)),
++ CmpOp::Eq { negated: true } => (name!(ne), name!(eq)),
++ CmpOp::Ord { ordering: Ordering::Less, strict: false } => {
++ (name!(le), name!(partial_ord))
++ }
++ CmpOp::Ord { ordering: Ordering::Less, strict: true } => {
++ (name!(lt), name!(partial_ord))
++ }
++ CmpOp::Ord { ordering: Ordering::Greater, strict: false } => {
++ (name!(ge), name!(partial_ord))
++ }
++ CmpOp::Ord { ordering: Ordering::Greater, strict: true } => {
++ (name!(gt), name!(partial_ord))
++ }
++ },
++ BinaryOp::Assignment { op: None } => return None,
++ })
+}
+
+/// Look up the method with the given name.
+pub(crate) fn lookup_method(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: &Name,
+) -> Option<(ReceiverAdjustments, FunctionId)> {
+ iterate_method_candidates(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ Some(name),
+ LookupMode::MethodCall,
+ |adjustments, f| match f {
+ AssocItemId::FunctionId(f) => Some((adjustments, f)),
+ _ => None,
+ },
+ )
+}
+
+/// Whether we're looking up a dotted method call (like `v.len()`) or a path
+/// (like `Vec::new`).
+#[derive(Copy, Clone, Debug, PartialEq, Eq)]
+pub enum LookupMode {
+ /// Looking up a method call like `v.len()`: We only consider candidates
+ /// that have a `self` parameter, and do autoderef.
+ MethodCall,
+ /// Looking up a path like `Vec::new` or `Vec::default`: We consider all
+ /// candidates including associated constants, but don't do autoderef.
+ Path,
+}
+
+#[derive(Clone, Copy)]
+pub enum VisibleFromModule {
+ /// Filter for results that are visible from the given module
+ Filter(ModuleId),
+ /// Include impls from the given block.
+ IncludeBlock(BlockId),
+ /// Do nothing special in regards visibility
+ None,
+}
+
+impl From<Option<ModuleId>> for VisibleFromModule {
+ fn from(module: Option<ModuleId>) -> Self {
+ match module {
+ Some(module) => Self::Filter(module),
+ None => Self::None,
+ }
+ }
+}
+
+impl From<Option<BlockId>> for VisibleFromModule {
+ fn from(block: Option<BlockId>) -> Self {
+ match block {
+ Some(block) => Self::IncludeBlock(block),
+ None => Self::None,
+ }
+ }
+}
+
+#[derive(Debug, Clone, Default)]
+pub struct ReceiverAdjustments {
+ autoref: Option<Mutability>,
+ autoderefs: usize,
+ unsize_array: bool,
+}
+
+impl ReceiverAdjustments {
+ pub(crate) fn apply(&self, table: &mut InferenceTable<'_>, ty: Ty) -> (Ty, Vec<Adjustment>) {
+ let mut ty = ty;
+ let mut adjust = Vec::new();
+ for _ in 0..self.autoderefs {
+ match autoderef::autoderef_step(table, ty.clone()) {
+ None => {
+ never!("autoderef not possible for {:?}", ty);
+ ty = TyKind::Error.intern(Interner);
+ break;
+ }
+ Some((kind, new_ty)) => {
+ ty = new_ty.clone();
+ adjust.push(Adjustment {
+ kind: Adjust::Deref(match kind {
+ // FIXME should we know the mutability here?
+ AutoderefKind::Overloaded => Some(OverloadedDeref(Mutability::Not)),
+ AutoderefKind::Builtin => None,
+ }),
+ target: new_ty,
+ });
+ }
+ }
+ }
+ if self.unsize_array {
+ ty = match ty.kind(Interner) {
+ TyKind::Array(inner, _) => TyKind::Slice(inner.clone()).intern(Interner),
+ _ => {
+ never!("unsize_array with non-array {:?}", ty);
+ ty
+ }
+ };
+ // FIXME this is kind of wrong since the unsize needs to happen to a pointer/reference
+ adjust.push(Adjustment {
+ kind: Adjust::Pointer(PointerCast::Unsize),
+ target: ty.clone(),
+ });
+ }
+ if let Some(m) = self.autoref {
+ ty = TyKind::Ref(m, static_lifetime(), ty).intern(Interner);
+ adjust
+ .push(Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(m)), target: ty.clone() });
+ }
+ (ty, adjust)
+ }
+
+ fn with_autoref(&self, m: Mutability) -> ReceiverAdjustments {
+ Self { autoref: Some(m), ..*self }
+ }
+}
+
+// This would be nicer if it just returned an iterator, but that runs into
+// lifetime problems, because we need to borrow temp `CrateImplDefs`.
+// FIXME add a context type here?
+pub(crate) fn iterate_method_candidates<T>(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mode: LookupMode,
+ mut callback: impl FnMut(ReceiverAdjustments, AssocItemId) -> Option<T>,
+) -> Option<T> {
+ let mut slot = None;
+ iterate_method_candidates_dyn(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ mode,
+ &mut |adj, item| {
+ assert!(slot.is_none());
+ if let Some(it) = callback(adj, item) {
+ slot = Some(it);
+ return ControlFlow::Break(());
+ }
+ ControlFlow::Continue(())
+ },
+ );
+ slot
+}
+
+pub fn lookup_impl_method(
+ self_ty: &Ty,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+ name: &Name,
+) -> Option<FunctionId> {
+ let self_ty_fp = TyFingerprint::for_trait_impl(self_ty)?;
+ let trait_impls = db.trait_impls_in_deps(env.krate);
+ let impls = trait_impls.for_trait_and_self_ty(trait_, self_ty_fp);
+ let mut table = InferenceTable::new(db, env.clone());
+ find_matching_impl(impls, &mut table, &self_ty).and_then(|data| {
+ data.items.iter().find_map(|it| match it {
+ AssocItemId::FunctionId(f) => (db.function_data(*f).name == *name).then(|| *f),
+ _ => None,
+ })
+ })
+}
+
+fn find_matching_impl(
+ mut impls: impl Iterator<Item = ImplId>,
+ table: &mut InferenceTable<'_>,
+ self_ty: &Ty,
+) -> Option<Arc<ImplData>> {
+ let db = table.db;
+ loop {
+ let impl_ = impls.next()?;
+ let r = table.run_in_snapshot(|table| {
+ let impl_data = db.impl_data(impl_);
+ let substs =
+ TyBuilder::subst_for_def(db, impl_).fill_with_inference_vars(table).build();
+ let impl_ty = db.impl_self_ty(impl_).substitute(Interner, &substs);
+
+ table
+ .unify(self_ty, &impl_ty)
+ .then(|| {
+ let wh_goals =
+ crate::chalk_db::convert_where_clauses(db, impl_.into(), &substs)
+ .into_iter()
+ .map(|b| b.cast(Interner));
+
+ let goal = crate::Goal::all(Interner, wh_goals);
+
+ table.try_obligation(goal).map(|_| impl_data)
+ })
+ .flatten()
+ });
+ if r.is_some() {
+ break r;
+ }
+ }
+}
+
+pub fn iterate_path_candidates(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ callback: &mut dyn FnMut(AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ iterate_method_candidates_dyn(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ LookupMode::Path,
+ // the adjustments are not relevant for path lookup
+ &mut |_, id| callback(id),
+ )
+}
+
+pub fn iterate_method_candidates_dyn(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mode: LookupMode,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ match mode {
+ LookupMode::MethodCall => {
+ // For method calls, rust first does any number of autoderef, and
+ // then one autoref (i.e. when the method takes &self or &mut self).
+ // Note that when we've got a receiver like &S, even if the method
+ // we find in the end takes &self, we still do the autoderef step
+ // (just as rustc does an autoderef and then autoref again).
+
+ // We have to be careful about the order we're looking at candidates
+ // in here. Consider the case where we're resolving `x.clone()`
+ // where `x: &Vec<_>`. This resolves to the clone method with self
+ // type `Vec<_>`, *not* `&_`. I.e. we need to consider methods where
+ // the receiver type exactly matches before cases where we have to
+ // do autoref. But in the autoderef steps, the `&_` self type comes
+ // up *before* the `Vec<_>` self type.
+ //
+ // On the other hand, we don't want to just pick any by-value method
+ // before any by-autoref method; it's just that we need to consider
+ // the methods by autoderef order of *receiver types*, not *self
+ // types*.
+
+ let mut table = InferenceTable::new(db, env.clone());
+ let ty = table.instantiate_canonical(ty.clone());
+ let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+
+ let result = deref_chain.into_iter().zip(adj).try_for_each(|(receiver_ty, adj)| {
+ iterate_method_candidates_with_autoref(
+ &receiver_ty,
+ adj,
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ callback,
+ )
+ });
+ result
+ }
+ LookupMode::Path => {
+ // No autoderef for path lookups
+ iterate_method_candidates_for_self_ty(
+ ty,
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ callback,
+ )
+ }
+ }
+}
+
+fn iterate_method_candidates_with_autoref(
+ receiver_ty: &Canonical<Ty>,
+ first_adjustment: ReceiverAdjustments,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ if receiver_ty.value.is_general_var(Interner, &receiver_ty.binders) {
+ // don't try to resolve methods on unknown types
+ return ControlFlow::Continue(());
+ }
+
+ iterate_method_candidates_by_receiver(
+ receiver_ty,
+ first_adjustment.clone(),
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )?;
+
+ let refed = Canonical {
+ value: TyKind::Ref(Mutability::Not, static_lifetime(), receiver_ty.value.clone())
+ .intern(Interner),
+ binders: receiver_ty.binders.clone(),
+ };
+
+ iterate_method_candidates_by_receiver(
+ &refed,
+ first_adjustment.with_autoref(Mutability::Not),
+ db,
+ env.clone(),
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )?;
+
+ let ref_muted = Canonical {
+ value: TyKind::Ref(Mutability::Mut, static_lifetime(), receiver_ty.value.clone())
+ .intern(Interner),
+ binders: receiver_ty.binders.clone(),
+ };
+
+ iterate_method_candidates_by_receiver(
+ &ref_muted,
+ first_adjustment.with_autoref(Mutability::Mut),
+ db,
+ env,
+ traits_in_scope,
+ visible_from_module,
+ name,
+ &mut callback,
+ )
+}
+
+fn iterate_method_candidates_by_receiver(
+ receiver_ty: &Canonical<Ty>,
+ receiver_adjustments: ReceiverAdjustments,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let mut table = InferenceTable::new(db, env);
+ let receiver_ty = table.instantiate_canonical(receiver_ty.clone());
+ let snapshot = table.snapshot();
+ // We're looking for methods with *receiver* type receiver_ty. These could
+ // be found in any of the derefs of receiver_ty, so we have to go through
+ // that.
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ while let Some((self_ty, _)) = autoderef.next() {
+ iterate_inherent_methods(
+ &self_ty,
+ &mut autoderef.table,
+ name,
+ Some(&receiver_ty),
+ Some(receiver_adjustments.clone()),
+ visible_from_module,
+ &mut callback,
+ )?
+ }
+
+ table.rollback_to(snapshot);
+
+ let mut autoderef = autoderef::Autoderef::new(&mut table, receiver_ty.clone());
+ while let Some((self_ty, _)) = autoderef.next() {
+ iterate_trait_method_candidates(
+ &self_ty,
+ &mut autoderef.table,
+ traits_in_scope,
+ name,
+ Some(&receiver_ty),
+ Some(receiver_adjustments.clone()),
+ &mut callback,
+ )?
+ }
+
+ ControlFlow::Continue(())
+}
+
+fn iterate_method_candidates_for_self_ty(
+ self_ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ visible_from_module: VisibleFromModule,
+ name: Option<&Name>,
+ mut callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let mut table = InferenceTable::new(db, env);
+ let self_ty = table.instantiate_canonical(self_ty.clone());
+ iterate_inherent_methods(
+ &self_ty,
+ &mut table,
+ name,
+ None,
+ None,
+ visible_from_module,
+ &mut callback,
+ )?;
+ iterate_trait_method_candidates(
+ &self_ty,
+ &mut table,
+ traits_in_scope,
+ name,
+ None,
+ None,
+ callback,
+ )
+}
+
+fn iterate_trait_method_candidates(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ traits_in_scope: &FxHashSet<TraitId>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let db = table.db;
+ let env = table.trait_env.clone();
+ let self_is_array = matches!(self_ty.kind(Interner), chalk_ir::TyKind::Array(..));
+ // if ty is `dyn Trait`, the trait doesn't need to be in scope
+ let inherent_trait =
+ self_ty.dyn_trait().into_iter().flat_map(|t| all_super_traits(db.upcast(), t));
+ let env_traits = matches!(self_ty.kind(Interner), TyKind::Placeholder(_))
+ // if we have `T: Trait` in the param env, the trait doesn't need to be in scope
+ .then(|| {
+ env.traits_in_scope_from_clauses(self_ty.clone())
+ .flat_map(|t| all_super_traits(db.upcast(), t))
+ })
+ .into_iter()
+ .flatten();
+ let traits = inherent_trait.chain(env_traits).chain(traits_in_scope.iter().copied());
+
+ let canonical_self_ty = table.canonicalize(self_ty.clone()).value;
+
+ 'traits: for t in traits {
+ let data = db.trait_data(t);
+
+ // Traits annotated with `#[rustc_skip_array_during_method_dispatch]` are skipped during
+ // method resolution, if the receiver is an array, and we're compiling for editions before
+ // 2021.
+ // This is to make `[a].into_iter()` not break code with the new `IntoIterator` impl for
+ // arrays.
+ if data.skip_array_during_method_dispatch && self_is_array {
+ // FIXME: this should really be using the edition of the method name's span, in case it
+ // comes from a macro
+ if db.crate_graph()[env.krate].edition < Edition::Edition2021 {
+ continue;
+ }
+ }
+
+ // we'll be lazy about checking whether the type implements the
+ // trait, but if we find out it doesn't, we'll skip the rest of the
+ // iteration
+ let mut known_implemented = false;
+ for &(_, item) in data.items.iter() {
+ // Don't pass a `visible_from_module` down to `is_valid_candidate`,
+ // since only inherent methods should be included into visibility checking.
+ if !is_valid_candidate(table, name, receiver_ty, item, self_ty, None) {
+ continue;
+ }
+ if !known_implemented {
+ let goal = generic_implements_goal(db, env.clone(), t, &canonical_self_ty);
+ if db.trait_solve(env.krate, goal.cast(Interner)).is_none() {
+ continue 'traits;
+ }
+ }
+ known_implemented = true;
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ ControlFlow::Continue(())
+}
+
+fn iterate_inherent_methods(
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ visible_from_module: VisibleFromModule,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+) -> ControlFlow<()> {
+ let db = table.db;
+ let env = table.trait_env.clone();
+ let def_crates = match def_crates(db, self_ty, env.krate) {
+ Some(k) => k,
+ None => return ControlFlow::Continue(()),
+ };
+
+ let (module, block) = match visible_from_module {
+ VisibleFromModule::Filter(module) => (Some(module), module.containing_block()),
+ VisibleFromModule::IncludeBlock(block) => (None, Some(block)),
+ VisibleFromModule::None => (None, None),
+ };
+
+ if let Some(block_id) = block {
+ if let Some(impls) = db.inherent_impls_in_block(block_id) {
+ impls_for_self_ty(
+ &impls,
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ module,
+ callback,
+ )?;
+ }
+ }
+
+ for krate in def_crates {
+ let impls = db.inherent_impls_in_crate(krate);
+ impls_for_self_ty(
+ &impls,
+ self_ty,
+ table,
+ name,
+ receiver_ty,
+ receiver_adjustments.clone(),
+ module,
+ callback,
+ )?;
+ }
+ return ControlFlow::Continue(());
+
+ fn impls_for_self_ty(
+ impls: &InherentImpls,
+ self_ty: &Ty,
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ receiver_adjustments: Option<ReceiverAdjustments>,
+ visible_from_module: Option<ModuleId>,
+ callback: &mut dyn FnMut(ReceiverAdjustments, AssocItemId) -> ControlFlow<()>,
+ ) -> ControlFlow<()> {
+ let db = table.db;
+ let impls_for_self_ty = impls.for_self_ty(self_ty);
+ for &impl_def in impls_for_self_ty {
+ for &item in &db.impl_data(impl_def).items {
+ if !is_valid_candidate(table, name, receiver_ty, item, self_ty, visible_from_module)
+ {
+ continue;
+ }
+ callback(receiver_adjustments.clone().unwrap_or_default(), item)?;
+ }
+ }
+ ControlFlow::Continue(())
+ }
+}
+
+/// Returns the receiver type for the index trait call.
+pub fn resolve_indexing_op(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ ty: Canonical<Ty>,
+ index_trait: TraitId,
+) -> Option<ReceiverAdjustments> {
+ let mut table = InferenceTable::new(db, env.clone());
+ let ty = table.instantiate_canonical(ty);
+ let (deref_chain, adj) = autoderef_method_receiver(&mut table, ty);
+ for (ty, adj) in deref_chain.into_iter().zip(adj) {
+ let goal = generic_implements_goal(db, env.clone(), index_trait, &ty);
+ if db.trait_solve(env.krate, goal.cast(Interner)).is_some() {
+ return Some(adj);
+ }
+ }
+ None
+}
+
+fn is_valid_candidate(
+ table: &mut InferenceTable<'_>,
+ name: Option<&Name>,
+ receiver_ty: Option<&Ty>,
+ item: AssocItemId,
+ self_ty: &Ty,
+ visible_from_module: Option<ModuleId>,
+) -> bool {
+ macro_rules! check_that {
+ ($cond:expr) => {
+ if !$cond {
+ return false;
+ }
+ };
+ }
+
+ let db = table.db;
+ match item {
+ AssocItemId::FunctionId(m) => {
+ let data = db.function_data(m);
+
+ check_that!(name.map_or(true, |n| n == &data.name));
+ check_that!(visible_from_module.map_or(true, |from_module| {
+ let v = db.function_visibility(m).is_visible_from(db.upcast(), from_module);
+ if !v {
+ cov_mark::hit!(autoderef_candidate_not_visible);
+ }
+ v
+ }));
+
+ table.run_in_snapshot(|table| {
+ let subst = TyBuilder::subst_for_def(db, m).fill_with_inference_vars(table).build();
+ let expect_self_ty = match m.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(_) => {
+ subst.at(Interner, 0).assert_ty_ref(Interner).clone()
+ }
+ ItemContainerId::ImplId(impl_id) => {
+ subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner)
+ }
+ // We should only get called for associated items (impl/trait)
+ ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => {
+ unreachable!()
+ }
+ };
+ check_that!(table.unify(&expect_self_ty, self_ty));
+ if let Some(receiver_ty) = receiver_ty {
+ check_that!(data.has_self_param());
+
+ let sig = db.callable_item_signature(m.into());
+ let expected_receiver =
+ sig.map(|s| s.params()[0].clone()).substitute(Interner, &subst);
+
+ check_that!(table.unify(&receiver_ty, &expected_receiver));
+ }
+ true
+ })
+ }
+ AssocItemId::ConstId(c) => {
+ let data = db.const_data(c);
+ check_that!(receiver_ty.is_none());
+
+ check_that!(name.map_or(true, |n| data.name.as_ref() == Some(n)));
+ check_that!(visible_from_module.map_or(true, |from_module| {
+ let v = db.const_visibility(c).is_visible_from(db.upcast(), from_module);
+ if !v {
+ cov_mark::hit!(const_candidate_not_visible);
+ }
+ v
+ }));
+ if let ItemContainerId::ImplId(impl_id) = c.lookup(db.upcast()).container {
+ let self_ty_matches = table.run_in_snapshot(|table| {
+ let subst =
+ TyBuilder::subst_for_def(db, c).fill_with_inference_vars(table).build();
+ let expected_self_ty =
+ subst.apply(db.impl_self_ty(impl_id).skip_binders().clone(), Interner);
+ table.unify(&expected_self_ty, &self_ty)
+ });
+ if !self_ty_matches {
+ cov_mark::hit!(const_candidate_self_type_mismatch);
+ return false;
+ }
+ }
+ true
+ }
+ _ => false,
+ }
+}
+
+pub fn implements_trait(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env.clone(), trait_, ty);
+ let solution = db.trait_solve(env.krate, goal.cast(Interner));
+
+ solution.is_some()
+}
+
+pub fn implements_trait_unique(
+ ty: &Canonical<Ty>,
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+) -> bool {
+ let goal = generic_implements_goal(db, env.clone(), trait_, ty);
+ let solution = db.trait_solve(env.krate, goal.cast(Interner));
+
+ matches!(solution, Some(crate::Solution::Unique(_)))
+}
+
+/// This creates Substs for a trait with the given Self type and type variables
+/// for all other parameters, to query Chalk with it.
+fn generic_implements_goal(
+ db: &dyn HirDatabase,
+ env: Arc<TraitEnvironment>,
+ trait_: TraitId,
+ self_ty: &Canonical<Ty>,
+) -> Canonical<InEnvironment<super::DomainGoal>> {
+ let mut kinds = self_ty.binders.interned().to_vec();
+ let trait_ref = TyBuilder::trait_ref(db, trait_)
+ .push(self_ty.value.clone())
+ .fill_with_bound_vars(DebruijnIndex::INNERMOST, kinds.len())
+ .build();
+ kinds.extend(trait_ref.substitution.iter(Interner).skip(1).map(|x| {
+ let vk = match x.data(Interner) {
+ chalk_ir::GenericArgData::Ty(_) => {
+ chalk_ir::VariableKind::Ty(chalk_ir::TyVariableKind::General)
+ }
+ chalk_ir::GenericArgData::Lifetime(_) => chalk_ir::VariableKind::Lifetime,
+ chalk_ir::GenericArgData::Const(c) => {
+ chalk_ir::VariableKind::Const(c.data(Interner).ty.clone())
+ }
+ };
+ chalk_ir::WithKind::new(vk, UniverseIndex::ROOT)
+ }));
+ let obligation = trait_ref.cast(Interner);
+ Canonical {
+ binders: CanonicalVarKinds::from_iter(Interner, kinds),
+ value: InEnvironment::new(&env.env, obligation),
+ }
+}
+
+fn autoderef_method_receiver(
+ table: &mut InferenceTable<'_>,
+ ty: Ty,
+) -> (Vec<Canonical<Ty>>, Vec<ReceiverAdjustments>) {
+ let (mut deref_chain, mut adjustments): (Vec<_>, Vec<_>) = (Vec::new(), Vec::new());
+ let mut autoderef = autoderef::Autoderef::new(table, ty);
+ while let Some((ty, derefs)) = autoderef.next() {
+ deref_chain.push(autoderef.table.canonicalize(ty).value);
+ adjustments.push(ReceiverAdjustments {
+ autoref: None,
+ autoderefs: derefs,
+ unsize_array: false,
+ });
+ }
+ // As a last step, we can do array unsizing (that's the only unsizing that rustc does for method receivers!)
+ if let (Some((TyKind::Array(parameters, _), binders)), Some(adj)) = (
+ deref_chain.last().map(|ty| (ty.value.kind(Interner), ty.binders.clone())),
+ adjustments.last().cloned(),
+ ) {
+ let unsized_ty = TyKind::Slice(parameters.clone()).intern(Interner);
+ deref_chain.push(Canonical { value: unsized_ty, binders });
+ adjustments.push(ReceiverAdjustments { unsize_array: true, ..adj });
+ }
+ (deref_chain, adjustments)
+}
--- /dev/null
+//! See `Semantics`.
+
+mod source_to_def;
+
+use std::{cell::RefCell, fmt, iter, ops};
+
+use base_db::{FileId, FileRange};
+use hir_def::{
+ body, macro_id_to_def_id,
+ resolver::{self, HasResolver, Resolver, TypeNs},
+ type_ref::Mutability,
+ AsMacroCall, FunctionId, MacroId, TraitId, VariantId,
+};
+use hir_expand::{
+ db::AstDatabase,
+ name::{known, AsName},
+ ExpansionInfo, MacroCallId,
+};
+use itertools::Itertools;
+use rustc_hash::{FxHashMap, FxHashSet};
+use smallvec::{smallvec, SmallVec};
+use syntax::{
+ algo::skip_trivia_token,
+ ast::{self, HasAttrs as _, HasGenericParams, HasLoopBody},
+ match_ast, AstNode, Direction, SyntaxKind, SyntaxNode, SyntaxNodePtr, SyntaxToken, TextSize,
+};
+
+use crate::{
+ db::HirDatabase,
+ semantics::source_to_def::{ChildContainer, SourceToDefCache, SourceToDefCtx},
+ source_analyzer::{resolve_hir_path, SourceAnalyzer},
+ Access, BindingMode, BuiltinAttr, Callable, ConstParam, Crate, DeriveHelper, Field, Function,
+ HasSource, HirFileId, Impl, InFile, Label, LifetimeParam, Local, Macro, Module, ModuleDef,
+ Name, Path, ScopeDef, ToolModule, Trait, Type, TypeAlias, TypeParam, VariantDef,
+};
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum PathResolution {
+ /// An item
+ Def(ModuleDef),
+ /// A local binding (only value namespace)
+ Local(Local),
+ /// A type parameter
+ TypeParam(TypeParam),
+ /// A const parameter
+ ConstParam(ConstParam),
+ SelfType(Impl),
+ BuiltinAttr(BuiltinAttr),
+ ToolModule(ToolModule),
+ DeriveHelper(DeriveHelper),
+}
+
+impl PathResolution {
+ pub(crate) fn in_type_ns(&self) -> Option<TypeNs> {
+ match self {
+ PathResolution::Def(ModuleDef::Adt(adt)) => Some(TypeNs::AdtId((*adt).into())),
+ PathResolution::Def(ModuleDef::BuiltinType(builtin)) => {
+ Some(TypeNs::BuiltinType((*builtin).into()))
+ }
+ PathResolution::Def(
+ ModuleDef::Const(_)
+ | ModuleDef::Variant(_)
+ | ModuleDef::Macro(_)
+ | ModuleDef::Function(_)
+ | ModuleDef::Module(_)
+ | ModuleDef::Static(_)
+ | ModuleDef::Trait(_),
+ ) => None,
+ PathResolution::Def(ModuleDef::TypeAlias(alias)) => {
+ Some(TypeNs::TypeAliasId((*alias).into()))
+ }
+ PathResolution::BuiltinAttr(_)
+ | PathResolution::ToolModule(_)
+ | PathResolution::Local(_)
+ | PathResolution::DeriveHelper(_)
+ | PathResolution::ConstParam(_) => None,
+ PathResolution::TypeParam(param) => Some(TypeNs::GenericParam((*param).into())),
+ PathResolution::SelfType(impl_def) => Some(TypeNs::SelfType((*impl_def).into())),
+ }
+ }
+}
+
+#[derive(Debug)]
+pub struct TypeInfo {
+ /// The original type of the expression or pattern.
+ pub original: Type,
+ /// The adjusted type, if an adjustment happened.
+ pub adjusted: Option<Type>,
+}
+
+impl TypeInfo {
+ pub fn original(self) -> Type {
+ self.original
+ }
+
+ pub fn has_adjustment(&self) -> bool {
+ self.adjusted.is_some()
+ }
+
+ /// The adjusted type, or the original in case no adjustments occurred.
+ pub fn adjusted(self) -> Type {
+ self.adjusted.unwrap_or(self.original)
+ }
+}
+
+/// Primary API to get semantic information, like types, from syntax trees.
+pub struct Semantics<'db, DB> {
+ pub db: &'db DB,
+ imp: SemanticsImpl<'db>,
+}
+
+pub struct SemanticsImpl<'db> {
+ pub db: &'db dyn HirDatabase,
+ s2d_cache: RefCell<SourceToDefCache>,
+ expansion_info_cache: RefCell<FxHashMap<HirFileId, Option<ExpansionInfo>>>,
+ // Rootnode to HirFileId cache
+ cache: RefCell<FxHashMap<SyntaxNode, HirFileId>>,
+ // MacroCall to its expansion's HirFileId cache
+ macro_call_cache: RefCell<FxHashMap<InFile<ast::MacroCall>, HirFileId>>,
+}
+
+impl<DB> fmt::Debug for Semantics<'_, DB> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "Semantics {{ ... }}")
+ }
+}
+
+impl<'db, DB: HirDatabase> Semantics<'db, DB> {
+ pub fn new(db: &DB) -> Semantics<'_, DB> {
+ let impl_ = SemanticsImpl::new(db);
+ Semantics { db, imp: impl_ }
+ }
+
+ pub fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ self.imp.parse(file_id)
+ }
+
+ pub fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ self.imp.parse_or_expand(file_id)
+ }
+
+ pub fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ self.imp.expand(macro_call)
+ }
+
+ /// If `item` has an attribute macro attached to it, expands it.
+ pub fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ self.imp.expand_attr_macro(item)
+ }
+
+ pub fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ self.imp.expand_derive_as_pseudo_attr_macro(attr)
+ }
+
+ pub fn resolve_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ self.imp.resolve_derive_macro(derive)
+ }
+
+ pub fn expand_derive_macro(&self, derive: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ self.imp.expand_derive_macro(derive)
+ }
+
+ pub fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ self.imp.is_attr_macro_call(item)
+ }
+
+ pub fn is_derive_annotated(&self, item: &ast::Adt) -> bool {
+ self.imp.is_derive_annotated(item)
+ }
+
+ pub fn speculative_expand(
+ &self,
+ actual_macro_call: &ast::MacroCall,
+ speculative_args: &ast::TokenTree,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand(actual_macro_call, speculative_args, token_to_map)
+ }
+
+ pub fn speculative_expand_attr_macro(
+ &self,
+ actual_macro_call: &ast::Item,
+ speculative_args: &ast::Item,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand_attr(actual_macro_call, speculative_args, token_to_map)
+ }
+
+ pub fn speculative_expand_derive_as_pseudo_attr_macro(
+ &self,
+ actual_macro_call: &ast::Attr,
+ speculative_args: &ast::Attr,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ self.imp.speculative_expand_derive_as_pseudo_attr_macro(
+ actual_macro_call,
+ speculative_args,
+ token_to_map,
+ )
+ }
+
+ /// Descend the token into macrocalls to its first mapped counterpart.
+ pub fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_single(token)
+ }
+
+ /// Descend the token into macrocalls to all its mapped counterparts.
+ pub fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros(token)
+ }
+
+ /// Descend the token into macrocalls to all its mapped counterparts that have the same text as the input token.
+ ///
+ /// Returns the original non descended token if none of the mapped counterparts have the same text.
+ pub fn descend_into_macros_with_same_text(
+ &self,
+ token: SyntaxToken,
+ ) -> SmallVec<[SyntaxToken; 1]> {
+ self.imp.descend_into_macros_with_same_text(token)
+ }
+
+ pub fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ self.imp.descend_into_macros_with_kind_preference(token)
+ }
+
+ /// Maps a node down by mapping its first and last token down.
+ pub fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ self.imp.descend_node_into_attributes(node)
+ }
+
+ /// Search for a definition's source and cache its syntax tree
+ pub fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ where
+ Def::Ast: AstNode,
+ {
+ self.imp.source(def)
+ }
+
+ pub fn hir_file_for(&self, syntax_node: &SyntaxNode) -> HirFileId {
+ self.imp.find_file(syntax_node).file_id
+ }
+
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ /// If upmapping is not possible, this will fall back to the range of the macro call of the
+ /// macro file the node resides in.
+ pub fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ self.imp.original_range(node)
+ }
+
+ /// Attempts to map the node out of macro expanded files returning the original file range.
+ pub fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ self.imp.original_range_opt(node)
+ }
+
+ /// Attempts to map the node out of macro expanded files.
+ /// This only work for attribute expansions, as other ones do not have nodes as input.
+ pub fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ self.imp.original_ast_node(node)
+ }
+
+ pub fn diagnostics_display_range(&self, diagnostics: InFile<SyntaxNodePtr>) -> FileRange {
+ self.imp.diagnostics_display_range(diagnostics)
+ }
+
+ pub fn token_ancestors_with_macros(
+ &self,
+ token: SyntaxToken,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ token.parent().into_iter().flat_map(move |it| self.ancestors_with_macros(it))
+ }
+
+ /// Iterates the ancestors of the given node, climbing up macro expansions while doing so.
+ pub fn ancestors_with_macros(&self, node: SyntaxNode) -> impl Iterator<Item = SyntaxNode> + '_ {
+ self.imp.ancestors_with_macros(node)
+ }
+
+ pub fn ancestors_at_offset_with_macros(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ self.imp.ancestors_at_offset_with_macros(node, offset)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *Macrofile*,
+ /// search up until it is of the target AstNode type
+ pub fn find_node_at_offset_with_macros<N: AstNode>(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<N> {
+ self.imp.ancestors_at_offset_with_macros(node, offset).find_map(N::cast)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
+ /// descend it and find again
+ pub fn find_node_at_offset_with_descend<N: AstNode>(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<N> {
+ self.imp.descend_node_at_offset(node, offset).flatten().find_map(N::cast)
+ }
+
+ /// Find an AstNode by offset inside SyntaxNode, if it is inside *MacroCall*,
+ /// descend it and find again
+ pub fn find_nodes_at_offset_with_descend<'slf, N: AstNode + 'slf>(
+ &'slf self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = N> + 'slf {
+ self.imp.descend_node_at_offset(node, offset).filter_map(|mut it| it.find_map(N::cast))
+ }
+
+ pub fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ self.imp.resolve_lifetime_param(lifetime)
+ }
+
+ pub fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ self.imp.resolve_label(lifetime)
+ }
+
+ pub fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ self.imp.resolve_type(ty)
+ }
+
+ pub fn resolve_trait(&self, trait_: &ast::Path) -> Option<Trait> {
+ self.imp.resolve_trait(trait_)
+ }
+
+ // FIXME: Figure out a nice interface to inspect adjustments
+ pub fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
+ self.imp.is_implicit_reborrow(expr)
+ }
+
+ pub fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ self.imp.type_of_expr(expr)
+ }
+
+ pub fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ self.imp.type_of_pat(pat)
+ }
+
+ pub fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ self.imp.type_of_self(param)
+ }
+
+ pub fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.imp.pattern_adjustments(pat)
+ }
+
+ pub fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.imp.binding_mode_of_pat(pat)
+ }
+
+ pub fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<Function> {
+ self.imp.resolve_method_call(call).map(Function::from)
+ }
+
++ pub fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<Function> {
++ self.imp.resolve_await_to_poll(await_expr).map(Function::from)
++ }
++
++ pub fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<Function> {
++ self.imp.resolve_prefix_expr(prefix_expr).map(Function::from)
++ }
++
++ pub fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<Function> {
++ self.imp.resolve_index_expr(index_expr).map(Function::from)
++ }
++
++ pub fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<Function> {
++ self.imp.resolve_bin_expr(bin_expr).map(Function::from)
++ }
++
++ pub fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<Function> {
++ self.imp.resolve_try_expr(try_expr).map(Function::from)
++ }
++
+ pub fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ self.imp.resolve_method_call_as_callable(call)
+ }
+
+ pub fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ self.imp.resolve_field(field)
+ }
+
+ pub fn resolve_record_field(
+ &self,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ self.imp.resolve_record_field(field)
+ }
+
+ pub fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ self.imp.resolve_record_pat_field(field)
+ }
+
+ pub fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ self.imp.resolve_macro_call(macro_call)
+ }
+
+ pub fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ self.imp.is_unsafe_macro_call(macro_call)
+ }
+
+ pub fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ self.imp.resolve_attr_macro_call(item)
+ }
+
+ pub fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ self.imp.resolve_path(path)
+ }
+
+ pub fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
+ self.imp.resolve_extern_crate(extern_crate)
+ }
+
+ pub fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantDef> {
+ self.imp.resolve_variant(record_lit).map(VariantDef::from)
+ }
+
+ pub fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ self.imp.resolve_bind_pat_to_const(pat)
+ }
+
+ pub fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ self.imp.record_literal_missing_fields(literal)
+ }
+
+ pub fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ self.imp.record_pattern_missing_fields(pattern)
+ }
+
+ pub fn to_def<T: ToDef>(&self, src: &T) -> Option<T::Def> {
+ let src = self.imp.find_file(src.syntax()).with_value(src).cloned();
+ T::to_def(&self.imp, src)
+ }
+
+ pub fn to_module_def(&self, file: FileId) -> Option<Module> {
+ self.imp.to_module_def(file).next()
+ }
+
+ pub fn to_module_defs(&self, file: FileId) -> impl Iterator<Item = Module> {
+ self.imp.to_module_def(file)
+ }
+
+ pub fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.imp.scope(node)
+ }
+
+ pub fn scope_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SemanticsScope<'db>> {
+ self.imp.scope_at_offset(node, offset)
+ }
+
+ pub fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
+ self.imp.scope_for_def(def)
+ }
+
+ pub fn assert_contains_node(&self, node: &SyntaxNode) {
+ self.imp.assert_contains_node(node)
+ }
+
+ pub fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ self.imp.is_unsafe_method_call(method_call_expr)
+ }
+
+ pub fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ self.imp.is_unsafe_ref_expr(ref_expr)
+ }
+
+ pub fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ self.imp.is_unsafe_ident_pat(ident_pat)
+ }
+}
+
+impl<'db> SemanticsImpl<'db> {
+ fn new(db: &'db dyn HirDatabase) -> Self {
+ SemanticsImpl {
+ db,
+ s2d_cache: Default::default(),
+ cache: Default::default(),
+ expansion_info_cache: Default::default(),
+ macro_call_cache: Default::default(),
+ }
+ }
+
+ fn parse(&self, file_id: FileId) -> ast::SourceFile {
+ let tree = self.db.parse(file_id).tree();
+ self.cache(tree.syntax().clone(), file_id.into());
+ tree
+ }
+
+ fn parse_or_expand(&self, file_id: HirFileId) -> Option<SyntaxNode> {
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ }
+
+ fn expand(&self, macro_call: &ast::MacroCall) -> Option<SyntaxNode> {
+ let sa = self.analyze_no_infer(macro_call.syntax())?;
+ let file_id = sa.expand(self.db, InFile::new(sa.file_id, macro_call))?;
+ let node = self.parse_or_expand(file_id)?;
+ Some(node)
+ }
+
+ fn expand_attr_macro(&self, item: &ast::Item) -> Option<SyntaxNode> {
+ let src = self.wrap_node_infile(item.clone());
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(src))?;
+ self.parse_or_expand(macro_call_id.as_file())
+ }
+
+ fn expand_derive_as_pseudo_attr_macro(&self, attr: &ast::Attr) -> Option<SyntaxNode> {
+ let src = self.wrap_node_infile(attr.clone());
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let call_id = self.with_ctx(|ctx| {
+ ctx.attr_to_derive_macro_call(src.with_value(&adt), src).map(|(_, it, _)| it)
+ })?;
+ self.parse_or_expand(call_id.as_file())
+ }
+
+ fn resolve_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<Option<Macro>>> {
+ let calls = self.derive_macro_calls(attr)?;
+ self.with_ctx(|ctx| {
+ Some(
+ calls
+ .into_iter()
+ .map(|call| {
+ macro_call_to_macro_id(ctx, self.db.upcast(), call?).map(|id| Macro { id })
+ })
+ .collect(),
+ )
+ })
+ }
+
+ fn expand_derive_macro(&self, attr: &ast::Attr) -> Option<Vec<SyntaxNode>> {
+ let res: Vec<_> = self
+ .derive_macro_calls(attr)?
+ .into_iter()
+ .flat_map(|call| {
+ let file_id = call?.as_file();
+ let node = self.db.parse_or_expand(file_id)?;
+ self.cache(node.clone(), file_id);
+ Some(node)
+ })
+ .collect();
+ Some(res)
+ }
+
+ fn derive_macro_calls(&self, attr: &ast::Attr) -> Option<Vec<Option<MacroCallId>>> {
+ let adt = attr.syntax().parent().and_then(ast::Adt::cast)?;
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, &adt);
+ let src = InFile::new(file_id, attr.clone());
+ self.with_ctx(|ctx| {
+ let (.., res) = ctx.attr_to_derive_macro_call(adt, src)?;
+ Some(res.to_vec())
+ })
+ }
+
+ fn is_derive_annotated(&self, adt: &ast::Adt) -> bool {
+ let file_id = self.find_file(adt.syntax()).file_id;
+ let adt = InFile::new(file_id, adt);
+ self.with_ctx(|ctx| ctx.has_derives(adt))
+ }
+
+ fn is_attr_macro_call(&self, item: &ast::Item) -> bool {
+ let file_id = self.find_file(item.syntax()).file_id;
+ let src = InFile::new(file_id, item.clone());
+ self.with_ctx(|ctx| ctx.item_to_macro_call(src).is_some())
+ }
+
+ fn speculative_expand(
+ &self,
+ actual_macro_call: &ast::MacroCall,
+ speculative_args: &ast::TokenTree,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let SourceAnalyzer { file_id, resolver, .. } =
+ self.analyze_no_infer(actual_macro_call.syntax())?;
+ let macro_call = InFile::new(file_id, actual_macro_call);
+ let krate = resolver.krate();
+ let macro_call_id = macro_call.as_call_id(self.db.upcast(), krate, |path| {
+ resolver
+ .resolve_path_as_macro(self.db.upcast(), &path)
+ .map(|it| macro_id_to_def_id(self.db.upcast(), it))
+ })?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ fn speculative_expand_attr(
+ &self,
+ actual_macro_call: &ast::Item,
+ speculative_args: &ast::Item,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let macro_call = self.wrap_node_infile(actual_macro_call.clone());
+ let macro_call_id = self.with_ctx(|ctx| ctx.item_to_macro_call(macro_call))?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ fn speculative_expand_derive_as_pseudo_attr_macro(
+ &self,
+ actual_macro_call: &ast::Attr,
+ speculative_args: &ast::Attr,
+ token_to_map: SyntaxToken,
+ ) -> Option<(SyntaxNode, SyntaxToken)> {
+ let attr = self.wrap_node_infile(actual_macro_call.clone());
+ let adt = actual_macro_call.syntax().parent().and_then(ast::Adt::cast)?;
+ let macro_call_id = self.with_ctx(|ctx| {
+ ctx.attr_to_derive_macro_call(attr.with_value(&adt), attr).map(|(_, it, _)| it)
+ })?;
+ hir_expand::db::expand_speculative(
+ self.db.upcast(),
+ macro_call_id,
+ speculative_args.syntax(),
+ token_to_map,
+ )
+ }
+
+ // This might not be the correct way to do this, but it works for now
+ fn descend_node_into_attributes<N: AstNode>(&self, node: N) -> SmallVec<[N; 1]> {
+ let mut res = smallvec![];
+ let tokens = (|| {
+ let first = skip_trivia_token(node.syntax().first_token()?, Direction::Next)?;
+ let last = skip_trivia_token(node.syntax().last_token()?, Direction::Prev)?;
+ Some((first, last))
+ })();
+ let (first, last) = match tokens {
+ Some(it) => it,
+ None => return res,
+ };
+
+ if first == last {
+ self.descend_into_macros_impl(first, &mut |InFile { value, .. }| {
+ if let Some(node) = value.parent_ancestors().find_map(N::cast) {
+ res.push(node)
+ }
+ false
+ });
+ } else {
+ // Descend first and last token, then zip them to look for the node they belong to
+ let mut scratch: SmallVec<[_; 1]> = smallvec![];
+ self.descend_into_macros_impl(first, &mut |token| {
+ scratch.push(token);
+ false
+ });
+
+ let mut scratch = scratch.into_iter();
+ self.descend_into_macros_impl(
+ last,
+ &mut |InFile { value: last, file_id: last_fid }| {
+ if let Some(InFile { value: first, file_id: first_fid }) = scratch.next() {
+ if first_fid == last_fid {
+ if let Some(p) = first.parent() {
+ let range = first.text_range().cover(last.text_range());
+ let node = find_root(&p)
+ .covering_element(range)
+ .ancestors()
+ .take_while(|it| it.text_range() == range)
+ .find_map(N::cast);
+ if let Some(node) = node {
+ res.push(node);
+ }
+ }
+ }
+ }
+ false
+ },
+ );
+ }
+ res
+ }
+
+ fn descend_into_macros(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res.push(value);
+ false
+ });
+ res
+ }
+
+ fn descend_into_macros_with_same_text(&self, token: SyntaxToken) -> SmallVec<[SyntaxToken; 1]> {
+ let text = token.text();
+ let mut res = smallvec![];
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if value.text() == text {
+ res.push(value);
+ }
+ false
+ });
+ if res.is_empty() {
+ res.push(token);
+ }
+ res
+ }
+
+ fn descend_into_macros_with_kind_preference(&self, token: SyntaxToken) -> SyntaxToken {
+ let fetch_kind = |token: &SyntaxToken| match token.parent() {
+ Some(node) => match node.kind() {
+ kind @ (SyntaxKind::NAME | SyntaxKind::NAME_REF) => {
+ node.parent().map_or(kind, |it| it.kind())
+ }
+ _ => token.kind(),
+ },
+ None => token.kind(),
+ };
+ let preferred_kind = fetch_kind(&token);
+ let mut res = None;
+ self.descend_into_macros_impl(token.clone(), &mut |InFile { value, .. }| {
+ if fetch_kind(&value) == preferred_kind {
+ res = Some(value);
+ true
+ } else {
+ if let None = res {
+ res = Some(value)
+ }
+ false
+ }
+ });
+ res.unwrap_or(token)
+ }
+
+ fn descend_into_macros_single(&self, token: SyntaxToken) -> SyntaxToken {
+ let mut res = token.clone();
+ self.descend_into_macros_impl(token, &mut |InFile { value, .. }| {
+ res = value;
+ true
+ });
+ res
+ }
+
+ fn descend_into_macros_impl(
+ &self,
+ token: SyntaxToken,
+ f: &mut dyn FnMut(InFile<SyntaxToken>) -> bool,
+ ) {
+ let _p = profile::span("descend_into_macros");
+ let parent = match token.parent() {
+ Some(it) => it,
+ None => return,
+ };
+ let sa = match self.analyze_no_infer(&parent) {
+ Some(it) => it,
+ None => return,
+ };
+ let def_map = sa.resolver.def_map();
+
+ let mut stack: SmallVec<[_; 4]> = smallvec![InFile::new(sa.file_id, token)];
+ let mut cache = self.expansion_info_cache.borrow_mut();
+ let mut mcache = self.macro_call_cache.borrow_mut();
+
+ let mut process_expansion_for_token =
+ |stack: &mut SmallVec<_>, macro_file, item, token: InFile<&_>| {
+ let expansion_info = cache
+ .entry(macro_file)
+ .or_insert_with(|| macro_file.expansion_info(self.db.upcast()))
+ .as_ref()?;
+
+ {
+ let InFile { file_id, value } = expansion_info.expanded();
+ self.cache(value, file_id);
+ }
+
+ let mapped_tokens = expansion_info.map_token_down(self.db.upcast(), item, token)?;
+ let len = stack.len();
+
+ // requeue the tokens we got from mapping our current token down
+ stack.extend(mapped_tokens);
+ // if the length changed we have found a mapping for the token
+ (stack.len() != len).then(|| ())
+ };
+
+ // Remap the next token in the queue into a macro call its in, if it is not being remapped
+ // either due to not being in a macro-call or because its unused push it into the result vec,
+ // otherwise push the remapped tokens back into the queue as they can potentially be remapped again.
+ while let Some(token) = stack.pop() {
+ self.db.unwind_if_cancelled();
+ let was_not_remapped = (|| {
+ // First expand into attribute invocations
+ let containing_attribute_macro_call = self.with_ctx(|ctx| {
+ token.value.parent_ancestors().filter_map(ast::Item::cast).find_map(|item| {
+ if item.attrs().next().is_none() {
+ // Don't force populate the dyn cache for items that don't have an attribute anyways
+ return None;
+ }
+ Some((ctx.item_to_macro_call(token.with_value(item.clone()))?, item))
+ })
+ });
+ if let Some((call_id, item)) = containing_attribute_macro_call {
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(item),
+ token.as_ref(),
+ );
+ }
+
+ // Then check for token trees, that means we are either in a function-like macro or
+ // secondary attribute inputs
+ let tt = token.value.parent_ancestors().map_while(ast::TokenTree::cast).last()?;
+ let parent = tt.syntax().parent()?;
+
+ if tt.left_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+ if tt.right_delimiter_token().map_or(false, |it| it == token.value) {
+ return None;
+ }
+
+ if let Some(macro_call) = ast::MacroCall::cast(parent.clone()) {
+ let mcall = token.with_value(macro_call);
+ let file_id = match mcache.get(&mcall) {
+ Some(&it) => it,
+ None => {
+ let it = sa.expand(self.db, mcall.as_ref())?;
+ mcache.insert(mcall, it);
+ it
+ }
+ };
+ process_expansion_for_token(&mut stack, file_id, None, token.as_ref())
+ } else if let Some(meta) = ast::Meta::cast(parent.clone()) {
+ // attribute we failed expansion for earlier, this might be a derive invocation
+ // or derive helper attribute
+ let attr = meta.parent_attr()?;
+
+ let adt = if let Some(adt) = attr.syntax().parent().and_then(ast::Adt::cast) {
+ // this might be a derive, or a derive helper on an ADT
+ let derive_call = self.with_ctx(|ctx| {
+ // so try downmapping the token into the pseudo derive expansion
+ // see [hir_expand::builtin_attr_macro] for how the pseudo derive expansion works
+ ctx.attr_to_derive_macro_call(
+ token.with_value(&adt),
+ token.with_value(attr.clone()),
+ )
+ .map(|(_, call_id, _)| call_id)
+ });
+
+ match derive_call {
+ Some(call_id) => {
+ // resolved to a derive
+ let file_id = call_id.as_file();
+ return process_expansion_for_token(
+ &mut stack,
+ file_id,
+ Some(adt.into()),
+ token.as_ref(),
+ );
+ }
+ None => Some(adt),
+ }
+ } else {
+ // Otherwise this could be a derive helper on a variant or field
+ if let Some(field) = attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ }
+ }?;
+ if !self.with_ctx(|ctx| ctx.has_derives(InFile::new(token.file_id, &adt))) {
+ return None;
+ }
+ // Not an attribute, nor a derive, so it's either a builtin or a derive helper
+ // Try to resolve to a derive helper and downmap
+ let attr_name = attr.path().and_then(|it| it.as_single_name_ref())?.as_name();
+ let id = self.db.ast_id_map(token.file_id).ast_id(&adt);
+ let helpers =
+ def_map.derive_helpers_in_scope(InFile::new(token.file_id, id))?;
+ let item = Some(adt.into());
+ let mut res = None;
+ for (.., derive) in helpers.iter().filter(|(helper, ..)| *helper == attr_name) {
+ res = res.or(process_expansion_for_token(
+ &mut stack,
+ derive.as_file(),
+ item.clone(),
+ token.as_ref(),
+ ));
+ }
+ res
+ } else {
+ None
+ }
+ })()
+ .is_none();
+
+ if was_not_remapped && f(token) {
+ break;
+ }
+ }
+ }
+
+ // Note this return type is deliberate as [`find_nodes_at_offset_with_descend`] wants to stop
+ // traversing the inner iterator when it finds a node.
+ // The outer iterator is over the tokens descendants
+ // The inner iterator is the ancestors of a descendant
+ fn descend_node_at_offset(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = impl Iterator<Item = SyntaxNode> + '_> + '_ {
+ node.token_at_offset(offset)
+ .map(move |token| self.descend_into_macros(token))
+ .map(|descendants| {
+ descendants.into_iter().map(move |it| self.token_ancestors_with_macros(it))
+ })
+ // re-order the tokens from token_at_offset by returning the ancestors with the smaller first nodes first
+ // See algo::ancestors_at_offset, which uses the same approach
+ .kmerge_by(|left, right| {
+ left.clone()
+ .map(|node| node.text_range().len())
+ .lt(right.clone().map(|node| node.text_range().len()))
+ })
+ }
+
+ fn original_range(&self, node: &SyntaxNode) -> FileRange {
+ let node = self.find_file(node);
+ node.original_file_range(self.db.upcast())
+ }
+
+ fn original_range_opt(&self, node: &SyntaxNode) -> Option<FileRange> {
+ let node = self.find_file(node);
+ node.original_file_range_opt(self.db.upcast())
+ }
+
+ fn original_ast_node<N: AstNode>(&self, node: N) -> Option<N> {
+ self.wrap_node_infile(node).original_ast_node(self.db.upcast()).map(
+ |InFile { file_id, value }| {
+ self.cache(find_root(value.syntax()), file_id);
+ value
+ },
+ )
+ }
+
+ fn diagnostics_display_range(&self, src: InFile<SyntaxNodePtr>) -> FileRange {
+ let root = self.parse_or_expand(src.file_id).unwrap();
+ let node = src.map(|it| it.to_node(&root));
+ node.as_ref().original_file_range(self.db.upcast())
+ }
+
+ fn token_ancestors_with_macros(
+ &self,
+ token: SyntaxToken,
+ ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
+ token.parent().into_iter().flat_map(move |parent| self.ancestors_with_macros(parent))
+ }
+
+ fn ancestors_with_macros(
+ &self,
+ node: SyntaxNode,
+ ) -> impl Iterator<Item = SyntaxNode> + Clone + '_ {
+ let node = self.find_file(&node);
+ let db = self.db.upcast();
+ iter::successors(Some(node.cloned()), move |&InFile { file_id, ref value }| {
+ match value.parent() {
+ Some(parent) => Some(InFile::new(file_id, parent)),
+ None => {
+ self.cache(value.clone(), file_id);
+ file_id.call_node(db)
+ }
+ }
+ })
+ .map(|it| it.value)
+ }
+
+ fn ancestors_at_offset_with_macros(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> impl Iterator<Item = SyntaxNode> + '_ {
+ node.token_at_offset(offset)
+ .map(|token| self.token_ancestors_with_macros(token))
+ .kmerge_by(|node1, node2| node1.text_range().len() < node2.text_range().len())
+ }
+
+ fn resolve_lifetime_param(&self, lifetime: &ast::Lifetime) -> Option<LifetimeParam> {
+ let text = lifetime.text();
+ let lifetime_param = lifetime.syntax().ancestors().find_map(|syn| {
+ let gpl = ast::AnyHasGenericParams::cast(syn)?.generic_param_list()?;
+ gpl.lifetime_params()
+ .find(|tp| tp.lifetime().as_ref().map(|lt| lt.text()).as_ref() == Some(&text))
+ })?;
+ let src = self.wrap_node_infile(lifetime_param);
+ ToDef::to_def(self, src)
+ }
+
+ fn resolve_label(&self, lifetime: &ast::Lifetime) -> Option<Label> {
+ let text = lifetime.text();
+ let label = lifetime.syntax().ancestors().find_map(|syn| {
+ let label = match_ast! {
+ match syn {
+ ast::ForExpr(it) => it.label(),
+ ast::WhileExpr(it) => it.label(),
+ ast::LoopExpr(it) => it.label(),
+ ast::BlockExpr(it) => it.label(),
+ _ => None,
+ }
+ };
+ label.filter(|l| {
+ l.lifetime()
+ .and_then(|lt| lt.lifetime_ident_token())
+ .map_or(false, |lt| lt.text() == text)
+ })
+ })?;
+ let src = self.wrap_node_infile(label);
+ ToDef::to_def(self, src)
+ }
+
+ fn resolve_type(&self, ty: &ast::Type) -> Option<Type> {
+ let analyze = self.analyze(ty.syntax())?;
+ let ctx = body::LowerCtx::new(self.db.upcast(), analyze.file_id);
+ let ty = hir_ty::TyLoweringContext::new(self.db, &analyze.resolver)
+ .lower_ty(&crate::TypeRef::from_ast(&ctx, ty.clone()));
+ Some(Type::new_with_resolver(self.db, &analyze.resolver, ty))
+ }
+
+ fn resolve_trait(&self, path: &ast::Path) -> Option<Trait> {
+ let analyze = self.analyze(path.syntax())?;
+ let hygiene = hir_expand::hygiene::Hygiene::new(self.db.upcast(), analyze.file_id);
+ let ctx = body::LowerCtx::with_hygiene(self.db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+ match analyze
+ .resolver
+ .resolve_path_in_type_ns_fully(self.db.upcast(), hir_path.mod_path())?
+ {
+ TypeNs::TraitId(id) => Some(Trait { id }),
+ _ => None,
+ }
+ }
+
+ fn is_implicit_reborrow(&self, expr: &ast::Expr) -> Option<Mutability> {
+ self.analyze(expr.syntax())?.is_implicit_reborrow(self.db, expr)
+ }
+
+ fn type_of_expr(&self, expr: &ast::Expr) -> Option<TypeInfo> {
+ self.analyze(expr.syntax())?
+ .type_of_expr(self.db, expr)
+ .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
+ }
+
+ fn type_of_pat(&self, pat: &ast::Pat) -> Option<TypeInfo> {
+ self.analyze(pat.syntax())?
+ .type_of_pat(self.db, pat)
+ .map(|(ty, coerced)| TypeInfo { original: ty, adjusted: coerced })
+ }
+
+ fn type_of_self(&self, param: &ast::SelfParam) -> Option<Type> {
+ self.analyze(param.syntax())?.type_of_self(self.db, param)
+ }
+
+ fn pattern_adjustments(&self, pat: &ast::Pat) -> SmallVec<[Type; 1]> {
+ self.analyze(pat.syntax())
+ .and_then(|it| it.pattern_adjustments(self.db, pat))
+ .unwrap_or_default()
+ }
+
+ fn binding_mode_of_pat(&self, pat: &ast::IdentPat) -> Option<BindingMode> {
+ self.analyze(pat.syntax())?.binding_mode_of_pat(self.db, pat)
+ }
+
+ fn resolve_method_call(&self, call: &ast::MethodCallExpr) -> Option<FunctionId> {
+ self.analyze(call.syntax())?.resolve_method_call(self.db, call)
+ }
+
++ fn resolve_await_to_poll(&self, await_expr: &ast::AwaitExpr) -> Option<FunctionId> {
++ self.analyze(await_expr.syntax())?.resolve_await_to_poll(self.db, await_expr)
++ }
++
++ fn resolve_prefix_expr(&self, prefix_expr: &ast::PrefixExpr) -> Option<FunctionId> {
++ self.analyze(prefix_expr.syntax())?.resolve_prefix_expr(self.db, prefix_expr)
++ }
++
++ fn resolve_index_expr(&self, index_expr: &ast::IndexExpr) -> Option<FunctionId> {
++ self.analyze(index_expr.syntax())?.resolve_index_expr(self.db, index_expr)
++ }
++
++ fn resolve_bin_expr(&self, bin_expr: &ast::BinExpr) -> Option<FunctionId> {
++ self.analyze(bin_expr.syntax())?.resolve_bin_expr(self.db, bin_expr)
++ }
++
++ fn resolve_try_expr(&self, try_expr: &ast::TryExpr) -> Option<FunctionId> {
++ self.analyze(try_expr.syntax())?.resolve_try_expr(self.db, try_expr)
++ }
++
+ fn resolve_method_call_as_callable(&self, call: &ast::MethodCallExpr) -> Option<Callable> {
+ self.analyze(call.syntax())?.resolve_method_call_as_callable(self.db, call)
+ }
+
+ fn resolve_field(&self, field: &ast::FieldExpr) -> Option<Field> {
+ self.analyze(field.syntax())?.resolve_field(self.db, field)
+ }
+
+ fn resolve_record_field(
+ &self,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ self.analyze(field.syntax())?.resolve_record_field(self.db, field)
+ }
+
+ fn resolve_record_pat_field(&self, field: &ast::RecordPatField) -> Option<Field> {
+ self.analyze(field.syntax())?.resolve_record_pat_field(self.db, field)
+ }
+
+ fn resolve_macro_call(&self, macro_call: &ast::MacroCall) -> Option<Macro> {
+ let sa = self.analyze(macro_call.syntax())?;
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.resolve_macro_call(self.db, macro_call)
+ }
+
+ fn is_unsafe_macro_call(&self, macro_call: &ast::MacroCall) -> bool {
+ let sa = match self.analyze(macro_call.syntax()) {
+ Some(it) => it,
+ None => return false,
+ };
+ let macro_call = self.find_file(macro_call.syntax()).with_value(macro_call);
+ sa.is_unsafe_macro_call(self.db, macro_call)
+ }
+
+ fn resolve_attr_macro_call(&self, item: &ast::Item) -> Option<Macro> {
+ let item_in_file = self.wrap_node_infile(item.clone());
+ let id = self.with_ctx(|ctx| {
+ let macro_call_id = ctx.item_to_macro_call(item_in_file)?;
+ macro_call_to_macro_id(ctx, self.db.upcast(), macro_call_id)
+ })?;
+ Some(Macro { id })
+ }
+
+ fn resolve_path(&self, path: &ast::Path) -> Option<PathResolution> {
+ self.analyze(path.syntax())?.resolve_path(self.db, path)
+ }
+
+ fn resolve_extern_crate(&self, extern_crate: &ast::ExternCrate) -> Option<Crate> {
+ let krate = self.scope(extern_crate.syntax())?.krate();
+ let name = extern_crate.name_ref()?.as_name();
+ if name == known::SELF_PARAM {
+ return Some(krate);
+ }
+ krate
+ .dependencies(self.db)
+ .into_iter()
+ .find_map(|dep| (dep.name == name).then(|| dep.krate))
+ }
+
+ fn resolve_variant(&self, record_lit: ast::RecordExpr) -> Option<VariantId> {
+ self.analyze(record_lit.syntax())?.resolve_variant(self.db, record_lit)
+ }
+
+ fn resolve_bind_pat_to_const(&self, pat: &ast::IdentPat) -> Option<ModuleDef> {
+ self.analyze(pat.syntax())?.resolve_bind_pat_to_const(self.db, pat)
+ }
+
+ fn record_literal_missing_fields(&self, literal: &ast::RecordExpr) -> Vec<(Field, Type)> {
+ self.analyze(literal.syntax())
+ .and_then(|it| it.record_literal_missing_fields(self.db, literal))
+ .unwrap_or_default()
+ }
+
+ fn record_pattern_missing_fields(&self, pattern: &ast::RecordPat) -> Vec<(Field, Type)> {
+ self.analyze(pattern.syntax())
+ .and_then(|it| it.record_pattern_missing_fields(self.db, pattern))
+ .unwrap_or_default()
+ }
+
+ fn with_ctx<F: FnOnce(&mut SourceToDefCtx<'_, '_>) -> T, T>(&self, f: F) -> T {
+ let mut cache = self.s2d_cache.borrow_mut();
+ let mut ctx = SourceToDefCtx { db: self.db, cache: &mut *cache };
+ f(&mut ctx)
+ }
+
+ fn to_module_def(&self, file: FileId) -> impl Iterator<Item = Module> {
+ self.with_ctx(|ctx| ctx.file_to_def(file)).into_iter().map(Module::from)
+ }
+
+ fn scope(&self, node: &SyntaxNode) -> Option<SemanticsScope<'db>> {
+ self.analyze_no_infer(node).map(|SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ })
+ }
+
+ fn scope_at_offset(&self, node: &SyntaxNode, offset: TextSize) -> Option<SemanticsScope<'db>> {
+ self.analyze_with_offset_no_infer(node, offset).map(
+ |SourceAnalyzer { file_id, resolver, .. }| SemanticsScope {
+ db: self.db,
+ file_id,
+ resolver,
+ },
+ )
+ }
+
+ fn scope_for_def(&self, def: Trait) -> SemanticsScope<'db> {
+ let file_id = self.db.lookup_intern_trait(def.id).id.file_id();
+ let resolver = def.id.resolver(self.db.upcast());
+ SemanticsScope { db: self.db, file_id, resolver }
+ }
+
+ fn source<Def: HasSource>(&self, def: Def) -> Option<InFile<Def::Ast>>
+ where
+ Def::Ast: AstNode,
+ {
+ let res = def.source(self.db)?;
+ self.cache(find_root(res.value.syntax()), res.file_id);
+ Some(res)
+ }
+
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, None, true)
+ }
+
+ /// Returns none if the file of the node is not part of a crate.
+ fn analyze_no_infer(&self, node: &SyntaxNode) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, None, false)
+ }
+
+ fn analyze_with_offset_no_infer(
+ &self,
+ node: &SyntaxNode,
+ offset: TextSize,
+ ) -> Option<SourceAnalyzer> {
+ self.analyze_impl(node, Some(offset), false)
+ }
+
+ fn analyze_impl(
+ &self,
+ node: &SyntaxNode,
+ offset: Option<TextSize>,
+ infer_body: bool,
+ ) -> Option<SourceAnalyzer> {
+ let _p = profile::span("Semantics::analyze_impl");
+ let node = self.find_file(node);
+
+ let container = match self.with_ctx(|ctx| ctx.find_container(node)) {
+ Some(it) => it,
+ None => return None,
+ };
+
+ let resolver = match container {
+ ChildContainer::DefWithBodyId(def) => {
+ return Some(if infer_body {
+ SourceAnalyzer::new_for_body(self.db, def, node, offset)
+ } else {
+ SourceAnalyzer::new_for_body_no_infer(self.db, def, node, offset)
+ })
+ }
+ ChildContainer::TraitId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::ImplId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::ModuleId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::EnumId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::VariantId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::TypeAliasId(it) => it.resolver(self.db.upcast()),
+ ChildContainer::GenericDefId(it) => it.resolver(self.db.upcast()),
+ };
+ Some(SourceAnalyzer::new_for_resolver(resolver, node))
+ }
+
+ fn cache(&self, root_node: SyntaxNode, file_id: HirFileId) {
+ assert!(root_node.parent().is_none());
+ let mut cache = self.cache.borrow_mut();
+ let prev = cache.insert(root_node, file_id);
+ assert!(prev == None || prev == Some(file_id))
+ }
+
+ fn assert_contains_node(&self, node: &SyntaxNode) {
+ self.find_file(node);
+ }
+
+ fn lookup(&self, root_node: &SyntaxNode) -> Option<HirFileId> {
+ let cache = self.cache.borrow();
+ cache.get(root_node).copied()
+ }
+
+ fn wrap_node_infile<N: AstNode>(&self, node: N) -> InFile<N> {
+ let InFile { file_id, .. } = self.find_file(node.syntax());
+ InFile::new(file_id, node)
+ }
+
+ /// Wraps the node in a [`InFile`] with the file id it belongs to.
+ fn find_file<'node>(&self, node: &'node SyntaxNode) -> InFile<&'node SyntaxNode> {
+ let root_node = find_root(node);
+ let file_id = self.lookup(&root_node).unwrap_or_else(|| {
+ panic!(
+ "\n\nFailed to lookup {:?} in this Semantics.\n\
+ Make sure to use only query nodes, derived from this instance of Semantics.\n\
+ root node: {:?}\n\
+ known nodes: {}\n\n",
+ node,
+ root_node,
+ self.cache
+ .borrow()
+ .keys()
+ .map(|it| format!("{:?}", it))
+ .collect::<Vec<_>>()
+ .join(", ")
+ )
+ });
+ InFile::new(file_id, node)
+ }
+
+ fn is_unsafe_method_call(&self, method_call_expr: &ast::MethodCallExpr) -> bool {
+ method_call_expr
+ .receiver()
+ .and_then(|expr| {
+ let field_expr = match expr {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+ let ty = self.type_of_expr(&field_expr.expr()?)?.original;
+ if !ty.is_packed(self.db) {
+ return None;
+ }
+
+ let func = self.resolve_method_call(method_call_expr).map(Function::from)?;
+ let res = match func.self_param(self.db)?.access(self.db) {
+ Access::Shared | Access::Exclusive => true,
+ Access::Owned => false,
+ };
+ Some(res)
+ })
+ .unwrap_or(false)
+ }
+
+ fn is_unsafe_ref_expr(&self, ref_expr: &ast::RefExpr) -> bool {
+ ref_expr
+ .expr()
+ .and_then(|expr| {
+ let field_expr = match expr {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+ let expr = field_expr.expr()?;
+ self.type_of_expr(&expr)
+ })
+ // Binding a reference to a packed type is possibly unsafe.
+ .map(|ty| ty.original.is_packed(self.db))
+ .unwrap_or(false)
+
+ // FIXME This needs layout computation to be correct. It will highlight
+ // more than it should with the current implementation.
+ }
+
+ fn is_unsafe_ident_pat(&self, ident_pat: &ast::IdentPat) -> bool {
+ if ident_pat.ref_token().is_none() {
+ return false;
+ }
+
+ ident_pat
+ .syntax()
+ .parent()
+ .and_then(|parent| {
+ // `IdentPat` can live under `RecordPat` directly under `RecordPatField` or
+ // `RecordPatFieldList`. `RecordPatField` also lives under `RecordPatFieldList`,
+ // so this tries to lookup the `IdentPat` anywhere along that structure to the
+ // `RecordPat` so we can get the containing type.
+ let record_pat = ast::RecordPatField::cast(parent.clone())
+ .and_then(|record_pat| record_pat.syntax().parent())
+ .or_else(|| Some(parent.clone()))
+ .and_then(|parent| {
+ ast::RecordPatFieldList::cast(parent)?
+ .syntax()
+ .parent()
+ .and_then(ast::RecordPat::cast)
+ });
+
+ // If this doesn't match a `RecordPat`, fallback to a `LetStmt` to see if
+ // this is initialized from a `FieldExpr`.
+ if let Some(record_pat) = record_pat {
+ self.type_of_pat(&ast::Pat::RecordPat(record_pat))
+ } else if let Some(let_stmt) = ast::LetStmt::cast(parent) {
+ let field_expr = match let_stmt.initializer()? {
+ ast::Expr::FieldExpr(field_expr) => field_expr,
+ _ => return None,
+ };
+
+ self.type_of_expr(&field_expr.expr()?)
+ } else {
+ None
+ }
+ })
+ // Binding a reference to a packed type is possibly unsafe.
+ .map(|ty| ty.original.is_packed(self.db))
+ .unwrap_or(false)
+ }
+}
+
+fn macro_call_to_macro_id(
+ ctx: &mut SourceToDefCtx<'_, '_>,
+ db: &dyn AstDatabase,
+ macro_call_id: MacroCallId,
+) -> Option<MacroId> {
+ let loc = db.lookup_intern_macro_call(macro_call_id);
+ match loc.def.kind {
+ hir_expand::MacroDefKind::Declarative(it)
+ | hir_expand::MacroDefKind::BuiltIn(_, it)
+ | hir_expand::MacroDefKind::BuiltInAttr(_, it)
+ | hir_expand::MacroDefKind::BuiltInDerive(_, it)
+ | hir_expand::MacroDefKind::BuiltInEager(_, it) => {
+ ctx.macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ }
+ hir_expand::MacroDefKind::ProcMacro(_, _, it) => {
+ ctx.proc_macro_to_def(InFile::new(it.file_id, it.to_node(db)))
+ }
+ }
+}
+
+pub trait ToDef: AstNode + Clone {
+ type Def;
+
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def>;
+}
+
+macro_rules! to_def_impls {
+ ($(($def:path, $ast:path, $meth:ident)),* ,) => {$(
+ impl ToDef for $ast {
+ type Def = $def;
+ fn to_def(sema: &SemanticsImpl<'_>, src: InFile<Self>) -> Option<Self::Def> {
+ sema.with_ctx(|ctx| ctx.$meth(src)).map(<$def>::from)
+ }
+ }
+ )*}
+}
+
+to_def_impls![
+ (crate::Module, ast::Module, module_to_def),
+ (crate::Module, ast::SourceFile, source_file_to_def),
+ (crate::Struct, ast::Struct, struct_to_def),
+ (crate::Enum, ast::Enum, enum_to_def),
+ (crate::Union, ast::Union, union_to_def),
+ (crate::Trait, ast::Trait, trait_to_def),
+ (crate::Impl, ast::Impl, impl_to_def),
+ (crate::TypeAlias, ast::TypeAlias, type_alias_to_def),
+ (crate::Const, ast::Const, const_to_def),
+ (crate::Static, ast::Static, static_to_def),
+ (crate::Function, ast::Fn, fn_to_def),
+ (crate::Field, ast::RecordField, record_field_to_def),
+ (crate::Field, ast::TupleField, tuple_field_to_def),
+ (crate::Variant, ast::Variant, enum_variant_to_def),
+ (crate::TypeParam, ast::TypeParam, type_param_to_def),
+ (crate::LifetimeParam, ast::LifetimeParam, lifetime_param_to_def),
+ (crate::ConstParam, ast::ConstParam, const_param_to_def),
+ (crate::GenericParam, ast::GenericParam, generic_param_to_def),
+ (crate::Macro, ast::Macro, macro_to_def),
+ (crate::Local, ast::IdentPat, bind_pat_to_def),
+ (crate::Local, ast::SelfParam, self_param_to_def),
+ (crate::Label, ast::Label, label_to_def),
+ (crate::Adt, ast::Adt, adt_to_def),
+];
+
+fn find_root(node: &SyntaxNode) -> SyntaxNode {
+ node.ancestors().last().unwrap()
+}
+
+/// `SemanticScope` encapsulates the notion of a scope (the set of visible
+/// names) at a particular program point.
+///
+/// It is a bit tricky, as scopes do not really exist inside the compiler.
+/// Rather, the compiler directly computes for each reference the definition it
+/// refers to. It might transiently compute the explicit scope map while doing
+/// so, but, generally, this is not something left after the analysis.
+///
+/// However, we do very much need explicit scopes for IDE purposes --
+/// completion, at its core, lists the contents of the current scope. The notion
+/// of scope is also useful to answer questions like "what would be the meaning
+/// of this piece of code if we inserted it into this position?".
+///
+/// So `SemanticsScope` is constructed from a specific program point (a syntax
+/// node or just a raw offset) and provides access to the set of visible names
+/// on a somewhat best-effort basis.
+///
+/// Note that if you are wondering "what does this specific existing name mean?",
+/// you'd better use the `resolve_` family of methods.
+#[derive(Debug)]
+pub struct SemanticsScope<'a> {
+ pub db: &'a dyn HirDatabase,
+ file_id: HirFileId,
+ resolver: Resolver,
+}
+
+impl<'a> SemanticsScope<'a> {
+ pub fn module(&self) -> Module {
+ Module { id: self.resolver.module() }
+ }
+
+ pub fn krate(&self) -> Crate {
+ Crate { id: self.resolver.krate() }
+ }
+
+ pub(crate) fn resolver(&self) -> &Resolver {
+ &self.resolver
+ }
+
+ /// Note: `VisibleTraits` should be treated as an opaque type, passed into `Type
+ pub fn visible_traits(&self) -> VisibleTraits {
+ let resolver = &self.resolver;
+ VisibleTraits(resolver.traits_in_scope(self.db.upcast()))
+ }
+
+ pub fn process_all_names(&self, f: &mut dyn FnMut(Name, ScopeDef)) {
+ let scope = self.resolver.names_in_scope(self.db.upcast());
+ for (name, entries) in scope {
+ for entry in entries {
+ let def = match entry {
+ resolver::ScopeDef::ModuleDef(it) => ScopeDef::ModuleDef(it.into()),
+ resolver::ScopeDef::Unknown => ScopeDef::Unknown,
+ resolver::ScopeDef::ImplSelfType(it) => ScopeDef::ImplSelfType(it.into()),
+ resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
+ resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
+ resolver::ScopeDef::Local(pat_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Local(Local { parent, pat_id }),
+ None => continue,
+ },
+ resolver::ScopeDef::Label(label_id) => match self.resolver.body_owner() {
+ Some(parent) => ScopeDef::Label(Label { parent, label_id }),
+ None => continue,
+ },
+ };
+ f(name.clone(), def)
+ }
+ }
+ }
+
+ /// Resolve a path as-if it was written at the given scope. This is
+ /// necessary a heuristic, as it doesn't take hygiene into account.
+ pub fn speculative_resolve(&self, path: &ast::Path) -> Option<PathResolution> {
+ let ctx = body::LowerCtx::new(self.db.upcast(), self.file_id);
+ let path = Path::from_src(path.clone(), &ctx)?;
+ resolve_hir_path(self.db, &self.resolver, &path)
+ }
+
+ /// Iterates over associated types that may be specified after the given path (using
+ /// `Ty::Assoc` syntax).
+ pub fn assoc_type_shorthand_candidates<R>(
+ &self,
+ resolution: &PathResolution,
+ mut cb: impl FnMut(&Name, TypeAlias) -> Option<R>,
+ ) -> Option<R> {
+ let def = self.resolver.generic_def()?;
+ hir_ty::associated_type_shorthand_candidates(
+ self.db,
+ def,
+ resolution.in_type_ns()?,
+ |name, _, id| cb(name, id.into()),
+ )
+ }
+}
+
+pub struct VisibleTraits(pub FxHashSet<TraitId>);
+
+impl ops::Deref for VisibleTraits {
+ type Target = FxHashSet<TraitId>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
--- /dev/null
- builtin_fn_macro::BuiltinFnLikeExpander, hygiene::Hygiene, name::AsName, HirFileId, InFile,
+//! Lookup hir elements using positions in the source code. This is a lossy
+//! transformation: in general, a single source might correspond to several
+//! modules, functions, etc, due to macros, cfgs and `#[path=]` attributes on
+//! modules.
+//!
+//! So, this modules should not be used during hir construction, it exists
+//! purely for "IDE needs".
+use std::{
+ iter::{self, once},
+ sync::Arc,
+};
+
+use hir_def::{
+ body::{
+ self,
+ scope::{ExprScopes, ScopeId},
+ Body, BodySourceMap,
+ },
+ expr::{ExprId, Pat, PatId},
+ macro_id_to_def_id,
+ path::{ModPath, Path, PathKind},
+ resolver::{resolver_for_scope, Resolver, TypeNs, ValueNs},
+ type_ref::Mutability,
+ AsMacroCall, AssocItemId, DefWithBodyId, FieldId, FunctionId, ItemContainerId, LocalFieldId,
+ Lookup, ModuleDefId, VariantId,
+};
+use hir_expand::{
- method_resolution, Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution,
- TyExt, TyKind, TyLoweringContext,
++ builtin_fn_macro::BuiltinFnLikeExpander,
++ hygiene::Hygiene,
++ name,
++ name::{AsName, Name},
++ HirFileId, InFile,
+};
+use hir_ty::{
+ diagnostics::{
+ record_literal_missing_fields, record_pattern_missing_fields, unsafe_expressions,
+ UnsafeExpr,
+ },
- let f_in_impl = self.resolve_impl_method(db, f_in_trait, &substs);
- f_in_impl.or(Some(f_in_trait))
++ method_resolution::{self, lang_names_for_bin_op},
++ Adjust, Adjustment, AutoBorrow, InferenceResult, Interner, Substitution, Ty, TyExt, TyKind,
++ TyLoweringContext,
+};
+use itertools::Itertools;
+use smallvec::SmallVec;
+use syntax::{
+ ast::{self, AstNode},
+ SyntaxKind, SyntaxNode, TextRange, TextSize,
+};
+
+use crate::{
+ db::HirDatabase, semantics::PathResolution, Adt, AssocItem, BindingMode, BuiltinAttr,
+ BuiltinType, Callable, Const, DeriveHelper, Field, Function, Local, Macro, ModuleDef, Static,
+ Struct, ToolModule, Trait, Type, TypeAlias, Variant,
+};
+
+/// `SourceAnalyzer` is a convenience wrapper which exposes HIR API in terms of
+/// original source files. It should not be used inside the HIR itself.
+#[derive(Debug)]
+pub(crate) struct SourceAnalyzer {
+ pub(crate) file_id: HirFileId,
+ pub(crate) resolver: Resolver,
+ def: Option<(DefWithBodyId, Arc<Body>, Arc<BodySourceMap>)>,
+ infer: Option<Arc<InferenceResult>>,
+}
+
+impl SourceAnalyzer {
+ pub(crate) fn new_for_body(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer {
+ resolver,
+ def: Some((def, body, source_map)),
+ infer: Some(db.infer(def)),
+ file_id,
+ }
+ }
+
+ pub(crate) fn new_for_body_no_infer(
+ db: &dyn HirDatabase,
+ def: DefWithBodyId,
+ node @ InFile { file_id, .. }: InFile<&SyntaxNode>,
+ offset: Option<TextSize>,
+ ) -> SourceAnalyzer {
+ let (body, source_map) = db.body_with_source_map(def);
+ let scopes = db.expr_scopes(def);
+ let scope = match offset {
+ None => scope_for(&scopes, &source_map, node),
+ Some(offset) => scope_for_offset(db, &scopes, &source_map, node.file_id, offset),
+ };
+ let resolver = resolver_for_scope(db.upcast(), def, scope);
+ SourceAnalyzer { resolver, def: Some((def, body, source_map)), infer: None, file_id }
+ }
+
+ pub(crate) fn new_for_resolver(
+ resolver: Resolver,
+ node: InFile<&SyntaxNode>,
+ ) -> SourceAnalyzer {
+ SourceAnalyzer { resolver, def: None, infer: None, file_id: node.file_id }
+ }
+
+ fn body_source_map(&self) -> Option<&BodySourceMap> {
+ self.def.as_ref().map(|(.., source_map)| &**source_map)
+ }
+ fn body(&self) -> Option<&Body> {
+ self.def.as_ref().map(|(_, body, _)| &**body)
+ }
+
+ fn expr_id(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<ExprId> {
+ let src = match expr {
+ ast::Expr::MacroExpr(expr) => {
+ self.expand_expr(db, InFile::new(self.file_id, expr.macro_call()?.clone()))?
+ }
+ _ => InFile::new(self.file_id, expr.clone()),
+ };
+ let sm = self.body_source_map()?;
+ sm.node_expr(src.as_ref())
+ }
+
+ fn pat_id(&self, pat: &ast::Pat) -> Option<PatId> {
+ // FIXME: macros, see `expr_id`
+ let src = InFile { file_id: self.file_id, value: pat };
+ self.body_source_map()?.node_pat(src)
+ }
+
+ fn expand_expr(
+ &self,
+ db: &dyn HirDatabase,
+ expr: InFile<ast::MacroCall>,
+ ) -> Option<InFile<ast::Expr>> {
+ let macro_file = self.body_source_map()?.node_macro_file(expr.as_ref())?;
+ let expanded = db.parse_or_expand(macro_file)?;
+
+ let res = match ast::MacroCall::cast(expanded.clone()) {
+ Some(call) => self.expand_expr(db, InFile::new(macro_file, call))?,
+ _ => InFile::new(macro_file, ast::Expr::cast(expanded)?),
+ };
+ Some(res)
+ }
+
+ pub(crate) fn is_implicit_reborrow(
+ &self,
+ db: &dyn HirDatabase,
+ expr: &ast::Expr,
+ ) -> Option<Mutability> {
+ let expr_id = self.expr_id(db, expr)?;
+ let infer = self.infer.as_ref()?;
+ let adjustments = infer.expr_adjustments.get(&expr_id)?;
+ adjustments.windows(2).find_map(|slice| match slice {
+ &[Adjustment {kind: Adjust::Deref(None), ..}, Adjustment {kind: Adjust::Borrow(AutoBorrow::Ref(m)), ..}] => Some(match m {
+ hir_ty::Mutability::Mut => Mutability::Mut,
+ hir_ty::Mutability::Not => Mutability::Shared,
+ }),
+ _ => None,
+ })
+ }
+
+ pub(crate) fn type_of_expr(
+ &self,
+ db: &dyn HirDatabase,
+ expr: &ast::Expr,
+ ) -> Option<(Type, Option<Type>)> {
+ let expr_id = self.expr_id(db, expr)?;
+ let infer = self.infer.as_ref()?;
+ let coerced = infer
+ .expr_adjustments
+ .get(&expr_id)
+ .and_then(|adjusts| adjusts.last().map(|adjust| adjust.target.clone()));
+ let ty = infer[expr_id].clone();
+ let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
+ Some((mk_ty(ty), coerced.map(mk_ty)))
+ }
+
+ pub(crate) fn type_of_pat(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::Pat,
+ ) -> Option<(Type, Option<Type>)> {
+ let pat_id = self.pat_id(pat)?;
+ let infer = self.infer.as_ref()?;
+ let coerced = infer
+ .pat_adjustments
+ .get(&pat_id)
+ .and_then(|adjusts| adjusts.last().map(|adjust| adjust.clone()));
+ let ty = infer[pat_id].clone();
+ let mk_ty = |ty| Type::new_with_resolver(db, &self.resolver, ty);
+ Some((mk_ty(ty), coerced.map(mk_ty)))
+ }
+
+ pub(crate) fn type_of_self(
+ &self,
+ db: &dyn HirDatabase,
+ param: &ast::SelfParam,
+ ) -> Option<Type> {
+ let src = InFile { file_id: self.file_id, value: param };
+ let pat_id = self.body_source_map()?.node_self_param(src)?;
+ let ty = self.infer.as_ref()?[pat_id].clone();
+ Some(Type::new_with_resolver(db, &self.resolver, ty))
+ }
+
+ pub(crate) fn binding_mode_of_pat(
+ &self,
+ _db: &dyn HirDatabase,
+ pat: &ast::IdentPat,
+ ) -> Option<BindingMode> {
+ let pat_id = self.pat_id(&pat.clone().into())?;
+ let infer = self.infer.as_ref()?;
+ infer.pat_binding_modes.get(&pat_id).map(|bm| match bm {
+ hir_ty::BindingMode::Move => BindingMode::Move,
+ hir_ty::BindingMode::Ref(hir_ty::Mutability::Mut) => BindingMode::Ref(Mutability::Mut),
+ hir_ty::BindingMode::Ref(hir_ty::Mutability::Not) => {
+ BindingMode::Ref(Mutability::Shared)
+ }
+ })
+ }
+ pub(crate) fn pattern_adjustments(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::Pat,
+ ) -> Option<SmallVec<[Type; 1]>> {
+ let pat_id = self.pat_id(&pat)?;
+ let infer = self.infer.as_ref()?;
+ Some(
+ infer
+ .pat_adjustments
+ .get(&pat_id)?
+ .iter()
+ .map(|ty| Type::new_with_resolver(db, &self.resolver, ty.clone()))
+ .collect(),
+ )
+ }
+
+ pub(crate) fn resolve_method_call_as_callable(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<Callable> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let (func, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
+ let ty = db.value_ty(func.into()).substitute(Interner, &substs);
+ let ty = Type::new_with_resolver(db, &self.resolver, ty);
+ let mut res = ty.as_callable(db)?;
+ res.is_bound_method = true;
+ Some(res)
+ }
+
+ pub(crate) fn resolve_method_call(
+ &self,
+ db: &dyn HirDatabase,
+ call: &ast::MethodCallExpr,
+ ) -> Option<FunctionId> {
+ let expr_id = self.expr_id(db, &call.clone().into())?;
+ let (f_in_trait, substs) = self.infer.as_ref()?.method_resolution(expr_id)?;
++
++ Some(self.resolve_impl_method_or_trait_def(db, f_in_trait, &substs))
++ }
++
++ pub(crate) fn resolve_await_to_poll(
++ &self,
++ db: &dyn HirDatabase,
++ await_expr: &ast::AwaitExpr,
++ ) -> Option<FunctionId> {
++ let ty = self.ty_of_expr(db, &await_expr.expr()?.into())?;
++
++ let op_fn = db
++ .lang_item(self.resolver.krate(), hir_expand::name![poll].to_smol_str())?
++ .as_function()?;
++ let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build();
++
++ Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
++ }
++
++ pub(crate) fn resolve_prefix_expr(
++ &self,
++ db: &dyn HirDatabase,
++ prefix_expr: &ast::PrefixExpr,
++ ) -> Option<FunctionId> {
++ let lang_item_name = match prefix_expr.op_kind()? {
++ ast::UnaryOp::Deref => name![deref],
++ ast::UnaryOp::Not => name![not],
++ ast::UnaryOp::Neg => name![neg],
++ };
++ let ty = self.ty_of_expr(db, &prefix_expr.expr()?.into())?;
++
++ let op_fn = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
++ let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build();
++
++ Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
++ }
++
++ pub(crate) fn resolve_index_expr(
++ &self,
++ db: &dyn HirDatabase,
++ index_expr: &ast::IndexExpr,
++ ) -> Option<FunctionId> {
++ let base_ty = self.ty_of_expr(db, &index_expr.base()?.into())?;
++ let index_ty = self.ty_of_expr(db, &index_expr.index()?.into())?;
++
++ let lang_item_name = name![index];
++
++ let op_fn = self.lang_trait_fn(db, &lang_item_name, &lang_item_name)?;
++ let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn)
++ .push(base_ty.clone())
++ .push(index_ty.clone())
++ .build();
++ Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
++ }
++
++ pub(crate) fn resolve_bin_expr(
++ &self,
++ db: &dyn HirDatabase,
++ binop_expr: &ast::BinExpr,
++ ) -> Option<FunctionId> {
++ let op = binop_expr.op_kind()?;
++ let lhs = self.ty_of_expr(db, &binop_expr.lhs()?.into())?;
++ let rhs = self.ty_of_expr(db, &binop_expr.rhs()?.into())?;
++
++ let op_fn = lang_names_for_bin_op(op)
++ .and_then(|(name, lang_item)| self.lang_trait_fn(db, &lang_item, &name))?;
++ let substs =
++ hir_ty::TyBuilder::subst_for_def(db, op_fn).push(lhs.clone()).push(rhs.clone()).build();
++
++ Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
++ }
++
++ pub(crate) fn resolve_try_expr(
++ &self,
++ db: &dyn HirDatabase,
++ try_expr: &ast::TryExpr,
++ ) -> Option<FunctionId> {
++ let ty = self.ty_of_expr(db, &try_expr.expr()?.into())?;
++
++ let op_fn =
++ db.lang_item(self.resolver.krate(), name![branch].to_smol_str())?.as_function()?;
++ let substs = hir_ty::TyBuilder::subst_for_def(db, op_fn).push(ty.clone()).build();
++
++ Some(self.resolve_impl_method_or_trait_def(db, op_fn, &substs))
+ }
+
+ pub(crate) fn resolve_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::FieldExpr,
+ ) -> Option<Field> {
+ let expr_id = self.expr_id(db, &field.clone().into())?;
+ self.infer.as_ref()?.field_resolution(expr_id).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_record_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::RecordExprField,
+ ) -> Option<(Field, Option<Local>, Type)> {
+ let record_expr = ast::RecordExpr::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
+ let expr = ast::Expr::from(record_expr);
+ let expr_id = self.body_source_map()?.node_expr(InFile::new(self.file_id, &expr))?;
+
+ let local_name = field.field_name()?.as_name();
+ let local = if field.name_ref().is_some() {
+ None
+ } else {
+ let path = ModPath::from_segments(PathKind::Plain, once(local_name.clone()));
+ match self.resolver.resolve_path_in_value_ns_fully(db.upcast(), &path) {
+ Some(ValueNs::LocalBinding(pat_id)) => {
+ Some(Local { pat_id, parent: self.resolver.body_owner()? })
+ }
+ _ => None,
+ }
+ };
+ let (_, subst) = self.infer.as_ref()?.type_of_expr.get(expr_id)?.as_adt()?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_expr(expr_id)?;
+ let variant_data = variant.variant_data(db.upcast());
+ let field = FieldId { parent: variant, local_id: variant_data.field(&local_name)? };
+ let field_ty =
+ db.field_types(variant).get(field.local_id)?.clone().substitute(Interner, subst);
+ Some((field.into(), local, Type::new_with_resolver(db, &self.resolver, field_ty)))
+ }
+
+ pub(crate) fn resolve_record_pat_field(
+ &self,
+ db: &dyn HirDatabase,
+ field: &ast::RecordPatField,
+ ) -> Option<Field> {
+ let field_name = field.field_name()?.as_name();
+ let record_pat = ast::RecordPat::cast(field.syntax().parent().and_then(|p| p.parent())?)?;
+ let pat_id = self.pat_id(&record_pat.into())?;
+ let variant = self.infer.as_ref()?.variant_resolution_for_pat(pat_id)?;
+ let variant_data = variant.variant_data(db.upcast());
+ let field = FieldId { parent: variant, local_id: variant_data.field(&field_name)? };
+ Some(field.into())
+ }
+
+ pub(crate) fn resolve_macro_call(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> Option<Macro> {
+ let ctx = body::LowerCtx::new(db.upcast(), macro_call.file_id);
+ let path = macro_call.value.path().and_then(|ast| Path::from_src(ast, &ctx))?;
+ self.resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(|it| it.into())
+ }
+
+ pub(crate) fn resolve_bind_pat_to_const(
+ &self,
+ db: &dyn HirDatabase,
+ pat: &ast::IdentPat,
+ ) -> Option<ModuleDef> {
+ let pat_id = self.pat_id(&pat.clone().into())?;
+ let body = self.body()?;
+ let path = match &body[pat_id] {
+ Pat::Path(path) => path,
+ _ => return None,
+ };
+ let res = resolve_hir_path(db, &self.resolver, path)?;
+ match res {
+ PathResolution::Def(def) => Some(def),
+ _ => None,
+ }
+ }
+
+ pub(crate) fn resolve_path(
+ &self,
+ db: &dyn HirDatabase,
+ path: &ast::Path,
+ ) -> Option<PathResolution> {
+ let parent = path.syntax().parent();
+ let parent = || parent.clone();
+
+ let mut prefer_value_ns = false;
+ let resolved = (|| {
+ if let Some(path_expr) = parent().and_then(ast::PathExpr::cast) {
+ let expr_id = self.expr_id(db, &path_expr.into())?;
+ let infer = self.infer.as_ref()?;
+ if let Some(assoc) = infer.assoc_resolutions_for_expr(expr_id) {
+ let assoc = match assoc {
+ AssocItemId::FunctionId(f_in_trait) => {
+ match infer.type_of_expr.get(expr_id) {
+ None => assoc,
+ Some(func_ty) => {
+ if let TyKind::FnDef(_fn_def, subs) = func_ty.kind(Interner) {
+ self.resolve_impl_method(db, f_in_trait, subs)
+ .map(AssocItemId::FunctionId)
+ .unwrap_or(assoc)
+ } else {
+ assoc
+ }
+ }
+ }
+ }
+
+ _ => assoc,
+ };
+
+ return Some(PathResolution::Def(AssocItem::from(assoc).into()));
+ }
+ if let Some(VariantId::EnumVariantId(variant)) =
+ infer.variant_resolution_for_expr(expr_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ prefer_value_ns = true;
+ } else if let Some(path_pat) = parent().and_then(ast::PathPat::cast) {
+ let pat_id = self.pat_id(&path_pat.into())?;
+ if let Some(assoc) = self.infer.as_ref()?.assoc_resolutions_for_pat(pat_id) {
+ return Some(PathResolution::Def(AssocItem::from(assoc).into()));
+ }
+ if let Some(VariantId::EnumVariantId(variant)) =
+ self.infer.as_ref()?.variant_resolution_for_pat(pat_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ } else if let Some(rec_lit) = parent().and_then(ast::RecordExpr::cast) {
+ let expr_id = self.expr_id(db, &rec_lit.into())?;
+ if let Some(VariantId::EnumVariantId(variant)) =
+ self.infer.as_ref()?.variant_resolution_for_expr(expr_id)
+ {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ } else {
+ let record_pat = parent().and_then(ast::RecordPat::cast).map(ast::Pat::from);
+ let tuple_struct_pat =
+ || parent().and_then(ast::TupleStructPat::cast).map(ast::Pat::from);
+ if let Some(pat) = record_pat.or_else(tuple_struct_pat) {
+ let pat_id = self.pat_id(&pat)?;
+ let variant_res_for_pat =
+ self.infer.as_ref()?.variant_resolution_for_pat(pat_id);
+ if let Some(VariantId::EnumVariantId(variant)) = variant_res_for_pat {
+ return Some(PathResolution::Def(ModuleDef::Variant(variant.into())));
+ }
+ }
+ }
+ None
+ })();
+ if let Some(_) = resolved {
+ return resolved;
+ }
+
+ // This must be a normal source file rather than macro file.
+ let hygiene = Hygiene::new(db.upcast(), self.file_id);
+ let ctx = body::LowerCtx::with_hygiene(db.upcast(), &hygiene);
+ let hir_path = Path::from_src(path.clone(), &ctx)?;
+
+ // Case where path is a qualifier of a use tree, e.g. foo::bar::{Baz, Qux} where we are
+ // trying to resolve foo::bar.
+ if let Some(use_tree) = parent().and_then(ast::UseTree::cast) {
+ if use_tree.coloncolon_token().is_some() {
+ return resolve_hir_path_qualifier(db, &self.resolver, &hir_path);
+ }
+ }
+
+ let meta_path = path
+ .syntax()
+ .ancestors()
+ .take_while(|it| {
+ let kind = it.kind();
+ ast::Path::can_cast(kind) || ast::Meta::can_cast(kind)
+ })
+ .last()
+ .and_then(ast::Meta::cast);
+
+ // Case where path is a qualifier of another path, e.g. foo::bar::Baz where we are
+ // trying to resolve foo::bar.
+ if path.parent_path().is_some() {
+ return match resolve_hir_path_qualifier(db, &self.resolver, &hir_path) {
+ None if meta_path.is_some() => {
+ path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
+ .map(PathResolution::ToolModule)
+ })
+ }
+ res => res,
+ };
+ } else if let Some(meta_path) = meta_path {
+ // Case where we are resolving the final path segment of a path in an attribute
+ // in this case we have to check for inert/builtin attributes and tools and prioritize
+ // resolution of attributes over other namespaces
+ if let Some(name_ref) = path.as_single_name_ref() {
+ let builtin =
+ BuiltinAttr::by_name(db, self.resolver.krate().into(), &name_ref.text());
+ if let Some(_) = builtin {
+ return builtin.map(PathResolution::BuiltinAttr);
+ }
+
+ if let Some(attr) = meta_path.parent_attr() {
+ let adt = if let Some(field) =
+ attr.syntax().parent().and_then(ast::RecordField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(field) =
+ attr.syntax().parent().and_then(ast::TupleField::cast)
+ {
+ field.syntax().ancestors().take(4).find_map(ast::Adt::cast)
+ } else if let Some(variant) =
+ attr.syntax().parent().and_then(ast::Variant::cast)
+ {
+ variant.syntax().ancestors().nth(2).and_then(ast::Adt::cast)
+ } else {
+ None
+ };
+ if let Some(adt) = adt {
+ let ast_id = db.ast_id_map(self.file_id).ast_id(&adt);
+ if let Some(helpers) = self
+ .resolver
+ .def_map()
+ .derive_helpers_in_scope(InFile::new(self.file_id, ast_id))
+ {
+ // FIXME: Multiple derives can have the same helper
+ let name_ref = name_ref.as_name();
+ for (macro_id, mut helpers) in
+ helpers.iter().group_by(|(_, macro_id, ..)| macro_id).into_iter()
+ {
+ if let Some(idx) = helpers.position(|(name, ..)| *name == name_ref)
+ {
+ return Some(PathResolution::DeriveHelper(DeriveHelper {
+ derive: *macro_id,
+ idx,
+ }));
+ }
+ }
+ }
+ }
+ }
+ }
+ return match resolve_hir_path_as_macro(db, &self.resolver, &hir_path) {
+ Some(m) => Some(PathResolution::Def(ModuleDef::Macro(m))),
+ // this labels any path that starts with a tool module as the tool itself, this is technically wrong
+ // but there is no benefit in differentiating these two cases for the time being
+ None => path.first_segment().and_then(|it| it.name_ref()).and_then(|name_ref| {
+ ToolModule::by_name(db, self.resolver.krate().into(), &name_ref.text())
+ .map(PathResolution::ToolModule)
+ }),
+ };
+ }
+ if parent().map_or(false, |it| ast::Visibility::can_cast(it.kind())) {
+ resolve_hir_path_qualifier(db, &self.resolver, &hir_path)
+ } else {
+ resolve_hir_path_(db, &self.resolver, &hir_path, prefer_value_ns)
+ }
+ }
+
+ pub(crate) fn record_literal_missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ literal: &ast::RecordExpr,
+ ) -> Option<Vec<(Field, Type)>> {
+ let body = self.body()?;
+ let infer = self.infer.as_ref()?;
+
+ let expr_id = self.expr_id(db, &literal.clone().into())?;
+ let substs = infer.type_of_expr[expr_id].as_adt()?.1;
+
+ let (variant, missing_fields, _exhaustive) =
+ record_literal_missing_fields(db, infer, expr_id, &body[expr_id])?;
+ let res = self.missing_fields(db, substs, variant, missing_fields);
+ Some(res)
+ }
+
+ pub(crate) fn record_pattern_missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ pattern: &ast::RecordPat,
+ ) -> Option<Vec<(Field, Type)>> {
+ let body = self.body()?;
+ let infer = self.infer.as_ref()?;
+
+ let pat_id = self.pat_id(&pattern.clone().into())?;
+ let substs = infer.type_of_pat[pat_id].as_adt()?.1;
+
+ let (variant, missing_fields, _exhaustive) =
+ record_pattern_missing_fields(db, infer, pat_id, &body[pat_id])?;
+ let res = self.missing_fields(db, substs, variant, missing_fields);
+ Some(res)
+ }
+
+ fn missing_fields(
+ &self,
+ db: &dyn HirDatabase,
+ substs: &Substitution,
+ variant: VariantId,
+ missing_fields: Vec<LocalFieldId>,
+ ) -> Vec<(Field, Type)> {
+ let field_types = db.field_types(variant);
+
+ missing_fields
+ .into_iter()
+ .map(|local_id| {
+ let field = FieldId { parent: variant, local_id };
+ let ty = field_types[local_id].clone().substitute(Interner, substs);
+ (field.into(), Type::new_with_resolver_inner(db, &self.resolver, ty))
+ })
+ .collect()
+ }
+
+ pub(crate) fn expand(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> Option<HirFileId> {
+ let krate = self.resolver.krate();
+ let macro_call_id = macro_call.as_call_id(db.upcast(), krate, |path| {
+ self.resolver
+ .resolve_path_as_macro(db.upcast(), &path)
+ .map(|it| macro_id_to_def_id(db.upcast(), it))
+ })?;
+ Some(macro_call_id.as_file()).filter(|it| it.expansion_level(db.upcast()) < 64)
+ }
+
+ pub(crate) fn resolve_variant(
+ &self,
+ db: &dyn HirDatabase,
+ record_lit: ast::RecordExpr,
+ ) -> Option<VariantId> {
+ let infer = self.infer.as_ref()?;
+ let expr_id = self.expr_id(db, &record_lit.into())?;
+ infer.variant_resolution_for_expr(expr_id)
+ }
+
+ pub(crate) fn is_unsafe_macro_call(
+ &self,
+ db: &dyn HirDatabase,
+ macro_call: InFile<&ast::MacroCall>,
+ ) -> bool {
+ // check for asm/global_asm
+ if let Some(mac) = self.resolve_macro_call(db, macro_call) {
+ let ex = match mac.id {
+ hir_def::MacroId::Macro2Id(it) => it.lookup(db.upcast()).expander,
+ hir_def::MacroId::MacroRulesId(it) => it.lookup(db.upcast()).expander,
+ _ => hir_def::MacroExpander::Declarative,
+ };
+ match ex {
+ hir_def::MacroExpander::BuiltIn(e)
+ if e == BuiltinFnLikeExpander::Asm || e == BuiltinFnLikeExpander::GlobalAsm =>
+ {
+ return true
+ }
+ _ => (),
+ }
+ }
+ let macro_expr = match macro_call
+ .map(|it| it.syntax().parent().and_then(ast::MacroExpr::cast))
+ .transpose()
+ {
+ Some(it) => it,
+ None => return false,
+ };
+
+ if let (Some((def, body, sm)), Some(infer)) = (&self.def, &self.infer) {
+ if let Some(expanded_expr) = sm.macro_expansion_expr(macro_expr.as_ref()) {
+ let mut is_unsafe = false;
+ unsafe_expressions(
+ db,
+ infer,
+ *def,
+ body,
+ expanded_expr,
+ &mut |UnsafeExpr { inside_unsafe_block, .. }| is_unsafe |= !inside_unsafe_block,
+ );
+ return is_unsafe;
+ }
+ }
+ false
+ }
+
+ fn resolve_impl_method(
+ &self,
+ db: &dyn HirDatabase,
+ func: FunctionId,
+ substs: &Substitution,
+ ) -> Option<FunctionId> {
+ let impled_trait = match func.lookup(db.upcast()).container {
+ ItemContainerId::TraitId(trait_id) => trait_id,
+ _ => return None,
+ };
+ if substs.is_empty(Interner) {
+ return None;
+ }
+ let self_ty = substs.at(Interner, 0).ty(Interner)?;
+ let krate = self.resolver.krate();
+ let trait_env = self.resolver.body_owner()?.as_generic_def_id().map_or_else(
+ || Arc::new(hir_ty::TraitEnvironment::empty(krate)),
+ |d| db.trait_environment(d),
+ );
+
+ let fun_data = db.function_data(func);
+ method_resolution::lookup_impl_method(self_ty, db, trait_env, impled_trait, &fun_data.name)
+ }
++
++ fn resolve_impl_method_or_trait_def(
++ &self,
++ db: &dyn HirDatabase,
++ func: FunctionId,
++ substs: &Substitution,
++ ) -> FunctionId {
++ self.resolve_impl_method(db, func, substs).unwrap_or(func)
++ }
++
++ fn lang_trait_fn(
++ &self,
++ db: &dyn HirDatabase,
++ lang_trait: &Name,
++ method_name: &Name,
++ ) -> Option<FunctionId> {
++ db.trait_data(db.lang_item(self.resolver.krate(), lang_trait.to_smol_str())?.as_trait()?)
++ .method_by_name(method_name)
++ }
++
++ fn ty_of_expr(&self, db: &dyn HirDatabase, expr: &ast::Expr) -> Option<&Ty> {
++ self.infer.as_ref()?.type_of_expr.get(self.expr_id(db, &expr)?)
++ }
+}
+
+fn scope_for(
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ node: InFile<&SyntaxNode>,
+) -> Option<ScopeId> {
+ node.value
+ .ancestors()
+ .filter_map(ast::Expr::cast)
+ .filter_map(|it| source_map.node_expr(InFile::new(node.file_id, &it)))
+ .find_map(|it| scopes.scope_for(it))
+}
+
+fn scope_for_offset(
+ db: &dyn HirDatabase,
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ from_file: HirFileId,
+ offset: TextSize,
+) -> Option<ScopeId> {
+ scopes
+ .scope_by_expr()
+ .iter()
+ .filter_map(|(id, scope)| {
+ let InFile { file_id, value } = source_map.expr_syntax(*id).ok()?;
+ if from_file == file_id {
+ return Some((value.text_range(), scope));
+ }
+
+ // FIXME handle attribute expansion
+ let source = iter::successors(file_id.call_node(db.upcast()), |it| {
+ it.file_id.call_node(db.upcast())
+ })
+ .find(|it| it.file_id == from_file)
+ .filter(|it| it.value.kind() == SyntaxKind::MACRO_CALL)?;
+ Some((source.value.text_range(), scope))
+ })
+ .filter(|(expr_range, _scope)| expr_range.start() <= offset && offset <= expr_range.end())
+ // find containing scope
+ .min_by_key(|(expr_range, _scope)| expr_range.len())
+ .map(|(expr_range, scope)| {
+ adjust(db, scopes, source_map, expr_range, from_file, offset).unwrap_or(*scope)
+ })
+}
+
+// XXX: during completion, cursor might be outside of any particular
+// expression. Try to figure out the correct scope...
+fn adjust(
+ db: &dyn HirDatabase,
+ scopes: &ExprScopes,
+ source_map: &BodySourceMap,
+ expr_range: TextRange,
+ from_file: HirFileId,
+ offset: TextSize,
+) -> Option<ScopeId> {
+ let child_scopes = scopes
+ .scope_by_expr()
+ .iter()
+ .filter_map(|(id, scope)| {
+ let source = source_map.expr_syntax(*id).ok()?;
+ // FIXME: correctly handle macro expansion
+ if source.file_id != from_file {
+ return None;
+ }
+ let root = source.file_syntax(db.upcast());
+ let node = source.value.to_node(&root);
+ Some((node.syntax().text_range(), scope))
+ })
+ .filter(|&(range, _)| {
+ range.start() <= offset && expr_range.contains_range(range) && range != expr_range
+ });
+
+ child_scopes
+ .max_by(|&(r1, _), &(r2, _)| {
+ if r1.contains_range(r2) {
+ std::cmp::Ordering::Greater
+ } else if r2.contains_range(r1) {
+ std::cmp::Ordering::Less
+ } else {
+ r1.start().cmp(&r2.start())
+ }
+ })
+ .map(|(_ptr, scope)| *scope)
+}
+
+#[inline]
+pub(crate) fn resolve_hir_path(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolve_hir_path_(db, resolver, path, false)
+}
+
+#[inline]
+pub(crate) fn resolve_hir_path_as_macro(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<Macro> {
+ resolver.resolve_path_as_macro(db.upcast(), path.mod_path()).map(Into::into)
+}
+
+fn resolve_hir_path_(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+ prefer_value_ns: bool,
+) -> Option<PathResolution> {
+ let types = || {
+ let (ty, unresolved) = match path.type_anchor() {
+ Some(type_ref) => {
+ let (_, res) = TyLoweringContext::new(db, resolver).lower_ty_ext(type_ref);
+ res.map(|ty_ns| (ty_ns, path.segments().first()))
+ }
+ None => {
+ let (ty, remaining) =
+ resolver.resolve_path_in_type_ns(db.upcast(), path.mod_path())?;
+ match remaining {
+ Some(remaining) if remaining > 1 => {
+ if remaining + 1 == path.segments().len() {
+ Some((ty, path.segments().last()))
+ } else {
+ None
+ }
+ }
+ _ => Some((ty, path.segments().get(1))),
+ }
+ }
+ }?;
+
+ // If we are in a TypeNs for a Trait, and we have an unresolved name, try to resolve it as a type
+ // within the trait's associated types.
+ if let (Some(unresolved), &TypeNs::TraitId(trait_id)) = (&unresolved, &ty) {
+ if let Some(type_alias_id) =
+ db.trait_data(trait_id).associated_type_by_name(unresolved.name)
+ {
+ return Some(PathResolution::Def(ModuleDefId::from(type_alias_id).into()));
+ }
+ }
+
+ let res = match ty {
+ TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+ TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
+ TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
+ PathResolution::Def(Adt::from(it).into())
+ }
+ TypeNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+ TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
+ TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ };
+ match unresolved {
+ Some(unresolved) => resolver
+ .generic_def()
+ .and_then(|def| {
+ hir_ty::associated_type_shorthand_candidates(
+ db,
+ def,
+ res.in_type_ns()?,
+ |name, _, id| (name == unresolved.name).then(|| id),
+ )
+ })
+ .map(TypeAlias::from)
+ .map(Into::into)
+ .map(PathResolution::Def),
+ None => Some(res),
+ }
+ };
+
+ let body_owner = resolver.body_owner();
+ let values = || {
+ resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
+ let res = match val {
+ ValueNs::LocalBinding(pat_id) => {
+ let var = Local { parent: body_owner?, pat_id };
+ PathResolution::Local(var)
+ }
+ ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),
+ ValueNs::ConstId(it) => PathResolution::Def(Const::from(it).into()),
+ ValueNs::StaticId(it) => PathResolution::Def(Static::from(it).into()),
+ ValueNs::StructId(it) => PathResolution::Def(Struct::from(it).into()),
+ ValueNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ ValueNs::ImplSelf(impl_id) => PathResolution::SelfType(impl_id.into()),
+ ValueNs::GenericParam(id) => PathResolution::ConstParam(id.into()),
+ };
+ Some(res)
+ })
+ };
+
+ let items = || {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path())
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ };
+
+ let macros = || {
+ resolver
+ .resolve_path_as_macro(db.upcast(), path.mod_path())
+ .map(|def| PathResolution::Def(ModuleDef::Macro(def.into())))
+ };
+
+ if prefer_value_ns { values().or_else(types) } else { types().or_else(values) }
+ .or_else(items)
+ .or_else(macros)
+}
+
+/// Resolves a path where we know it is a qualifier of another path.
+///
+/// For example, if we have:
+/// ```
+/// mod my {
+/// pub mod foo {
+/// struct Bar;
+/// }
+///
+/// pub fn foo() {}
+/// }
+/// ```
+/// then we know that `foo` in `my::foo::Bar` refers to the module, not the function.
+fn resolve_hir_path_qualifier(
+ db: &dyn HirDatabase,
+ resolver: &Resolver,
+ path: &Path,
+) -> Option<PathResolution> {
+ resolver
+ .resolve_path_in_type_ns_fully(db.upcast(), path.mod_path())
+ .map(|ty| match ty {
+ TypeNs::SelfType(it) => PathResolution::SelfType(it.into()),
+ TypeNs::GenericParam(id) => PathResolution::TypeParam(id.into()),
+ TypeNs::AdtSelfType(it) | TypeNs::AdtId(it) => {
+ PathResolution::Def(Adt::from(it).into())
+ }
+ TypeNs::EnumVariantId(it) => PathResolution::Def(Variant::from(it).into()),
+ TypeNs::TypeAliasId(it) => PathResolution::Def(TypeAlias::from(it).into()),
+ TypeNs::BuiltinType(it) => PathResolution::Def(BuiltinType::from(it).into()),
+ TypeNs::TraitId(it) => PathResolution::Def(Trait::from(it).into()),
+ })
+ .or_else(|| {
+ resolver
+ .resolve_module_path_in_items(db.upcast(), path.mod_path())
+ .take_types()
+ .map(|it| PathResolution::Def(it.into()))
+ })
+}
--- /dev/null
- use hir::{HasSource, InFile};
++use hir::{HasSource, HirDisplay, InFile};
+use ide_db::assists::{AssistId, AssistKind};
+use syntax::{
- ast::{self, edit::IndentLevel},
- AstNode, TextSize,
++ ast::{self, make, HasArgList},
++ match_ast, AstNode, SyntaxNode,
+};
+
+use crate::assist_context::{AssistContext, Assists};
+
+// Assist: generate_enum_variant
+//
+// Adds a variant to an enum.
+//
+// ```
+// enum Countries {
+// Ghana,
+// }
+//
+// fn main() {
+// let country = Countries::Lesotho$0;
+// }
+// ```
+// ->
+// ```
+// enum Countries {
+// Ghana,
+// Lesotho,
+// }
+//
+// fn main() {
+// let country = Countries::Lesotho;
+// }
+// ```
+pub(crate) fn generate_enum_variant(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> {
- let path_expr: ast::PathExpr = ctx.find_node_at_offset()?;
- let path = path_expr.path()?;
++ let path: ast::Path = ctx.find_node_at_offset()?;
++ let parent = path_parent(&path)?;
+
+ if ctx.sema.resolve_path(&path).is_some() {
+ // No need to generate anything if the path resolves
+ return None;
+ }
+
+ let name_ref = path.segment()?.name_ref()?;
+ if name_ref.text().starts_with(char::is_lowercase) {
+ // Don't suggest generating variant if the name starts with a lowercase letter
+ return None;
+ }
+
+ if let Some(hir::PathResolution::Def(hir::ModuleDef::Adt(hir::Adt::Enum(e)))) =
+ ctx.sema.resolve_path(&path.qualifier()?)
+ {
+ let target = path.syntax().text_range();
- return add_variant_to_accumulator(acc, ctx, target, e, &name_ref);
++ return add_variant_to_accumulator(acc, ctx, target, e, &name_ref, parent);
+ }
+
+ None
+}
+
++#[derive(Debug)]
++enum PathParent {
++ PathExpr(ast::PathExpr),
++ RecordExpr(ast::RecordExpr),
++ PathPat(ast::PathPat),
++ UseTree(ast::UseTree),
++}
++
++impl PathParent {
++ fn syntax(&self) -> &SyntaxNode {
++ match self {
++ PathParent::PathExpr(it) => it.syntax(),
++ PathParent::RecordExpr(it) => it.syntax(),
++ PathParent::PathPat(it) => it.syntax(),
++ PathParent::UseTree(it) => it.syntax(),
++ }
++ }
++
++ fn make_field_list(&self, ctx: &AssistContext<'_>) -> Option<ast::FieldList> {
++ let scope = ctx.sema.scope(self.syntax())?;
++
++ match self {
++ PathParent::PathExpr(it) => {
++ if let Some(call_expr) = it.syntax().parent().and_then(ast::CallExpr::cast) {
++ make_tuple_field_list(call_expr, ctx, &scope)
++ } else {
++ None
++ }
++ }
++ PathParent::RecordExpr(it) => make_record_field_list(it, ctx, &scope),
++ PathParent::UseTree(_) | PathParent::PathPat(_) => None,
++ }
++ }
++}
++
++fn path_parent(path: &ast::Path) -> Option<PathParent> {
++ let parent = path.syntax().parent()?;
++
++ match_ast! {
++ match parent {
++ ast::PathExpr(it) => Some(PathParent::PathExpr(it)),
++ ast::RecordExpr(it) => Some(PathParent::RecordExpr(it)),
++ ast::PathPat(it) => Some(PathParent::PathPat(it)),
++ ast::UseTree(it) => Some(PathParent::UseTree(it)),
++ _ => None
++ }
++ }
++}
++
+fn add_variant_to_accumulator(
+ acc: &mut Assists,
+ ctx: &AssistContext<'_>,
+ target: syntax::TextRange,
+ adt: hir::Enum,
+ name_ref: &ast::NameRef,
++ parent: PathParent,
+) -> Option<()> {
+ let db = ctx.db();
+ let InFile { file_id, value: enum_node } = adt.source(db)?.original_ast_node(db)?;
- let enum_indent = IndentLevel::from_node(&enum_node.syntax());
-
- let variant_list = enum_node.variant_list()?;
- let offset = variant_list.syntax().text_range().end() - TextSize::of('}');
- let empty_enum = variant_list.variants().next().is_none();
+
+ acc.add(
+ AssistId("generate_enum_variant", AssistKind::Generate),
+ "Generate variant",
+ target,
+ |builder| {
+ builder.edit_file(file_id.original_file(db));
- let text = format!(
- "{maybe_newline}{indent_1}{name},\n{enum_indent}",
- maybe_newline = if empty_enum { "\n" } else { "" },
- indent_1 = IndentLevel(1),
- name = name_ref,
- enum_indent = enum_indent
- );
- builder.insert(offset, text)
++ let node = builder.make_mut(enum_node);
++ let variant = make_variant(ctx, name_ref, parent);
++ node.variant_list().map(|it| it.add_variant(variant.clone_for_update()));
+ },
+ )
+}
+
++fn make_variant(
++ ctx: &AssistContext<'_>,
++ name_ref: &ast::NameRef,
++ parent: PathParent,
++) -> ast::Variant {
++ let field_list = parent.make_field_list(ctx);
++ make::variant(make::name(&name_ref.text()), field_list)
++}
++
++fn make_record_field_list(
++ record: &ast::RecordExpr,
++ ctx: &AssistContext<'_>,
++ scope: &hir::SemanticsScope<'_>,
++) -> Option<ast::FieldList> {
++ let fields = record.record_expr_field_list()?.fields();
++ let record_fields = fields.map(|field| {
++ let name = name_from_field(&field);
++
++ let ty = field
++ .expr()
++ .and_then(|it| expr_ty(ctx, it, scope))
++ .unwrap_or_else(make::ty_placeholder);
++
++ make::record_field(None, name, ty)
++ });
++ Some(make::record_field_list(record_fields).into())
++}
++
++fn name_from_field(field: &ast::RecordExprField) -> ast::Name {
++ let text = match field.name_ref() {
++ Some(it) => it.to_string(),
++ None => name_from_field_shorthand(field).unwrap_or("unknown".to_string()),
++ };
++ make::name(&text)
++}
++
++fn name_from_field_shorthand(field: &ast::RecordExprField) -> Option<String> {
++ let path = match field.expr()? {
++ ast::Expr::PathExpr(path_expr) => path_expr.path(),
++ _ => None,
++ }?;
++ Some(path.as_single_name_ref()?.to_string())
++}
++
++fn make_tuple_field_list(
++ call_expr: ast::CallExpr,
++ ctx: &AssistContext<'_>,
++ scope: &hir::SemanticsScope<'_>,
++) -> Option<ast::FieldList> {
++ let args = call_expr.arg_list()?.args();
++ let tuple_fields = args.map(|arg| {
++ let ty = expr_ty(ctx, arg, &scope).unwrap_or_else(make::ty_placeholder);
++ make::tuple_field(None, ty)
++ });
++ Some(make::tuple_field_list(tuple_fields).into())
++}
++
++fn expr_ty(
++ ctx: &AssistContext<'_>,
++ arg: ast::Expr,
++ scope: &hir::SemanticsScope<'_>,
++) -> Option<ast::Type> {
++ let ty = ctx.sema.type_of_expr(&arg).map(|it| it.adjusted())?;
++ let text = ty.display_source_code(ctx.db(), scope.module().into()).ok()?;
++ Some(make::ty(&text))
++}
++
+#[cfg(test)]
+mod tests {
+ use crate::tests::{check_assist, check_assist_not_applicable};
+
+ use super::*;
+
+ #[test]
+ fn generate_basic_enum_variant_in_empty_enum() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {}
+fn main() {
+ Foo::Bar$0
+}
+",
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Bar
+}
+",
+ )
+ }
+
+ #[test]
+ fn generate_basic_enum_variant_in_non_empty_enum() {
+ check_assist(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Baz$0
+}
+",
+ r"
+enum Foo {
+ Bar,
+ Baz,
+}
+fn main() {
+ Foo::Baz
+}
+",
+ )
+ }
+
+ #[test]
+ fn generate_basic_enum_variant_in_different_file() {
+ check_assist(
+ generate_enum_variant,
+ r"
+//- /main.rs
+mod foo;
+use foo::Foo;
+
+fn main() {
+ Foo::Baz$0
+}
+
+//- /foo.rs
+enum Foo {
+ Bar,
+}
+",
+ r"
+enum Foo {
+ Bar,
+ Baz,
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_existing_variant() {
+ check_assist_not_applicable(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::Bar$0
+}
+",
+ )
+ }
+
+ #[test]
+ fn not_applicable_for_lowercase() {
+ check_assist_not_applicable(
+ generate_enum_variant,
+ r"
+enum Foo {
+ Bar,
+}
+fn main() {
+ Foo::new$0
+}
+",
+ )
+ }
+
+ #[test]
+ fn indentation_level_is_correct() {
+ check_assist(
+ generate_enum_variant,
+ r"
+mod m {
+ enum Foo {
+ Bar,
+ }
+}
+fn main() {
+ m::Foo::Baz$0
+}
+",
+ r"
+mod m {
+ enum Foo {
+ Bar,
+ Baz,
+ }
+}
+fn main() {
+ m::Foo::Baz
+}
++",
++ )
++ }
++
++ #[test]
++ fn associated_single_element_tuple() {
++ check_assist(
++ generate_enum_variant,
++ r"
++enum Foo {}
++fn main() {
++ Foo::Bar$0(true)
++}
++",
++ r"
++enum Foo {
++ Bar(bool),
++}
++fn main() {
++ Foo::Bar(true)
++}
++",
++ )
++ }
++
++ #[test]
++ fn associated_single_element_tuple_unknown_type() {
++ check_assist(
++ generate_enum_variant,
++ r"
++enum Foo {}
++fn main() {
++ Foo::Bar$0(x)
++}
++",
++ r"
++enum Foo {
++ Bar(_),
++}
++fn main() {
++ Foo::Bar(x)
++}
++",
++ )
++ }
++
++ #[test]
++ fn associated_multi_element_tuple() {
++ check_assist(
++ generate_enum_variant,
++ r"
++struct Struct {}
++enum Foo {}
++fn main() {
++ Foo::Bar$0(true, x, Struct {})
++}
++",
++ r"
++struct Struct {}
++enum Foo {
++ Bar(bool, _, Struct),
++}
++fn main() {
++ Foo::Bar(true, x, Struct {})
++}
++",
++ )
++ }
++
++ #[test]
++ fn associated_record() {
++ check_assist(
++ generate_enum_variant,
++ r"
++enum Foo {}
++fn main() {
++ Foo::$0Bar { x: true }
++}
++",
++ r"
++enum Foo {
++ Bar { x: bool },
++}
++fn main() {
++ Foo::Bar { x: true }
++}
++",
++ )
++ }
++
++ #[test]
++ fn associated_record_unknown_type() {
++ check_assist(
++ generate_enum_variant,
++ r"
++enum Foo {}
++fn main() {
++ Foo::$0Bar { x: y }
++}
++",
++ r"
++enum Foo {
++ Bar { x: _ },
++}
++fn main() {
++ Foo::Bar { x: y }
++}
++",
++ )
++ }
++
++ #[test]
++ fn associated_record_field_shorthand() {
++ check_assist(
++ generate_enum_variant,
++ r"
++enum Foo {}
++fn main() {
++ let x = true;
++ Foo::$0Bar { x }
++}
++",
++ r"
++enum Foo {
++ Bar { x: bool },
++}
++fn main() {
++ let x = true;
++ Foo::Bar { x }
++}
++",
++ )
++ }
++
++ #[test]
++ fn associated_record_field_shorthand_unknown_type() {
++ check_assist(
++ generate_enum_variant,
++ r"
++enum Foo {}
++fn main() {
++ Foo::$0Bar { x }
++}
++",
++ r"
++enum Foo {
++ Bar { x: _ },
++}
++fn main() {
++ Foo::Bar { x }
++}
++",
++ )
++ }
++
++ #[test]
++ fn associated_record_field_multiple_fields() {
++ check_assist(
++ generate_enum_variant,
++ r"
++struct Struct {}
++enum Foo {}
++fn main() {
++ Foo::$0Bar { x, y: x, s: Struct {} }
++}
++",
++ r"
++struct Struct {}
++enum Foo {
++ Bar { x: _, y: _, s: Struct },
++}
++fn main() {
++ Foo::Bar { x, y: x, s: Struct {} }
++}
++",
++ )
++ }
++
++ #[test]
++ fn use_tree() {
++ check_assist(
++ generate_enum_variant,
++ r"
++//- /main.rs
++mod foo;
++use foo::Foo::Bar$0;
++
++//- /foo.rs
++enum Foo {}
++",
++ r"
++enum Foo {
++ Bar,
++}
++",
++ )
++ }
++
++ #[test]
++ fn not_applicable_for_path_type() {
++ check_assist_not_applicable(
++ generate_enum_variant,
++ r"
++enum Foo {}
++impl Foo::Bar$0 {}
++",
++ )
++ }
++
++ #[test]
++ fn path_pat() {
++ check_assist(
++ generate_enum_variant,
++ r"
++enum Foo {}
++fn foo(x: Foo) {
++ match x {
++ Foo::Bar$0 =>
++ }
++}
++",
++ r"
++enum Foo {
++ Bar,
++}
++fn foo(x: Foo) {
++ match x {
++ Foo::Bar =>
++ }
++}
+",
+ )
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Memory Usage (Clears Database)**
+//! Applies changes to the IDE state transactionally.
+
+use std::sync::Arc;
+
+use base_db::{
+ salsa::{Database, Durability},
+ Change, SourceRootId,
+};
+use profile::{memory_usage, Bytes};
+use rustc_hash::FxHashSet;
+
+use crate::{symbol_index::SymbolsDatabase, RootDatabase};
+
+impl RootDatabase {
+ pub fn request_cancellation(&mut self) {
+ let _p = profile::span("RootDatabase::request_cancellation");
+ self.salsa_runtime_mut().synthetic_write(Durability::LOW);
+ }
+
+ pub fn apply_change(&mut self, change: Change) {
+ let _p = profile::span("RootDatabase::apply_change");
+ self.request_cancellation();
+ tracing::info!("apply_change {:?}", change);
+ if let Some(roots) = &change.roots {
+ let mut local_roots = FxHashSet::default();
+ let mut library_roots = FxHashSet::default();
+ for (idx, root) in roots.iter().enumerate() {
+ let root_id = SourceRootId(idx as u32);
+ if root.is_library {
+ library_roots.insert(root_id);
+ } else {
+ local_roots.insert(root_id);
+ }
+ }
+ self.set_local_roots_with_durability(Arc::new(local_roots), Durability::HIGH);
+ self.set_library_roots_with_durability(Arc::new(library_roots), Durability::HIGH);
+ }
+ change.apply(self);
+ }
+
+ // Feature: Memory Usage
+ //
+ // Clears rust-analyzer's internal database and prints memory usage statistics.
+ //
+ // |===
+ // | Editor | Action Name
+ //
++ // | VS Code | **rust-analyzer: Memory Usage (Clears Database)**
+ // |===
+ // image::https://user-images.githubusercontent.com/48062697/113065592-08559f00-91b1-11eb-8c96-64b88068ec02.gif[]
+ pub fn per_query_memory_usage(&mut self) -> Vec<(String, Bytes)> {
+ let mut acc: Vec<(String, Bytes)> = vec![];
+ macro_rules! purge_each_query {
+ ($($q:path)*) => {$(
+ let before = memory_usage().allocated;
+ $q.in_db(self).purge();
+ let after = memory_usage().allocated;
+ let q: $q = Default::default();
+ let name = format!("{:?}", q);
+ acc.push((name, before - after));
+ )*}
+ }
+ purge_each_query![
+ // SourceDatabase
+ base_db::ParseQuery
+ base_db::CrateGraphQuery
+
+ // SourceDatabaseExt
+ base_db::FileTextQuery
+ base_db::FileSourceRootQuery
+ base_db::SourceRootQuery
+ base_db::SourceRootCratesQuery
+
+ // AstDatabase
+ hir::db::AstIdMapQuery
+ hir::db::MacroArgTextQuery
+ hir::db::MacroDefQuery
+ hir::db::ParseMacroExpansionQuery
+ hir::db::MacroExpandQuery
+ hir::db::HygieneFrameQuery
+ hir::db::InternMacroCallQuery
+
+ // DefDatabase
+ hir::db::FileItemTreeQuery
+ hir::db::BlockDefMapQuery
+ hir::db::CrateDefMapQueryQuery
+ hir::db::FieldsAttrsQuery
+ hir::db::VariantsAttrsQuery
+ hir::db::FieldsAttrsSourceMapQuery
+ hir::db::VariantsAttrsSourceMapQuery
+ hir::db::StructDataQuery
+ hir::db::UnionDataQuery
+ hir::db::EnumDataQuery
+ hir::db::ImplDataQuery
+ hir::db::TraitDataQuery
+ hir::db::TypeAliasDataQuery
+ hir::db::FunctionDataQuery
+ hir::db::ConstDataQuery
+ hir::db::StaticDataQuery
+ hir::db::BodyWithSourceMapQuery
+ hir::db::BodyQuery
+ hir::db::ExprScopesQuery
+ hir::db::GenericParamsQuery
+ hir::db::AttrsQuery
+ hir::db::CrateLangItemsQuery
+ hir::db::LangItemQuery
+ hir::db::ImportMapQuery
+
+ // HirDatabase
+ hir::db::InferQueryQuery
+ hir::db::TyQuery
+ hir::db::ValueTyQuery
+ hir::db::ImplSelfTyQuery
+ hir::db::ImplTraitQuery
+ hir::db::FieldTypesQuery
+ hir::db::CallableItemSignatureQuery
+ hir::db::GenericPredicatesForParamQuery
+ hir::db::GenericPredicatesQuery
+ hir::db::GenericDefaultsQuery
+ hir::db::InherentImplsInCrateQuery
+ hir::db::TraitEnvironmentQuery
+ hir::db::TraitImplsInCrateQuery
+ hir::db::TraitImplsInDepsQuery
+ hir::db::AssociatedTyDataQuery
+ hir::db::AssociatedTyDataQuery
+ hir::db::TraitDatumQuery
+ hir::db::StructDatumQuery
+ hir::db::ImplDatumQuery
+ hir::db::FnDefDatumQuery
+ hir::db::ReturnTypeImplTraitsQuery
+ hir::db::InternCallableDefQuery
+ hir::db::InternTypeOrConstParamIdQuery
+ hir::db::InternImplTraitIdQuery
+ hir::db::InternClosureQuery
+ hir::db::AssociatedTyValueQuery
+ hir::db::TraitSolveQueryQuery
+ hir::db::InternTypeOrConstParamIdQuery
+
+ // SymbolsDatabase
+ crate::symbol_index::ModuleSymbolsQuery
+ crate::symbol_index::LibrarySymbolsQuery
+ crate::symbol_index::LocalRootsQuery
+ crate::symbol_index::LibraryRootsQuery
+
+ // LineIndexDatabase
+ crate::LineIndexQuery
+
+ // InternDatabase
+ hir::db::InternFunctionQuery
+ hir::db::InternStructQuery
+ hir::db::InternUnionQuery
+ hir::db::InternEnumQuery
+ hir::db::InternConstQuery
+ hir::db::InternStaticQuery
+ hir::db::InternTraitQuery
+ hir::db::InternTypeAliasQuery
+ hir::db::InternImplQuery
+ ];
+
+ acc.sort_by_key(|it| std::cmp::Reverse(it.1));
+ acc
+ }
+}
--- /dev/null
+//! `NameDefinition` keeps information about the element we want to search references for.
+//! The element is represented by `NameKind`. It's located inside some `container` and
+//! has a `visibility`, which defines a search scope.
+//! Note that the reference search is possible for not all of the classified items.
+
+// FIXME: this badly needs rename/rewrite (matklad, 2020-02-06).
+
+use arrayvec::ArrayVec;
+use hir::{
+ Adt, AsAssocItem, AssocItem, BuiltinAttr, BuiltinType, Const, Crate, DeriveHelper, Field,
+ Function, GenericParam, HasVisibility, Impl, ItemInNs, Label, Local, Macro, Module, ModuleDef,
+ Name, PathResolution, Semantics, Static, ToolModule, Trait, TypeAlias, Variant, Visibility,
+};
+use stdx::impl_from;
+use syntax::{
+ ast::{self, AstNode},
+ match_ast, SyntaxKind, SyntaxNode, SyntaxToken,
+};
+
+use crate::RootDatabase;
+
+// FIXME: a more precise name would probably be `Symbol`?
+#[derive(Debug, PartialEq, Eq, Copy, Clone, Hash)]
+pub enum Definition {
+ Macro(Macro),
+ Field(Field),
+ Module(Module),
+ Function(Function),
+ Adt(Adt),
+ Variant(Variant),
+ Const(Const),
+ Static(Static),
+ Trait(Trait),
+ TypeAlias(TypeAlias),
+ BuiltinType(BuiltinType),
+ SelfType(Impl),
+ Local(Local),
+ GenericParam(GenericParam),
+ Label(Label),
+ DeriveHelper(DeriveHelper),
+ BuiltinAttr(BuiltinAttr),
+ ToolModule(ToolModule),
+}
+
+impl Definition {
+ pub fn canonical_module_path(&self, db: &RootDatabase) -> Option<impl Iterator<Item = Module>> {
+ self.module(db).map(|it| it.path_to_root(db).into_iter().rev())
+ }
+
+ pub fn krate(&self, db: &RootDatabase) -> Option<Crate> {
+ Some(match self {
+ Definition::Module(m) => m.krate(),
+ _ => self.module(db)?.krate(),
+ })
+ }
+
+ pub fn module(&self, db: &RootDatabase) -> Option<Module> {
+ let module = match self {
+ Definition::Macro(it) => it.module(db),
+ Definition::Module(it) => it.parent(db)?,
+ Definition::Field(it) => it.parent_def(db).module(db),
+ Definition::Function(it) => it.module(db),
+ Definition::Adt(it) => it.module(db),
+ Definition::Const(it) => it.module(db),
+ Definition::Static(it) => it.module(db),
+ Definition::Trait(it) => it.module(db),
+ Definition::TypeAlias(it) => it.module(db),
+ Definition::Variant(it) => it.module(db),
+ Definition::SelfType(it) => it.module(db),
+ Definition::Local(it) => it.module(db),
+ Definition::GenericParam(it) => it.module(db),
+ Definition::Label(it) => it.module(db),
+ Definition::DeriveHelper(it) => it.derive().module(db),
+ Definition::BuiltinAttr(_) | Definition::BuiltinType(_) | Definition::ToolModule(_) => {
+ return None
+ }
+ };
+ Some(module)
+ }
+
+ pub fn visibility(&self, db: &RootDatabase) -> Option<Visibility> {
+ let vis = match self {
+ Definition::Field(sf) => sf.visibility(db),
+ Definition::Module(it) => it.visibility(db),
+ Definition::Function(it) => it.visibility(db),
+ Definition::Adt(it) => it.visibility(db),
+ Definition::Const(it) => it.visibility(db),
+ Definition::Static(it) => it.visibility(db),
+ Definition::Trait(it) => it.visibility(db),
+ Definition::TypeAlias(it) => it.visibility(db),
+ Definition::Variant(it) => it.visibility(db),
+ Definition::BuiltinType(_) => Visibility::Public,
+ Definition::Macro(_) => return None,
+ Definition::BuiltinAttr(_)
+ | Definition::ToolModule(_)
+ | Definition::SelfType(_)
+ | Definition::Local(_)
+ | Definition::GenericParam(_)
+ | Definition::Label(_)
+ | Definition::DeriveHelper(_) => return None,
+ };
+ Some(vis)
+ }
+
+ pub fn name(&self, db: &RootDatabase) -> Option<Name> {
+ let name = match self {
+ Definition::Macro(it) => it.name(db),
+ Definition::Field(it) => it.name(db),
+ Definition::Module(it) => it.name(db)?,
+ Definition::Function(it) => it.name(db),
+ Definition::Adt(it) => it.name(db),
+ Definition::Variant(it) => it.name(db),
+ Definition::Const(it) => it.name(db)?,
+ Definition::Static(it) => it.name(db),
+ Definition::Trait(it) => it.name(db),
+ Definition::TypeAlias(it) => it.name(db),
+ Definition::BuiltinType(it) => it.name(),
+ Definition::SelfType(_) => return None,
+ Definition::Local(it) => it.name(db),
+ Definition::GenericParam(it) => it.name(db),
+ Definition::Label(it) => it.name(db),
+ Definition::BuiltinAttr(_) => return None, // FIXME
+ Definition::ToolModule(_) => return None, // FIXME
+ Definition::DeriveHelper(it) => it.name(db),
+ };
+ Some(name)
+ }
+}
+
++// FIXME: IdentClass as a name no longer fits
+#[derive(Debug)]
+pub enum IdentClass {
+ NameClass(NameClass),
+ NameRefClass(NameRefClass),
++ Operator(OperatorClass),
+}
+
+impl IdentClass {
+ pub fn classify_node(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+ ) -> Option<IdentClass> {
+ match_ast! {
+ match node {
+ ast::Name(name) => NameClass::classify(sema, &name).map(IdentClass::NameClass),
+ ast::NameRef(name_ref) => NameRefClass::classify(sema, &name_ref).map(IdentClass::NameRefClass),
+ ast::Lifetime(lifetime) => {
+ NameClass::classify_lifetime(sema, &lifetime)
+ .map(IdentClass::NameClass)
+ .or_else(|| NameRefClass::classify_lifetime(sema, &lifetime).map(IdentClass::NameRefClass))
+ },
++ ast::AwaitExpr(await_expr) => OperatorClass::classify_await(sema, &await_expr).map(IdentClass::Operator),
++ ast::BinExpr(bin_expr) => OperatorClass::classify_bin(sema, &bin_expr).map(IdentClass::Operator),
++ ast::IndexExpr(index_expr) => OperatorClass::classify_index(sema, &index_expr).map(IdentClass::Operator),
++ ast::PrefixExpr(prefix_expr) => OperatorClass::classify_prefix(sema,&prefix_expr).map(IdentClass::Operator),
++ ast::TryExpr(try_expr) => OperatorClass::classify_try(sema,&try_expr).map(IdentClass::Operator),
+ _ => None,
+ }
+ }
+ }
+
+ pub fn classify_token(
+ sema: &Semantics<'_, RootDatabase>,
+ token: &SyntaxToken,
+ ) -> Option<IdentClass> {
+ let parent = token.parent()?;
+ Self::classify_node(sema, &parent)
+ }
+
+ pub fn classify_lifetime(
+ sema: &Semantics<'_, RootDatabase>,
+ lifetime: &ast::Lifetime,
+ ) -> Option<IdentClass> {
+ NameRefClass::classify_lifetime(sema, lifetime)
+ .map(IdentClass::NameRefClass)
+ .or_else(|| NameClass::classify_lifetime(sema, lifetime).map(IdentClass::NameClass))
+ }
+
+ pub fn definitions(self) -> ArrayVec<Definition, 2> {
+ let mut res = ArrayVec::new();
+ match self {
+ IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
+ res.push(it)
+ }
+ IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => {
+ res.push(Definition::Local(local_def));
+ res.push(Definition::Field(field_ref));
+ }
+ IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it),
+ IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => {
+ res.push(Definition::Local(local_ref));
+ res.push(Definition::Field(field_ref));
+ }
++ IdentClass::Operator(
++ OperatorClass::Await(func)
++ | OperatorClass::Prefix(func)
++ | OperatorClass::Bin(func)
++ | OperatorClass::Index(func)
++ | OperatorClass::Try(func),
++ ) => res.push(Definition::Function(func)),
++ }
++ res
++ }
++
++ pub fn definitions_no_ops(self) -> ArrayVec<Definition, 2> {
++ let mut res = ArrayVec::new();
++ match self {
++ IdentClass::NameClass(NameClass::Definition(it) | NameClass::ConstReference(it)) => {
++ res.push(it)
++ }
++ IdentClass::NameClass(NameClass::PatFieldShorthand { local_def, field_ref }) => {
++ res.push(Definition::Local(local_def));
++ res.push(Definition::Field(field_ref));
++ }
++ IdentClass::NameRefClass(NameRefClass::Definition(it)) => res.push(it),
++ IdentClass::NameRefClass(NameRefClass::FieldShorthand { local_ref, field_ref }) => {
++ res.push(Definition::Local(local_ref));
++ res.push(Definition::Field(field_ref));
++ }
++ IdentClass::Operator(_) => (),
+ }
+ res
+ }
+}
+
+/// On a first blush, a single `ast::Name` defines a single definition at some
+/// scope. That is, that, by just looking at the syntactical category, we can
+/// unambiguously define the semantic category.
+///
+/// Sadly, that's not 100% true, there are special cases. To make sure that
+/// callers handle all the special cases correctly via exhaustive matching, we
+/// add a [`NameClass`] enum which lists all of them!
+///
+/// A model special case is `None` constant in pattern.
+#[derive(Debug)]
+pub enum NameClass {
+ Definition(Definition),
+ /// `None` in `if let None = Some(82) {}`.
+ /// Syntactically, it is a name, but semantically it is a reference.
+ ConstReference(Definition),
+ /// `field` in `if let Foo { field } = foo`. Here, `ast::Name` both introduces
+ /// a definition into a local scope, and refers to an existing definition.
+ PatFieldShorthand {
+ local_def: Local,
+ field_ref: Field,
+ },
+}
+
+impl NameClass {
+ /// `Definition` defined by this name.
+ pub fn defined(self) -> Option<Definition> {
+ let res = match self {
+ NameClass::Definition(it) => it,
+ NameClass::ConstReference(_) => return None,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ };
+ Some(res)
+ }
+
+ pub fn classify(sema: &Semantics<'_, RootDatabase>, name: &ast::Name) -> Option<NameClass> {
+ let _p = profile::span("classify_name");
+
+ let parent = name.syntax().parent()?;
+
+ let definition = match_ast! {
+ match parent {
+ ast::Item(it) => classify_item(sema, it)?,
+ ast::IdentPat(it) => return classify_ident_pat(sema, it),
+ ast::Rename(it) => classify_rename(sema, it)?,
+ ast::SelfParam(it) => Definition::Local(sema.to_def(&it)?),
+ ast::RecordField(it) => Definition::Field(sema.to_def(&it)?),
+ ast::Variant(it) => Definition::Variant(sema.to_def(&it)?),
+ ast::TypeParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()),
+ ast::ConstParam(it) => Definition::GenericParam(sema.to_def(&it)?.into()),
+ _ => return None,
+ }
+ };
+ return Some(NameClass::Definition(definition));
+
+ fn classify_item(
+ sema: &Semantics<'_, RootDatabase>,
+ item: ast::Item,
+ ) -> Option<Definition> {
+ let definition = match item {
+ ast::Item::MacroRules(it) => {
+ Definition::Macro(sema.to_def(&ast::Macro::MacroRules(it))?)
+ }
+ ast::Item::MacroDef(it) => {
+ Definition::Macro(sema.to_def(&ast::Macro::MacroDef(it))?)
+ }
+ ast::Item::Const(it) => Definition::Const(sema.to_def(&it)?),
+ ast::Item::Fn(it) => {
+ let def = sema.to_def(&it)?;
+ def.as_proc_macro(sema.db)
+ .map(Definition::Macro)
+ .unwrap_or(Definition::Function(def))
+ }
+ ast::Item::Module(it) => Definition::Module(sema.to_def(&it)?),
+ ast::Item::Static(it) => Definition::Static(sema.to_def(&it)?),
+ ast::Item::Trait(it) => Definition::Trait(sema.to_def(&it)?),
+ ast::Item::TypeAlias(it) => Definition::TypeAlias(sema.to_def(&it)?),
+ ast::Item::Enum(it) => Definition::Adt(hir::Adt::Enum(sema.to_def(&it)?)),
+ ast::Item::Struct(it) => Definition::Adt(hir::Adt::Struct(sema.to_def(&it)?)),
+ ast::Item::Union(it) => Definition::Adt(hir::Adt::Union(sema.to_def(&it)?)),
+ _ => return None,
+ };
+ Some(definition)
+ }
+
+ fn classify_ident_pat(
+ sema: &Semantics<'_, RootDatabase>,
+ ident_pat: ast::IdentPat,
+ ) -> Option<NameClass> {
+ if let Some(def) = sema.resolve_bind_pat_to_const(&ident_pat) {
+ return Some(NameClass::ConstReference(Definition::from(def)));
+ }
+
+ let local = sema.to_def(&ident_pat)?;
+ let pat_parent = ident_pat.syntax().parent();
+ if let Some(record_pat_field) = pat_parent.and_then(ast::RecordPatField::cast) {
+ if record_pat_field.name_ref().is_none() {
+ if let Some(field) = sema.resolve_record_pat_field(&record_pat_field) {
+ return Some(NameClass::PatFieldShorthand {
+ local_def: local,
+ field_ref: field,
+ });
+ }
+ }
+ }
+ Some(NameClass::Definition(Definition::Local(local)))
+ }
+
+ fn classify_rename(
+ sema: &Semantics<'_, RootDatabase>,
+ rename: ast::Rename,
+ ) -> Option<Definition> {
+ if let Some(use_tree) = rename.syntax().parent().and_then(ast::UseTree::cast) {
+ let path = use_tree.path()?;
+ sema.resolve_path(&path).map(Definition::from)
+ } else {
+ let extern_crate = rename.syntax().parent().and_then(ast::ExternCrate::cast)?;
+ let krate = sema.resolve_extern_crate(&extern_crate)?;
+ let root_module = krate.root_module(sema.db);
+ Some(Definition::Module(root_module))
+ }
+ }
+ }
+
+ pub fn classify_lifetime(
+ sema: &Semantics<'_, RootDatabase>,
+ lifetime: &ast::Lifetime,
+ ) -> Option<NameClass> {
+ let _p = profile::span("classify_lifetime").detail(|| lifetime.to_string());
+ let parent = lifetime.syntax().parent()?;
+
+ if let Some(it) = ast::LifetimeParam::cast(parent.clone()) {
+ sema.to_def(&it).map(Into::into).map(Definition::GenericParam)
+ } else if let Some(it) = ast::Label::cast(parent) {
+ sema.to_def(&it).map(Definition::Label)
+ } else {
+ None
+ }
+ .map(NameClass::Definition)
+ }
+}
+
++#[derive(Debug)]
++pub enum OperatorClass {
++ Await(Function),
++ Prefix(Function),
++ Index(Function),
++ Try(Function),
++ Bin(Function),
++}
++
++impl OperatorClass {
++ pub fn classify_await(
++ sema: &Semantics<'_, RootDatabase>,
++ await_expr: &ast::AwaitExpr,
++ ) -> Option<OperatorClass> {
++ sema.resolve_await_to_poll(await_expr).map(OperatorClass::Await)
++ }
++
++ pub fn classify_prefix(
++ sema: &Semantics<'_, RootDatabase>,
++ prefix_expr: &ast::PrefixExpr,
++ ) -> Option<OperatorClass> {
++ sema.resolve_prefix_expr(prefix_expr).map(OperatorClass::Prefix)
++ }
++
++ pub fn classify_try(
++ sema: &Semantics<'_, RootDatabase>,
++ try_expr: &ast::TryExpr,
++ ) -> Option<OperatorClass> {
++ sema.resolve_try_expr(try_expr).map(OperatorClass::Try)
++ }
++
++ pub fn classify_index(
++ sema: &Semantics<'_, RootDatabase>,
++ index_expr: &ast::IndexExpr,
++ ) -> Option<OperatorClass> {
++ sema.resolve_index_expr(index_expr).map(OperatorClass::Index)
++ }
++
++ pub fn classify_bin(
++ sema: &Semantics<'_, RootDatabase>,
++ bin_expr: &ast::BinExpr,
++ ) -> Option<OperatorClass> {
++ sema.resolve_bin_expr(bin_expr).map(OperatorClass::Bin)
++ }
++}
++
+/// This is similar to [`NameClass`], but works for [`ast::NameRef`] rather than
+/// for [`ast::Name`]. Similarly, what looks like a reference in syntax is a
+/// reference most of the time, but there are a couple of annoying exceptions.
+///
+/// A model special case is field shorthand syntax, which uses a single
+/// reference to point to two different defs.
+#[derive(Debug)]
+pub enum NameRefClass {
+ Definition(Definition),
+ FieldShorthand { local_ref: Local, field_ref: Field },
+}
+
+impl NameRefClass {
+ // Note: we don't have unit-tests for this rather important function.
+ // It is primarily exercised via goto definition tests in `ide`.
+ pub fn classify(
+ sema: &Semantics<'_, RootDatabase>,
+ name_ref: &ast::NameRef,
+ ) -> Option<NameRefClass> {
+ let _p = profile::span("classify_name_ref").detail(|| name_ref.to_string());
+
+ let parent = name_ref.syntax().parent()?;
+
+ if let Some(record_field) = ast::RecordExprField::for_field_name(name_ref) {
+ if let Some((field, local, _)) = sema.resolve_record_field(&record_field) {
+ let res = match local {
+ None => NameRefClass::Definition(Definition::Field(field)),
+ Some(local) => {
+ NameRefClass::FieldShorthand { field_ref: field, local_ref: local }
+ }
+ };
+ return Some(res);
+ }
+ }
+
+ if let Some(path) = ast::PathSegment::cast(parent.clone()).map(|it| it.parent_path()) {
+ if path.parent_path().is_none() {
+ if let Some(macro_call) = path.syntax().parent().and_then(ast::MacroCall::cast) {
+ // Only use this to resolve to macro calls for last segments as qualifiers resolve
+ // to modules below.
+ if let Some(macro_def) = sema.resolve_macro_call(¯o_call) {
+ return Some(NameRefClass::Definition(Definition::Macro(macro_def)));
+ }
+ }
+ }
+ return sema.resolve_path(&path).map(Into::into).map(NameRefClass::Definition);
+ }
+
+ match_ast! {
+ match parent {
+ ast::MethodCallExpr(method_call) => {
+ sema.resolve_method_call(&method_call)
+ .map(Definition::Function)
+ .map(NameRefClass::Definition)
+ },
+ ast::FieldExpr(field_expr) => {
+ sema.resolve_field(&field_expr)
+ .map(Definition::Field)
+ .map(NameRefClass::Definition)
+ },
+ ast::RecordPatField(record_pat_field) => {
+ sema.resolve_record_pat_field(&record_pat_field)
+ .map(Definition::Field)
+ .map(NameRefClass::Definition)
+ },
+ ast::AssocTypeArg(_) => {
+ // `Trait<Assoc = Ty>`
+ // ^^^^^
+ let containing_path = name_ref.syntax().ancestors().find_map(ast::Path::cast)?;
+ let resolved = sema.resolve_path(&containing_path)?;
+ if let PathResolution::Def(ModuleDef::Trait(tr)) = resolved {
+ if let Some(ty) = tr
+ .items_with_supertraits(sema.db)
+ .iter()
+ .filter_map(|&assoc| match assoc {
+ hir::AssocItem::TypeAlias(it) => Some(it),
+ _ => None,
+ })
+ .find(|alias| alias.name(sema.db).to_smol_str() == name_ref.text().as_str())
+ {
+ return Some(NameRefClass::Definition(Definition::TypeAlias(ty)));
+ }
+ }
+ None
+ },
+ ast::ExternCrate(extern_crate) => {
+ let krate = sema.resolve_extern_crate(&extern_crate)?;
+ let root_module = krate.root_module(sema.db);
+ Some(NameRefClass::Definition(Definition::Module(root_module)))
+ },
+ _ => None
+ }
+ }
+ }
+
+ pub fn classify_lifetime(
+ sema: &Semantics<'_, RootDatabase>,
+ lifetime: &ast::Lifetime,
+ ) -> Option<NameRefClass> {
+ let _p = profile::span("classify_lifetime_ref").detail(|| lifetime.to_string());
+ let parent = lifetime.syntax().parent()?;
+ match parent.kind() {
+ SyntaxKind::BREAK_EXPR | SyntaxKind::CONTINUE_EXPR => {
+ sema.resolve_label(lifetime).map(Definition::Label).map(NameRefClass::Definition)
+ }
+ SyntaxKind::LIFETIME_ARG
+ | SyntaxKind::SELF_PARAM
+ | SyntaxKind::TYPE_BOUND
+ | SyntaxKind::WHERE_PRED
+ | SyntaxKind::REF_TYPE => sema
+ .resolve_lifetime_param(lifetime)
+ .map(GenericParam::LifetimeParam)
+ .map(Definition::GenericParam)
+ .map(NameRefClass::Definition),
+ // lifetime bounds, as in the 'b in 'a: 'b aren't wrapped in TypeBound nodes so we gotta check
+ // if our lifetime is in a LifetimeParam without being the constrained lifetime
+ _ if ast::LifetimeParam::cast(parent).and_then(|param| param.lifetime()).as_ref()
+ != Some(lifetime) =>
+ {
+ sema.resolve_lifetime_param(lifetime)
+ .map(GenericParam::LifetimeParam)
+ .map(Definition::GenericParam)
+ .map(NameRefClass::Definition)
+ }
+ _ => None,
+ }
+ }
+}
+
+impl_from!(
+ Field, Module, Function, Adt, Variant, Const, Static, Trait, TypeAlias, BuiltinType, Local,
+ GenericParam, Label, Macro
+ for Definition
+);
+
+impl From<Impl> for Definition {
+ fn from(impl_: Impl) -> Self {
+ Definition::SelfType(impl_)
+ }
+}
+
+impl AsAssocItem for Definition {
+ fn as_assoc_item(self, db: &dyn hir::db::HirDatabase) -> Option<AssocItem> {
+ match self {
+ Definition::Function(it) => it.as_assoc_item(db),
+ Definition::Const(it) => it.as_assoc_item(db),
+ Definition::TypeAlias(it) => it.as_assoc_item(db),
+ _ => None,
+ }
+ }
+}
+
+impl From<AssocItem> for Definition {
+ fn from(assoc_item: AssocItem) -> Self {
+ match assoc_item {
+ AssocItem::Function(it) => Definition::Function(it),
+ AssocItem::Const(it) => Definition::Const(it),
+ AssocItem::TypeAlias(it) => Definition::TypeAlias(it),
+ }
+ }
+}
+
+impl From<PathResolution> for Definition {
+ fn from(path_resolution: PathResolution) -> Self {
+ match path_resolution {
+ PathResolution::Def(def) => def.into(),
+ PathResolution::Local(local) => Definition::Local(local),
+ PathResolution::TypeParam(par) => Definition::GenericParam(par.into()),
+ PathResolution::ConstParam(par) => Definition::GenericParam(par.into()),
+ PathResolution::SelfType(impl_def) => Definition::SelfType(impl_def),
+ PathResolution::BuiltinAttr(attr) => Definition::BuiltinAttr(attr),
+ PathResolution::ToolModule(tool) => Definition::ToolModule(tool),
+ PathResolution::DeriveHelper(helper) => Definition::DeriveHelper(helper),
+ }
+ }
+}
+
+impl From<ModuleDef> for Definition {
+ fn from(def: ModuleDef) -> Self {
+ match def {
+ ModuleDef::Module(it) => Definition::Module(it),
+ ModuleDef::Function(it) => Definition::Function(it),
+ ModuleDef::Adt(it) => Definition::Adt(it),
+ ModuleDef::Variant(it) => Definition::Variant(it),
+ ModuleDef::Const(it) => Definition::Const(it),
+ ModuleDef::Static(it) => Definition::Static(it),
+ ModuleDef::Trait(it) => Definition::Trait(it),
+ ModuleDef::TypeAlias(it) => Definition::TypeAlias(it),
+ ModuleDef::Macro(it) => Definition::Macro(it),
+ ModuleDef::BuiltinType(it) => Definition::BuiltinType(it),
+ }
+ }
+}
+
+impl From<Definition> for Option<ItemInNs> {
+ fn from(def: Definition) -> Self {
+ let item = match def {
+ Definition::Module(it) => ModuleDef::Module(it),
+ Definition::Function(it) => ModuleDef::Function(it),
+ Definition::Adt(it) => ModuleDef::Adt(it),
+ Definition::Variant(it) => ModuleDef::Variant(it),
+ Definition::Const(it) => ModuleDef::Const(it),
+ Definition::Static(it) => ModuleDef::Static(it),
+ Definition::Trait(it) => ModuleDef::Trait(it),
+ Definition::TypeAlias(it) => ModuleDef::TypeAlias(it),
+ Definition::BuiltinType(it) => ModuleDef::BuiltinType(it),
+ _ => return None,
+ };
+ Some(ItemInNs::from(item))
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Structural Search Replace**
+//! Structural Search Replace
+//!
+//! Allows searching the AST for code that matches one or more patterns and then replacing that code
+//! based on a template.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+// Feature: Structural Search and Replace
+//
+// Search and replace with named wildcards that will match any expression, type, path, pattern or item.
+// The syntax for a structural search replace command is `<search_pattern> ==>> <replace_pattern>`.
+// A `$<name>` placeholder in the search pattern will match any AST node and `$<name>` will reference it in the replacement.
+// Within a macro call, a placeholder will match up until whatever token follows the placeholder.
+//
+// All paths in both the search pattern and the replacement template must resolve in the context
+// in which this command is invoked. Paths in the search pattern will then match the code if they
+// resolve to the same item, even if they're written differently. For example if we invoke the
+// command in the module `foo` with a pattern of `Bar`, then code in the parent module that refers
+// to `foo::Bar` will match.
+//
+// Paths in the replacement template will be rendered appropriately for the context in which the
+// replacement occurs. For example if our replacement template is `foo::Bar` and we match some
+// code in the `foo` module, we'll insert just `Bar`.
+//
+// Inherent method calls should generally be written in UFCS form. e.g. `foo::Bar::baz($s, $a)` will
+// match `$s.baz($a)`, provided the method call `baz` resolves to the method `foo::Bar::baz`. When a
+// placeholder is the receiver of a method call in the search pattern (e.g. `$s.foo()`), but not in
+// the replacement template (e.g. `bar($s)`), then *, & and &mut will be added as needed to mirror
+// whatever autoderef and autoref was happening implicitly in the matched code.
+//
+// The scope of the search / replace will be restricted to the current selection if any, otherwise
+// it will apply to the whole workspace.
+//
+// Placeholders may be given constraints by writing them as `${<name>:<constraint1>:<constraint2>...}`.
+//
+// Supported constraints:
+//
+// |===
+// | Constraint | Restricts placeholder
+//
+// | kind(literal) | Is a literal (e.g. `42` or `"forty two"`)
+// | not(a) | Negates the constraint `a`
+// |===
+//
+// Available via the command `rust-analyzer.ssr`.
+//
+// ```rust
+// // Using structural search replace command [foo($a, $b) ==>> ($a).foo($b)]
+//
+// // BEFORE
+// String::from(foo(y + 5, z))
+//
+// // AFTER
+// String::from((y + 5).foo(z))
+// ```
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: Structural Search Replace**
+// |===
+//
+// Also available as an assist, by writing a comment containing the structural
+// search and replace rule. You will only see the assist if the comment can
+// be parsed as a valid structural search and replace rule.
+//
+// ```rust
+// // Place the cursor on the line below to see the assist 💡.
+// // foo($a, $b) ==>> ($a).foo($b)
+// ```
+
+mod from_comment;
+mod matching;
+mod nester;
+mod parsing;
+mod fragments;
+mod replacing;
+mod resolving;
+mod search;
+#[macro_use]
+mod errors;
+#[cfg(test)]
+mod tests;
+
+pub use crate::{errors::SsrError, from_comment::ssr_from_comment, matching::Match};
+
+use crate::{errors::bail, matching::MatchFailureReason};
+use hir::Semantics;
+use ide_db::{
+ base_db::{FileId, FilePosition, FileRange},
+ FxHashMap,
+};
+use resolving::ResolvedRule;
+use syntax::{ast, AstNode, SyntaxNode, TextRange};
+use text_edit::TextEdit;
+
+// A structured search replace rule. Create by calling `parse` on a str.
+#[derive(Debug)]
+pub struct SsrRule {
+ /// A structured pattern that we're searching for.
+ pattern: parsing::RawPattern,
+ /// What we'll replace it with.
+ template: parsing::RawPattern,
+ parsed_rules: Vec<parsing::ParsedRule>,
+}
+
+#[derive(Debug)]
+pub struct SsrPattern {
+ parsed_rules: Vec<parsing::ParsedRule>,
+}
+
+#[derive(Debug, Default)]
+pub struct SsrMatches {
+ pub matches: Vec<Match>,
+}
+
+/// Searches a crate for pattern matches and possibly replaces them with something else.
+pub struct MatchFinder<'db> {
+ /// Our source of information about the user's code.
+ sema: Semantics<'db, ide_db::RootDatabase>,
+ rules: Vec<ResolvedRule>,
+ resolution_scope: resolving::ResolutionScope<'db>,
+ restrict_ranges: Vec<FileRange>,
+}
+
+impl<'db> MatchFinder<'db> {
+ /// Constructs a new instance where names will be looked up as if they appeared at
+ /// `lookup_context`.
+ pub fn in_context(
+ db: &'db ide_db::RootDatabase,
+ lookup_context: FilePosition,
+ mut restrict_ranges: Vec<FileRange>,
+ ) -> Result<MatchFinder<'db>, SsrError> {
+ restrict_ranges.retain(|range| !range.range.is_empty());
+ let sema = Semantics::new(db);
+ let resolution_scope = resolving::ResolutionScope::new(&sema, lookup_context)
+ .ok_or_else(|| SsrError("no resolution scope for file".into()))?;
+ Ok(MatchFinder { sema, rules: Vec::new(), resolution_scope, restrict_ranges })
+ }
+
+ /// Constructs an instance using the start of the first file in `db` as the lookup context.
+ pub fn at_first_file(db: &'db ide_db::RootDatabase) -> Result<MatchFinder<'db>, SsrError> {
+ use ide_db::base_db::SourceDatabaseExt;
+ use ide_db::symbol_index::SymbolsDatabase;
+ if let Some(first_file_id) =
+ db.local_roots().iter().next().and_then(|root| db.source_root(*root).iter().next())
+ {
+ MatchFinder::in_context(
+ db,
+ FilePosition { file_id: first_file_id, offset: 0.into() },
+ vec![],
+ )
+ } else {
+ bail!("No files to search");
+ }
+ }
+
+ /// Adds a rule to be applied. The order in which rules are added matters. Earlier rules take
+ /// precedence. If a node is matched by an earlier rule, then later rules won't be permitted to
+ /// match to it.
+ pub fn add_rule(&mut self, rule: SsrRule) -> Result<(), SsrError> {
+ for parsed_rule in rule.parsed_rules {
+ self.rules.push(ResolvedRule::new(
+ parsed_rule,
+ &self.resolution_scope,
+ self.rules.len(),
+ )?);
+ }
+ Ok(())
+ }
+
+ /// Finds matches for all added rules and returns edits for all found matches.
+ pub fn edits(&self) -> FxHashMap<FileId, TextEdit> {
+ use ide_db::base_db::SourceDatabaseExt;
+ let mut matches_by_file = FxHashMap::default();
+ for m in self.matches().matches {
+ matches_by_file
+ .entry(m.range.file_id)
+ .or_insert_with(SsrMatches::default)
+ .matches
+ .push(m);
+ }
+ matches_by_file
+ .into_iter()
+ .map(|(file_id, matches)| {
+ (
+ file_id,
+ replacing::matches_to_edit(
+ &matches,
+ &self.sema.db.file_text(file_id),
+ &self.rules,
+ ),
+ )
+ })
+ .collect()
+ }
+
+ /// Adds a search pattern. For use if you intend to only call `find_matches_in_file`. If you
+ /// intend to do replacement, use `add_rule` instead.
+ pub fn add_search_pattern(&mut self, pattern: SsrPattern) -> Result<(), SsrError> {
+ for parsed_rule in pattern.parsed_rules {
+ self.rules.push(ResolvedRule::new(
+ parsed_rule,
+ &self.resolution_scope,
+ self.rules.len(),
+ )?);
+ }
+ Ok(())
+ }
+
+ /// Returns matches for all added rules.
+ pub fn matches(&self) -> SsrMatches {
+ let mut matches = Vec::new();
+ let mut usage_cache = search::UsageCache::default();
+ for rule in &self.rules {
+ self.find_matches_for_rule(rule, &mut usage_cache, &mut matches);
+ }
+ nester::nest_and_remove_collisions(matches, &self.sema)
+ }
+
+ /// Finds all nodes in `file_id` whose text is exactly equal to `snippet` and attempts to match
+ /// them, while recording reasons why they don't match. This API is useful for command
+ /// line-based debugging where providing a range is difficult.
+ pub fn debug_where_text_equal(&self, file_id: FileId, snippet: &str) -> Vec<MatchDebugInfo> {
+ use ide_db::base_db::SourceDatabaseExt;
+ let file = self.sema.parse(file_id);
+ let mut res = Vec::new();
+ let file_text = self.sema.db.file_text(file_id);
+ let mut remaining_text = file_text.as_str();
+ let mut base = 0;
+ let len = snippet.len() as u32;
+ while let Some(offset) = remaining_text.find(snippet) {
+ let start = base + offset as u32;
+ let end = start + len;
+ self.output_debug_for_nodes_at_range(
+ file.syntax(),
+ FileRange { file_id, range: TextRange::new(start.into(), end.into()) },
+ &None,
+ &mut res,
+ );
+ remaining_text = &remaining_text[offset + snippet.len()..];
+ base = end;
+ }
+ res
+ }
+
+ fn output_debug_for_nodes_at_range(
+ &self,
+ node: &SyntaxNode,
+ range: FileRange,
+ restrict_range: &Option<FileRange>,
+ out: &mut Vec<MatchDebugInfo>,
+ ) {
+ for node in node.children() {
+ let node_range = self.sema.original_range(&node);
+ if node_range.file_id != range.file_id || !node_range.range.contains_range(range.range)
+ {
+ continue;
+ }
+ if node_range.range == range.range {
+ for rule in &self.rules {
+ // For now we ignore rules that have a different kind than our node, otherwise
+ // we get lots of noise. If at some point we add support for restricting rules
+ // to a particular kind of thing (e.g. only match type references), then we can
+ // relax this. We special-case expressions, since function calls can match
+ // method calls.
+ if rule.pattern.node.kind() != node.kind()
+ && !(ast::Expr::can_cast(rule.pattern.node.kind())
+ && ast::Expr::can_cast(node.kind()))
+ {
+ continue;
+ }
+ out.push(MatchDebugInfo {
+ matched: matching::get_match(true, rule, &node, restrict_range, &self.sema)
+ .map_err(|e| MatchFailureReason {
+ reason: e.reason.unwrap_or_else(|| {
+ "Match failed, but no reason was given".to_owned()
+ }),
+ }),
+ pattern: rule.pattern.node.clone(),
+ node: node.clone(),
+ });
+ }
+ } else if let Some(macro_call) = ast::MacroCall::cast(node.clone()) {
+ if let Some(expanded) = self.sema.expand(¯o_call) {
+ if let Some(tt) = macro_call.token_tree() {
+ self.output_debug_for_nodes_at_range(
+ &expanded,
+ range,
+ &Some(self.sema.original_range(tt.syntax())),
+ out,
+ );
+ }
+ }
+ }
+ self.output_debug_for_nodes_at_range(&node, range, restrict_range, out);
+ }
+ }
+}
+
+pub struct MatchDebugInfo {
+ node: SyntaxNode,
+ /// Our search pattern parsed as an expression or item, etc
+ pattern: SyntaxNode,
+ matched: Result<Match, MatchFailureReason>,
+}
+
+impl std::fmt::Debug for MatchDebugInfo {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match &self.matched {
+ Ok(_) => writeln!(f, "Node matched")?,
+ Err(reason) => writeln!(f, "Node failed to match because: {}", reason.reason)?,
+ }
+ writeln!(
+ f,
+ "============ AST ===========\n\
+ {:#?}",
+ self.node
+ )?;
+ writeln!(f, "========= PATTERN ==========")?;
+ writeln!(f, "{:#?}", self.pattern)?;
+ writeln!(f, "============================")?;
+ Ok(())
+ }
+}
+
+impl SsrMatches {
+ /// Returns `self` with any nested matches removed and made into top-level matches.
+ pub fn flattened(self) -> SsrMatches {
+ let mut out = SsrMatches::default();
+ self.flatten_into(&mut out);
+ out
+ }
+
+ fn flatten_into(self, out: &mut SsrMatches) {
+ for mut m in self.matches {
+ for p in m.placeholder_values.values_mut() {
+ std::mem::take(&mut p.inner_matches).flatten_into(out);
+ }
+ out.matches.push(m);
+ }
+ }
+}
+
+impl Match {
+ pub fn matched_text(&self) -> String {
+ self.matched_node.text().to_string()
+ }
+}
+
+impl std::error::Error for SsrError {}
+
+#[cfg(test)]
+impl MatchDebugInfo {
+ pub(crate) fn match_failure_reason(&self) -> Option<&str> {
+ self.matched.as_ref().err().map(|r| r.reason.as_str())
+ }
+}
--- /dev/null
- use syntax::{ast, AstNode, SyntaxKind::NAME, TextRange};
+//! Entry point for call-hierarchy
+
+use hir::Semantics;
+use ide_db::{
+ defs::{Definition, NameClass, NameRefClass},
+ helpers::pick_best_token,
+ search::FileReference,
+ FxIndexMap, RootDatabase,
+};
- NAME => 1,
++use syntax::{ast, AstNode, SyntaxKind::IDENT, TextRange};
+
+use crate::{goto_definition, FilePosition, NavigationTarget, RangeInfo, TryToNav};
+
+#[derive(Debug, Clone)]
+pub struct CallItem {
+ pub target: NavigationTarget,
+ pub ranges: Vec<TextRange>,
+}
+
+impl CallItem {
+ #[cfg(test)]
+ pub(crate) fn debug_render(&self) -> String {
+ format!("{} : {:?}", self.target.debug_render(), self.ranges)
+ }
+}
+
+pub(crate) fn call_hierarchy(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ goto_definition::goto_definition(db, position)
+}
+
+pub(crate) fn incoming_calls(
+ db: &RootDatabase,
+ FilePosition { file_id, offset }: FilePosition,
+) -> Option<Vec<CallItem>> {
+ let sema = &Semantics::new(db);
+
+ let file = sema.parse(file_id);
+ let file = file.syntax();
+ let mut calls = CallLocations::default();
+
+ let references = sema
+ .find_nodes_at_offset_with_descend(file, offset)
+ .filter_map(move |node| match node {
+ ast::NameLike::NameRef(name_ref) => match NameRefClass::classify(sema, &name_ref)? {
+ NameRefClass::Definition(def @ Definition::Function(_)) => Some(def),
+ _ => None,
+ },
+ ast::NameLike::Name(name) => match NameClass::classify(sema, &name)? {
+ NameClass::Definition(def @ Definition::Function(_)) => Some(def),
+ _ => None,
+ },
+ ast::NameLike::Lifetime(_) => None,
+ })
+ .flat_map(|func| func.usages(sema).all());
+
+ for (_, references) in references {
+ let references = references.into_iter().map(|FileReference { name, .. }| name);
+ for name in references {
+ // This target is the containing function
+ let nav = sema.ancestors_with_macros(name.syntax().clone()).find_map(|node| {
+ let def = ast::Fn::cast(node).and_then(|fn_| sema.to_def(&fn_))?;
+ def.try_to_nav(sema.db)
+ });
+ if let Some(nav) = nav {
+ calls.add(nav, sema.original_range(name.syntax()).range);
+ }
+ }
+ }
+
+ Some(calls.into_items())
+}
+
+pub(crate) fn outgoing_calls(db: &RootDatabase, position: FilePosition) -> Option<Vec<CallItem>> {
+ let sema = Semantics::new(db);
+ let file_id = position.file_id;
+ let file = sema.parse(file_id);
+ let file = file.syntax();
+ let token = pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
++ IDENT => 1,
+ _ => 0,
+ })?;
+ let mut calls = CallLocations::default();
+
+ sema.descend_into_macros(token)
+ .into_iter()
+ .filter_map(|it| it.parent_ancestors().nth(1).and_then(ast::Item::cast))
+ .filter_map(|item| match item {
+ ast::Item::Const(c) => c.body().map(|it| it.syntax().descendants()),
+ ast::Item::Fn(f) => f.body().map(|it| it.syntax().descendants()),
+ ast::Item::Static(s) => s.body().map(|it| it.syntax().descendants()),
+ _ => None,
+ })
+ .flatten()
+ .filter_map(ast::CallableExpr::cast)
+ .filter_map(|call_node| {
+ let (nav_target, range) = match call_node {
+ ast::CallableExpr::Call(call) => {
+ let expr = call.expr()?;
+ let callable = sema.type_of_expr(&expr)?.original.as_callable(db)?;
+ match callable.kind() {
+ hir::CallableKind::Function(it) => {
+ let range = expr.syntax().text_range();
+ it.try_to_nav(db).zip(Some(range))
+ }
+ _ => None,
+ }
+ }
+ ast::CallableExpr::MethodCall(expr) => {
+ let range = expr.name_ref()?.syntax().text_range();
+ let function = sema.resolve_method_call(&expr)?;
+ function.try_to_nav(db).zip(Some(range))
+ }
+ }?;
+ Some((nav_target, range))
+ })
+ .for_each(|(nav, range)| calls.add(nav, range));
+
+ Some(calls.into_items())
+}
+
+#[derive(Default)]
+struct CallLocations {
+ funcs: FxIndexMap<NavigationTarget, Vec<TextRange>>,
+}
+
+impl CallLocations {
+ fn add(&mut self, target: NavigationTarget, range: TextRange) {
+ self.funcs.entry(target).or_default().push(range);
+ }
+
+ fn into_items(self) -> Vec<CallItem> {
+ self.funcs.into_iter().map(|(target, ranges)| CallItem { target, ranges }).collect()
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use ide_db::base_db::FilePosition;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ fn check_hierarchy(
+ ra_fixture: &str,
+ expected: Expect,
+ expected_incoming: Expect,
+ expected_outgoing: Expect,
+ ) {
+ let (analysis, pos) = fixture::position(ra_fixture);
+
+ let mut navs = analysis.call_hierarchy(pos).unwrap().unwrap().info;
+ assert_eq!(navs.len(), 1);
+ let nav = navs.pop().unwrap();
+ expected.assert_eq(&nav.debug_render());
+
+ let item_pos =
+ FilePosition { file_id: nav.file_id, offset: nav.focus_or_full_range().start() };
+ let incoming_calls = analysis.incoming_calls(item_pos).unwrap().unwrap();
+ expected_incoming
+ .assert_eq(&incoming_calls.into_iter().map(|call| call.debug_render()).join("\n"));
+
+ let outgoing_calls = analysis.outgoing_calls(item_pos).unwrap().unwrap();
+ expected_outgoing
+ .assert_eq(&outgoing_calls.into_iter().map(|call| call.debug_render()).join("\n"));
+ }
+
+ #[test]
+ fn test_call_hierarchy_on_ref() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn caller() {
+ call$0ee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["caller Function FileId(0) 15..44 18..24 : [33..39]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_on_def() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn call$0ee() {}
+fn caller() {
+ callee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["caller Function FileId(0) 15..44 18..24 : [33..39]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_same_fn() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn caller() {
+ call$0ee();
+ callee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["caller Function FileId(0) 15..58 18..24 : [33..39, 47..53]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_different_fn() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn caller1() {
+ call$0ee();
+}
+
+fn caller2() {
+ callee();
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![["
+ caller1 Function FileId(0) 15..45 18..25 : [34..40]
+ caller2 Function FileId(0) 47..77 50..57 : [66..72]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_tests_mod() {
+ check_hierarchy(
+ r#"
+//- /lib.rs cfg:test
+fn callee() {}
+fn caller1() {
+ call$0ee();
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_caller() {
+ callee();
+ }
+}
+"#,
+ expect![["callee Function FileId(0) 0..14 3..9"]],
+ expect![[r#"
+ caller1 Function FileId(0) 15..45 18..25 : [34..40]
+ test_caller Function FileId(0) 95..149 110..121 : [134..140]"#]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_different_files() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::callee;
+
+fn caller() {
+ call$0ee();
+}
+
+//- /foo/mod.rs
+pub fn callee() {}
+"#,
+ expect![["callee Function FileId(1) 0..18 7..13"]],
+ expect![["caller Function FileId(0) 27..56 30..36 : [45..51]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_outgoing() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn callee() {}
+fn call$0er() {
+ callee();
+ callee();
+}
+"#,
+ expect![["caller Function FileId(0) 15..58 18..24"]],
+ expect![[]],
+ expect![["callee Function FileId(0) 0..14 3..9 : [33..39, 47..53]"]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_outgoing_in_different_files() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+mod foo;
+use foo::callee;
+
+fn call$0er() {
+ callee();
+}
+
+//- /foo/mod.rs
+pub fn callee() {}
+"#,
+ expect![["caller Function FileId(0) 27..56 30..36"]],
+ expect![[]],
+ expect![["callee Function FileId(1) 0..18 7..13 : [45..51]"]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_incoming_outgoing() {
+ check_hierarchy(
+ r#"
+//- /lib.rs
+fn caller1() {
+ call$0er2();
+}
+
+fn caller2() {
+ caller3();
+}
+
+fn caller3() {
+
+}
+"#,
+ expect![["caller2 Function FileId(0) 33..64 36..43"]],
+ expect![["caller1 Function FileId(0) 0..31 3..10 : [19..26]"]],
+ expect![["caller3 Function FileId(0) 66..83 69..76 : [52..59]"]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_issue_5103() {
+ check_hierarchy(
+ r#"
+fn a() {
+ b()
+}
+
+fn b() {}
+
+fn main() {
+ a$0()
+}
+"#,
+ expect![["a Function FileId(0) 0..18 3..4"]],
+ expect![["main Function FileId(0) 31..52 34..38 : [47..48]"]],
+ expect![["b Function FileId(0) 20..29 23..24 : [13..14]"]],
+ );
+
+ check_hierarchy(
+ r#"
+fn a() {
+ b$0()
+}
+
+fn b() {}
+
+fn main() {
+ a()
+}
+"#,
+ expect![["b Function FileId(0) 20..29 23..24"]],
+ expect![["a Function FileId(0) 0..18 3..4 : [13..14]"]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_macros_incoming() {
+ check_hierarchy(
+ r#"
+macro_rules! define {
+ ($ident:ident) => {
+ fn $ident {}
+ }
+}
+macro_rules! call {
+ ($ident:ident) => {
+ $ident()
+ }
+}
+define!(callee)
+fn caller() {
+ call!(call$0ee);
+}
+"#,
+ expect![[r#"callee Function FileId(0) 144..159 152..158"#]],
+ expect![[r#"caller Function FileId(0) 160..194 163..169 : [184..190]"#]],
+ expect![[]],
+ );
+ check_hierarchy(
+ r#"
+macro_rules! define {
+ ($ident:ident) => {
+ fn $ident {}
+ }
+}
+macro_rules! call {
+ ($ident:ident) => {
+ $ident()
+ }
+}
+define!(cal$0lee)
+fn caller() {
+ call!(callee);
+}
+"#,
+ expect![[r#"callee Function FileId(0) 144..159 152..158"#]],
+ expect![[r#"caller Function FileId(0) 160..194 163..169 : [184..190]"#]],
+ expect![[]],
+ );
+ }
+
+ #[test]
+ fn test_call_hierarchy_in_macros_outgoing() {
+ check_hierarchy(
+ r#"
+macro_rules! define {
+ ($ident:ident) => {
+ fn $ident {}
+ }
+}
+macro_rules! call {
+ ($ident:ident) => {
+ $ident()
+ }
+}
+define!(callee)
+fn caller$0() {
+ call!(callee);
+}
+"#,
+ expect![[r#"caller Function FileId(0) 160..194 163..169"#]],
+ expect![[]],
+ // FIXME
+ expect![[]],
+ );
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Expand macro recursively**
+use hir::Semantics;
+use ide_db::{
+ base_db::FileId, helpers::pick_best_token,
+ syntax_helpers::insert_whitespace_into_node::insert_ws_into, RootDatabase,
+};
+use syntax::{ast, ted, AstNode, NodeOrToken, SyntaxKind, SyntaxNode, T};
+
+use crate::FilePosition;
+
+pub struct ExpandedMacro {
+ pub name: String,
+ pub expansion: String,
+}
+
+// Feature: Expand Macro Recursively
+//
+// Shows the full macro expansion of the macro at current cursor.
+//
+// |===
+// | Editor | Action Name
+//
- // due to how Rust Analyzer works internally, we need to special case derive attributes,
++// | VS Code | **rust-analyzer: Expand macro recursively**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020648-b3973180-917a-11eb-84a9-ecb921293dc5.gif[]
+pub(crate) fn expand_macro(db: &RootDatabase, position: FilePosition) -> Option<ExpandedMacro> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(position.file_id);
+
+ let tok = pick_best_token(file.syntax().token_at_offset(position.offset), |kind| match kind {
+ SyntaxKind::IDENT => 1,
+ _ => 0,
+ })?;
+
++ // due to how rust-analyzer works internally, we need to special case derive attributes,
+ // otherwise they might not get found, e.g. here with the cursor at $0 `#[attr]` would expand:
+ // ```
+ // #[attr]
+ // #[derive($0Foo)]
+ // struct Bar;
+ // ```
+
+ let derive = sema.descend_into_macros(tok.clone()).into_iter().find_map(|descended| {
+ let hir_file = sema.hir_file_for(&descended.parent()?);
+ if !hir_file.is_derive_attr_pseudo_expansion(db) {
+ return None;
+ }
+
+ let name = descended.parent_ancestors().filter_map(ast::Path::cast).last()?.to_string();
+ // up map out of the #[derive] expansion
+ let token = hir::InFile::new(hir_file, descended).upmap(db)?.value;
+ let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
+ let expansions = sema.expand_derive_macro(&attr)?;
+ let idx = attr
+ .token_tree()?
+ .token_trees_and_tokens()
+ .filter_map(NodeOrToken::into_token)
+ .take_while(|it| it != &token)
+ .filter(|it| it.kind() == T![,])
+ .count();
+ let expansion =
+ format(db, SyntaxKind::MACRO_ITEMS, position.file_id, expansions.get(idx).cloned()?);
+ Some(ExpandedMacro { name, expansion })
+ });
+
+ if derive.is_some() {
+ return derive;
+ }
+
+ // FIXME: Intermix attribute and bang! expansions
+ // currently we only recursively expand one of the two types
+ let mut anc = tok.parent_ancestors();
+ let (name, expanded, kind) = loop {
+ let node = anc.next()?;
+
+ if let Some(item) = ast::Item::cast(node.clone()) {
+ if let Some(def) = sema.resolve_attr_macro_call(&item) {
+ break (
+ def.name(db).to_string(),
+ expand_attr_macro_recur(&sema, &item)?,
+ SyntaxKind::MACRO_ITEMS,
+ );
+ }
+ }
+ if let Some(mac) = ast::MacroCall::cast(node) {
+ break (
+ mac.path()?.segment()?.name_ref()?.to_string(),
+ expand_macro_recur(&sema, &mac)?,
+ mac.syntax().parent().map(|it| it.kind()).unwrap_or(SyntaxKind::MACRO_ITEMS),
+ );
+ }
+ };
+
+ // FIXME:
+ // macro expansion may lose all white space information
+ // But we hope someday we can use ra_fmt for that
+ let expansion = format(db, kind, position.file_id, expanded);
+
+ Some(ExpandedMacro { name, expansion })
+}
+
+fn expand_macro_recur(
+ sema: &Semantics<'_, RootDatabase>,
+ macro_call: &ast::MacroCall,
+) -> Option<SyntaxNode> {
+ let expanded = sema.expand(macro_call)?.clone_for_update();
+ expand(sema, expanded, ast::MacroCall::cast, expand_macro_recur)
+}
+
+fn expand_attr_macro_recur(
+ sema: &Semantics<'_, RootDatabase>,
+ item: &ast::Item,
+) -> Option<SyntaxNode> {
+ let expanded = sema.expand_attr_macro(item)?.clone_for_update();
+ expand(sema, expanded, ast::Item::cast, expand_attr_macro_recur)
+}
+
+fn expand<T: AstNode>(
+ sema: &Semantics<'_, RootDatabase>,
+ expanded: SyntaxNode,
+ f: impl FnMut(SyntaxNode) -> Option<T>,
+ exp: impl Fn(&Semantics<'_, RootDatabase>, &T) -> Option<SyntaxNode>,
+) -> Option<SyntaxNode> {
+ let children = expanded.descendants().filter_map(f);
+ let mut replacements = Vec::new();
+
+ for child in children {
+ if let Some(new_node) = exp(sema, &child) {
+ // check if the whole original syntax is replaced
+ if expanded == *child.syntax() {
+ return Some(new_node);
+ }
+ replacements.push((child, new_node));
+ }
+ }
+
+ replacements.into_iter().rev().for_each(|(old, new)| ted::replace(old.syntax(), new));
+ Some(expanded)
+}
+
+fn format(db: &RootDatabase, kind: SyntaxKind, file_id: FileId, expanded: SyntaxNode) -> String {
+ let expansion = insert_ws_into(expanded).to_string();
+
+ _format(db, kind, file_id, &expansion).unwrap_or(expansion)
+}
+
+#[cfg(any(test, target_arch = "wasm32", target_os = "emscripten"))]
+fn _format(
+ _db: &RootDatabase,
+ _kind: SyntaxKind,
+ _file_id: FileId,
+ _expansion: &str,
+) -> Option<String> {
+ None
+}
+
+#[cfg(not(any(test, target_arch = "wasm32", target_os = "emscripten")))]
+fn _format(
+ db: &RootDatabase,
+ kind: SyntaxKind,
+ file_id: FileId,
+ expansion: &str,
+) -> Option<String> {
+ use ide_db::base_db::{FileLoader, SourceDatabase};
+ // hack until we get hygiene working (same character amount to preserve formatting as much as possible)
+ const DOLLAR_CRATE_REPLACE: &str = &"__r_a_";
+ let expansion = expansion.replace("$crate", DOLLAR_CRATE_REPLACE);
+ let (prefix, suffix) = match kind {
+ SyntaxKind::MACRO_PAT => ("fn __(", ": u32);"),
+ SyntaxKind::MACRO_EXPR | SyntaxKind::MACRO_STMTS => ("fn __() {", "}"),
+ SyntaxKind::MACRO_TYPE => ("type __ =", ";"),
+ _ => ("", ""),
+ };
+ let expansion = format!("{prefix}{expansion}{suffix}");
+
+ let &crate_id = db.relevant_crates(file_id).iter().next()?;
+ let edition = db.crate_graph()[crate_id].edition;
+
+ let mut cmd = std::process::Command::new(toolchain::rustfmt());
+ cmd.arg("--edition");
+ cmd.arg(edition.to_string());
+
+ let mut rustfmt = cmd
+ .stdin(std::process::Stdio::piped())
+ .stdout(std::process::Stdio::piped())
+ .stderr(std::process::Stdio::piped())
+ .spawn()
+ .ok()?;
+
+ std::io::Write::write_all(&mut rustfmt.stdin.as_mut()?, expansion.as_bytes()).ok()?;
+
+ let output = rustfmt.wait_with_output().ok()?;
+ let captured_stdout = String::from_utf8(output.stdout).ok()?;
+
+ if output.status.success() && !captured_stdout.trim().is_empty() {
+ let output = captured_stdout.replace(DOLLAR_CRATE_REPLACE, "$crate");
+ let output = output.trim().strip_prefix(prefix)?;
+ let output = match kind {
+ SyntaxKind::MACRO_PAT => {
+ output.strip_suffix(suffix).or_else(|| output.strip_suffix(": u32,\n);"))?
+ }
+ _ => output.strip_suffix(suffix)?,
+ };
+ let trim_indent = stdx::trim_indent(output);
+ tracing::debug!("expand_macro: formatting succeeded");
+ Some(trim_indent)
+ } else {
+ None
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::fixture;
+
+ #[track_caller]
+ fn check(ra_fixture: &str, expect: Expect) {
+ let (analysis, pos) = fixture::position(ra_fixture);
+ let expansion = analysis.expand_macro(pos).unwrap().unwrap();
+ let actual = format!("{}\n{}", expansion.name, expansion.expansion);
+ expect.assert_eq(&actual);
+ }
+
+ #[test]
+ fn macro_expand_as_keyword() {
+ check(
+ r#"
+macro_rules! bar {
+ ($i:tt) => { $i as _ }
+}
+fn main() {
+ let x: u64 = ba$0r!(5i64);
+}
+"#,
+ expect![[r#"
+ bar
+ 5i64 as _"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_underscore() {
+ check(
+ r#"
+macro_rules! bar {
+ ($i:tt) => { for _ in 0..$i {} }
+}
+fn main() {
+ ba$0r!(42);
+}
+"#,
+ expect![[r#"
+ bar
+ for _ in 0..42{}"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_recursive_expansion() {
+ check(
+ r#"
+macro_rules! bar {
+ () => { fn b() {} }
+}
+macro_rules! foo {
+ () => { bar!(); }
+}
+macro_rules! baz {
+ () => { foo!(); }
+}
+f$0oo!();
+"#,
+ expect![[r#"
+ foo
+ fn b(){}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_multiple_lines() {
+ check(
+ r#"
+macro_rules! foo {
+ () => {
+ fn some_thing() -> u32 {
+ let a = 0;
+ a + 10
+ }
+ }
+}
+f$0oo!();
+ "#,
+ expect![[r#"
+ foo
+ fn some_thing() -> u32 {
+ let a = 0;
+ a+10
+ }"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_match_ast() {
+ check(
+ r#"
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+ (match ($node:expr) {
+ $( ast::$ast:ident($it:ident) => $res:block, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = ast::$ast::cast($node.clone()) $res else )*
+ { $catch_all }
+ }};
+}
+
+fn main() {
+ mat$0ch_ast! {
+ match container {
+ ast::TraitDef(it) => {},
+ ast::ImplDef(it) => {},
+ _ => { continue },
+ }
+ }
+}
+"#,
+ expect![[r#"
+ match_ast
+ {
+ if let Some(it) = ast::TraitDef::cast(container.clone()){}
+ else if let Some(it) = ast::ImplDef::cast(container.clone()){}
+ else {
+ {
+ continue
+ }
+ }
+ }"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_match_ast_inside_let_statement() {
+ check(
+ r#"
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+ (match ($node:expr) {}) => {{}};
+}
+
+fn main() {
+ let p = f(|it| {
+ let res = mat$0ch_ast! { match c {}};
+ Some(res)
+ })?;
+}
+"#,
+ expect![[r#"
+ match_ast
+ {}"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_inner_macro_rules() {
+ check(
+ r#"
+macro_rules! foo {
+ ($t:tt) => {{
+ macro_rules! bar {
+ () => {
+ $t
+ }
+ }
+ bar!()
+ }};
+}
+
+fn main() {
+ foo$0!(42);
+}
+ "#,
+ expect![[r#"
+ foo
+ {
+ macro_rules! bar {
+ () => {
+ 42
+ }
+ }
+ 42
+ }"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_inner_macro_fail_to_expand() {
+ check(
+ r#"
+macro_rules! bar {
+ (BAD) => {};
+}
+macro_rules! foo {
+ () => {bar!()};
+}
+
+fn main() {
+ let res = fo$0o!();
+}
+"#,
+ expect![[r#"
+ foo
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_with_dollar_crate() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! bar {
+ () => {0};
+}
+macro_rules! foo {
+ () => {$crate::bar!()};
+}
+
+fn main() {
+ let res = fo$0o!();
+}
+"#,
+ expect![[r#"
+ foo
+ 0"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_with_dyn_absolute_path() {
+ check(
+ r#"
+macro_rules! foo {
+ () => {fn f<T>(_: &dyn ::std::marker::Copy) {}};
+}
+
+fn main() {
+ let res = fo$0o!();
+}
+"#,
+ expect![[r#"
+ foo
+ fn f<T>(_: &dyn ::std::marker::Copy){}"#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_derive() {
+ check(
+ r#"
+//- proc_macros: identity
+//- minicore: clone, derive
+
+#[proc_macros::identity]
+#[derive(C$0lone)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Clone
+ impl < >core::clone::Clone for Foo< >{}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_derive2() {
+ check(
+ r#"
+//- minicore: copy, clone, derive
+
+#[derive(Cop$0y)]
+#[derive(Clone)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Copy
+ impl < >core::marker::Copy for Foo< >{}
+ "#]],
+ );
+ }
+
+ #[test]
+ fn macro_expand_derive_multi() {
+ check(
+ r#"
+//- minicore: copy, clone, derive
+
+#[derive(Cop$0y, Clone)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Copy
+ impl < >core::marker::Copy for Foo< >{}
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: copy, clone, derive
+
+#[derive(Copy, Cl$0one)]
+struct Foo {}
+"#,
+ expect![[r#"
+ Clone
+ impl < >core::clone::Clone for Foo< >{}
+ "#]],
+ );
+ }
+}
--- /dev/null
- | COMMENT => 2,
+use std::{convert::TryInto, mem::discriminant};
+
+use crate::{doc_links::token_as_doc_comment, FilePosition, NavigationTarget, RangeInfo, TryToNav};
+use hir::{AsAssocItem, AssocItem, Semantics};
+use ide_db::{
+ base_db::{AnchoredPath, FileId, FileLoader},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, AstNode, AstToken, SyntaxKind::*, SyntaxToken, TextRange, T};
+
+// Feature: Go to Definition
+//
+// Navigates to the definition of an identifier.
+//
+// For outline modules, this will navigate to the source file of the module.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[F12]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065563-025fbe00-91b1-11eb-83e4-a5a703610b23.gif[]
+pub(crate) fn goto_definition(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = &Semantics::new(db);
+ let file = sema.parse(position.file_id).syntax().clone();
+ let original_token =
+ pick_best_token(file.token_at_offset(position.offset), |kind| match kind {
+ IDENT
+ | INT_NUMBER
+ | LIFETIME_IDENT
+ | T![self]
+ | T![super]
+ | T![crate]
+ | T![Self]
++ | COMMENT => 4,
++ // index and prefix ops
++ T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
++ kind if kind.is_keyword() => 2,
++ T!['('] | T![')'] => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ return doc_comment.get_definition_with_descend_at(sema, position.offset, |def, _, _| {
+ let nav = def.try_to_nav(db)?;
+ Some(RangeInfo::new(original_token.text_range(), vec![nav]))
+ });
+ }
+ let navs = sema
+ .descend_into_macros(original_token.clone())
+ .into_iter()
+ .filter_map(|token| {
+ let parent = token.parent()?;
+ if let Some(tt) = ast::TokenTree::cast(parent) {
+ if let Some(x) = try_lookup_include_path(sema, tt, token.clone(), position.file_id)
+ {
+ return Some(vec![x]);
+ }
+ }
+ Some(
+ IdentClass::classify_token(sema, &token)?
+ .definitions()
+ .into_iter()
+ .flat_map(|def| {
+ try_filter_trait_item_definition(sema, &def)
+ .unwrap_or_else(|| def_to_nav(sema.db, def))
+ })
+ .collect(),
+ )
+ })
+ .flatten()
+ .unique()
+ .collect::<Vec<NavigationTarget>>();
+
+ Some(RangeInfo::new(original_token.text_range(), navs))
+}
+
+fn try_lookup_include_path(
+ sema: &Semantics<'_, RootDatabase>,
+ tt: ast::TokenTree,
+ token: SyntaxToken,
+ file_id: FileId,
+) -> Option<NavigationTarget> {
+ let token = ast::String::cast(token)?;
+ let path = token.value()?.into_owned();
+ let macro_call = tt.syntax().parent().and_then(ast::MacroCall::cast)?;
+ let name = macro_call.path()?.segment()?.name_ref()?;
+ if !matches!(&*name.text(), "include" | "include_str" | "include_bytes") {
+ return None;
+ }
+ let file_id = sema.db.resolve_path(AnchoredPath { anchor: file_id, path: &path })?;
+ let size = sema.db.file_text(file_id).len().try_into().ok()?;
+ Some(NavigationTarget {
+ file_id,
+ full_range: TextRange::new(0.into(), size),
+ name: path.into(),
+ focus_range: None,
+ kind: None,
+ container_name: None,
+ description: None,
+ docs: None,
+ })
+}
+/// finds the trait definition of an impl'd item, except function
+/// e.g.
+/// ```rust
+/// trait A { type a; }
+/// struct S;
+/// impl A for S { type a = i32; } // <-- on this associate type, will get the location of a in the trait
+/// ```
+fn try_filter_trait_item_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ def: &Definition,
+) -> Option<Vec<NavigationTarget>> {
+ let db = sema.db;
+ let assoc = def.as_assoc_item(db)?;
+ match assoc {
+ AssocItem::Function(..) => None,
+ AssocItem::Const(..) | AssocItem::TypeAlias(..) => {
+ let imp = match assoc.container(db) {
+ hir::AssocItemContainer::Impl(imp) => imp,
+ _ => return None,
+ };
+ let trait_ = imp.trait_(db)?;
+ let name = def.name(db)?;
+ let discri_value = discriminant(&assoc);
+ trait_
+ .items(db)
+ .iter()
+ .filter(|itm| discriminant(*itm) == discri_value)
+ .find_map(|itm| (itm.name(db)? == name).then(|| itm.try_to_nav(db)).flatten())
+ .map(|it| vec![it])
+ }
+ }
+}
+
+fn def_to_nav(db: &RootDatabase, def: Definition) -> Vec<NavigationTarget> {
+ def.try_to_nav(db).map(|it| vec![it]).unwrap_or_default()
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
+ if navs.is_empty() {
+ panic!("unresolved reference")
+ }
+
+ let cmp = |&FileRange { file_id, range }: &_| (file_id, range.start());
+ let navs = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected = expected
+ .into_iter()
+ .map(|(FileRange { file_id, range }, _)| FileRange { file_id, range })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ assert_eq!(expected, navs);
+ }
+
+ fn check_unresolved(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let navs = analysis.goto_definition(position).unwrap().expect("no definition found").info;
+
+ assert!(navs.is_empty(), "didn't expect this to resolve anywhere: {:?}", navs)
+ }
+
+ #[test]
+ fn goto_def_if_items_same_name() {
+ check(
+ r#"
+trait Trait {
+ type A;
+ const A: i32;
+ //^
+}
+
+struct T;
+impl Trait for T {
+ type A = i32;
+ const A$0: i32 = -9;
+}"#,
+ );
+ }
+ #[test]
+ fn goto_def_in_mac_call_in_attr_invoc() {
+ check(
+ r#"
+//- proc_macros: identity
+pub struct Struct {
+ // ^^^^^^
+ field: i32,
+}
+
+macro_rules! identity {
+ ($($tt:tt)*) => {$($tt)*};
+}
+
+#[proc_macros::identity]
+fn function() {
+ identity!(Struct$0 { field: 0 });
+}
+
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std$0;
+//- /std/lib.rs crate:std
+// empty
+//^file
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_renamed_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as abc$0;
+//- /std/lib.rs crate:std
+// empty
+//^file
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_in_items() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo$0) }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_at_start_of_item() {
+ check(
+ r#"
+struct Foo;
+ //^^^
+enum E { X($0Foo) }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_resolves_correct_name() {
+ check(
+ r#"
+//- /lib.rs
+use a::Foo;
+mod a;
+mod b;
+enum E { X(Foo$0) }
+
+//- /a.rs
+struct Foo;
+ //^^^
+//- /b.rs
+struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_module_declaration() {
+ check(
+ r#"
+//- /lib.rs
+mod $0foo;
+
+//- /foo.rs
+// empty
+//^file
+"#,
+ );
+
+ check(
+ r#"
+//- /lib.rs
+mod $0foo;
+
+//- /foo/mod.rs
+// empty
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros() {
+ check(
+ r#"
+macro_rules! foo { () => { () } }
+ //^^^
+fn bar() {
+ $0foo!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros_from_other_crates() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo;
+fn bar() {
+ $0foo!();
+}
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macros_in_use_tree() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo$0;
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macro_defined_fn_with_arg() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! define_fn {
+ ($name:ident) => (fn $name() {})
+}
+
+define_fn!(foo);
+ //^^^
+
+fn bar() {
+ $0foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_macro_defined_fn_no_arg() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! define_fn {
+ () => (fn foo() {})
+}
+
+ define_fn!();
+//^^^^^^^^^^^^^
+
+fn bar() {
+ $0foo();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_works_for_macro_inside_pattern() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! foo {() => {0}}
+ //^^^
+
+fn bar() {
+ match (0,1) {
+ ($0foo!(), _) => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_works_for_macro_inside_match_arm_lhs() {
+ check(
+ r#"
+//- /lib.rs
+macro_rules! foo {() => {0}}
+ //^^^
+fn bar() {
+ match 0 {
+ $0foo!() => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_use_alias() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo as bar$0;
+
+//- /foo/lib.rs crate:foo
+// empty
+//^file
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_use_alias_foo_macro() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::foo as bar$0;
+
+//- /foo/lib.rs crate:foo
+#[macro_export]
+macro_rules! foo { () => { () } }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_methods() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn frobnicate(&self) { }
+ //^^^^^^^^^^
+}
+
+fn bar(foo: &Foo) {
+ foo.frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_fields() {
+ check(
+ r#"
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar(foo: &Foo) {
+ foo.spam$0;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_fields() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar() -> Foo {
+ Foo {
+ spam$0: 0,
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_pat_fields() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo {
+ spam: u32,
+} //^^^^
+
+fn bar(foo: Foo) -> Foo {
+ let Foo { spam$0: _, } = foo
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_record_fields_macros() {
+ check(
+ r"
+macro_rules! m { () => { 92 };}
+struct Foo { spam: u32 }
+ //^^^^
+
+fn bar() -> Foo {
+ Foo { spam$0: m!() }
+}
+",
+ );
+ }
+
+ #[test]
+ fn goto_for_tuple_fields() {
+ check(
+ r#"
+struct Foo(u32);
+ //^^^
+
+fn bar() {
+ let foo = Foo(0);
+ foo.$00;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_inherent_methods() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ fn frobnicate() { }
+} //^^^^^^^^^^
+
+fn bar(foo: &Foo) {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_trait_methods_through_traits() {
+ check(
+ r#"
+trait Foo {
+ fn frobnicate();
+} //^^^^^^^^^^
+
+fn bar() {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_ufcs_trait_methods_through_self() {
+ check(
+ r#"
+struct Foo;
+trait Trait {
+ fn frobnicate();
+} //^^^^^^^^^^
+impl Trait for Foo {}
+
+fn bar() {
+ Foo::frobnicate$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_on_self() {
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ //^^^
+ pub fn new() -> Self {
+ Self$0 {}
+ }
+}
+"#,
+ );
+ check(
+ r#"
+struct Foo;
+impl Foo {
+ //^^^
+ pub fn new() -> Self$0 {
+ Self {}
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+enum Foo { A }
+impl Foo {
+ //^^^
+ pub fn new() -> Self$0 {
+ Foo::A
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+enum Foo { A }
+impl Foo {
+ //^^^
+ pub fn thing(a: &Self$0) {
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_definition_on_self_in_trait_impl() {
+ check(
+ r#"
+struct Foo;
+trait Make {
+ fn new() -> Self;
+}
+impl Make for Foo {
+ //^^^
+ fn new() -> Self {
+ Self$0 {}
+ }
+}
+"#,
+ );
+
+ check(
+ r#"
+struct Foo;
+trait Make {
+ fn new() -> Self;
+}
+impl Make for Foo {
+ //^^^
+ fn new() -> Self$0 {
+ Self {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_when_used_on_definition_name_itself() {
+ check(
+ r#"
+struct Foo$0 { value: u32 }
+ //^^^
+ "#,
+ );
+
+ check(
+ r#"
+struct Foo {
+ field$0: string,
+} //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+fn foo_test$0() { }
+ //^^^^^^^^
+"#,
+ );
+
+ check(
+ r#"
+enum Foo$0 { Variant }
+ //^^^
+"#,
+ );
+
+ check(
+ r#"
+enum Foo {
+ Variant1,
+ Variant2$0,
+ //^^^^^^^^
+ Variant3,
+}
+"#,
+ );
+
+ check(
+ r#"
+static INNER$0: &str = "";
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+const INNER$0: &str = "";
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+type Thing$0 = Option<()>;
+ //^^^^^
+"#,
+ );
+
+ check(
+ r#"
+trait Foo$0 { }
+ //^^^
+"#,
+ );
+
+ check(
+ r#"
+mod bar$0 { }
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_from_macro() {
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => { $($tt)* }
+}
+fn foo() {}
+ //^^^
+id! {
+ fn bar() {
+ fo$0o();
+ }
+}
+mod confuse_index { fn foo(); }
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_through_format() {
+ check(
+ r#"
+#[macro_export]
+macro_rules! format {
+ ($($arg:tt)*) => ($crate::fmt::format($crate::__export::format_args!($($arg)*)))
+}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {
+ ($fmt:expr) => ({ /* compiler built-in */ });
+ ($fmt:expr, $($args:tt)*) => ({ /* compiler built-in */ })
+}
+pub mod __export {
+ pub use crate::format_args;
+ fn foo() {} // for index confusion
+}
+fn foo() -> i8 {}
+ //^^^
+fn test() {
+ format!("{}", fo$0o())
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_through_included_file() {
+ check(
+ r#"
+//- /main.rs
+#[rustc_builtin_macro]
+macro_rules! include {}
+
+ include!("foo.rs");
+//^^^^^^^^^^^^^^^^^^^
+
+fn f() {
+ foo$0();
+}
+
+mod confuse_index {
+ pub fn foo() {}
+}
+
+//- /foo.rs
+fn foo() {}
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_for_type_param() {
+ check(
+ r#"
+struct Foo<T: Clone> { t: $0T }
+ //^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_within_macro() {
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => ($($tt)*)
+}
+
+fn foo() {
+ let x = 1;
+ //^
+ id!({
+ let y = $0x;
+ let z = y;
+ });
+}
+"#,
+ );
+
+ check(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => ($($tt)*)
+}
+
+fn foo() {
+ let x = 1;
+ id!({
+ let y = x;
+ //^
+ let z = $0y;
+ });
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_local_fn() {
+ check(
+ r#"
+fn main() {
+ fn foo() {
+ let x = 92;
+ //^
+ $0x;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_local_macro() {
+ check(
+ r#"
+fn bar() {
+ macro_rules! foo { () => { () } }
+ //^^^
+ $0foo!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_field_init_shorthand() {
+ check(
+ r#"
+struct Foo { x: i32 }
+ //^
+fn main() {
+ let x = 92;
+ //^
+ Foo { x$0 };
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_field() {
+ check(
+ r#"
+enum Foo {
+ Bar { x: i32 }
+ //^
+}
+fn baz(foo: Foo) {
+ match foo {
+ Foo::Bar { x$0 } => x
+ //^
+ };
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_pattern_const() {
+ check(
+ r#"
+enum Foo { Bar }
+ //^^^
+impl Foo {
+ fn baz(self) {
+ match self { Self::Bar$0 => {} }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_pattern_record() {
+ check(
+ r#"
+enum Foo { Bar { val: i32 } }
+ //^^^
+impl Foo {
+ fn baz(self) -> i32 {
+ match self { Self::Bar$0 { val } => {} }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_expr_const() {
+ check(
+ r#"
+enum Foo { Bar }
+ //^^^
+impl Foo {
+ fn baz(self) { Self::Bar$0; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_enum_variant_self_expr_record() {
+ check(
+ r#"
+enum Foo { Bar { val: i32 } }
+ //^^^
+impl Foo {
+ fn baz(self) { Self::Bar$0 {val: 4}; }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_type_alias_generic_parameter() {
+ check(
+ r#"
+type Alias<T> = T$0;
+ //^
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_macro_container() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:foo
+foo::module$0::mac!();
+
+//- /foo/lib.rs crate:foo
+pub mod module {
+ //^^^^^^
+ #[macro_export]
+ macro_rules! _mac { () => { () } }
+ pub use crate::_mac as mac;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_in_path() {
+ check(
+ r#"
+trait Iterator {
+ type Item;
+ //^^^^
+}
+
+fn f() -> impl Iterator<Item$0 = u8> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_super_assoc_ty_in_path() {
+ check(
+ r#"
+trait Super {
+ type Item;
+ //^^^^
+}
+
+trait Sub: Super {}
+
+fn f() -> impl Sub<Item$0 = u8> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn unknown_assoc_ty() {
+ check_unresolved(
+ r#"
+trait Iterator { type Item; }
+fn f() -> impl Iterator<Invalid$0 = u8> {}
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_in_path_multiple() {
+ check(
+ r#"
+trait Iterator {
+ type A;
+ //^
+ type B;
+}
+
+fn f() -> impl Iterator<A$0 = u8, B = ()> {}
+"#,
+ );
+ check(
+ r#"
+trait Iterator {
+ type A;
+ type B;
+ //^
+}
+
+fn f() -> impl Iterator<A = u8, B$0 = ()> {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_ufcs() {
+ check(
+ r#"
+trait Iterator {
+ type Item;
+ //^^^^
+}
+
+fn g() -> <() as Iterator<Item$0 = ()>>::Item {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_for_assoc_ty_ufcs_multiple() {
+ check(
+ r#"
+trait Iterator {
+ type A;
+ //^
+ type B;
+}
+
+fn g() -> <() as Iterator<A$0 = (), B = u8>>::B {}
+"#,
+ );
+ check(
+ r#"
+trait Iterator {
+ type A;
+ type B;
+ //^
+}
+
+fn g() -> <() as Iterator<A = (), B$0 = u8>>::A {}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_self_param_ty_specified() {
+ check(
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn bar(self: &Foo) {
+ //^^^^
+ let foo = sel$0f;
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_self_param_on_decl() {
+ check(
+ r#"
+struct Foo {}
+
+impl Foo {
+ fn bar(&self$0) {
+ //^^^^
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_on_decl() {
+ check(
+ r#"
+fn foo<'foobar$0>(_: &'foobar ()) {
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_decl() {
+ check(
+ r#"
+fn foo<'foobar>(_: &'foobar$0 ()) {
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_param_decl_nested() {
+ check(
+ r#"
+fn foo<'foobar>(_: &'foobar ()) {
+ fn foo<'foobar>(_: &'foobar$0 ()) {}
+ //^^^^^^^
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_lifetime_hrtb() {
+ // FIXME: requires the HIR to somehow track these hrtb lifetimes
+ check_unresolved(
+ r#"
+trait Foo<T> {}
+fn foo<T>() where for<'a> T: Foo<&'a$0 (u8, u16)>, {}
+ //^^
+"#,
+ );
+ check_unresolved(
+ r#"
+trait Foo<T> {}
+fn foo<T>() where for<'a$0> T: Foo<&'a (u8, u16)>, {}
+ //^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_lifetime_hrtb_for_type() {
+ // FIXME: requires ForTypes to be implemented
+ check_unresolved(
+ r#"trait Foo<T> {}
+fn foo<T>() where T: for<'a> Foo<&'a$0 (u8, u16)>, {}
+ //^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_label() {
+ check(
+ r#"
+fn foo<'foo>(_: &'foo ()) {
+ 'foo: {
+ //^^^^
+ 'bar: loop {
+ break 'foo$0;
+ }
+ }
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_intra_doc_link_same_file() {
+ check(
+ r#"
+/// Blah, [`bar`](bar) .. [`foo`](foo$0) has [`bar`](bar)
+pub fn bar() { }
+
+/// You might want to see [`std::fs::read()`] too.
+pub fn foo() { }
+ //^^^
+
+}"#,
+ )
+ }
+
+ #[test]
+ fn goto_def_for_intra_doc_link_inner() {
+ check(
+ r#"
+//- /main.rs
+mod m;
+struct S;
+ //^
+
+//- /m.rs
+//! [`super::S$0`]
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_incomplete_field() {
+ check(
+ r#"
+struct A { a: u32 }
+ //^
+fn foo() { A { a$0: }; }
+"#,
+ )
+ }
+
+ #[test]
+ fn goto_proc_macro() {
+ check(
+ r#"
+//- /main.rs crate:main deps:mac
+use mac::fn_macro;
+
+fn_macro$0!();
+
+//- /mac.rs crate:mac
+#![crate_type="proc-macro"]
+#[proc_macro]
+fn fn_macro() {}
+ //^^^^^^^^
+ "#,
+ )
+ }
+
+ #[test]
+ fn goto_intra_doc_links() {
+ check(
+ r#"
+
+pub mod theitem {
+ /// This is the item. Cool!
+ pub struct TheItem;
+ //^^^^^^^
+}
+
+/// Gives you a [`TheItem$0`].
+///
+/// [`TheItem`]: theitem::TheItem
+pub fn gimme() -> theitem::TheItem {
+ theitem::TheItem
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_ident_from_pat_macro() {
+ check(
+ r#"
+macro_rules! pat {
+ ($name:ident) => { Enum::Variant1($name) }
+}
+
+enum Enum {
+ Variant1(u8),
+ Variant2,
+}
+
+fn f(e: Enum) {
+ match e {
+ pat!(bind) => {
+ //^^^^
+ bind$0
+ }
+ Enum::Variant2 => {}
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_include() {
+ check(
+ r#"
+//- /main.rs
+fn main() {
+ let str = include_str!("foo.txt$0");
+}
+//- /foo.txt
+// empty
+//^file
+"#,
+ );
+ }
+ #[cfg(test)]
+ mod goto_impl_of_trait_fn {
+ use super::check;
+ #[test]
+ fn cursor_on_impl() {
+ check(
+ r#"
+trait Twait {
+ fn a();
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a$0();
+ //^
+}
+ "#,
+ );
+ }
+ #[test]
+ fn method_call() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self);
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a(&self){};
+ //^
+}
+fn f() {
+ let s = Stwuct;
+ s.a$0();
+}
+ "#,
+ );
+ }
+ #[test]
+ fn path_call() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self);
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ fn a(&self){};
+ //^
+}
+fn f() {
+ let s = Stwuct;
+ Stwuct::a$0(&s);
+}
+ "#,
+ );
+ }
+ #[test]
+ fn where_clause_can_work() {
+ check(
+ r#"
+trait G {
+ fn g(&self);
+}
+trait Bound{}
+trait EA{}
+struct Gen<T>(T);
+impl <T:EA> G for Gen<T> {
+ fn g(&self) {
+ }
+}
+impl <T> G for Gen<T>
+where T : Bound
+{
+ fn g(&self){
+ //^
+ }
+}
+struct A;
+impl Bound for A{}
+fn f() {
+ let gen = Gen::<A>(A);
+ gen.g$0();
+}
+ "#,
+ );
+ }
+ #[test]
+ fn wc_case_is_ok() {
+ check(
+ r#"
+trait G {
+ fn g(&self);
+}
+trait BParent{}
+trait Bound: BParent{}
+struct Gen<T>(T);
+impl <T> G for Gen<T>
+where T : Bound
+{
+ fn g(&self){
+ //^
+ }
+}
+struct A;
+impl Bound for A{}
+fn f() {
+ let gen = Gen::<A>(A);
+ gen.g$0();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn method_call_defaulted() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self) {}
+ //^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+}
+fn f() {
+ let s = Stwuct;
+ s.a$0();
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn method_call_on_generic() {
+ check(
+ r#"
+trait Twait {
+ fn a(&self) {}
+ //^
+}
+
+fn f<T: Twait>(s: T) {
+ s.a$0();
+}
+ "#,
+ );
+ }
+ }
+
+ #[test]
+ fn goto_def_of_trait_impl_const() {
+ check(
+ r#"
+trait Twait {
+ const NOMS: bool;
+ // ^^^^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ const NOMS$0: bool = true;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_of_trait_impl_type_alias() {
+ check(
+ r#"
+trait Twait {
+ type IsBad;
+ // ^^^^^
+}
+
+struct Stwuct;
+
+impl Twait for Stwuct {
+ type IsBad$0 = !;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_def_derive_input() {
+ check(
+ r#"
+ //- minicore:derive
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+ // ^^^^
+ #[derive(Copy$0)]
+ struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+pub macro Copy {}
+ // ^^^^
+#[cfg_attr(feature = "false", derive)]
+#[derive(Copy$0)]
+struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+ // ^^^^
+}
+#[derive(foo::Copy$0)]
+struct Foo;
+ "#,
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ // ^^^
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(foo$0::Copy)]
+struct Foo;
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_def_in_macro_multi() {
+ check(
+ r#"
+struct Foo {
+ foo: ()
+ //^^^
+}
+macro_rules! foo {
+ ($ident:ident) => {
+ fn $ident(Foo { $ident }: Foo) {}
+ }
+}
+foo!(foo$0);
+ //^^^
+ //^^^
+"#,
+ );
+ check(
+ r#"
+fn bar() {}
+ //^^^
+struct bar;
+ //^^^
+macro_rules! foo {
+ ($ident:ident) => {
+ fn foo() {
+ let _: $ident = $ident;
+ }
+ }
+}
+
+foo!(bar$0);
++"#,
++ );
++ }
++
++ #[test]
++ fn goto_await_poll() {
++ check(
++ r#"
++//- minicore: future
++
++struct MyFut;
++
++impl core::future::Future for MyFut {
++ type Output = ();
++
++ fn poll(
++ //^^^^
++ self: std::pin::Pin<&mut Self>,
++ cx: &mut std::task::Context<'_>
++ ) -> std::task::Poll<Self::Output>
++ {
++ ()
++ }
++}
++
++fn f() {
++ MyFut.await$0;
++}
++"#,
++ );
++ }
++
++ #[test]
++ fn goto_try_op() {
++ check(
++ r#"
++//- minicore: try
++
++struct Struct;
++
++impl core::ops::Try for Struct {
++ fn branch(
++ //^^^^^^
++ self
++ ) {}
++}
++
++fn f() {
++ Struct?$0;
++}
++"#,
++ );
++ }
++
++ #[test]
++ fn goto_index_op() {
++ check(
++ r#"
++//- minicore: index
++
++struct Struct;
++
++impl core::ops::Index<usize> for Struct {
++ fn index(
++ //^^^^^
++ self
++ ) {}
++}
++
++fn f() {
++ Struct[0]$0;
++}
++"#,
++ );
++ }
++
++ #[test]
++ fn goto_prefix_op() {
++ check(
++ r#"
++//- minicore: deref
++
++struct Struct;
++
++impl core::ops::Deref for Struct {
++ fn deref(
++ //^^^^^
++ self
++ ) {}
++}
++
++fn f() {
++ $0*Struct;
++}
++"#,
++ );
++ }
++
++ #[test]
++ fn goto_bin_op() {
++ check(
++ r#"
++//- minicore: add
++
++struct Struct;
++
++impl core::ops::Add for Struct {
++ fn add(
++ //^^^
++ self
++ ) {}
++}
++
++fn f() {
++ Struct +$0 Struct;
++}
+"#,
+ );
+ }
+}
--- /dev/null
- IDENT | T![self] => 1,
+use hir::{AsAssocItem, Impl, Semantics};
+use ide_db::{
+ defs::{Definition, NameClass, NameRefClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, AstNode, SyntaxKind::*, T};
+
+use crate::{FilePosition, NavigationTarget, RangeInfo, TryToNav};
+
+// Feature: Go to Implementation
+//
+// Navigates to the impl blocks of types.
+//
+// |===
+// | Editor | Shortcut
+//
+// | VS Code | kbd:[Ctrl+F12]
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065566-02f85480-91b1-11eb-9288-aaad8abd8841.gif[]
+pub(crate) fn goto_implementation(
+ db: &RootDatabase,
+ position: FilePosition,
+) -> Option<RangeInfo<Vec<NavigationTarget>>> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+ let syntax = source_file.syntax().clone();
+
+ let original_token =
+ pick_best_token(syntax.token_at_offset(position.offset), |kind| match kind {
++ IDENT | T![self] | INT_NUMBER => 1,
+ _ => 0,
+ })?;
+ let range = original_token.text_range();
+ let navs = sema
+ .descend_into_macros(original_token)
+ .into_iter()
+ .filter_map(|token| token.parent().and_then(ast::NameLike::cast))
+ .filter_map(|node| match &node {
+ ast::NameLike::Name(name) => {
+ NameClass::classify(&sema, name).map(|class| match class {
+ NameClass::Definition(it) | NameClass::ConstReference(it) => it,
+ NameClass::PatFieldShorthand { local_def, field_ref: _ } => {
+ Definition::Local(local_def)
+ }
+ })
+ }
+ ast::NameLike::NameRef(name_ref) => {
+ NameRefClass::classify(&sema, name_ref).map(|class| match class {
+ NameRefClass::Definition(def) => def,
+ NameRefClass::FieldShorthand { local_ref, field_ref: _ } => {
+ Definition::Local(local_ref)
+ }
+ })
+ }
+ ast::NameLike::Lifetime(_) => None,
+ })
+ .unique()
+ .filter_map(|def| {
+ let navs = match def {
+ Definition::Trait(trait_) => impls_for_trait(&sema, trait_),
+ Definition::Adt(adt) => impls_for_ty(&sema, adt.ty(sema.db)),
+ Definition::TypeAlias(alias) => impls_for_ty(&sema, alias.ty(sema.db)),
+ Definition::BuiltinType(builtin) => impls_for_ty(&sema, builtin.ty(sema.db)),
+ Definition::Function(f) => {
+ let assoc = f.as_assoc_item(sema.db)?;
+ let name = assoc.name(sema.db)?;
+ let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
+ impls_for_trait_item(&sema, trait_, name)
+ }
+ Definition::Const(c) => {
+ let assoc = c.as_assoc_item(sema.db)?;
+ let name = assoc.name(sema.db)?;
+ let trait_ = assoc.containing_trait_or_trait_impl(sema.db)?;
+ impls_for_trait_item(&sema, trait_, name)
+ }
+ _ => return None,
+ };
+ Some(navs)
+ })
+ .flatten()
+ .collect();
+
+ Some(RangeInfo { range, info: navs })
+}
+
+fn impls_for_ty(sema: &Semantics<'_, RootDatabase>, ty: hir::Type) -> Vec<NavigationTarget> {
+ Impl::all_for_type(sema.db, ty).into_iter().filter_map(|imp| imp.try_to_nav(sema.db)).collect()
+}
+
+fn impls_for_trait(
+ sema: &Semantics<'_, RootDatabase>,
+ trait_: hir::Trait,
+) -> Vec<NavigationTarget> {
+ Impl::all_for_trait(sema.db, trait_)
+ .into_iter()
+ .filter_map(|imp| imp.try_to_nav(sema.db))
+ .collect()
+}
+
+fn impls_for_trait_item(
+ sema: &Semantics<'_, RootDatabase>,
+ trait_: hir::Trait,
+ fun_name: hir::Name,
+) -> Vec<NavigationTarget> {
+ Impl::all_for_trait(sema.db, trait_)
+ .into_iter()
+ .filter_map(|imp| {
+ let item = imp.items(sema.db).iter().find_map(|itm| {
+ let itm_name = itm.name(sema.db)?;
+ (itm_name == fun_name).then(|| *itm)
+ })?;
+ item.try_to_nav(sema.db)
+ })
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+
+ let navs = analysis.goto_implementation(position).unwrap().unwrap().info;
+
+ let cmp = |frange: &FileRange| (frange.file_id, frange.range.start());
+
+ let actual = navs
+ .into_iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .sorted_by_key(cmp)
+ .collect::<Vec<_>>();
+ let expected =
+ expected.into_iter().map(|(range, _)| range).sorted_by_key(cmp).collect::<Vec<_>>();
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn goto_implementation_works() {
+ check(
+ r#"
+struct Foo$0;
+impl Foo {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_works_multiple_blocks() {
+ check(
+ r#"
+struct Foo$0;
+impl Foo {}
+ //^^^
+impl Foo {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_works_multiple_mods() {
+ check(
+ r#"
+struct Foo$0;
+mod a {
+ impl super::Foo {}
+ //^^^^^^^^^^
+}
+mod b {
+ impl super::Foo {}
+ //^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_works_multiple_files() {
+ check(
+ r#"
+//- /lib.rs
+struct Foo$0;
+mod a;
+mod b;
+//- /a.rs
+impl crate::Foo {}
+ //^^^^^^^^^^
+//- /b.rs
+impl crate::Foo {}
+ //^^^^^^^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_for_trait() {
+ check(
+ r#"
+trait T$0 {}
+struct Foo;
+impl T for Foo {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_for_trait_multiple_files() {
+ check(
+ r#"
+//- /lib.rs
+trait T$0 {};
+struct Foo;
+mod a;
+mod b;
+//- /a.rs
+impl crate::T for crate::Foo {}
+ //^^^^^^^^^^
+//- /b.rs
+impl crate::T for crate::Foo {}
+ //^^^^^^^^^^
+ "#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_all_impls() {
+ check(
+ r#"
+//- /lib.rs
+trait T {}
+struct Foo$0;
+impl Foo {}
+ //^^^
+impl T for Foo {}
+ //^^^
+impl T for &Foo {}
+ //^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_to_builtin_derive() {
+ check(
+ r#"
+//- minicore: copy, derive
+ #[derive(Copy)]
+//^^^^^^^^^^^^^^^
+struct Foo$0;
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_type_alias() {
+ check(
+ r#"
+struct Foo;
+
+type Bar$0 = Foo;
+
+impl Foo {}
+ //^^^
+impl Bar {}
+ //^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_adt_generic() {
+ check(
+ r#"
+struct Foo$0<T>;
+
+impl<T> Foo<T> {}
+ //^^^^^^
+impl Foo<str> {}
+ //^^^^^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_builtin() {
+ check(
+ r#"
+//- /lib.rs crate:main deps:core
+fn foo(_: bool$0) {{}}
+//- /libcore.rs crate:core
+#[lang = "bool"]
+impl bool {}
+ //^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_trait_functions() {
+ check(
+ r#"
+trait Tr {
+ fn f$0();
+}
+
+struct S;
+
+impl Tr for S {
+ fn f() {
+ //^
+ println!("Hello, world!");
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn goto_implementation_trait_assoc_const() {
+ check(
+ r#"
+trait Tr {
+ const C$0: usize;
+}
+
+struct S;
+
+impl Tr for S {
+ const C: usize = 4;
+ //^
+}
+"#,
+ );
+ }
+}
--- /dev/null
- .filter_map(|token| IdentClass::classify_token(sema, &token).map(IdentClass::definitions))
+use hir::Semantics;
+use ide_db::{
+ base_db::{FileId, FilePosition},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ search::{FileReference, ReferenceCategory, SearchScope},
+ syntax_helpers::node_ext::{for_each_break_and_continue_expr, for_each_tail_expr, walk_expr},
+ FxHashSet, RootDatabase,
+};
+use syntax::{
+ ast::{self, HasLoopBody},
+ match_ast, AstNode,
+ SyntaxKind::{self, IDENT, INT_NUMBER},
+ SyntaxNode, SyntaxToken, TextRange, T,
+};
+
+use crate::{references, NavigationTarget, TryToNav};
+
+#[derive(PartialEq, Eq, Hash)]
+pub struct HighlightedRange {
+ pub range: TextRange,
+ // FIXME: This needs to be more precise. Reference category makes sense only
+ // for references, but we also have defs. And things like exit points are
+ // neither.
+ pub category: Option<ReferenceCategory>,
+}
+
+#[derive(Default, Clone)]
+pub struct HighlightRelatedConfig {
+ pub references: bool,
+ pub exit_points: bool,
+ pub break_points: bool,
+ pub yield_points: bool,
+}
+
+// Feature: Highlight Related
+//
+// Highlights constructs related to the thing under the cursor:
+//
+// . if on an identifier, highlights all references to that identifier in the current file
+// . if on an `async` or `await token, highlights all yield points for that async context
+// . if on a `return` or `fn` keyword, `?` character or `->` return type arrow, highlights all exit points for that context
+// . if on a `break`, `loop`, `while` or `for` token, highlights all break points for that loop or block context
+//
+// Note: `?` and `->` do not currently trigger this behavior in the VSCode editor.
+pub(crate) fn highlight_related(
+ sema: &Semantics<'_, RootDatabase>,
+ config: HighlightRelatedConfig,
+ FilePosition { offset, file_id }: FilePosition,
+) -> Option<Vec<HighlightedRange>> {
+ let _p = profile::span("highlight_related");
+ let syntax = sema.parse(file_id).syntax().clone();
+
+ let token = pick_best_token(syntax.token_at_offset(offset), |kind| match kind {
+ T![?] => 4, // prefer `?` when the cursor is sandwiched like in `await$0?`
+ T![->] => 3,
+ kind if kind.is_keyword() => 2,
+ IDENT | INT_NUMBER => 1,
+ _ => 0,
+ })?;
+ match token.kind() {
+ T![?] if config.exit_points && token.parent().and_then(ast::TryExpr::cast).is_some() => {
+ highlight_exit_points(sema, token)
+ }
+ T![fn] | T![return] | T![->] if config.exit_points => highlight_exit_points(sema, token),
+ T![await] | T![async] if config.yield_points => highlight_yield_points(token),
+ T![for] if config.break_points && token.parent().and_then(ast::ForExpr::cast).is_some() => {
+ highlight_break_points(token)
+ }
+ T![break] | T![loop] | T![while] | T![continue] if config.break_points => {
+ highlight_break_points(token)
+ }
+ _ if config.references => highlight_references(sema, &syntax, token, file_id),
+ _ => None,
+ }
+}
+
+fn highlight_references(
+ sema: &Semantics<'_, RootDatabase>,
+ node: &SyntaxNode,
+ token: SyntaxToken,
+ file_id: FileId,
+) -> Option<Vec<HighlightedRange>> {
+ let defs = find_defs(sema, token);
+ let usages = defs
+ .iter()
+ .filter_map(|&d| {
+ d.usages(sema)
+ .set_scope(Some(SearchScope::single_file(file_id)))
+ .include_self_refs()
+ .all()
+ .references
+ .remove(&file_id)
+ })
+ .flatten()
+ .map(|FileReference { category: access, range, .. }| HighlightedRange {
+ range,
+ category: access,
+ });
+ let mut res = FxHashSet::default();
+
+ let mut def_to_hl_range = |def| {
+ let hl_range = match def {
+ Definition::Module(module) => {
+ Some(NavigationTarget::from_module_to_decl(sema.db, module))
+ }
+ def => def.try_to_nav(sema.db),
+ }
+ .filter(|decl| decl.file_id == file_id)
+ .and_then(|decl| decl.focus_range)
+ .map(|range| {
+ let category =
+ references::decl_mutability(&def, node, range).then(|| ReferenceCategory::Write);
+ HighlightedRange { range, category }
+ });
+ if let Some(hl_range) = hl_range {
+ res.insert(hl_range);
+ }
+ };
+ for &def in &defs {
+ match def {
+ Definition::Local(local) => local
+ .associated_locals(sema.db)
+ .iter()
+ .for_each(|&local| def_to_hl_range(Definition::Local(local))),
+ def => def_to_hl_range(def),
+ }
+ }
+
+ res.extend(usages);
+ if res.is_empty() {
+ None
+ } else {
+ Some(res.into_iter().collect())
+ }
+}
+
+fn highlight_exit_points(
+ sema: &Semantics<'_, RootDatabase>,
+ token: SyntaxToken,
+) -> Option<Vec<HighlightedRange>> {
+ fn hl(
+ sema: &Semantics<'_, RootDatabase>,
+ body: Option<ast::Expr>,
+ ) -> Option<Vec<HighlightedRange>> {
+ let mut highlights = Vec::new();
+ let body = body?;
+ walk_expr(&body, &mut |expr| match expr {
+ ast::Expr::ReturnExpr(expr) => {
+ if let Some(token) = expr.return_token() {
+ highlights.push(HighlightedRange { category: None, range: token.text_range() });
+ }
+ }
+ ast::Expr::TryExpr(try_) => {
+ if let Some(token) = try_.question_mark_token() {
+ highlights.push(HighlightedRange { category: None, range: token.text_range() });
+ }
+ }
+ ast::Expr::MethodCallExpr(_) | ast::Expr::CallExpr(_) | ast::Expr::MacroExpr(_) => {
+ if sema.type_of_expr(&expr).map_or(false, |ty| ty.original.is_never()) {
+ highlights.push(HighlightedRange {
+ category: None,
+ range: expr.syntax().text_range(),
+ });
+ }
+ }
+ _ => (),
+ });
+ let tail = match body {
+ ast::Expr::BlockExpr(b) => b.tail_expr(),
+ e => Some(e),
+ };
+
+ if let Some(tail) = tail {
+ for_each_tail_expr(&tail, &mut |tail| {
+ let range = match tail {
+ ast::Expr::BreakExpr(b) => b
+ .break_token()
+ .map_or_else(|| tail.syntax().text_range(), |tok| tok.text_range()),
+ _ => tail.syntax().text_range(),
+ };
+ highlights.push(HighlightedRange { category: None, range })
+ });
+ }
+ Some(highlights)
+ }
+ for anc in token.parent_ancestors() {
+ return match_ast! {
+ match anc {
+ ast::Fn(fn_) => hl(sema, fn_.body().map(ast::Expr::BlockExpr)),
+ ast::ClosureExpr(closure) => hl(sema, closure.body()),
+ ast::BlockExpr(block_expr) => if matches!(block_expr.modifier(), Some(ast::BlockModifier::Async(_) | ast::BlockModifier::Try(_)| ast::BlockModifier::Const(_))) {
+ hl(sema, Some(block_expr.into()))
+ } else {
+ continue;
+ },
+ _ => continue,
+ }
+ };
+ }
+ None
+}
+
+fn highlight_break_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
+ fn hl(
+ cursor_token_kind: SyntaxKind,
+ token: Option<SyntaxToken>,
+ label: Option<ast::Label>,
+ body: Option<ast::StmtList>,
+ ) -> Option<Vec<HighlightedRange>> {
+ let mut highlights = Vec::new();
+ let range = cover_range(
+ token.map(|tok| tok.text_range()),
+ label.as_ref().map(|it| it.syntax().text_range()),
+ );
+ highlights.extend(range.map(|range| HighlightedRange { category: None, range }));
+ for_each_break_and_continue_expr(label, body, &mut |expr| {
+ let range: Option<TextRange> = match (cursor_token_kind, expr) {
+ (T![for] | T![while] | T![loop] | T![break], ast::Expr::BreakExpr(break_)) => {
+ cover_range(
+ break_.break_token().map(|it| it.text_range()),
+ break_.lifetime().map(|it| it.syntax().text_range()),
+ )
+ }
+ (
+ T![for] | T![while] | T![loop] | T![continue],
+ ast::Expr::ContinueExpr(continue_),
+ ) => cover_range(
+ continue_.continue_token().map(|it| it.text_range()),
+ continue_.lifetime().map(|it| it.syntax().text_range()),
+ ),
+ _ => None,
+ };
+ highlights.extend(range.map(|range| HighlightedRange { category: None, range }));
+ });
+ Some(highlights)
+ }
+ let parent = token.parent()?;
+ let lbl = match_ast! {
+ match parent {
+ ast::BreakExpr(b) => b.lifetime(),
+ ast::ContinueExpr(c) => c.lifetime(),
+ ast::LoopExpr(l) => l.label().and_then(|it| it.lifetime()),
+ ast::ForExpr(f) => f.label().and_then(|it| it.lifetime()),
+ ast::WhileExpr(w) => w.label().and_then(|it| it.lifetime()),
+ ast::BlockExpr(b) => Some(b.label().and_then(|it| it.lifetime())?),
+ _ => return None,
+ }
+ };
+ let lbl = lbl.as_ref();
+ let label_matches = |def_lbl: Option<ast::Label>| match lbl {
+ Some(lbl) => {
+ Some(lbl.text()) == def_lbl.and_then(|it| it.lifetime()).as_ref().map(|it| it.text())
+ }
+ None => true,
+ };
+ let token_kind = token.kind();
+ for anc in token.parent_ancestors().flat_map(ast::Expr::cast) {
+ return match anc {
+ ast::Expr::LoopExpr(l) if label_matches(l.label()) => hl(
+ token_kind,
+ l.loop_token(),
+ l.label(),
+ l.loop_body().and_then(|it| it.stmt_list()),
+ ),
+ ast::Expr::ForExpr(f) if label_matches(f.label()) => hl(
+ token_kind,
+ f.for_token(),
+ f.label(),
+ f.loop_body().and_then(|it| it.stmt_list()),
+ ),
+ ast::Expr::WhileExpr(w) if label_matches(w.label()) => hl(
+ token_kind,
+ w.while_token(),
+ w.label(),
+ w.loop_body().and_then(|it| it.stmt_list()),
+ ),
+ ast::Expr::BlockExpr(e) if e.label().is_some() && label_matches(e.label()) => {
+ hl(token_kind, None, e.label(), e.stmt_list())
+ }
+ _ => continue,
+ };
+ }
+ None
+}
+
+fn highlight_yield_points(token: SyntaxToken) -> Option<Vec<HighlightedRange>> {
+ fn hl(
+ async_token: Option<SyntaxToken>,
+ body: Option<ast::Expr>,
+ ) -> Option<Vec<HighlightedRange>> {
+ let mut highlights =
+ vec![HighlightedRange { category: None, range: async_token?.text_range() }];
+ if let Some(body) = body {
+ walk_expr(&body, &mut |expr| {
+ if let ast::Expr::AwaitExpr(expr) = expr {
+ if let Some(token) = expr.await_token() {
+ highlights
+ .push(HighlightedRange { category: None, range: token.text_range() });
+ }
+ }
+ });
+ }
+ Some(highlights)
+ }
+ for anc in token.parent_ancestors() {
+ return match_ast! {
+ match anc {
+ ast::Fn(fn_) => hl(fn_.async_token(), fn_.body().map(ast::Expr::BlockExpr)),
+ ast::BlockExpr(block_expr) => {
+ if block_expr.async_token().is_none() {
+ continue;
+ }
+ hl(block_expr.async_token(), Some(block_expr.into()))
+ },
+ ast::ClosureExpr(closure) => hl(closure.async_token(), closure.body()),
+ _ => continue,
+ }
+ };
+ }
+ None
+}
+
+fn cover_range(r0: Option<TextRange>, r1: Option<TextRange>) -> Option<TextRange> {
+ match (r0, r1) {
+ (Some(r0), Some(r1)) => Some(r0.cover(r1)),
+ (Some(range), None) => Some(range),
+ (None, Some(range)) => Some(range),
+ (None, None) => None,
+ }
+}
+
+fn find_defs(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> FxHashSet<Definition> {
+ sema.descend_into_macros(token)
+ .into_iter()
++ .filter_map(|token| IdentClass::classify_token(sema, &token))
++ .map(IdentClass::definitions_no_ops)
+ .flatten()
+ .collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+
+ use super::*;
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ let config = HighlightRelatedConfig {
+ break_points: true,
+ exit_points: true,
+ references: true,
+ yield_points: true,
+ };
+
+ check_with_config(ra_fixture, config);
+ }
+
+ #[track_caller]
+ fn check_with_config(ra_fixture: &str, config: HighlightRelatedConfig) {
+ let (analysis, pos, annotations) = fixture::annotations(ra_fixture);
+
+ let hls = analysis.highlight_related(config, pos).unwrap().unwrap_or_default();
+
+ let mut expected = annotations
+ .into_iter()
+ .map(|(r, access)| (r.range, (!access.is_empty()).then(|| access)))
+ .collect::<Vec<_>>();
+
+ let mut actual = hls
+ .into_iter()
+ .map(|hl| {
+ (
+ hl.range,
+ hl.category.map(|it| {
+ match it {
+ ReferenceCategory::Read => "read",
+ ReferenceCategory::Write => "write",
+ }
+ .to_string()
+ }),
+ )
+ })
+ .collect::<Vec<_>>();
+ actual.sort_by_key(|(range, _)| range.start());
+ expected.sort_by_key(|(range, _)| range.start());
+
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn test_hl_tuple_fields() {
+ check(
+ r#"
+struct Tuple(u32, u32);
+
+fn foo(t: Tuple) {
+ t.0$0;
+ // ^ read
+ t.0;
+ // ^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo$0;
+ // ^^^
+//- /foo.rs
+struct Foo;
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_self_in_crate_root() {
+ check(
+ r#"
+use crate$0;
+ //^^^^^
+use self;
+ //^^^^
+mod __ {
+ use super;
+ //^^^^^
+}
+"#,
+ );
+ check(
+ r#"
+//- /main.rs crate:main deps:lib
+use lib$0;
+ //^^^
+//- /lib.rs crate:lib
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_self_in_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+use self$0;
+ // ^^^^
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_local() {
+ check(
+ r#"
+fn foo() {
+ let mut bar = 3;
+ // ^^^ write
+ bar$0;
+ // ^^^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_local_in_attr() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::identity]
+fn foo() {
+ let mut bar = 3;
+ // ^^^ write
+ bar$0;
+ // ^^^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_multi_macro_usage() {
+ check(
+ r#"
+macro_rules! foo {
+ ($ident:ident) => {
+ fn $ident() -> $ident { loop {} }
+ struct $ident;
+ }
+}
+
+foo!(bar$0);
+ // ^^^
+fn foo() {
+ let bar: bar = bar();
+ // ^^^
+ // ^^^
+}
+"#,
+ );
+ check(
+ r#"
+macro_rules! foo {
+ ($ident:ident) => {
+ fn $ident() -> $ident { loop {} }
+ struct $ident;
+ }
+}
+
+foo!(bar);
+ // ^^^
+fn foo() {
+ let bar: bar$0 = bar();
+ // ^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_points() {
+ check(
+ r#"
+pub async fn foo() {
+ // ^^^^^
+ let x = foo()
+ .await$0
+ // ^^^^^
+ .await;
+ // ^^^^^
+ || { 0.await };
+ (async { 0.await }).await
+ // ^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_points2() {
+ check(
+ r#"
+pub async$0 fn foo() {
+ // ^^^^^
+ let x = foo()
+ .await
+ // ^^^^^
+ .await;
+ // ^^^^^
+ || { 0.await };
+ (async { 0.await }).await
+ // ^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_nested_fn() {
+ check(
+ r#"
+async fn foo() {
+ async fn foo2() {
+ // ^^^^^
+ async fn foo3() {
+ 0.await
+ }
+ 0.await$0
+ // ^^^^^
+ }
+ 0.await
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_yield_nested_async_blocks() {
+ check(
+ r#"
+async fn foo() {
+ (async {
+ // ^^^^^
+ (async {
+ 0.await
+ }).await$0 }
+ // ^^^^^
+ ).await;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_exit_points() {
+ check(
+ r#"
+fn foo() -> u32 {
+ if true {
+ return$0 0;
+ // ^^^^^^
+ }
+
+ 0?;
+ // ^
+ 0xDEAD_BEEF
+ // ^^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_exit_points2() {
+ check(
+ r#"
+fn foo() ->$0 u32 {
+ if true {
+ return 0;
+ // ^^^^^^
+ }
+
+ 0?;
+ // ^
+ 0xDEAD_BEEF
+ // ^^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_exit_points3() {
+ check(
+ r#"
+fn$0 foo() -> u32 {
+ if true {
+ return 0;
+ // ^^^^^^
+ }
+
+ 0?;
+ // ^
+ 0xDEAD_BEEF
+ // ^^^^^^^^^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_prefer_ref_over_tail_exit() {
+ check(
+ r#"
+fn foo() -> u32 {
+// ^^^
+ if true {
+ return 0;
+ }
+
+ 0?;
+
+ foo$0()
+ // ^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_never_call_is_exit_point() {
+ check(
+ r#"
+struct Never;
+impl Never {
+ fn never(self) -> ! { loop {} }
+}
+macro_rules! never {
+ () => { never() }
+}
+fn never() -> ! { loop {} }
+fn foo() ->$0 u32 {
+ never();
+ // ^^^^^^^
+ never!();
+ // ^^^^^^^^
+
+ Never.never();
+ // ^^^^^^^^^^^^^
+
+ 0
+ // ^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_inner_tail_exit_points() {
+ check(
+ r#"
+fn foo() ->$0 u32 {
+ if true {
+ unsafe {
+ return 5;
+ // ^^^^^^
+ 5
+ // ^
+ }
+ } else if false {
+ 0
+ // ^
+ } else {
+ match 5 {
+ 6 => 100,
+ // ^^^
+ 7 => loop {
+ break 5;
+ // ^^^^^
+ }
+ 8 => 'a: loop {
+ 'b: loop {
+ break 'a 5;
+ // ^^^^^
+ break 'b 5;
+ break 5;
+ };
+ }
+ //
+ _ => 500,
+ // ^^^
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_inner_tail_exit_points_labeled_block() {
+ check(
+ r#"
+fn foo() ->$0 u32 {
+ 'foo: {
+ break 'foo 0;
+ // ^^^^^
+ loop {
+ break;
+ break 'foo 0;
+ // ^^^^^
+ }
+ 0
+ // ^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_loop() {
+ check(
+ r#"
+fn foo() {
+ 'outer: loop {
+ // ^^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: loop {
+ break;
+ 'innermost: loop {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_loop2() {
+ check(
+ r#"
+fn foo() {
+ 'outer: loop {
+ break;
+ 'inner: loop {
+ // ^^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'innermost: loop {
+ break 'outer;
+ break 'inner;
+ // ^^^^^^^^^^^^
+ }
+ break 'outer;
+ break$0;
+ // ^^^^^
+ }
+ break;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_for() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in () {
+ // ^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: for _ in () {
+ break;
+ 'innermost: for _ in () {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_for_but_not_continue() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in () {
+ // ^^^^^^^^^^^
+ break;
+ // ^^^^^
+ continue;
+ 'inner: for _ in () {
+ break;
+ continue;
+ 'innermost: for _ in () {
+ continue 'outer;
+ break 'outer;
+ // ^^^^^^^^^^^^
+ continue 'inner;
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ continue 'outer;
+ break;
+ continue;
+ }
+ break;
+ // ^^^^^
+ continue;
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_continue_for_but_not_break() {
+ check(
+ r#"
+fn foo() {
+ 'outer: for _ in () {
+ // ^^^^^^^^^^^
+ break;
+ continue;
+ // ^^^^^^^^
+ 'inner: for _ in () {
+ break;
+ continue;
+ 'innermost: for _ in () {
+ continue 'outer;
+ // ^^^^^^^^^^^^^^^
+ break 'outer;
+ continue 'inner;
+ break 'inner;
+ }
+ break 'outer;
+ continue$0 'outer;
+ // ^^^^^^^^^^^^^^^
+ break;
+ continue;
+ }
+ break;
+ continue;
+ // ^^^^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_and_continue() {
+ check(
+ r#"
+fn foo() {
+ 'outer: fo$0r _ in () {
+ // ^^^^^^^^^^^
+ break;
+ // ^^^^^
+ continue;
+ // ^^^^^^^^
+ 'inner: for _ in () {
+ break;
+ continue;
+ 'innermost: for _ in () {
+ continue 'outer;
+ // ^^^^^^^^^^^^^^^
+ break 'outer;
+ // ^^^^^^^^^^^^
+ continue 'inner;
+ break 'inner;
+ }
+ break 'outer;
+ // ^^^^^^^^^^^^
+ continue 'outer;
+ // ^^^^^^^^^^^^^^^
+ break;
+ continue;
+ }
+ break;
+ // ^^^^^
+ continue;
+ // ^^^^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_while() {
+ check(
+ r#"
+fn foo() {
+ 'outer: while true {
+ // ^^^^^^^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: while true {
+ break;
+ 'innermost: while true {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_labeled_block() {
+ check(
+ r#"
+fn foo() {
+ 'outer: {
+ // ^^^^^^^
+ break;
+ // ^^^^^
+ 'inner: {
+ break;
+ 'innermost: {
+ break 'outer;
+ // ^^^^^^^^^^^^
+ break 'inner;
+ }
+ break$0 'outer;
+ // ^^^^^^^^^^^^
+ break;
+ }
+ break;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_unlabeled_loop() {
+ check(
+ r#"
+fn foo() {
+ loop {
+ // ^^^^
+ break$0;
+ // ^^^^^
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_break_unlabeled_block_in_loop() {
+ check(
+ r#"
+fn foo() {
+ loop {
+ // ^^^^
+ {
+ break$0;
+ // ^^^^^
+ }
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_field_shorthand() {
+ check(
+ r#"
+struct Struct { field: u32 }
+ //^^^^^
+fn function(field: u32) {
+ //^^^^^
+ Struct { field$0 }
+ //^^^^^ read
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() {
+ let x$0 = 5;
+ let y = x * 2;
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local_preserved_break() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() {
+ let x$0 = 5;
+ let y = x * 2;
+
+ loop {
+ break;
+ }
+}
+"#,
+ config.clone(),
+ );
+
+ check_with_config(
+ r#"
+fn foo() {
+ let x = 5;
+ let y = x * 2;
+
+ loop$0 {
+// ^^^^
+ break;
+// ^^^^^
+ }
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local_preserved_yield() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+async fn foo() {
+ let x$0 = 5;
+ let y = x * 2;
+
+ 0.await;
+}
+"#,
+ config.clone(),
+ );
+
+ check_with_config(
+ r#"
+ async fn foo() {
+// ^^^^^
+ let x = 5;
+ let y = x * 2;
+
+ 0.await$0;
+// ^^^^^
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_ref_local_preserved_exit() {
+ let config = HighlightRelatedConfig {
+ references: false,
+ break_points: true,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() -> i32 {
+ let x$0 = 5;
+ let y = x * 2;
+
+ if true {
+ return y;
+ }
+
+ 0?
+}
+"#,
+ config.clone(),
+ );
+
+ check_with_config(
+ r#"
+fn foo() ->$0 i32 {
+ let x = 5;
+ let y = x * 2;
+
+ if true {
+ return y;
+// ^^^^^^
+ }
+
+ 0?
+// ^
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_break() {
+ let config = HighlightRelatedConfig {
+ references: true,
+ break_points: false,
+ exit_points: true,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() {
+ loop {
+ break$0;
+ }
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_yield() {
+ let config = HighlightRelatedConfig {
+ references: true,
+ break_points: true,
+ exit_points: true,
+ yield_points: false,
+ };
+
+ check_with_config(
+ r#"
+async$0 fn foo() {
+ 0.await;
+}
+"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_disabled_exit() {
+ let config = HighlightRelatedConfig {
+ references: true,
+ break_points: true,
+ exit_points: false,
+ yield_points: true,
+ };
+
+ check_with_config(
+ r#"
+fn foo() ->$0 i32 {
+ if true {
+ return -1;
+ }
+
+ 42
+}"#,
+ config,
+ );
+ }
+
+ #[test]
+ fn test_hl_multi_local() {
+ check(
+ r#"
+fn foo((
+ foo$0
+ //^^^
+ | foo
+ //^^^
+ | foo
+ //^^^
+): ()) {
+ foo;
+ //^^^read
+ let foo;
+}
+"#,
+ );
+ check(
+ r#"
+fn foo((
+ foo
+ //^^^
+ | foo$0
+ //^^^
+ | foo
+ //^^^
+): ()) {
+ foo;
+ //^^^read
+ let foo;
+}
+"#,
+ );
+ check(
+ r#"
+fn foo((
+ foo
+ //^^^
+ | foo
+ //^^^
+ | foo
+ //^^^
+): ()) {
+ foo$0;
+ //^^^read
+ let foo;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_hl_trait_impl_methods() {
+ check(
+ r#"
+trait Trait {
+ fn func$0(self) {}
+ //^^^^
+}
+
+impl Trait for () {
+ fn func(self) {}
+ //^^^^
+}
+
+fn main() {
+ <()>::func(());
+ //^^^^
+ ().func();
+ //^^^^
+}
+"#,
+ );
+ check(
+ r#"
+trait Trait {
+ fn func(self) {}
+ //^^^^
+}
+
+impl Trait for () {
+ fn func$0(self) {}
+ //^^^^
+}
+
+fn main() {
+ <()>::func(());
+ //^^^^
+ ().func();
+ //^^^^
+}
+"#,
+ );
+ check(
+ r#"
+trait Trait {
+ fn func(self) {}
+ //^^^^
+}
+
+impl Trait for () {
+ fn func(self) {}
+ //^^^^
+}
+
+fn main() {
+ <()>::func(());
+ //^^^^
+ ().func$0();
+ //^^^^
+}
+"#,
+ );
+ }
+}
--- /dev/null
- defs::{Definition, IdentClass},
+mod render;
+
+#[cfg(test)]
+mod tests;
+
+use std::iter;
+
+use either::Either;
+use hir::{HasSource, Semantics};
+use ide_db::{
+ base_db::FileRange,
- IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 3,
++ defs::{Definition, IdentClass, OperatorClass},
+ famous_defs::FamousDefs,
+ helpers::pick_best_token,
+ FxIndexSet, RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{ast, match_ast, AstNode, SyntaxKind::*, SyntaxNode, SyntaxToken, T};
+
+use crate::{
+ doc_links::token_as_doc_comment,
+ markup::Markup,
+ runnables::{runnable_fn, runnable_mod},
+ FileId, FilePosition, NavigationTarget, RangeInfo, Runnable, TryToNav,
+};
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct HoverConfig {
+ pub links_in_hover: bool,
+ pub documentation: Option<HoverDocFormat>,
+}
+
+impl HoverConfig {
+ fn markdown(&self) -> bool {
+ matches!(self.documentation, Some(HoverDocFormat::Markdown))
+ }
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum HoverDocFormat {
+ Markdown,
+ PlainText,
+}
+
+#[derive(Debug, Clone)]
+pub enum HoverAction {
+ Runnable(Runnable),
+ Implementation(FilePosition),
+ Reference(FilePosition),
+ GoToType(Vec<HoverGotoTypeData>),
+}
+
+impl HoverAction {
+ fn goto_type_from_targets(db: &RootDatabase, targets: Vec<hir::ModuleDef>) -> Self {
+ let targets = targets
+ .into_iter()
+ .filter_map(|it| {
+ Some(HoverGotoTypeData {
+ mod_path: render::path(
+ db,
+ it.module(db)?,
+ it.name(db).map(|name| name.to_string()),
+ ),
+ nav: it.try_to_nav(db)?,
+ })
+ })
+ .collect();
+ HoverAction::GoToType(targets)
+ }
+}
+
+#[derive(Debug, Clone, Eq, PartialEq, Hash)]
+pub struct HoverGotoTypeData {
+ pub mod_path: String,
+ pub nav: NavigationTarget,
+}
+
+/// Contains the results when hovering over an item
+#[derive(Debug, Default)]
+pub struct HoverResult {
+ pub markup: Markup,
+ pub actions: Vec<HoverAction>,
+}
+
+// Feature: Hover
+//
+// Shows additional information, like the type of an expression or the documentation for a definition when "focusing" code.
+// Focusing is usually hovering with a mouse, but can also be triggered with a shortcut.
+//
+// image::https://user-images.githubusercontent.com/48062697/113020658-b5f98b80-917a-11eb-9f88-3dbc27320c95.gif[]
+pub(crate) fn hover(
+ db: &RootDatabase,
+ FileRange { file_id, range }: FileRange,
+ config: &HoverConfig,
+) -> Option<RangeInfo<HoverResult>> {
+ let sema = &hir::Semantics::new(db);
+ let file = sema.parse(file_id).syntax().clone();
+
+ if !range.is_empty() {
+ return hover_ranged(&file, range, sema, config);
+ }
+ let offset = range.start();
+
+ let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
- let node = token
- .parent_ancestors()
- .take_while(|it| !ast::Item::can_cast(it.kind()))
- .find(|n| ast::Expr::can_cast(n.kind()) || ast::Pat::can_cast(n.kind()))?;
++ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self] => 4,
++ // index and prefix ops
++ T!['['] | T![']'] | T![?] | T![*] | T![-] | T![!] => 3,
++ kind if kind.is_keyword() => 2,
+ T!['('] | T![')'] => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ cov_mark::hit!(no_highlight_on_comment_hover);
+ return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| {
+ let res = hover_for_definition(sema, file_id, def, &node, config)?;
+ Some(RangeInfo::new(range, res))
+ });
+ }
+
+ let in_attr = matches!(original_token.parent().and_then(ast::TokenTree::cast), Some(tt) if tt.syntax().ancestors().any(|it| ast::Meta::can_cast(it.kind())));
+ let descended = if in_attr {
+ [sema.descend_into_macros_with_kind_preference(original_token.clone())].into()
+ } else {
+ sema.descend_into_macros_with_same_text(original_token.clone())
+ };
+
+ // FIXME: Definition should include known lints and the like instead of having this special case here
+ let hovered_lint = descended.iter().find_map(|token| {
+ let attr = token.parent_ancestors().find_map(ast::Attr::cast)?;
+ render::try_for_lint(&attr, token)
+ });
+ if let Some(res) = hovered_lint {
+ return Some(RangeInfo::new(original_token.text_range(), res));
+ }
+
+ let result = descended
+ .iter()
+ .filter_map(|token| {
+ let node = token.parent()?;
+ let class = IdentClass::classify_token(sema, token)?;
++ if let IdentClass::Operator(OperatorClass::Await(_)) = class {
++ // It's better for us to fall back to the keyword hover here,
++ // rendering poll is very confusing
++ return None;
++ }
+ Some(class.definitions().into_iter().zip(iter::once(node).cycle()))
+ })
+ .flatten()
+ .unique_by(|&(def, _)| def)
+ .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config))
+ .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| {
+ acc.actions.extend(actions);
+ acc.markup = Markup::from(format!("{}\n---\n{}", acc.markup, markup));
+ acc
+ });
+
+ if result.is_none() {
+ // fallbacks, show keywords or types
+
+ let res = descended.iter().find_map(|token| render::keyword(sema, config, token));
+ if let Some(res) = res {
+ return Some(RangeInfo::new(original_token.text_range(), res));
+ }
+ let res = descended
+ .iter()
+ .find_map(|token| hover_type_fallback(sema, config, token, &original_token));
+ if let Some(_) = res {
+ return res;
+ }
+ }
+ result.map(|mut res: HoverResult| {
+ res.actions = dedupe_or_merge_hover_actions(res.actions);
+ RangeInfo::new(original_token.text_range(), res)
+ })
+}
+
+pub(crate) fn hover_for_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ file_id: FileId,
+ definition: Definition,
+ node: &SyntaxNode,
+ config: &HoverConfig,
+) -> Option<HoverResult> {
+ let famous_defs = match &definition {
+ Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(node)?.krate())),
+ _ => None,
+ };
+ render::definition(sema.db, definition, famous_defs.as_ref(), config).map(|markup| {
+ HoverResult {
+ markup: render::process_markup(sema.db, definition, &markup, config),
+ actions: show_implementations_action(sema.db, definition)
+ .into_iter()
+ .chain(show_fn_references_action(sema.db, definition))
+ .chain(runnable_action(sema, definition, file_id))
+ .chain(goto_type_action_for_def(sema.db, definition))
+ .collect(),
+ }
+ })
+}
+
+fn hover_ranged(
+ file: &SyntaxNode,
+ range: syntax::TextRange,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+) -> Option<RangeInfo<HoverResult>> {
+ // FIXME: make this work in attributes
+ let expr_or_pat = file.covering_element(range).ancestors().find_map(|it| {
+ match_ast! {
+ match it {
+ ast::Expr(expr) => Some(Either::Left(expr)),
+ ast::Pat(pat) => Some(Either::Right(pat)),
+ _ => None,
+ }
+ }
+ })?;
+ let res = match &expr_or_pat {
+ Either::Left(ast::Expr::TryExpr(try_expr)) => render::try_expr(sema, config, try_expr),
+ Either::Left(ast::Expr::PrefixExpr(prefix_expr))
+ if prefix_expr.op_kind() == Some(ast::UnaryOp::Deref) =>
+ {
+ render::deref_expr(sema, config, prefix_expr)
+ }
+ _ => None,
+ };
+ let res = res.or_else(|| render::type_info(sema, config, &expr_or_pat));
+ res.map(|it| {
+ let range = match expr_or_pat {
+ Either::Left(it) => it.syntax().text_range(),
+ Either::Right(it) => it.syntax().text_range(),
+ };
+ RangeInfo::new(range, it)
+ })
+}
+
+fn hover_type_fallback(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &HoverConfig,
+ token: &SyntaxToken,
+ original_token: &SyntaxToken,
+) -> Option<RangeInfo<HoverResult>> {
++ let node =
++ token.parent_ancestors().take_while(|it| !ast::Item::can_cast(it.kind())).find(|n| {
++ ast::Expr::can_cast(n.kind())
++ || ast::Pat::can_cast(n.kind())
++ || ast::Type::can_cast(n.kind())
++ })?;
+
+ let expr_or_pat = match_ast! {
+ match node {
+ ast::Expr(it) => Either::Left(it),
+ ast::Pat(it) => Either::Right(it),
+ // If this node is a MACRO_CALL, it means that `descend_into_macros_many` failed to resolve.
+ // (e.g expanding a builtin macro). So we give up here.
+ ast::MacroCall(_it) => return None,
+ _ => return None,
+ }
+ };
+
+ let res = render::type_info(sema, config, &expr_or_pat)?;
+ let range = sema
+ .original_range_opt(&node)
+ .map(|frange| frange.range)
+ .unwrap_or_else(|| original_token.text_range());
+ Some(RangeInfo::new(range, res))
+}
+
+fn show_implementations_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ fn to_action(nav_target: NavigationTarget) -> HoverAction {
+ HoverAction::Implementation(FilePosition {
+ file_id: nav_target.file_id,
+ offset: nav_target.focus_or_full_range().start(),
+ })
+ }
+
+ let adt = match def {
+ Definition::Trait(it) => return it.try_to_nav(db).map(to_action),
+ Definition::Adt(it) => Some(it),
+ Definition::SelfType(it) => it.self_ty(db).as_adt(),
+ _ => None,
+ }?;
+ adt.try_to_nav(db).map(to_action)
+}
+
+fn show_fn_references_action(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ match def {
+ Definition::Function(it) => it.try_to_nav(db).map(|nav_target| {
+ HoverAction::Reference(FilePosition {
+ file_id: nav_target.file_id,
+ offset: nav_target.focus_or_full_range().start(),
+ })
+ }),
+ _ => None,
+ }
+}
+
+fn runnable_action(
+ sema: &hir::Semantics<'_, RootDatabase>,
+ def: Definition,
+ file_id: FileId,
+) -> Option<HoverAction> {
+ match def {
+ Definition::Module(it) => runnable_mod(sema, it).map(HoverAction::Runnable),
+ Definition::Function(func) => {
+ let src = func.source(sema.db)?;
+ if src.file_id != file_id.into() {
+ cov_mark::hit!(hover_macro_generated_struct_fn_doc_comment);
+ cov_mark::hit!(hover_macro_generated_struct_fn_doc_attr);
+ return None;
+ }
+
+ runnable_fn(sema, func).map(HoverAction::Runnable)
+ }
+ _ => None,
+ }
+}
+
+fn goto_type_action_for_def(db: &RootDatabase, def: Definition) -> Option<HoverAction> {
+ let mut targets: Vec<hir::ModuleDef> = Vec::new();
+ let mut push_new_def = |item: hir::ModuleDef| {
+ if !targets.contains(&item) {
+ targets.push(item);
+ }
+ };
+
+ if let Definition::GenericParam(hir::GenericParam::TypeParam(it)) = def {
+ it.trait_bounds(db).into_iter().for_each(|it| push_new_def(it.into()));
+ } else {
+ let ty = match def {
+ Definition::Local(it) => it.ty(db),
+ Definition::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db),
+ Definition::Field(field) => field.ty(db),
+ Definition::Function(function) => function.ret_type(db),
+ _ => return None,
+ };
+
+ walk_and_push_ty(db, &ty, &mut push_new_def);
+ }
+
+ Some(HoverAction::goto_type_from_targets(db, targets))
+}
+
+fn walk_and_push_ty(
+ db: &RootDatabase,
+ ty: &hir::Type,
+ push_new_def: &mut dyn FnMut(hir::ModuleDef),
+) {
+ ty.walk(db, |t| {
+ if let Some(adt) = t.as_adt() {
+ push_new_def(adt.into());
+ } else if let Some(trait_) = t.as_dyn_trait() {
+ push_new_def(trait_.into());
+ } else if let Some(traits) = t.as_impl_traits(db) {
+ traits.for_each(|it| push_new_def(it.into()));
+ } else if let Some(trait_) = t.as_associated_type_parent_trait(db) {
+ push_new_def(trait_.into());
+ }
+ });
+}
+
+fn dedupe_or_merge_hover_actions(actions: Vec<HoverAction>) -> Vec<HoverAction> {
+ let mut deduped_actions = Vec::with_capacity(actions.len());
+ let mut go_to_type_targets = FxIndexSet::default();
+
+ let mut seen_implementation = false;
+ let mut seen_reference = false;
+ let mut seen_runnable = false;
+ for action in actions {
+ match action {
+ HoverAction::GoToType(targets) => {
+ go_to_type_targets.extend(targets);
+ }
+ HoverAction::Implementation(..) => {
+ if !seen_implementation {
+ seen_implementation = true;
+ deduped_actions.push(action);
+ }
+ }
+ HoverAction::Reference(..) => {
+ if !seen_reference {
+ seen_reference = true;
+ deduped_actions.push(action);
+ }
+ }
+ HoverAction::Runnable(..) => {
+ if !seen_runnable {
+ seen_runnable = true;
+ deduped_actions.push(action);
+ }
+ }
+ };
+ }
+
+ if !go_to_type_targets.is_empty() {
+ deduped_actions.push(HoverAction::GoToType(go_to_type_targets.into_iter().collect()));
+ }
+
+ deduped_actions
+}
--- /dev/null
+use expect_test::{expect, Expect};
+use ide_db::base_db::{FileLoader, FileRange};
+use syntax::TextRange;
+
+use crate::{fixture, hover::HoverDocFormat, HoverConfig};
+
+fn check_hover_no_result(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap();
+ assert!(hover.is_none(), "hover not expected but found: {:?}", hover.unwrap());
+}
+
+#[track_caller]
+fn check(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap()
+ .unwrap();
+
+ let content = analysis.db.file_text(position.file_id);
+ let hovered_element = &content[hover.range];
+
+ let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ expect.assert_eq(&actual)
+}
+
+fn check_hover_no_links(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap()
+ .unwrap();
+
+ let content = analysis.db.file_text(position.file_id);
+ let hovered_element = &content[hover.range];
+
+ let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ expect.assert_eq(&actual)
+}
+
+fn check_hover_no_markdown(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::PlainText) },
+ FileRange { file_id: position.file_id, range: TextRange::empty(position.offset) },
+ )
+ .unwrap()
+ .unwrap();
+
+ let content = analysis.db.file_text(position.file_id);
+ let hovered_element = &content[hover.range];
+
+ let actual = format!("*{}*\n{}\n", hovered_element, hover.info.markup);
+ expect.assert_eq(&actual)
+}
+
+fn check_actions(ra_fixture: &str, expect: Expect) {
+ let (analysis, file_id, position) = fixture::range_or_position(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) },
+ FileRange { file_id, range: position.range_or_empty() },
+ )
+ .unwrap()
+ .unwrap();
+ expect.assert_debug_eq(&hover.info.actions)
+}
+
+fn check_hover_range(ra_fixture: &str, expect: Expect) {
+ let (analysis, range) = fixture::range(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ range,
+ )
+ .unwrap()
+ .unwrap();
+ expect.assert_eq(hover.info.markup.as_str())
+}
+
+fn check_hover_range_no_results(ra_fixture: &str) {
+ let (analysis, range) = fixture::range(ra_fixture);
+ let hover = analysis
+ .hover(
+ &HoverConfig { links_in_hover: false, documentation: Some(HoverDocFormat::Markdown) },
+ range,
+ )
+ .unwrap();
+ assert!(hover.is_none());
+}
+
+#[test]
+fn hover_descend_macros_avoids_duplicates() {
+ check(
+ r#"
+macro_rules! dupe_use {
+ ($local:ident) => {
+ {
+ $local;
+ $local;
+ }
+ }
+}
+fn foo() {
+ let local = 0;
+ dupe_use!(local$0);
+}
+"#,
+ expect![[r#"
+ *local*
+
+ ```rust
+ let local: i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_all_macro_descends() {
+ check(
+ r#"
+macro_rules! m {
+ ($name:ident) => {
+ /// Outer
+ fn $name() {}
+
+ mod module {
+ /// Inner
+ fn $name() {}
+ }
+ };
+}
+
+m!(ab$0c);
+ "#,
+ expect![[r#"
+ *abc*
+
+ ```rust
+ test::module
+ ```
+
+ ```rust
+ fn abc()
+ ```
+
+ ---
+
+ Inner
+ ---
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn abc()
+ ```
+
+ ---
+
+ Outer
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_type_of_an_expression() {
+ check(
+ r#"
+pub fn foo() -> u32 { 1 }
+
+fn main() {
+ let foo_test = foo()$0;
+}
+"#,
+ expect![[r#"
+ *foo()*
+ ```rust
+ u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_remove_markdown_if_configured() {
+ check_hover_no_markdown(
+ r#"
+pub fn foo() -> u32 { 1 }
+
+fn main() {
+ let foo_test = foo()$0;
+}
+"#,
+ expect![[r#"
+ *foo()*
+ u32
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_long_type_of_an_expression() {
+ check(
+ r#"
+struct Scan<A, B, C> { a: A, b: B, c: C }
+struct Iter<I> { inner: I }
+enum Option<T> { Some(T), None }
+
+struct OtherStruct<T> { i: T }
+
+fn scan<A, B, C>(a: A, b: B, c: C) -> Iter<Scan<OtherStruct<A>, B, C>> {
+ Iter { inner: Scan { a, b, c } }
+}
+
+fn main() {
+ let num: i32 = 55;
+ let closure = |memo: &mut u32, value: &u32, _another: &mut u32| -> Option<u32> {
+ Option::Some(*memo + value)
+ };
+ let number = 5u32;
+ let mut iter$0 = scan(OtherStruct { i: num }, closure, number);
+}
+"#,
+ expect![[r#"
+ *iter*
+
+ ```rust
+ let mut iter: Iter<Scan<OtherStruct<OtherStruct<i32>>, |&mut u32, &u32, &mut u32| -> Option<u32>, u32>>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_signature() {
+ // Single file with result
+ check(
+ r#"
+pub fn foo() -> u32 { 1 }
+
+fn main() { let foo_test = fo$0o(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo() -> u32
+ ```
+ "#]],
+ );
+
+ // Multiple candidates but results are ambiguous.
+ check(
+ r#"
+//- /a.rs
+pub fn foo() -> u32 { 1 }
+
+//- /b.rs
+pub fn foo() -> &str { "" }
+
+//- /c.rs
+pub fn foo(a: u32, b: u32) {}
+
+//- /main.rs
+mod a;
+mod b;
+mod c;
+
+fn main() { let foo_test = fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+ ```rust
+ {unknown}
+ ```
+ "#]],
+ );
+
+ // Use literal `crate` in path
+ check(
+ r#"
+pub struct X;
+
+fn foo() -> crate::X { X }
+
+fn main() { f$0oo(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo() -> crate::X
+ ```
+ "#]],
+ );
+
+ // Check `super` in path
+ check(
+ r#"
+pub struct X;
+
+mod m { pub fn foo() -> super::X { super::X } }
+
+fn main() { m::f$0oo(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::m
+ ```
+
+ ```rust
+ pub fn foo() -> super::X
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_omits_unnamed_where_preds() {
+ check(
+ r#"
+pub fn foo(bar: impl T) { }
+
+fn main() { fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(bar: impl T)
+ ```
+ "#]],
+ );
+ check(
+ r#"
+pub fn foo<V: AsRef<str>>(bar: impl T, baz: V) { }
+
+fn main() { fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo<V>(bar: impl T, baz: V)
+ where
+ V: AsRef<str>,
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_signature_with_type_params() {
+ check(
+ r#"
+pub fn foo<'a, T: AsRef<str>>(b: &'a T) -> &'a str { }
+
+fn main() { let foo_test = fo$0o(); }
+ "#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo<'a, T>(b: &'a T) -> &'a str
+ where
+ T: AsRef<str>,
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_signature_on_fn_name() {
+ check(
+ r#"
+pub fn foo$0(a: u32, b: u32) -> u32 {}
+
+fn main() { }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(a: u32, b: u32) -> u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_doc() {
+ check(
+ r#"
+/// # Example
+/// ```
+/// # use std::path::Path;
+/// #
+/// foo(Path::new("hello, world!"))
+/// ```
+pub fn foo$0(_: &Path) {}
+
+fn main() { }
+"#,
+ expect![[r##"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(_: &Path)
+ ```
+
+ ---
+
+ # Example
+
+ ```
+ # use std::path::Path;
+ #
+ foo(Path::new("hello, world!"))
+ ```
+ "##]],
+ );
+}
+
+#[test]
+fn hover_shows_fn_doc_attr_raw_string() {
+ check(
+ r##"
+#[doc = r#"Raw string doc attr"#]
+pub fn foo$0(_: &Path) {}
+
+fn main() { }
+"##,
+ expect![[r##"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo(_: &Path)
+ ```
+
+ ---
+
+ Raw string doc attr
+ "##]],
+ );
+}
+
+#[test]
+fn hover_shows_struct_field_info() {
+ // Hovering over the field when instantiating
+ check(
+ r#"
+struct Foo { field_a: u32 }
+
+fn main() {
+ let foo = Foo { field_a$0: 0, };
+}
+"#,
+ expect![[r#"
+ *field_a*
+
+ ```rust
+ test::Foo
+ ```
+
+ ```rust
+ field_a: u32
+ ```
+ "#]],
+ );
+
+ // Hovering over the field in the definition
+ check(
+ r#"
+struct Foo { field_a$0: u32 }
+
+fn main() {
+ let foo = Foo { field_a: 0 };
+}
+"#,
+ expect![[r#"
+ *field_a*
+
+ ```rust
+ test::Foo
+ ```
+
+ ```rust
+ field_a: u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_static() {
+ check(
+ r#"const foo$0: u32 = 123;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const foo: u32 = 123 (0x7B)
+ ```
+ "#]],
+ );
+ check(
+ r#"
+const foo$0: u32 = {
+ let x = foo();
+ x + 100
+};"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const foo: u32 = {
+ let x = foo();
+ x + 100
+ }
+ ```
+ "#]],
+ );
+
+ check(
+ r#"static foo$0: u32 = 456;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ static foo: u32 = 456
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_default_generic_types() {
+ check(
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let zz$0 = Test { t: 23u8, k: 33 };
+}"#,
+ expect![[r#"
+ *zz*
+
+ ```rust
+ let zz: Test<i32>
+ ```
+ "#]],
+ );
+ check_hover_range(
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let $0zz$0 = Test { t: 23u8, k: 33 };
+}"#,
+ expect![[r#"
+ ```rust
+ Test<i32, u8>
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_some() {
+ check(
+ r#"
+enum Option<T> { Some(T) }
+use Option::Some;
+
+fn main() { So$0me(12); }
+"#,
+ expect![[r#"
+ *Some*
+
+ ```rust
+ test::Option
+ ```
+
+ ```rust
+ Some(T)
+ ```
+ "#]],
+ );
+
+ check(
+ r#"
+enum Option<T> { Some(T) }
+use Option::Some;
+
+fn main() { let b$0ar = Some(12); }
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ let bar: Option<i32>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_enum_variant() {
+ check(
+ r#"
+enum Option<T> {
+ /// The None variant
+ Non$0e
+}
+"#,
+ expect![[r#"
+ *None*
+
+ ```rust
+ test::Option
+ ```
+
+ ```rust
+ None
+ ```
+
+ ---
+
+ The None variant
+ "#]],
+ );
+
+ check(
+ r#"
+enum Option<T> {
+ /// The Some variant
+ Some(T)
+}
+fn main() {
+ let s = Option::Som$0e(12);
+}
+"#,
+ expect![[r#"
+ *Some*
+
+ ```rust
+ test::Option
+ ```
+
+ ```rust
+ Some(T)
+ ```
+
+ ---
+
+ The Some variant
+ "#]],
+ );
+}
+
+#[test]
+fn hover_for_local_variable() {
+ check(
+ r#"fn func(foo: i32) { fo$0o; }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_for_local_variable_pat() {
+ check(
+ r#"fn func(fo$0o: i32) {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_local_var_edge() {
+ check(
+ r#"fn func(foo: i32) { if true { $0foo; }; }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_for_param_edge() {
+ check(
+ r#"fn func($0foo: i32) {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ foo: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_for_param_with_multiple_traits() {
+ check(
+ r#"
+ //- minicore: sized
+ trait Deref {
+ type Target: ?Sized;
+ }
+ trait DerefMut {
+ type Target: ?Sized;
+ }
+ fn f(_x$0: impl Deref<Target=u8> + DerefMut<Target=u8>) {}"#,
+ expect![[r#"
+ *_x*
+
+ ```rust
+ _x: impl Deref<Target = u8> + DerefMut<Target = u8>
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_infer_associated_method_result() {
+ check(
+ r#"
+struct Thing { x: u32 }
+
+impl Thing {
+ fn new() -> Thing { Thing { x: 0 } }
+}
+
+fn main() { let foo_$0test = Thing::new(); }
+"#,
+ expect![[r#"
+ *foo_test*
+
+ ```rust
+ let foo_test: Thing
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_infer_associated_method_exact() {
+ check(
+ r#"
+mod wrapper {
+ pub struct Thing { x: u32 }
+
+ impl Thing {
+ pub fn new() -> Thing { Thing { x: 0 } }
+ }
+}
+
+fn main() { let foo_test = wrapper::Thing::new$0(); }
+"#,
+ expect![[r#"
+ *new*
+
+ ```rust
+ test::wrapper::Thing
+ ```
+
+ ```rust
+ pub fn new() -> Thing
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_infer_associated_const_in_pattern() {
+ check(
+ r#"
+struct X;
+impl X {
+ const C: u32 = 1;
+}
+
+fn main() {
+ match 1 {
+ X::C$0 => {},
+ 2 => {},
+ _ => {}
+ };
+}
+"#,
+ expect![[r#"
+ *C*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const C: u32 = 1
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_self() {
+ check(
+ r#"
+struct Thing { x: u32 }
+impl Thing {
+ fn new() -> Self { Self$0 { x: 0 } }
+}
+"#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Thing
+ ```
+ "#]],
+ );
+ check(
+ r#"
+struct Thing { x: u32 }
+impl Thing {
+ fn new() -> Self$0 { Self { x: 0 } }
+}
+"#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Thing
+ ```
+ "#]],
+ );
+ check(
+ r#"
+enum Thing { A }
+impl Thing {
+ pub fn new() -> Self$0 { Thing::A }
+}
+"#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ enum Thing
+ ```
+ "#]],
+ );
+ check(
+ r#"
+ enum Thing { A }
+ impl Thing {
+ pub fn thing(a: Self$0) {}
+ }
+ "#,
+ expect![[r#"
+ *Self*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ enum Thing
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_shadowing_pat() {
+ check(
+ r#"
+fn x() {}
+
+fn y() {
+ let x = 0i32;
+ x$0;
+}
+"#,
+ expect![[r#"
+ *x*
+
+ ```rust
+ let x: i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_macro_invocation() {
+ check(
+ r#"
+macro_rules! foo { () => {} }
+
+fn f() { fo$0o!(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro_rules! foo
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_macro2_invocation() {
+ check(
+ r#"
+/// foo bar
+///
+/// foo bar baz
+macro foo() {}
+
+fn f() { fo$0o!(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro foo
+ ```
+
+ ---
+
+ foo bar
+
+ foo bar baz
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_tuple_field() {
+ check(
+ r#"struct TS(String, i32$0);"#,
+ expect![[r#"
+ *i32*
+
+ ```rust
+ i32
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_through_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+fn foo() {}
+id! {
+ fn bar() { fo$0o(); }
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_attr() {
+ check(
+ r#"
+//- proc_macros: identity
+#[proc_macros::identity]
+fn foo$0() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_expr_in_macro() {
+ check(
+ r#"
+macro_rules! id { ($($tt:tt)*) => { $($tt)* } }
+fn foo(bar:u32) { let a = id!(ba$0r); }
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ bar: u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_expr_in_macro_recursive() {
+ check(
+ r#"
+macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } }
+macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } }
+fn foo(bar:u32) { let a = id!(ba$0r); }
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ bar: u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_func_in_macro_recursive() {
+ check(
+ r#"
+macro_rules! id_deep { ($($tt:tt)*) => { $($tt)* } }
+macro_rules! id { ($($tt:tt)*) => { id_deep!($($tt)*) } }
+fn bar() -> u32 { 0 }
+fn foo() { let a = id!([0u32, bar($0)] ); }
+"#,
+ expect![[r#"
+ *bar()*
+ ```rust
+ u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_literal_string_in_macro() {
+ check(
+ r#"
+macro_rules! arr { ($($tt:tt)*) => { [$($tt)*] } }
+fn foo() {
+ let mastered_for_itunes = "";
+ let _ = arr!("Tr$0acks", &mastered_for_itunes);
+}
+"#,
+ expect![[r#"
+ *"Tracks"*
+ ```rust
+ &str
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_through_assert_macro() {
+ check(
+ r#"
+#[rustc_builtin_macro]
+macro_rules! assert {}
+
+fn bar() -> bool { true }
+fn foo() {
+ assert!(ba$0r());
+}
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn bar() -> bool
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_multiple_actions() {
+ check_actions(
+ r#"
+struct Bar;
+struct Foo { bar: Bar }
+
+fn foo(Foo { b$0ar }: &Foo) {}
+ "#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Bar",
+ kind: Struct,
+ description: "struct Bar",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ )
+}
+
+#[test]
+fn test_hover_through_literal_string_in_builtin_macro() {
+ check_hover_no_result(
+ r#"
+ #[rustc_builtin_macro]
+ macro_rules! format {}
+
+ fn foo() {
+ format!("hel$0lo {}", 0);
+ }
+"#,
+ );
+}
+
+#[test]
+fn test_hover_non_ascii_space_doc() {
+ check(
+ "
+/// <- `\u{3000}` here
+fn foo() { }
+
+fn bar() { fo$0o(); }
+",
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+
+ ---
+
+ \<- ` ` here
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_show_qualifiers() {
+ check(
+ r#"async fn foo$0() {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ async fn foo()
+ ```
+ "#]],
+ );
+ check(
+ r#"pub const unsafe fn foo$0() {}"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub const unsafe fn foo()
+ ```
+ "#]],
+ );
+ // Top level `pub(crate)` will be displayed as no visibility.
+ check(
+ r#"mod m { pub(crate) async unsafe extern "C" fn foo$0() {} }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::m
+ ```
+
+ ```rust
+ pub(crate) async unsafe extern "C" fn foo()
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_show_types() {
+ check(
+ r#"fn foo$0(a: i32, b:i32) -> i32 { 0 }"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo(a: i32, b: i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_pointer_show_identifiers() {
+ check(
+ r#"type foo$0 = fn(a: i32, b: i32) -> i32;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type foo = fn(a: i32, b: i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_function_pointer_no_identifier() {
+ check(
+ r#"type foo$0 = fn(i32, _: i32) -> i32;"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type foo = fn(i32, i32) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_trait_show_qualifiers() {
+ check_actions(
+ r"unsafe trait foo$0() {}",
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 13,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_extern_crate() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate st$0d;
+//- /std/lib.rs crate:std
+//! Standard library for this test
+//!
+//! Printed?
+//! abc123
+"#,
+ expect![[r#"
+ *std*
+
+ ```rust
+ extern crate std
+ ```
+
+ ---
+
+ Standard library for this test
+
+ Printed?
+ abc123
+ "#]],
+ );
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+extern crate std as ab$0c;
+//- /std/lib.rs crate:std
+//! Standard library for this test
+//!
+//! Printed?
+//! abc123
+"#,
+ expect![[r#"
+ *abc*
+
+ ```rust
+ extern crate std
+ ```
+
+ ---
+
+ Standard library for this test
+
+ Printed?
+ abc123
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_mod_with_same_name_as_function() {
+ check(
+ r#"
+use self::m$0y::Bar;
+mod my { pub struct Bar; }
+
+fn my() {}
+"#,
+ expect![[r#"
+ *my*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod my
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_doc_comment() {
+ check(
+ r#"
+/// This is an example
+/// multiline doc
+///
+/// # Example
+///
+/// ```
+/// let five = 5;
+///
+/// assert_eq!(6, my_crate::add_one(5));
+/// ```
+struct Bar;
+
+fn foo() { let bar = Ba$0r; }
+"#,
+ expect![[r##"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ This is an example
+ multiline doc
+
+ # Example
+
+ ```
+ let five = 5;
+
+ assert_eq!(6, my_crate::add_one(5));
+ ```
+ "##]],
+ );
+}
+
+#[test]
+fn test_hover_struct_doc_attr() {
+ check(
+ r#"
+#[doc = "bar docs"]
+struct Bar;
+
+fn foo() { let bar = Ba$0r; }
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ bar docs
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_doc_attr_multiple_and_mixed() {
+ check(
+ r#"
+/// bar docs 0
+#[doc = "bar docs 1"]
+#[doc = "bar docs 2"]
+struct Bar;
+
+fn foo() { let bar = Ba$0r; }
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct Bar
+ ```
+
+ ---
+
+ bar docs 0
+ bar docs 1
+ bar docs 2
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_external_url() {
+ check(
+ r#"
+pub struct Foo;
+/// [external](https://www.google.com)
+pub struct B$0ar
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Bar
+ ```
+
+ ---
+
+ [external](https://www.google.com)
+ "#]],
+ );
+}
+
+// Check that we don't rewrite links which we can't identify
+#[test]
+fn test_hover_unknown_target() {
+ check(
+ r#"
+pub struct Foo;
+/// [baz](Baz)
+pub struct B$0ar
+"#,
+ expect![[r#"
+ *Bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Bar
+ ```
+
+ ---
+
+ [baz](Baz)
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_no_links() {
+ check_hover_no_links(
+ r#"
+/// Test cases:
+/// case 1. bare URL: https://www.example.com/
+/// case 2. inline URL with title: [example](https://www.example.com/)
+/// case 3. code reference: [`Result`]
+/// case 4. code reference but miss footnote: [`String`]
+/// case 5. autolink: <http://www.example.com/>
+/// case 6. email address: <test@example.com>
+/// case 7. reference: [example][example]
+/// case 8. collapsed link: [example][]
+/// case 9. shortcut link: [example]
+/// case 10. inline without URL: [example]()
+/// case 11. reference: [foo][foo]
+/// case 12. reference: [foo][bar]
+/// case 13. collapsed link: [foo][]
+/// case 14. shortcut link: [foo]
+/// case 15. inline without URL: [foo]()
+/// case 16. just escaped text: \[foo]
+/// case 17. inline link: [Foo](foo::Foo)
+///
+/// [`Result`]: ../../std/result/enum.Result.html
+/// [^example]: https://www.example.com/
+pub fn fo$0o() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub fn foo()
+ ```
+
+ ---
+
+ Test cases:
+ case 1. bare URL: https://www.example.com/
+ case 2. inline URL with title: [example](https://www.example.com/)
+ case 3. code reference: `Result`
+ case 4. code reference but miss footnote: `String`
+ case 5. autolink: http://www.example.com/
+ case 6. email address: test@example.com
+ case 7. reference: example
+ case 8. collapsed link: example
+ case 9. shortcut link: example
+ case 10. inline without URL: example
+ case 11. reference: foo
+ case 12. reference: foo
+ case 13. collapsed link: foo
+ case 14. shortcut link: foo
+ case 15. inline without URL: foo
+ case 16. just escaped text: \[foo\]
+ case 17. inline link: Foo
+
+ [^example]: https://www.example.com/
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_macro_generated_struct_fn_doc_comment() {
+ cov_mark::check!(hover_macro_generated_struct_fn_doc_comment);
+
+ check(
+ r#"
+macro_rules! bar {
+ () => {
+ struct Bar;
+ impl Bar {
+ /// Do the foo
+ fn foo(&self) {}
+ }
+ }
+}
+
+bar!();
+
+fn foo() { let bar = Bar; bar.fo$0o(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::Bar
+ ```
+
+ ```rust
+ fn foo(&self)
+ ```
+
+ ---
+
+ Do the foo
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_macro_generated_struct_fn_doc_attr() {
+ cov_mark::check!(hover_macro_generated_struct_fn_doc_attr);
+
+ check(
+ r#"
+macro_rules! bar {
+ () => {
+ struct Bar;
+ impl Bar {
+ #[doc = "Do the foo"]
+ fn foo(&self) {}
+ }
+ }
+}
+
+bar!();
+
+fn foo() { let bar = Bar; bar.fo$0o(); }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test::Bar
+ ```
+
+ ```rust
+ fn foo(&self)
+ ```
+
+ ---
+
+ Do the foo
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_variadic_function() {
+ check(
+ r#"
+extern "C" {
+ pub fn foo(bar: i32, ...) -> i32;
+}
+
+fn main() { let foo_test = unsafe { fo$0o(1, 2, 3); } }
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub unsafe fn foo(bar: i32, ...) -> i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_trait_has_impl_action() {
+ check_actions(
+ r#"trait foo$0() {}"#,
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 6,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_has_impl_action() {
+ check_actions(
+ r"struct foo$0() {}",
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_union_has_impl_action() {
+ check_actions(
+ r#"union foo$0() {}"#,
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 6,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_enum_has_impl_action() {
+ check_actions(
+ r"enum foo$0() { A, B }",
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 5,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_self_has_impl_action() {
+ check_actions(
+ r#"struct foo where Self$0:;"#,
+ expect![[r#"
+ [
+ Implementation(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 7,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_test_has_action() {
+ check_actions(
+ r#"
+#[test]
+fn foo_$0test() {}
+"#,
+ expect![[r#"
+ [
+ Reference(
+ FilePosition {
+ file_id: FileId(
+ 0,
+ ),
+ offset: 11,
+ },
+ ),
+ Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..24,
+ focus_range: 11..19,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_test_mod_has_action() {
+ check_actions(
+ r#"
+mod tests$0 {
+ #[test]
+ fn foo_test() {}
+}
+"#,
+ expect![[r#"
+ [
+ Runnable(
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..46,
+ focus_range: 4..9,
+ name: "tests",
+ kind: Module,
+ description: "mod tests",
+ },
+ kind: TestMod {
+ path: "tests",
+ },
+ cfg: None,
+ },
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_struct_has_goto_type_action() {
+ check_actions(
+ r#"
+struct S{ f1: u32 }
+
+fn main() { let s$0t = S{ f1:0 }; }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..19,
+ focus_range: 7..8,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_struct_has_goto_type_actions() {
+ check_actions(
+ r#"
+struct Arg(u32);
+struct S<T>{ f1: T }
+
+fn main() { let s$0t = S{ f1:Arg(0) }; }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 17..37,
+ focus_range: 24..25,
+ name: "S",
+ kind: Struct,
+ description: "struct S<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Arg",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..16,
+ focus_range: 7..10,
+ name: "Arg",
+ kind: Struct,
+ description: "struct Arg",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_struct_has_flattened_goto_type_actions() {
+ check_actions(
+ r#"
+struct Arg(u32);
+struct S<T>{ f1: T }
+
+fn main() { let s$0t = S{ f1: S{ f1: Arg(0) } }; }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 17..37,
+ focus_range: 24..25,
+ name: "S",
+ kind: Struct,
+ description: "struct S<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Arg",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..16,
+ focus_range: 7..10,
+ name: "Arg",
+ kind: Struct,
+ description: "struct Arg",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_tuple_has_goto_type_actions() {
+ check_actions(
+ r#"
+struct A(u32);
+struct B(u32);
+mod M {
+ pub struct C(u32);
+}
+
+fn main() { let s$0t = (A(1), B(2), M::C(3) ); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::A",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..14,
+ focus_range: 7..8,
+ name: "A",
+ kind: Struct,
+ description: "struct A",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::B",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 15..29,
+ focus_range: 22..23,
+ name: "B",
+ kind: Struct,
+ description: "struct B",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::M::C",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 42..60,
+ focus_range: 53..54,
+ name: "C",
+ kind: Struct,
+ description: "pub struct C",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_return_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+fn foo() -> impl Foo {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_return_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+struct S;
+fn foo() -> impl Foo<S> {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..25,
+ focus_range: 23..24,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_return_impl_traits_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+trait Bar {}
+fn foo() -> impl Foo + Bar {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 13..25,
+ focus_range: 19..22,
+ name: "Bar",
+ kind: Trait,
+ description: "trait Bar",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_return_impl_traits_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+trait Bar<T> {}
+struct S1 {}
+struct S2 {}
+
+fn foo() -> impl Foo<S1> + Bar<S2> {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..31,
+ focus_range: 22..25,
+ name: "Bar",
+ kind: Trait,
+ description: "trait Bar<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S1",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 32..44,
+ focus_range: 39..41,
+ name: "S1",
+ kind: Struct,
+ description: "struct S1",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S2",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 45..57,
+ focus_range: 52..54,
+ name: "S2",
+ kind: Struct,
+ description: "struct S2",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_arg_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+fn foo(ar$0g: &impl Foo) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_arg_impl_traits_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+trait Bar<T> {}
+struct S{}
+
+fn foo(ar$0g: &impl Foo + Bar<S>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 13..28,
+ focus_range: 19..22,
+ name: "Bar",
+ kind: Trait,
+ description: "trait Bar<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 29..39,
+ focus_range: 36..37,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_async_block_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+//- /main.rs crate:main deps:core
+// we don't use minicore here so that this test doesn't randomly fail
+// when someone edits minicore
+struct S;
+fn foo() {
+ let fo$0o = async { S };
+}
+//- /core.rs crate:core
+pub mod future {
+ #[lang = "future_trait"]
+ pub trait Future {}
+}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "core::future::Future",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 21..69,
+ focus_range: 60..66,
+ name: "Future",
+ kind: Trait,
+ description: "pub trait Future",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "main::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..110,
+ focus_range: 108..109,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_arg_generic_impl_trait_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+struct S {}
+fn foo(ar$0g: &impl Foo<S>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..27,
+ focus_range: 23..24,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_dyn_return_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+struct S;
+impl Foo for S {}
+
+struct B<T>{}
+fn foo() -> B<dyn Foo> {}
+
+fn main() { let s$0t = foo(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::B",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 42..55,
+ focus_range: 49..50,
+ name: "B",
+ kind: Struct,
+ description: "struct B<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_dyn_arg_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+fn foo(ar$0g: &dyn Foo) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_generic_dyn_arg_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo<T> {}
+struct S {}
+fn foo(ar$0g: &dyn Foo<S>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..15,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 16..27,
+ focus_range: 23..24,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_goto_type_action_links_order() {
+ check_actions(
+ r#"
+trait ImplTrait<T> {}
+trait DynTrait<T> {}
+struct B<T> {}
+struct S {}
+
+fn foo(a$0rg: &impl ImplTrait<B<dyn DynTrait<B<S>>>>) {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::ImplTrait",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..21,
+ focus_range: 6..15,
+ name: "ImplTrait",
+ kind: Trait,
+ description: "trait ImplTrait<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::B",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 43..57,
+ focus_range: 50..51,
+ name: "B",
+ kind: Struct,
+ description: "struct B<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::DynTrait",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 22..42,
+ focus_range: 28..36,
+ name: "DynTrait",
+ kind: Trait,
+ description: "trait DynTrait<T>",
+ },
+ },
+ HoverGotoTypeData {
+ mod_path: "test::S",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 58..69,
+ focus_range: 65..66,
+ name: "S",
+ kind: Struct,
+ description: "struct S",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_associated_type_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {
+ type Item;
+ fn get(self) -> Self::Item {}
+}
+
+struct Bar{}
+struct S{}
+
+impl Foo for S { type Item = Bar; }
+
+fn test() -> impl Foo { S {} }
+
+fn main() { let s$0t = test().get(); }
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..62,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_const_param_has_goto_type_action() {
+ check_actions(
+ r#"
+struct Bar;
+struct Foo<const BAR: Bar>;
+
+impl<const BAR: Bar> Foo<BAR$0> {}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Bar",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Bar",
+ kind: Struct,
+ description: "struct Bar",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_type_param_has_goto_type_action() {
+ check_actions(
+ r#"
+trait Foo {}
+
+fn foo<T: Foo>(t: T$0){}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..12,
+ focus_range: 6..9,
+ name: "Foo",
+ kind: Trait,
+ description: "trait Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_self_has_go_to_type() {
+ check_actions(
+ r#"
+struct Foo;
+impl Foo {
+ fn foo(&self$0) {}
+}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Foo",
+ kind: Struct,
+ description: "struct Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn hover_displays_normalized_crate_names() {
+ check(
+ r#"
+//- /lib.rs crate:name-with-dashes
+pub mod wrapper {
+ pub struct Thing { x: u32 }
+
+ impl Thing {
+ pub fn new() -> Thing { Thing { x: 0 } }
+ }
+}
+
+//- /main.rs crate:main deps:name-with-dashes
+fn main() { let foo_test = name_with_dashes::wrapper::Thing::new$0(); }
+"#,
+ expect![[r#"
+ *new*
+
+ ```rust
+ name_with_dashes::wrapper::Thing
+ ```
+
+ ```rust
+ pub fn new() -> Thing
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_field_pat_shorthand_ref_match_ergonomics() {
+ check(
+ r#"
+struct S {
+ f: i32,
+}
+
+fn main() {
+ let s = S { f: 0 };
+ let S { f$0 } = &s;
+}
+"#,
+ expect![[r#"
+ *f*
+
+ ```rust
+ f: &i32
+ ```
+ ---
+
+ ```rust
+ test::S
+ ```
+
+ ```rust
+ f: i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_order() {
+ check(
+ r#"
+struct Foo;
+struct S$0T<const C: usize = 1, T = Foo>(T);
+"#,
+ expect![[r#"
+ *ST*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ struct ST<const C: usize, T = Foo>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_positive_i8_literal() {
+ check(
+ r#"
+struct Const<const N: i8>;
+
+fn main() {
+ let v$0alue = Const::<1>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<1>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_zero_i8_literal() {
+ check(
+ r#"
+struct Const<const N: i8>;
+
+fn main() {
+ let v$0alue = Const::<0>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<0>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_negative_i8_literal() {
+ check(
+ r#"
+struct Const<const N: i8>;
+
+fn main() {
+ let v$0alue = Const::<-1>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<-1>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_bool_literal() {
+ check(
+ r#"
+struct Const<const F: bool>;
+
+fn main() {
+ let v$0alue = Const::<true>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<true>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn const_generic_char_literal() {
+ check(
+ r#"
+struct Const<const C: char>;
+
+fn main() {
+ let v$0alue = Const::<'🦀'>;
+}
+"#,
+ expect![[r#"
+ *value*
+
+ ```rust
+ let value: Const<'🦀'>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_self_param_shows_type() {
+ check(
+ r#"
+struct Foo {}
+impl Foo {
+ fn bar(&sel$0f) {}
+}
+"#,
+ expect![[r#"
+ *self*
+
+ ```rust
+ self: &Foo
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_self_param_shows_type_for_arbitrary_self_type() {
+ check(
+ r#"
+struct Arc<T>(T);
+struct Foo {}
+impl Foo {
+ fn bar(sel$0f: Arc<Foo>) {}
+}
+"#,
+ expect![[r#"
+ *self*
+
+ ```rust
+ self: Arc<Foo>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_doc_outer_inner() {
+ check(
+ r#"
+/// Be quick;
+mod Foo$0 {
+ //! time is mana
+
+ /// This comment belongs to the function
+ fn foo() {}
+}
+"#,
+ expect![[r#"
+ *Foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod Foo
+ ```
+
+ ---
+
+ Be quick;
+ time is mana
+ "#]],
+ );
+}
+
+#[test]
+fn hover_doc_outer_inner_attribue() {
+ check(
+ r#"
+#[doc = "Be quick;"]
+mod Foo$0 {
+ #![doc = "time is mana"]
+
+ #[doc = "This comment belongs to the function"]
+ fn foo() {}
+}
+"#,
+ expect![[r#"
+ *Foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod Foo
+ ```
+
+ ---
+
+ Be quick;
+ time is mana
+ "#]],
+ );
+}
+
+#[test]
+fn hover_doc_block_style_indentend() {
+ check(
+ r#"
+/**
+ foo
+ ```rust
+ let x = 3;
+ ```
+*/
+fn foo$0() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn foo()
+ ```
+
+ ---
+
+ foo
+
+ ```rust
+ let x = 3;
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_comments_dont_highlight_parent() {
+ cov_mark::check!(no_highlight_on_comment_hover);
+ check_hover_no_result(
+ r#"
+fn no_hover() {
+ // no$0hover
+}
+"#,
+ );
+}
+
+#[test]
+fn hover_label() {
+ check(
+ r#"
+fn foo() {
+ 'label$0: loop {}
+}
+"#,
+ expect![[r#"
+ *'label*
+
+ ```rust
+ 'label
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_lifetime() {
+ check(
+ r#"fn foo<'lifetime>(_: &'lifetime$0 ()) {}"#,
+ expect![[r#"
+ *'lifetime*
+
+ ```rust
+ 'lifetime
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_type_param() {
+ check(
+ r#"
+//- minicore: sized
+struct Foo<T>(T);
+trait TraitA {}
+trait TraitB {}
+impl<T: TraitA + TraitB> Foo<T$0> where T: Sized {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: TraitA + TraitB
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: sized
+struct Foo<T>(T);
+impl<T> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ // lifetimes bounds arent being tracked yet
+ check(
+ r#"
+//- minicore: sized
+struct Foo<T>(T);
+impl<T: 'static> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_type_param_sized_bounds() {
+ // implicit `: Sized` bound
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+struct Foo<T>(T);
+impl<T: Trait> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+struct Foo<T>(T);
+impl<T: Trait + ?Sized> Foo<T$0> {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait + ?Sized
+ ```
+ "#]],
+ );
+}
+
+mod type_param_sized_bounds {
+ use super::*;
+
+ #[test]
+ fn single_implicit() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn single_explicit() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0: Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn single_relaxed() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0: ?Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: ?Sized
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multiple_implicit() {
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Trait>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multiple_explicit() {
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Trait + Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn multiple_relaxed() {
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Trait + ?Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait + ?Sized
+ ```
+ "#]],
+ );
+ }
+
+ #[test]
+ fn mixed() {
+ check(
+ r#"
+//- minicore: sized
+fn foo<T$0: ?Sized + Sized + Sized>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore: sized
+trait Trait {}
+fn foo<T$0: Sized + ?Sized + Sized + Trait>() {}
+"#,
+ expect![[r#"
+ *T*
+
+ ```rust
+ T: Trait
+ ```
+ "#]],
+ );
+ }
+}
+
+#[test]
+fn hover_const_generic_type_alias() {
+ check(
+ r#"
+struct Foo<const LEN: usize>;
+type Fo$0o2 = Foo<2>;
+"#,
+ expect![[r#"
+ *Foo2*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Foo2 = Foo<2>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_param() {
+ check(
+ r#"
+struct Foo<const LEN: usize>;
+impl<const LEN: usize> Foo<LEN$0> {}
+"#,
+ expect![[r#"
+ *LEN*
+
+ ```rust
+ const LEN: usize
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_eval() {
+ // show hex for <10
+ check(
+ r#"
+/// This is a doc
+const FOO$0: usize = 1 << 3;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 8
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show hex for >10
+ check(
+ r#"
+/// This is a doc
+const FOO$0: usize = (1 << 3) + (1 << 2);
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 12 (0xC)
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show original body when const eval fails
+ check(
+ r#"
+/// This is a doc
+const FOO$0: usize = 2 - 3;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 2 - 3
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // don't show hex for negatives
+ check(
+ r#"
+/// This is a doc
+const FOO$0: i32 = 2 - 3;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: i32 = -1
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ check(
+ r#"
+/// This is a doc
+const FOO$0: &str = "bar";
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: &str = "bar"
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show char literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: char = 'a';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: char = 'a'
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show escaped char literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: char = '\x61';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: char = 'a'
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show byte literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: u8 = b'a';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: u8 = 97 (0x61)
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show escaped byte literal
+ check(
+ r#"
+/// This is a doc
+const FOO$0: u8 = b'\x61';
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: u8 = 97 (0x61)
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ // show float literal
+ check(
+ r#"
+ /// This is a doc
+ const FOO$0: f64 = 1.0234;
+ "#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: f64 = 1.0234
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ //show float typecasted from int
+ check(
+ r#"
+/// This is a doc
+const FOO$0: f32 = 1f32;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: f32 = 1.0
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+ //show f64 typecasted from float
+ check(
+ r#"
+/// This is a doc
+const FOO$0: f64 = 1.0f64;
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: f64 = 1.0
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+}
+
+#[test]
+fn hover_const_pat() {
+ check(
+ r#"
+/// This is a doc
+const FOO: usize = 3;
+fn foo() {
+ match 5 {
+ FOO$0 => (),
+ _ => ()
+ }
+}
+"#,
+ expect![[r#"
+ *FOO*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ const FOO: usize = 3
+ ```
+
+ ---
+
+ This is a doc
+ "#]],
+ );
+}
+
+#[test]
+fn array_repeat_exp() {
+ check(
+ r#"
+fn main() {
+ let til$0e4 = [0_u32; (4 * 8 * 8) / 32];
+}
+ "#,
+ expect![[r#"
+ *tile4*
+
+ ```rust
+ let tile4: [u32; 8]
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_mod_def() {
+ check(
+ r#"
+//- /main.rs
+mod foo$0;
+//- /foo.rs
+//! For the horde!
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod foo
+ ```
+
+ ---
+
+ For the horde!
+ "#]],
+ );
+}
+
+#[test]
+fn hover_self_in_use() {
+ check(
+ r#"
+//! This should not appear
+mod foo {
+ /// But this should appear
+ pub mod bar {}
+}
+use foo::bar::{self$0};
+"#,
+ expect![[r#"
+ *self*
+
+ ```rust
+ test::foo
+ ```
+
+ ```rust
+ mod bar
+ ```
+
+ ---
+
+ But this should appear
+ "#]],
+ )
+}
+
+#[test]
+fn hover_keyword() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+fn f() { retur$0n; }
+//- /libstd.rs crate:std
+/// Docs for return_keyword
+mod return_keyword {}
+"#,
+ expect![[r#"
+ *return*
+
+ ```rust
+ return
+ ```
+
+ ---
+
+ Docs for return_keyword
+ "#]],
+ );
+}
+
+#[test]
+fn hover_keyword_doc() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+fn foo() {
+ let bar = mov$0e || {};
+}
+//- /libstd.rs crate:std
+#[doc(keyword = "move")]
+/// [closure]
+/// [closures][closure]
+/// [threads]
+/// <https://doc.rust-lang.org/nightly/book/ch13-01-closures.html>
+///
+/// [closure]: ../book/ch13-01-closures.html
+/// [threads]: ../book/ch16-01-threads.html#using-move-closures-with-threads
+mod move_keyword {}
+"#,
+ expect![[r##"
+ *move*
+
+ ```rust
+ move
+ ```
+
+ ---
+
+ [closure](https://doc.rust-lang.org/nightly/book/ch13-01-closures.html)
+ [closures](https://doc.rust-lang.org/nightly/book/ch13-01-closures.html)
+ [threads](https://doc.rust-lang.org/nightly/book/ch16-01-threads.html#using-move-closures-with-threads)
+ <https://doc.rust-lang.org/nightly/book/ch13-01-closures.html>
+ "##]],
+ );
+}
+
+#[test]
+fn hover_keyword_as_primitive() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+type F = f$0n(i32) -> i32;
+//- /libstd.rs crate:std
+/// Docs for prim_fn
+mod prim_fn {}
+"#,
+ expect![[r#"
+ *fn*
+
+ ```rust
+ fn
+ ```
+
+ ---
+
+ Docs for prim_fn
+ "#]],
+ );
+}
+
+#[test]
+fn hover_builtin() {
+ check(
+ r#"
+//- /main.rs crate:main deps:std
+cosnt _: &str$0 = ""; }
+
+//- /libstd.rs crate:std
+/// Docs for prim_str
+/// [`foo`](../std/keyword.foo.html)
+mod prim_str {}
+"#,
+ expect![[r#"
+ *str*
+
+ ```rust
+ str
+ ```
+
+ ---
+
+ Docs for prim_str
+ [`foo`](https://doc.rust-lang.org/nightly/std/keyword.foo.html)
+ "#]],
+ );
+}
+
+#[test]
+fn hover_macro_expanded_function() {
+ check(
+ r#"
+struct S<'a, T>(&'a T);
+trait Clone {}
+macro_rules! foo {
+ () => {
+ fn bar<'t, T: Clone + 't>(s: &mut S<'t, T>, t: u32) -> *mut u32 where
+ 't: 't + 't,
+ for<'a> T: Clone + 'a
+ { 0 as _ }
+ };
+}
+
+foo!();
+
+fn main() {
+ bar$0;
+}
+"#,
+ expect![[r#"
+ *bar*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn bar<'t, T>(s: &mut S<'t, T>, t: u32) -> *mut u32
+ where
+ T: Clone + 't,
+ 't: 't + 't,
+ for<'a> T: Clone + 'a,
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_intra_doc_links() {
+ check(
+ r#"
+
+pub mod theitem {
+ /// This is the item. Cool!
+ pub struct TheItem;
+}
+
+/// Gives you a [`TheItem$0`].
+///
+/// [`TheItem`]: theitem::TheItem
+pub fn gimme() -> theitem::TheItem {
+ theitem::TheItem
+}
+"#,
+ expect![[r#"
+ *[`TheItem`]*
+
+ ```rust
+ test::theitem
+ ```
+
+ ```rust
+ pub struct TheItem
+ ```
+
+ ---
+
+ This is the item. Cool!
+ "#]],
+ );
+}
+
+#[test]
+fn test_hover_trait_assoc_typealias() {
+ check(
+ r#"
+ fn main() {}
+
+trait T1 {
+ type Bar;
+ type Baz;
+}
+
+struct Foo;
+
+mod t2 {
+ pub trait T2 {
+ type Bar;
+ }
+}
+
+use t2::T2;
+
+impl T2 for Foo {
+ type Bar = String;
+}
+
+impl T1 for Foo {
+ type Bar = <Foo as t2::T2>::Ba$0r;
+ // ^^^ unresolvedReference
+}
+ "#,
+ expect![[r#"
+*Bar*
+
+```rust
+test::t2
+```
+
+```rust
+pub type Bar
+```
+"#]],
+ );
+}
+#[test]
+fn hover_generic_assoc() {
+ check(
+ r#"
+fn foo<T: A>() where T::Assoc$0: {}
+
+trait A {
+ type Assoc;
+}"#,
+ expect![[r#"
+ *Assoc*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Assoc
+ ```
+ "#]],
+ );
+ check(
+ r#"
+fn foo<T: A>() {
+ let _: <T>::Assoc$0;
+}
+
+trait A {
+ type Assoc;
+}"#,
+ expect![[r#"
+ *Assoc*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Assoc
+ ```
+ "#]],
+ );
+ check(
+ r#"
+trait A where
+ Self::Assoc$0: ,
+{
+ type Assoc;
+}"#,
+ expect![[r#"
+ *Assoc*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ type Assoc
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn string_shadowed_with_inner_items() {
+ check(
+ r#"
+//- /main.rs crate:main deps:alloc
+
+/// Custom `String` type.
+struct String;
+
+fn f() {
+ let _: String$0;
+
+ fn inner() {}
+}
+
+//- /alloc.rs crate:alloc
+#[prelude_import]
+pub use string::*;
+
+mod string {
+ /// This is `alloc::String`.
+ pub struct String;
+}
+"#,
+ expect![[r#"
+ *String*
+
+ ```rust
+ main
+ ```
+
+ ```rust
+ struct String
+ ```
+
+ ---
+
+ Custom `String` type.
+ "#]],
+ )
+}
+
+#[test]
+fn function_doesnt_shadow_crate_in_use_tree() {
+ check(
+ r#"
+//- /main.rs crate:main deps:foo
+use foo$0::{foo};
+
+//- /foo.rs crate:foo
+pub fn foo() {}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate foo
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_feature() {
+ check(
+ r#"#![feature(box_syntax$0)]"#,
+ expect![[r##"
+ *box_syntax*
+ ```
+ box_syntax
+ ```
+ ___
+
+ # `box_syntax`
+
+ The tracking issue for this feature is: [#49733]
+
+ [#49733]: https://github.com/rust-lang/rust/issues/49733
+
+ See also [`box_patterns`](box-patterns.md)
+
+ ------------------------
+
+ Currently the only stable way to create a `Box` is via the `Box::new` method.
+ Also it is not possible in stable Rust to destructure a `Box` in a match
+ pattern. The unstable `box` keyword can be used to create a `Box`. An example
+ usage would be:
+
+ ```rust
+ #![feature(box_syntax)]
+
+ fn main() {
+ let b = box 5;
+ }
+ ```
+
+ "##]],
+ )
+}
+
+#[test]
+fn hover_lint() {
+ check(
+ r#"#![allow(arithmetic_overflow$0)]"#,
+ expect![[r#"
+ *arithmetic_overflow*
+ ```
+ arithmetic_overflow
+ ```
+ ___
+
+ arithmetic operation overflows
+ "#]],
+ )
+}
+
+#[test]
+fn hover_clippy_lint() {
+ check(
+ r#"#![allow(clippy::almost_swapped$0)]"#,
+ expect![[r#"
+ *almost_swapped*
+ ```
+ clippy::almost_swapped
+ ```
+ ___
+
+ Checks for `foo = bar; bar = foo` sequences.
+ "#]],
+ )
+}
+
+#[test]
+fn hover_attr_path_qualifier() {
+ check(
+ r#"
+//- /foo.rs crate:foo
+
+//- /lib.rs crate:main.rs deps:foo
+#[fo$0o::bar()]
+struct Foo;
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate foo
+ ```
+ "#]],
+ )
+}
+
+#[test]
+fn hover_rename() {
+ check(
+ r#"
+use self as foo$0;
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate test
+ ```
+ "#]],
+ );
+ check(
+ r#"
+mod bar {}
+use bar::{self as foo$0};
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ mod bar
+ ```
+ "#]],
+ );
+ check(
+ r#"
+mod bar {
+ use super as foo$0;
+}
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate test
+ ```
+ "#]],
+ );
+ check(
+ r#"
+use crate as foo$0;
+"#,
+ expect![[r#"
+ *foo*
+
+ ```rust
+ extern crate test
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_attribute_in_macro() {
+ check(
+ r#"
+//- minicore:derive
+macro_rules! identity {
+ ($struct:item) => {
+ $struct
+ };
+}
+#[rustc_builtin_macro]
+pub macro Copy {}
+identity!{
+ #[derive(Copy$0)]
+ struct Foo;
+}
+"#,
+ expect![[r#"
+ *Copy*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro Copy
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_derive_input() {
+ check(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+pub macro Copy {}
+#[derive(Copy$0)]
+struct Foo;
+"#,
+ expect![[r#"
+ *Copy*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ macro Copy
+ ```
+ "#]],
+ );
+ check(
+ r#"
+//- minicore:derive
+mod foo {
+ #[rustc_builtin_macro]
+ pub macro Copy {}
+}
+#[derive(foo::Copy$0)]
+struct Foo;
+"#,
+ expect![[r#"
+ *Copy*
+
+ ```rust
+ test::foo
+ ```
+
+ ```rust
+ macro Copy
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_range_math() {
+ check_hover_range(
+ r#"
+fn f() { let expr = $01 + 2 * 3$0 }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = 1 $0+ 2 * $03 }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = 1 + $02 * 3$0 }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_arrays() {
+ check_hover_range(
+ r#"
+fn f() { let expr = $0[1, 2, 3, 4]$0 }
+"#,
+ expect![[r#"
+ ```rust
+ [i32; 4]
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = [1, 2, $03, 4]$0 }
+"#,
+ expect![[r#"
+ ```rust
+ [i32; 4]
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr = [1, 2, $03$0, 4] }
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_functions() {
+ check_hover_range(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b() { $0f$0(&[1, 2, 3, 4, 5]); }
+"#,
+ expect![[r#"
+ ```rust
+ fn f<i32>(&[i32])
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b() { f($0&[1, 2, 3, 4, 5]$0); }
+"#,
+ expect![[r#"
+ ```rust
+ &[i32; 5]
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_shows_nothing_when_invalid() {
+ check_hover_range_no_results(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b()$0 { f(&[1, 2, 3, 4, 5]); }$0
+"#,
+ );
+
+ check_hover_range_no_results(
+ r#"
+fn f<T>$0(a: &[T]) { }
+fn b() { f(&[1, 2, 3,$0 4, 5]); }
+"#,
+ );
+
+ check_hover_range_no_results(
+ r#"
+fn $0f() { let expr = [1, 2, 3, 4]$0 }
+"#,
+ );
+}
+
+#[test]
+fn hover_range_shows_unit_for_statements() {
+ check_hover_range(
+ r#"
+fn f<T>(a: &[T]) { }
+fn b() { $0f(&[1, 2, 3, 4, 5]); }$0
+"#,
+ expect![[r#"
+ ```rust
+ ()
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn f() { let expr$0 = $0[1, 2, 3, 4] }
+"#,
+ expect![[r#"
+ ```rust
+ ()
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_for_pat() {
+ check_hover_range(
+ r#"
+fn foo() {
+ let $0x$0 = 0;
+}
+"#,
+ expect![[r#"
+ ```rust
+ i32
+ ```"#]],
+ );
+
+ check_hover_range(
+ r#"
+fn foo() {
+ let $0x$0 = "";
+}
+"#,
+ expect![[r#"
+ ```rust
+ &str
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_range_shows_coercions_if_applicable_expr() {
+ check_hover_range(
+ r#"
+fn foo() {
+ let x: &u32 = $0&&&&&0$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Type: &&&&&u32
+ Coerced to: &u32
+ ```
+ "#]],
+ );
+ check_hover_range(
+ r#"
+fn foo() {
+ let x: *const u32 = $0&0$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Type: &u32
+ Coerced to: *const u32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_range_shows_type_actions() {
+ check_actions(
+ r#"
+struct Foo;
+fn foo() {
+ let x: &Foo = $0&&&&&Foo$0;
+}
+"#,
+ expect![[r#"
+ [
+ GoToType(
+ [
+ HoverGotoTypeData {
+ mod_path: "test::Foo",
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..11,
+ focus_range: 7..10,
+ name: "Foo",
+ kind: Struct,
+ description: "struct Foo",
+ },
+ },
+ ],
+ ),
+ ]
+ "#]],
+ );
+}
+
+#[test]
+fn hover_try_expr_res() {
+ check_hover_range(
+ r#"
+//- minicore:result
+struct FooError;
+
+fn foo() -> Result<(), FooError> {
+ Ok($0Result::<(), FooError>::Ok(())?$0)
+}
+"#,
+ expect![[r#"
+ ```rust
+ ()
+ ```"#]],
+ );
+ check_hover_range(
+ r#"
+//- minicore:result
+struct FooError;
+struct BarError;
+
+fn foo() -> Result<(), FooError> {
+ Ok($0Result::<(), BarError>::Ok(())?$0)
+}
+"#,
+ expect![[r#"
+ ```text
+ Try Error Type: BarError
+ Propagated as: FooError
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_try_expr() {
+ check_hover_range(
+ r#"
+struct NotResult<T, U>(T, U);
+struct Short;
+struct Looooong;
+
+fn foo() -> NotResult<(), Looooong> {
+ $0NotResult((), Short)?$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Try Target Type: NotResult<(), Short>
+ Propagated as: NotResult<(), Looooong>
+ ```
+ "#]],
+ );
+ check_hover_range(
+ r#"
+struct NotResult<T, U>(T, U);
+struct Short;
+struct Looooong;
+
+fn foo() -> NotResult<(), Short> {
+ $0NotResult((), Looooong)?$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Try Target Type: NotResult<(), Looooong>
+ Propagated as: NotResult<(), Short>
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_try_expr_option() {
+ cov_mark::check!(hover_try_expr_opt_opt);
+ check_hover_range(
+ r#"
+//- minicore: option, try
+
+fn foo() -> Option<()> {
+ $0Some(0)?$0;
+ None
+}
+"#,
+ expect![[r#"
+ ```rust
+ <Option<i32> as Try>::Output
+ ```"#]],
+ );
+}
+
+#[test]
+fn hover_deref_expr() {
+ check_hover_range(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct DerefExample<T> {
+ value: T
+}
+
+impl<T> Deref for DerefExample<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.value
+ }
+}
+
+fn foo() {
+ let x = DerefExample { value: 0 };
+ let y: i32 = $0*x$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Dereferenced from: DerefExample<i32>
+ To type: i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_deref_expr_with_coercion() {
+ check_hover_range(
+ r#"
+//- minicore: deref
+use core::ops::Deref;
+
+struct DerefExample<T> {
+ value: T
+}
+
+impl<T> Deref for DerefExample<T> {
+ type Target = T;
+
+ fn deref(&self) -> &Self::Target {
+ &self.value
+ }
+}
+
+fn foo() {
+ let x = DerefExample { value: &&&&&0 };
+ let y: &i32 = $0*x$0;
+}
+"#,
+ expect![[r#"
+ ```text
+ Dereferenced from: DerefExample<&&&&&i32>
+ To type: &&&&&i32
+ Coerced to: &i32
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_intra_in_macro() {
+ check(
+ r#"
+macro_rules! foo_macro {
+ ($(#[$attr:meta])* $name:ident) => {
+ $(#[$attr])*
+ pub struct $name;
+ }
+}
+
+foo_macro!(
+ /// Doc comment for [`Foo$0`]
+ Foo
+);
+"#,
+ expect![[r#"
+ *[`Foo`]*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Foo
+ ```
+
+ ---
+
+ Doc comment for [`Foo`](https://docs.rs/test/*/test/struct.Foo.html)
+ "#]],
+ );
+}
+
+#[test]
+fn hover_intra_in_attr() {
+ check(
+ r#"
+#[doc = "Doc comment for [`Foo$0`]"]
+pub struct Foo;
+"#,
+ expect![[r#"
+ *[`Foo`]*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ pub struct Foo
+ ```
+
+ ---
+
+ Doc comment for [`Foo`](https://docs.rs/test/*/test/struct.Foo.html)
+ "#]],
+ );
+}
+
+#[test]
+fn hover_inert_attr() {
+ check(
+ r#"
+#[doc$0 = ""]
+pub struct Foo;
+"#,
+ expect![[r##"
+ *doc*
+
+ ```rust
+ #[doc]
+ ```
+
+ ---
+
+ Valid forms are:
+
+ * \#\[doc(hidden|inline|...)\]
+ * \#\[doc = string\]
+ "##]],
+ );
+ check(
+ r#"
+#[allow$0()]
+pub struct Foo;
+"#,
+ expect![[r##"
+ *allow*
+
+ ```rust
+ #[allow]
+ ```
+
+ ---
+
+ Valid forms are:
+
+ * \#\[allow(lint1, lint2, ..., /\*opt\*/ reason = "...")\]
+ "##]],
+ );
+}
+
+#[test]
+fn hover_dollar_crate() {
+ // $crate should be resolved to the right crate name.
+
+ check(
+ r#"
+//- /main.rs crate:main deps:dep
+dep::m!(KONST$0);
+//- /dep.rs crate:dep
+#[macro_export]
+macro_rules! m {
+ ( $name:ident ) => { const $name: $crate::Type = $crate::Type; };
+}
+
+pub struct Type;
+"#,
+ expect![[r#"
+ *KONST*
+
+ ```rust
+ main
+ ```
+
+ ```rust
+ const KONST: dep::Type = $crate::Type
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_record_variant() {
+ check(
+ r#"
+enum Enum {
+ RecordV$0 { field: u32 }
+}
+"#,
+ expect![[r#"
+ *RecordV*
+
+ ```rust
+ test::Enum
+ ```
+
+ ```rust
+ RecordV { field: u32 }
+ ```
+ "#]],
+ );
+}
+
+#[test]
+fn hover_trait_impl_assoc_item_def_doc_forwarding() {
+ check(
+ r#"
+trait T {
+ /// Trait docs
+ fn func() {}
+}
+impl T for () {
+ fn func$0() {}
+}
+"#,
+ expect![[r#"
+ *func*
+
+ ```rust
+ test
+ ```
+
+ ```rust
+ fn func()
+ ```
+
+ ---
+
+ Trait docs
+ "#]],
+ );
+}
+
+#[test]
+fn hover_ranged_macro_call() {
+ check_hover_range(
+ r#"
+macro_rules! __rust_force_expr {
+ ($e:expr) => {
+ $e
+ };
+}
+macro_rules! vec {
+ ($elem:expr) => {
+ __rust_force_expr!($elem)
+ };
+}
+
+struct Struct;
+impl Struct {
+ fn foo(self) {}
+}
+
+fn f() {
+ $0vec![Struct]$0;
+}
+"#,
+ expect![[r#"
+ ```rust
+ Struct
+ ```"#]],
+ );
+}
++
++#[test]
++fn hover_deref() {
++ check(
++ r#"
++//- minicore: deref
++
++struct Struct(usize);
++
++impl core::ops::Deref for Struct {
++ type Target = usize;
++
++ fn deref(&self) -> &Self::Target {
++ &self.0
++ }
++}
++
++fn f() {
++ $0*Struct(0);
++}
++"#,
++ expect![[r#"
++ ***
++
++ ```rust
++ test::Struct
++ ```
++
++ ```rust
++ fn deref(&self) -> &Self::Target
++ ```
++ "#]],
++ );
++}
--- /dev/null
- // | VS Code | **Rust Analyzer: Toggle inlay hints*
+use either::Either;
+use hir::{known, Callable, HasVisibility, HirDisplay, Mutability, Semantics, TypeInfo};
+use ide_db::{
+ base_db::FileRange, famous_defs::FamousDefs, syntax_helpers::node_ext::walk_ty, FxHashMap,
+ RootDatabase,
+};
+use itertools::Itertools;
+use stdx::to_lower_snake_case;
+use syntax::{
+ ast::{self, AstNode, HasArgList, HasGenericParams, HasName, UnaryOp},
+ match_ast, Direction, NodeOrToken, SmolStr, SyntaxKind, SyntaxNode, SyntaxToken, TextRange,
+ TextSize, T,
+};
+
+use crate::FileId;
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub struct InlayHintsConfig {
+ pub render_colons: bool,
+ pub type_hints: bool,
+ pub parameter_hints: bool,
+ pub chaining_hints: bool,
+ pub reborrow_hints: ReborrowHints,
+ pub closure_return_type_hints: ClosureReturnTypeHints,
+ pub binding_mode_hints: bool,
+ pub lifetime_elision_hints: LifetimeElisionHints,
+ pub param_names_for_lifetime_elision_hints: bool,
+ pub hide_named_constructor_hints: bool,
+ pub hide_closure_initialization_hints: bool,
+ pub max_length: Option<usize>,
+ pub closing_brace_hints_min_lines: Option<usize>,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum ClosureReturnTypeHints {
+ Always,
+ WithBlock,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum LifetimeElisionHints {
+ Always,
+ SkipTrivial,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum ReborrowHints {
+ Always,
+ MutableOnly,
+ Never,
+}
+
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum InlayKind {
+ BindingModeHint,
+ ChainingHint,
+ ClosingBraceHint,
+ ClosureReturnTypeHint,
+ GenericParamListHint,
+ ImplicitReborrowHint,
+ LifetimeHint,
+ ParameterHint,
+ TypeHint,
+}
+
+#[derive(Debug)]
+pub struct InlayHint {
+ pub range: TextRange,
+ pub kind: InlayKind,
+ pub label: String,
+ pub tooltip: Option<InlayTooltip>,
+}
+
+#[derive(Debug)]
+pub enum InlayTooltip {
+ String(String),
+ HoverRanged(FileId, TextRange),
+ HoverOffset(FileId, TextSize),
+}
+
+// Feature: Inlay Hints
+//
+// rust-analyzer shows additional information inline with the source code.
+// Editors usually render this using read-only virtual text snippets interspersed with code.
+//
+// rust-analyzer by default shows hints for
+//
+// * types of local variables
+// * names of function arguments
+// * types of chained expressions
+//
+// Optionally, one can enable additional hints for
+//
+// * return types of closure expressions
+// * elided lifetimes
+// * compiler inserted reborrows
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: Toggle inlay hints*
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020660-b5f98b80-917a-11eb-8d70-3be3fd558cdd.png[]
+pub(crate) fn inlay_hints(
+ db: &RootDatabase,
+ file_id: FileId,
+ range_limit: Option<FileRange>,
+ config: &InlayHintsConfig,
+) -> Vec<InlayHint> {
+ let _p = profile::span("inlay_hints");
+ let sema = Semantics::new(db);
+ let file = sema.parse(file_id);
+ let file = file.syntax();
+
+ let mut acc = Vec::new();
+
+ if let Some(scope) = sema.scope(&file) {
+ let famous_defs = FamousDefs(&sema, scope.krate());
+
+ let hints = |node| hints(&mut acc, &famous_defs, config, file_id, node);
+ match range_limit {
+ Some(FileRange { range, .. }) => match file.covering_element(range) {
+ NodeOrToken::Token(_) => return acc,
+ NodeOrToken::Node(n) => n
+ .descendants()
+ .filter(|descendant| range.intersect(descendant.text_range()).is_some())
+ .for_each(hints),
+ },
+ None => file.descendants().for_each(hints),
+ };
+ }
+
+ acc
+}
+
+fn hints(
+ hints: &mut Vec<InlayHint>,
+ famous_defs @ FamousDefs(sema, _): &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ node: SyntaxNode,
+) {
+ closing_brace_hints(hints, sema, config, file_id, node.clone());
+ match_ast! {
+ match node {
+ ast::Expr(expr) => {
+ chaining_hints(hints, sema, &famous_defs, config, file_id, &expr);
+ match expr {
+ ast::Expr::CallExpr(it) => param_name_hints(hints, sema, config, ast::Expr::from(it)),
+ ast::Expr::MethodCallExpr(it) => {
+ param_name_hints(hints, sema, config, ast::Expr::from(it))
+ }
+ ast::Expr::ClosureExpr(it) => closure_ret_hints(hints, sema, &famous_defs, config, file_id, it),
+ // We could show reborrows for all expressions, but usually that is just noise to the user
+ // and the main point here is to show why "moving" a mutable reference doesn't necessarily move it
+ ast::Expr::PathExpr(_) => reborrow_hints(hints, sema, config, &expr),
+ _ => None,
+ }
+ },
+ ast::Pat(it) => {
+ binding_mode_hints(hints, sema, config, &it);
+ if let ast::Pat::IdentPat(it) = it {
+ bind_pat_hints(hints, sema, config, file_id, &it);
+ }
+ Some(())
+ },
+ ast::Item(it) => match it {
+ // FIXME: record impl lifetimes so they aren't being reused in assoc item lifetime inlay hints
+ ast::Item::Impl(_) => None,
+ ast::Item::Fn(it) => fn_lifetime_fn_hints(hints, config, it),
+ // static type elisions
+ ast::Item::Static(it) => implicit_static_hints(hints, config, Either::Left(it)),
+ ast::Item::Const(it) => implicit_static_hints(hints, config, Either::Right(it)),
+ _ => None,
+ },
+ // FIXME: fn-ptr type, dyn fn type, and trait object type elisions
+ ast::Type(_) => None,
+ _ => None,
+ }
+ };
+}
+
+fn closing_brace_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ node: SyntaxNode,
+) -> Option<()> {
+ let min_lines = config.closing_brace_hints_min_lines?;
+
+ let name = |it: ast::Name| it.syntax().text_range().start();
+
+ let mut closing_token;
+ let (label, name_offset) = if let Some(item_list) = ast::AssocItemList::cast(node.clone()) {
+ closing_token = item_list.r_curly_token()?;
+
+ let parent = item_list.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::Impl(imp) => {
+ let imp = sema.to_def(&imp)?;
+ let ty = imp.self_ty(sema.db);
+ let trait_ = imp.trait_(sema.db);
+
+ (match trait_ {
+ Some(tr) => format!("impl {} for {}", tr.name(sema.db), ty.display_truncated(sema.db, config.max_length)),
+ None => format!("impl {}", ty.display_truncated(sema.db, config.max_length)),
+ }, None)
+ },
+ ast::Trait(tr) => {
+ (format!("trait {}", tr.name()?), tr.name().map(name))
+ },
+ _ => return None,
+ }
+ }
+ } else if let Some(list) = ast::ItemList::cast(node.clone()) {
+ closing_token = list.r_curly_token()?;
+
+ let module = ast::Module::cast(list.syntax().parent()?)?;
+ (format!("mod {}", module.name()?), module.name().map(name))
+ } else if let Some(block) = ast::BlockExpr::cast(node.clone()) {
+ closing_token = block.stmt_list()?.r_curly_token()?;
+
+ let parent = block.syntax().parent()?;
+ match_ast! {
+ match parent {
+ ast::Fn(it) => {
+ // FIXME: this could include parameters, but `HirDisplay` prints too much info
+ // and doesn't respect the max length either, so the hints end up way too long
+ (format!("fn {}", it.name()?), it.name().map(name))
+ },
+ ast::Static(it) => (format!("static {}", it.name()?), it.name().map(name)),
+ ast::Const(it) => {
+ if it.underscore_token().is_some() {
+ ("const _".into(), None)
+ } else {
+ (format!("const {}", it.name()?), it.name().map(name))
+ }
+ },
+ _ => return None,
+ }
+ }
+ } else if let Some(mac) = ast::MacroCall::cast(node.clone()) {
+ let last_token = mac.syntax().last_token()?;
+ if last_token.kind() != T![;] && last_token.kind() != SyntaxKind::R_CURLY {
+ return None;
+ }
+ closing_token = last_token;
+
+ (
+ format!("{}!", mac.path()?),
+ mac.path().and_then(|it| it.segment()).map(|it| it.syntax().text_range().start()),
+ )
+ } else {
+ return None;
+ };
+
+ if let Some(mut next) = closing_token.next_token() {
+ if next.kind() == T![;] {
+ if let Some(tok) = next.next_token() {
+ closing_token = next;
+ next = tok;
+ }
+ }
+ if !(next.kind() == SyntaxKind::WHITESPACE && next.text().contains('\n')) {
+ // Only display the hint if the `}` is the last token on the line
+ return None;
+ }
+ }
+
+ let mut lines = 1;
+ node.text().for_each_chunk(|s| lines += s.matches('\n').count());
+ if lines < min_lines {
+ return None;
+ }
+
+ acc.push(InlayHint {
+ range: closing_token.text_range(),
+ kind: InlayKind::ClosingBraceHint,
+ label,
+ tooltip: name_offset.map(|it| InlayTooltip::HoverOffset(file_id, it)),
+ });
+
+ None
+}
+
+fn implicit_static_hints(
+ acc: &mut Vec<InlayHint>,
+ config: &InlayHintsConfig,
+ statik_or_const: Either<ast::Static, ast::Const>,
+) -> Option<()> {
+ if config.lifetime_elision_hints != LifetimeElisionHints::Always {
+ return None;
+ }
+
+ if let Either::Right(it) = &statik_or_const {
+ if ast::AssocItemList::can_cast(
+ it.syntax().parent().map_or(SyntaxKind::EOF, |it| it.kind()),
+ ) {
+ return None;
+ }
+ }
+
+ if let Some(ast::Type::RefType(ty)) = statik_or_const.either(|it| it.ty(), |it| it.ty()) {
+ if ty.lifetime().is_none() {
+ let t = ty.amp_token()?;
+ acc.push(InlayHint {
+ range: t.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label: "'static".to_owned(),
+ tooltip: Some(InlayTooltip::String("Elided static lifetime".into())),
+ });
+ }
+ }
+
+ Some(())
+}
+
+fn fn_lifetime_fn_hints(
+ acc: &mut Vec<InlayHint>,
+ config: &InlayHintsConfig,
+ func: ast::Fn,
+) -> Option<()> {
+ if config.lifetime_elision_hints == LifetimeElisionHints::Never {
+ return None;
+ }
+
+ let mk_lt_hint = |t: SyntaxToken, label| InlayHint {
+ range: t.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label,
+ tooltip: Some(InlayTooltip::String("Elided lifetime".into())),
+ };
+
+ let param_list = func.param_list()?;
+ let generic_param_list = func.generic_param_list();
+ let ret_type = func.ret_type();
+ let self_param = param_list.self_param().filter(|it| it.amp_token().is_some());
+
+ let is_elided = |lt: &Option<ast::Lifetime>| match lt {
+ Some(lt) => matches!(lt.text().as_str(), "'_"),
+ None => true,
+ };
+
+ let potential_lt_refs = {
+ let mut acc: Vec<_> = vec![];
+ if let Some(self_param) = &self_param {
+ let lifetime = self_param.lifetime();
+ let is_elided = is_elided(&lifetime);
+ acc.push((None, self_param.amp_token(), lifetime, is_elided));
+ }
+ param_list.params().filter_map(|it| Some((it.pat(), it.ty()?))).for_each(|(pat, ty)| {
+ // FIXME: check path types
+ walk_ty(&ty, &mut |ty| match ty {
+ ast::Type::RefType(r) => {
+ let lifetime = r.lifetime();
+ let is_elided = is_elided(&lifetime);
+ acc.push((
+ pat.as_ref().and_then(|it| match it {
+ ast::Pat::IdentPat(p) => p.name(),
+ _ => None,
+ }),
+ r.amp_token(),
+ lifetime,
+ is_elided,
+ ))
+ }
+ _ => (),
+ })
+ });
+ acc
+ };
+
+ // allocate names
+ let mut gen_idx_name = {
+ let mut gen = (0u8..).map(|idx| match idx {
+ idx if idx < 10 => SmolStr::from_iter(['\'', (idx + 48) as char]),
+ idx => format!("'{idx}").into(),
+ });
+ move || gen.next().unwrap_or_default()
+ };
+ let mut allocated_lifetimes = vec![];
+
+ let mut used_names: FxHashMap<SmolStr, usize> =
+ match config.param_names_for_lifetime_elision_hints {
+ true => generic_param_list
+ .iter()
+ .flat_map(|gpl| gpl.lifetime_params())
+ .filter_map(|param| param.lifetime())
+ .filter_map(|lt| Some((SmolStr::from(lt.text().as_str().get(1..)?), 0)))
+ .collect(),
+ false => Default::default(),
+ };
+ {
+ let mut potential_lt_refs = potential_lt_refs.iter().filter(|&&(.., is_elided)| is_elided);
+ if let Some(_) = &self_param {
+ if let Some(_) = potential_lt_refs.next() {
+ allocated_lifetimes.push(if config.param_names_for_lifetime_elision_hints {
+ // self can't be used as a lifetime, so no need to check for collisions
+ "'self".into()
+ } else {
+ gen_idx_name()
+ });
+ }
+ }
+ potential_lt_refs.for_each(|(name, ..)| {
+ let name = match name {
+ Some(it) if config.param_names_for_lifetime_elision_hints => {
+ if let Some(c) = used_names.get_mut(it.text().as_str()) {
+ *c += 1;
+ SmolStr::from(format!("'{text}{c}", text = it.text().as_str()))
+ } else {
+ used_names.insert(it.text().as_str().into(), 0);
+ SmolStr::from_iter(["\'", it.text().as_str()])
+ }
+ }
+ _ => gen_idx_name(),
+ };
+ allocated_lifetimes.push(name);
+ });
+ }
+
+ // fetch output lifetime if elision rule applies
+ let output = match potential_lt_refs.as_slice() {
+ [(_, _, lifetime, _), ..] if self_param.is_some() || potential_lt_refs.len() == 1 => {
+ match lifetime {
+ Some(lt) => match lt.text().as_str() {
+ "'_" => allocated_lifetimes.get(0).cloned(),
+ "'static" => None,
+ name => Some(name.into()),
+ },
+ None => allocated_lifetimes.get(0).cloned(),
+ }
+ }
+ [..] => None,
+ };
+
+ if allocated_lifetimes.is_empty() && output.is_none() {
+ return None;
+ }
+
+ // apply hints
+ // apply output if required
+ let mut is_trivial = true;
+ if let (Some(output_lt), Some(r)) = (&output, ret_type) {
+ if let Some(ty) = r.ty() {
+ walk_ty(&ty, &mut |ty| match ty {
+ ast::Type::RefType(ty) if ty.lifetime().is_none() => {
+ if let Some(amp) = ty.amp_token() {
+ is_trivial = false;
+ acc.push(mk_lt_hint(amp, output_lt.to_string()));
+ }
+ }
+ _ => (),
+ })
+ }
+ }
+
+ if config.lifetime_elision_hints == LifetimeElisionHints::SkipTrivial && is_trivial {
+ return None;
+ }
+
+ let mut a = allocated_lifetimes.iter();
+ for (_, amp_token, _, is_elided) in potential_lt_refs {
+ if is_elided {
+ let t = amp_token?;
+ let lt = a.next()?;
+ acc.push(mk_lt_hint(t, lt.to_string()));
+ }
+ }
+
+ // generate generic param list things
+ match (generic_param_list, allocated_lifetimes.as_slice()) {
+ (_, []) => (),
+ (Some(gpl), allocated_lifetimes) => {
+ let angle_tok = gpl.l_angle_token()?;
+ let is_empty = gpl.generic_params().next().is_none();
+ acc.push(InlayHint {
+ range: angle_tok.text_range(),
+ kind: InlayKind::LifetimeHint,
+ label: format!(
+ "{}{}",
+ allocated_lifetimes.iter().format(", "),
+ if is_empty { "" } else { ", " }
+ ),
+ tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
+ });
+ }
+ (None, allocated_lifetimes) => acc.push(InlayHint {
+ range: func.name()?.syntax().text_range(),
+ kind: InlayKind::GenericParamListHint,
+ label: format!("<{}>", allocated_lifetimes.iter().format(", "),).into(),
+ tooltip: Some(InlayTooltip::String("Elided lifetimes".into())),
+ }),
+ }
+ Some(())
+}
+
+fn closure_ret_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ closure: ast::ClosureExpr,
+) -> Option<()> {
+ if config.closure_return_type_hints == ClosureReturnTypeHints::Never {
+ return None;
+ }
+
+ if closure.ret_type().is_some() {
+ return None;
+ }
+
+ if !closure_has_block_body(&closure)
+ && config.closure_return_type_hints == ClosureReturnTypeHints::WithBlock
+ {
+ return None;
+ }
+
+ let param_list = closure.param_list()?;
+
+ let closure = sema.descend_node_into_attributes(closure.clone()).pop()?;
+ let ty = sema.type_of_expr(&ast::Expr::ClosureExpr(closure))?.adjusted();
+ let callable = ty.as_callable(sema.db)?;
+ let ty = callable.return_type();
+ if ty.is_unit() {
+ return None;
+ }
+ acc.push(InlayHint {
+ range: param_list.syntax().text_range(),
+ kind: InlayKind::ClosureReturnTypeHint,
+ label: hint_iterator(sema, &famous_defs, config, &ty)
+ .unwrap_or_else(|| ty.display_truncated(sema.db, config.max_length).to_string()),
+ tooltip: Some(InlayTooltip::HoverRanged(file_id, param_list.syntax().text_range())),
+ });
+ Some(())
+}
+
+fn reborrow_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ expr: &ast::Expr,
+) -> Option<()> {
+ if config.reborrow_hints == ReborrowHints::Never {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let desc_expr = descended.as_ref().unwrap_or(expr);
+ let mutability = sema.is_implicit_reborrow(desc_expr)?;
+ let label = match mutability {
+ hir::Mutability::Shared if config.reborrow_hints != ReborrowHints::MutableOnly => "&*",
+ hir::Mutability::Mut => "&mut *",
+ _ => return None,
+ };
+ acc.push(InlayHint {
+ range: expr.syntax().text_range(),
+ kind: InlayKind::ImplicitReborrowHint,
+ label: label.to_string(),
+ tooltip: Some(InlayTooltip::String("Compiler inserted reborrow".into())),
+ });
+ Some(())
+}
+
+fn chaining_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ expr: &ast::Expr,
+) -> Option<()> {
+ if !config.chaining_hints {
+ return None;
+ }
+
+ if matches!(expr, ast::Expr::RecordExpr(_)) {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let desc_expr = descended.as_ref().unwrap_or(expr);
+
+ let mut tokens = expr
+ .syntax()
+ .siblings_with_tokens(Direction::Next)
+ .filter_map(NodeOrToken::into_token)
+ .filter(|t| match t.kind() {
+ SyntaxKind::WHITESPACE if !t.text().contains('\n') => false,
+ SyntaxKind::COMMENT => false,
+ _ => true,
+ });
+
+ // Chaining can be defined as an expression whose next sibling tokens are newline and dot
+ // Ignoring extra whitespace and comments
+ let next = tokens.next()?.kind();
+ if next == SyntaxKind::WHITESPACE {
+ let mut next_next = tokens.next()?.kind();
+ while next_next == SyntaxKind::WHITESPACE {
+ next_next = tokens.next()?.kind();
+ }
+ if next_next == T![.] {
+ let ty = sema.type_of_expr(desc_expr)?.original;
+ if ty.is_unknown() {
+ return None;
+ }
+ if matches!(expr, ast::Expr::PathExpr(_)) {
+ if let Some(hir::Adt::Struct(st)) = ty.as_adt() {
+ if st.fields(sema.db).is_empty() {
+ return None;
+ }
+ }
+ }
+ acc.push(InlayHint {
+ range: expr.syntax().text_range(),
+ kind: InlayKind::ChainingHint,
+ label: hint_iterator(sema, &famous_defs, config, &ty).unwrap_or_else(|| {
+ ty.display_truncated(sema.db, config.max_length).to_string()
+ }),
+ tooltip: Some(InlayTooltip::HoverRanged(file_id, expr.syntax().text_range())),
+ });
+ }
+ }
+ Some(())
+}
+
+fn param_name_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ expr: ast::Expr,
+) -> Option<()> {
+ if !config.parameter_hints {
+ return None;
+ }
+
+ let (callable, arg_list) = get_callable(sema, &expr)?;
+ let hints = callable
+ .params(sema.db)
+ .into_iter()
+ .zip(arg_list.args())
+ .filter_map(|((param, _ty), arg)| {
+ // Only annotate hints for expressions that exist in the original file
+ let range = sema.original_range_opt(arg.syntax())?;
+ let (param_name, name_syntax) = match param.as_ref()? {
+ Either::Left(pat) => ("self".to_string(), pat.name()),
+ Either::Right(pat) => match pat {
+ ast::Pat::IdentPat(it) => (it.name()?.to_string(), it.name()),
+ _ => return None,
+ },
+ };
+ Some((name_syntax, param_name, arg, range))
+ })
+ .filter(|(_, param_name, arg, _)| {
+ !should_hide_param_name_hint(sema, &callable, param_name, arg)
+ })
+ .map(|(param, param_name, _, FileRange { range, .. })| {
+ let mut tooltip = None;
+ if let Some(name) = param {
+ if let hir::CallableKind::Function(f) = callable.kind() {
+ // assert the file is cached so we can map out of macros
+ if let Some(_) = sema.source(f) {
+ tooltip = sema.original_range_opt(name.syntax());
+ }
+ }
+ }
+
+ InlayHint {
+ range,
+ kind: InlayKind::ParameterHint,
+ label: param_name,
+ tooltip: tooltip.map(|it| InlayTooltip::HoverOffset(it.file_id, it.range.start())),
+ }
+ });
+
+ acc.extend(hints);
+ Some(())
+}
+
+fn binding_mode_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ pat: &ast::Pat,
+) -> Option<()> {
+ if !config.binding_mode_hints {
+ return None;
+ }
+
+ let range = pat.syntax().text_range();
+ sema.pattern_adjustments(&pat).iter().for_each(|ty| {
+ let reference = ty.is_reference();
+ let mut_reference = ty.is_mutable_reference();
+ let r = match (reference, mut_reference) {
+ (true, true) => "&mut",
+ (true, false) => "&",
+ _ => return,
+ };
+ acc.push(InlayHint {
+ range,
+ kind: InlayKind::BindingModeHint,
+ label: r.to_string(),
+ tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
+ });
+ });
+ match pat {
+ ast::Pat::IdentPat(pat) if pat.ref_token().is_none() && pat.mut_token().is_none() => {
+ let bm = sema.binding_mode_of_pat(pat)?;
+ let bm = match bm {
+ hir::BindingMode::Move => return None,
+ hir::BindingMode::Ref(Mutability::Mut) => "ref mut",
+ hir::BindingMode::Ref(Mutability::Shared) => "ref",
+ };
+ acc.push(InlayHint {
+ range,
+ kind: InlayKind::BindingModeHint,
+ label: bm.to_string(),
+ tooltip: Some(InlayTooltip::String("Inferred binding mode".into())),
+ });
+ }
+ _ => (),
+ }
+
+ Some(())
+}
+
+fn bind_pat_hints(
+ acc: &mut Vec<InlayHint>,
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ file_id: FileId,
+ pat: &ast::IdentPat,
+) -> Option<()> {
+ if !config.type_hints {
+ return None;
+ }
+
+ let descended = sema.descend_node_into_attributes(pat.clone()).pop();
+ let desc_pat = descended.as_ref().unwrap_or(pat);
+ let ty = sema.type_of_pat(&desc_pat.clone().into())?.original;
+
+ if should_not_display_type_hint(sema, config, pat, &ty) {
+ return None;
+ }
+
+ let krate = sema.scope(desc_pat.syntax())?.krate();
+ let famous_defs = FamousDefs(sema, krate);
+ let label = hint_iterator(sema, &famous_defs, config, &ty);
+
+ let label = match label {
+ Some(label) => label,
+ None => {
+ let ty_name = ty.display_truncated(sema.db, config.max_length).to_string();
+ if config.hide_named_constructor_hints
+ && is_named_constructor(sema, pat, &ty_name).is_some()
+ {
+ return None;
+ }
+ ty_name
+ }
+ };
+
+ acc.push(InlayHint {
+ range: match pat.name() {
+ Some(name) => name.syntax().text_range(),
+ None => pat.syntax().text_range(),
+ },
+ kind: InlayKind::TypeHint,
+ label,
+ tooltip: pat
+ .name()
+ .map(|it| it.syntax().text_range())
+ .map(|it| InlayTooltip::HoverRanged(file_id, it)),
+ });
+
+ Some(())
+}
+
+fn is_named_constructor(
+ sema: &Semantics<'_, RootDatabase>,
+ pat: &ast::IdentPat,
+ ty_name: &str,
+) -> Option<()> {
+ let let_node = pat.syntax().parent()?;
+ let expr = match_ast! {
+ match let_node {
+ ast::LetStmt(it) => it.initializer(),
+ ast::LetExpr(it) => it.expr(),
+ _ => None,
+ }
+ }?;
+
+ let expr = sema.descend_node_into_attributes(expr.clone()).pop().unwrap_or(expr);
+ // unwrap postfix expressions
+ let expr = match expr {
+ ast::Expr::TryExpr(it) => it.expr(),
+ ast::Expr::AwaitExpr(it) => it.expr(),
+ expr => Some(expr),
+ }?;
+ let expr = match expr {
+ ast::Expr::CallExpr(call) => match call.expr()? {
+ ast::Expr::PathExpr(path) => path,
+ _ => return None,
+ },
+ ast::Expr::PathExpr(path) => path,
+ _ => return None,
+ };
+ let path = expr.path()?;
+
+ let callable = sema.type_of_expr(&ast::Expr::PathExpr(expr))?.original.as_callable(sema.db);
+ let callable_kind = callable.map(|it| it.kind());
+ let qual_seg = match callable_kind {
+ Some(hir::CallableKind::Function(_) | hir::CallableKind::TupleEnumVariant(_)) => {
+ path.qualifier()?.segment()
+ }
+ _ => path.segment(),
+ }?;
+
+ let ctor_name = match qual_seg.kind()? {
+ ast::PathSegmentKind::Name(name_ref) => {
+ match qual_seg.generic_arg_list().map(|it| it.generic_args()) {
+ Some(generics) => format!("{}<{}>", name_ref, generics.format(", ")),
+ None => name_ref.to_string(),
+ }
+ }
+ ast::PathSegmentKind::Type { type_ref: Some(ty), trait_ref: None } => ty.to_string(),
+ _ => return None,
+ };
+ (ctor_name == ty_name).then(|| ())
+}
+
+/// Checks if the type is an Iterator from std::iter and replaces its hint with an `impl Iterator<Item = Ty>`.
+fn hint_iterator(
+ sema: &Semantics<'_, RootDatabase>,
+ famous_defs: &FamousDefs<'_, '_>,
+ config: &InlayHintsConfig,
+ ty: &hir::Type,
+) -> Option<String> {
+ let db = sema.db;
+ let strukt = ty.strip_references().as_adt()?;
+ let krate = strukt.module(db).krate();
+ if krate != famous_defs.core()? {
+ return None;
+ }
+ let iter_trait = famous_defs.core_iter_Iterator()?;
+ let iter_mod = famous_defs.core_iter()?;
+
+ // Assert that this struct comes from `core::iter`.
+ if !(strukt.visibility(db) == hir::Visibility::Public
+ && strukt.module(db).path_to_root(db).contains(&iter_mod))
+ {
+ return None;
+ }
+
+ if ty.impls_trait(db, iter_trait, &[]) {
+ let assoc_type_item = iter_trait.items(db).into_iter().find_map(|item| match item {
+ hir::AssocItem::TypeAlias(alias) if alias.name(db) == known::Item => Some(alias),
+ _ => None,
+ })?;
+ if let Some(ty) = ty.normalize_trait_assoc_type(db, &[], assoc_type_item) {
+ const LABEL_START: &str = "impl Iterator<Item = ";
+ const LABEL_END: &str = ">";
+
+ let ty_display = hint_iterator(sema, famous_defs, config, &ty)
+ .map(|assoc_type_impl| assoc_type_impl.to_string())
+ .unwrap_or_else(|| {
+ ty.display_truncated(
+ db,
+ config
+ .max_length
+ .map(|len| len.saturating_sub(LABEL_START.len() + LABEL_END.len())),
+ )
+ .to_string()
+ });
+ return Some(format!("{}{}{}", LABEL_START, ty_display, LABEL_END));
+ }
+ }
+
+ None
+}
+
+fn pat_is_enum_variant(db: &RootDatabase, bind_pat: &ast::IdentPat, pat_ty: &hir::Type) -> bool {
+ if let Some(hir::Adt::Enum(enum_data)) = pat_ty.as_adt() {
+ let pat_text = bind_pat.to_string();
+ enum_data
+ .variants(db)
+ .into_iter()
+ .map(|variant| variant.name(db).to_smol_str())
+ .any(|enum_name| enum_name == pat_text)
+ } else {
+ false
+ }
+}
+
+fn should_not_display_type_hint(
+ sema: &Semantics<'_, RootDatabase>,
+ config: &InlayHintsConfig,
+ bind_pat: &ast::IdentPat,
+ pat_ty: &hir::Type,
+) -> bool {
+ let db = sema.db;
+
+ if pat_ty.is_unknown() {
+ return true;
+ }
+
+ if let Some(hir::Adt::Struct(s)) = pat_ty.as_adt() {
+ if s.fields(db).is_empty() && s.name(db).to_smol_str() == bind_pat.to_string() {
+ return true;
+ }
+ }
+
+ if config.hide_closure_initialization_hints {
+ if let Some(parent) = bind_pat.syntax().parent() {
+ if let Some(it) = ast::LetStmt::cast(parent.clone()) {
+ if let Some(ast::Expr::ClosureExpr(closure)) = it.initializer() {
+ if closure_has_block_body(&closure) {
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ for node in bind_pat.syntax().ancestors() {
+ match_ast! {
+ match node {
+ ast::LetStmt(it) => return it.ty().is_some(),
+ // FIXME: We might wanna show type hints in parameters for non-top level patterns as well
+ ast::Param(it) => return it.ty().is_some(),
+ ast::MatchArm(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
+ ast::LetExpr(_) => return pat_is_enum_variant(db, bind_pat, pat_ty),
+ ast::IfExpr(_) => return false,
+ ast::WhileExpr(_) => return false,
+ ast::ForExpr(it) => {
+ // We *should* display hint only if user provided "in {expr}" and we know the type of expr (and it's not unit).
+ // Type of expr should be iterable.
+ return it.in_token().is_none() ||
+ it.iterable()
+ .and_then(|iterable_expr| sema.type_of_expr(&iterable_expr))
+ .map(TypeInfo::original)
+ .map_or(true, |iterable_ty| iterable_ty.is_unknown() || iterable_ty.is_unit())
+ },
+ _ => (),
+ }
+ }
+ }
+ false
+}
+
+fn closure_has_block_body(closure: &ast::ClosureExpr) -> bool {
+ matches!(closure.body(), Some(ast::Expr::BlockExpr(_)))
+}
+
+fn should_hide_param_name_hint(
+ sema: &Semantics<'_, RootDatabase>,
+ callable: &hir::Callable,
+ param_name: &str,
+ argument: &ast::Expr,
+) -> bool {
+ // These are to be tested in the `parameter_hint_heuristics` test
+ // hide when:
+ // - the parameter name is a suffix of the function's name
+ // - the argument is a qualified constructing or call expression where the qualifier is an ADT
+ // - exact argument<->parameter match(ignoring leading underscore) or parameter is a prefix/suffix
+ // of argument with _ splitting it off
+ // - param starts with `ra_fixture`
+ // - param is a well known name in a unary function
+
+ let param_name = param_name.trim_start_matches('_');
+ if param_name.is_empty() {
+ return true;
+ }
+
+ if matches!(argument, ast::Expr::PrefixExpr(prefix) if prefix.op_kind() == Some(UnaryOp::Not)) {
+ return false;
+ }
+
+ let fn_name = match callable.kind() {
+ hir::CallableKind::Function(it) => Some(it.name(sema.db).to_smol_str()),
+ _ => None,
+ };
+ let fn_name = fn_name.as_deref();
+ is_param_name_suffix_of_fn_name(param_name, callable, fn_name)
+ || is_argument_similar_to_param_name(argument, param_name)
+ || param_name.starts_with("ra_fixture")
+ || (callable.n_params() == 1 && is_obvious_param(param_name))
+ || is_adt_constructor_similar_to_param_name(sema, argument, param_name)
+}
+
+fn is_argument_similar_to_param_name(argument: &ast::Expr, param_name: &str) -> bool {
+ // check whether param_name and argument are the same or
+ // whether param_name is a prefix/suffix of argument(split at `_`)
+ let argument = match get_string_representation(argument) {
+ Some(argument) => argument,
+ None => return false,
+ };
+
+ // std is honestly too panic happy...
+ let str_split_at = |str: &str, at| str.is_char_boundary(at).then(|| argument.split_at(at));
+
+ let param_name = param_name.trim_start_matches('_');
+ let argument = argument.trim_start_matches('_');
+
+ match str_split_at(argument, param_name.len()) {
+ Some((prefix, rest)) if prefix.eq_ignore_ascii_case(param_name) => {
+ return rest.is_empty() || rest.starts_with('_');
+ }
+ _ => (),
+ }
+ match argument.len().checked_sub(param_name.len()).and_then(|at| str_split_at(argument, at)) {
+ Some((rest, suffix)) if param_name.eq_ignore_ascii_case(suffix) => {
+ return rest.is_empty() || rest.ends_with('_');
+ }
+ _ => (),
+ }
+ false
+}
+
+/// Hide the parameter name of a unary function if it is a `_` - prefixed suffix of the function's name, or equal.
+///
+/// `fn strip_suffix(suffix)` will be hidden.
+/// `fn stripsuffix(suffix)` will not be hidden.
+fn is_param_name_suffix_of_fn_name(
+ param_name: &str,
+ callable: &Callable,
+ fn_name: Option<&str>,
+) -> bool {
+ match (callable.n_params(), fn_name) {
+ (1, Some(function)) => {
+ function == param_name
+ || function
+ .len()
+ .checked_sub(param_name.len())
+ .and_then(|at| function.is_char_boundary(at).then(|| function.split_at(at)))
+ .map_or(false, |(prefix, suffix)| {
+ suffix.eq_ignore_ascii_case(param_name) && prefix.ends_with('_')
+ })
+ }
+ _ => false,
+ }
+}
+
+fn is_adt_constructor_similar_to_param_name(
+ sema: &Semantics<'_, RootDatabase>,
+ argument: &ast::Expr,
+ param_name: &str,
+) -> bool {
+ let path = match argument {
+ ast::Expr::CallExpr(c) => c.expr().and_then(|e| match e {
+ ast::Expr::PathExpr(p) => p.path(),
+ _ => None,
+ }),
+ ast::Expr::PathExpr(p) => p.path(),
+ ast::Expr::RecordExpr(r) => r.path(),
+ _ => return false,
+ };
+ let path = match path {
+ Some(it) => it,
+ None => return false,
+ };
+ (|| match sema.resolve_path(&path)? {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => {
+ Some(to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name)
+ }
+ hir::PathResolution::Def(hir::ModuleDef::Function(_) | hir::ModuleDef::Variant(_)) => {
+ if to_lower_snake_case(&path.segment()?.name_ref()?.text()) == param_name {
+ return Some(true);
+ }
+ let qual = path.qualifier()?;
+ match sema.resolve_path(&qual)? {
+ hir::PathResolution::Def(hir::ModuleDef::Adt(_)) => {
+ Some(to_lower_snake_case(&qual.segment()?.name_ref()?.text()) == param_name)
+ }
+ _ => None,
+ }
+ }
+ _ => None,
+ })()
+ .unwrap_or(false)
+}
+
+fn get_string_representation(expr: &ast::Expr) -> Option<String> {
+ match expr {
+ ast::Expr::MethodCallExpr(method_call_expr) => {
+ let name_ref = method_call_expr.name_ref()?;
+ match name_ref.text().as_str() {
+ "clone" | "as_ref" => method_call_expr.receiver().map(|rec| rec.to_string()),
+ name_ref => Some(name_ref.to_owned()),
+ }
+ }
+ ast::Expr::MacroExpr(macro_expr) => {
+ Some(macro_expr.macro_call()?.path()?.segment()?.to_string())
+ }
+ ast::Expr::FieldExpr(field_expr) => Some(field_expr.name_ref()?.to_string()),
+ ast::Expr::PathExpr(path_expr) => Some(path_expr.path()?.segment()?.to_string()),
+ ast::Expr::PrefixExpr(prefix_expr) => get_string_representation(&prefix_expr.expr()?),
+ ast::Expr::RefExpr(ref_expr) => get_string_representation(&ref_expr.expr()?),
+ ast::Expr::CastExpr(cast_expr) => get_string_representation(&cast_expr.expr()?),
+ _ => None,
+ }
+}
+
+fn is_obvious_param(param_name: &str) -> bool {
+ // avoid displaying hints for common functions like map, filter, etc.
+ // or other obvious words used in std
+ let is_obvious_param_name =
+ matches!(param_name, "predicate" | "value" | "pat" | "rhs" | "other");
+ param_name.len() == 1 || is_obvious_param_name
+}
+
+fn get_callable(
+ sema: &Semantics<'_, RootDatabase>,
+ expr: &ast::Expr,
+) -> Option<(hir::Callable, ast::ArgList)> {
+ match expr {
+ ast::Expr::CallExpr(expr) => {
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let expr = descended.as_ref().unwrap_or(expr);
+ sema.type_of_expr(&expr.expr()?)?.original.as_callable(sema.db).zip(expr.arg_list())
+ }
+ ast::Expr::MethodCallExpr(expr) => {
+ let descended = sema.descend_node_into_attributes(expr.clone()).pop();
+ let expr = descended.as_ref().unwrap_or(expr);
+ sema.resolve_method_call_as_callable(expr).zip(expr.arg_list())
+ }
+ _ => None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+ use ide_db::base_db::FileRange;
+ use itertools::Itertools;
+ use syntax::{TextRange, TextSize};
+ use test_utils::extract_annotations;
+
+ use crate::inlay_hints::ReborrowHints;
+ use crate::{fixture, inlay_hints::InlayHintsConfig, LifetimeElisionHints};
+
+ use super::ClosureReturnTypeHints;
+
+ const DISABLED_CONFIG: InlayHintsConfig = InlayHintsConfig {
+ render_colons: false,
+ type_hints: false,
+ parameter_hints: false,
+ chaining_hints: false,
+ lifetime_elision_hints: LifetimeElisionHints::Never,
+ closure_return_type_hints: ClosureReturnTypeHints::Never,
+ reborrow_hints: ReborrowHints::Always,
+ binding_mode_hints: false,
+ hide_named_constructor_hints: false,
+ hide_closure_initialization_hints: false,
+ param_names_for_lifetime_elision_hints: false,
+ max_length: None,
+ closing_brace_hints_min_lines: None,
+ };
+ const TEST_CONFIG: InlayHintsConfig = InlayHintsConfig {
+ type_hints: true,
+ parameter_hints: true,
+ chaining_hints: true,
+ reborrow_hints: ReborrowHints::Always,
+ closure_return_type_hints: ClosureReturnTypeHints::WithBlock,
+ binding_mode_hints: true,
+ lifetime_elision_hints: LifetimeElisionHints::Always,
+ ..DISABLED_CONFIG
+ };
+
+ #[track_caller]
+ fn check(ra_fixture: &str) {
+ check_with_config(TEST_CONFIG, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_params(ra_fixture: &str) {
+ check_with_config(
+ InlayHintsConfig { parameter_hints: true, ..DISABLED_CONFIG },
+ ra_fixture,
+ );
+ }
+
+ #[track_caller]
+ fn check_types(ra_fixture: &str) {
+ check_with_config(InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG }, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_chains(ra_fixture: &str) {
+ check_with_config(InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG }, ra_fixture);
+ }
+
+ #[track_caller]
+ fn check_with_config(config: InlayHintsConfig, ra_fixture: &str) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let mut expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
+ let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
+ let actual = inlay_hints
+ .into_iter()
+ .map(|it| (it.range, it.label.to_string()))
+ .sorted_by_key(|(range, _)| range.start())
+ .collect::<Vec<_>>();
+ expected.sort_by_key(|(range, _)| range.start());
+
+ assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
+ }
+
+ #[track_caller]
+ fn check_expect(config: InlayHintsConfig, ra_fixture: &str, expect: Expect) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let inlay_hints = analysis.inlay_hints(&config, file_id, None).unwrap();
+ expect.assert_debug_eq(&inlay_hints)
+ }
+
+ #[test]
+ fn hints_disabled() {
+ check_with_config(
+ InlayHintsConfig { render_colons: true, ..DISABLED_CONFIG },
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(4, 4);
+}"#,
+ );
+ }
+
+ // Parameter hint tests
+
+ #[test]
+ fn param_hints_only() {
+ check_params(
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(
+ 4,
+ //^ a
+ 4,
+ //^ b
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_hints_on_closure() {
+ check_params(
+ r#"
+fn main() {
+ let clo = |a: u8, b: u8| a + b;
+ clo(
+ 1,
+ //^ a
+ 2,
+ //^ b
+ );
+}
+ "#,
+ );
+ }
+
+ #[test]
+ fn param_name_similar_to_fn_name_still_hints() {
+ check_params(
+ r#"
+fn max(x: i32, y: i32) -> i32 { x + y }
+fn main() {
+ let _x = max(
+ 4,
+ //^ x
+ 4,
+ //^ y
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_name_similar_to_fn_name() {
+ check_params(
+ r#"
+fn param_with_underscore(with_underscore: i32) -> i32 { with_underscore }
+fn main() {
+ let _x = param_with_underscore(
+ 4,
+ );
+}"#,
+ );
+ check_params(
+ r#"
+fn param_with_underscore(underscore: i32) -> i32 { underscore }
+fn main() {
+ let _x = param_with_underscore(
+ 4,
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_name_same_as_fn_name() {
+ check_params(
+ r#"
+fn foo(foo: i32) -> i32 { foo }
+fn main() {
+ let _x = foo(
+ 4,
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn never_hide_param_when_multiple_params() {
+ check_params(
+ r#"
+fn foo(foo: i32, bar: i32) -> i32 { bar + baz }
+fn main() {
+ let _x = foo(
+ 4,
+ //^ foo
+ 8,
+ //^ bar
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn param_hints_look_through_as_ref_and_clone() {
+ check_params(
+ r#"
+fn foo(bar: i32, baz: f32) {}
+
+fn main() {
+ let bar = 3;
+ let baz = &"baz";
+ let fez = 1.0;
+ foo(bar.clone(), bar.clone());
+ //^^^^^^^^^^^ baz
+ foo(bar.as_ref(), bar.as_ref());
+ //^^^^^^^^^^^^ baz
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn self_param_hints() {
+ check_params(
+ r#"
+struct Foo;
+
+impl Foo {
+ fn foo(self: Self) {}
+ fn bar(self: &Self) {}
+}
+
+fn main() {
+ Foo::foo(Foo);
+ //^^^ self
+ Foo::bar(&Foo);
+ //^^^^ self
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn param_name_hints_show_for_literals() {
+ check_params(
+ r#"pub fn test(a: i32, b: i32) -> [i32; 2] { [a, b] }
+fn main() {
+ test(
+ 0xa_b,
+ //^^^^^ a
+ 0xa_b,
+ //^^^^^ b
+ );
+}"#,
+ )
+ }
+
+ #[test]
+ fn function_call_parameter_hint() {
+ check_params(
+ r#"
+//- minicore: option
+struct FileId {}
+struct SmolStr {}
+
+struct TextRange {}
+struct SyntaxKind {}
+struct NavigationTarget {}
+
+struct Test {}
+
+impl Test {
+ fn method(&self, mut param: i32) -> i32 { param * 2 }
+
+ fn from_syntax(
+ file_id: FileId,
+ name: SmolStr,
+ focus_range: Option<TextRange>,
+ full_range: TextRange,
+ kind: SyntaxKind,
+ docs: Option<String>,
+ ) -> NavigationTarget {
+ NavigationTarget {}
+ }
+}
+
+fn test_func(mut foo: i32, bar: i32, msg: &str, _: i32, last: i32) -> i32 {
+ foo + bar
+}
+
+fn main() {
+ let not_literal = 1;
+ let _: i32 = test_func(1, 2, "hello", 3, not_literal);
+ //^ foo ^ bar ^^^^^^^ msg ^^^^^^^^^^^ last
+ let t: Test = Test {};
+ t.method(123);
+ //^^^ param
+ Test::method(&t, 3456);
+ //^^ self ^^^^ param
+ Test::from_syntax(
+ FileId {},
+ "impl".into(),
+ //^^^^^^^^^^^^^ name
+ None,
+ //^^^^ focus_range
+ TextRange {},
+ //^^^^^^^^^^^^ full_range
+ SyntaxKind {},
+ //^^^^^^^^^^^^^ kind
+ None,
+ //^^^^ docs
+ );
+}"#,
+ );
+ }
+
+ #[test]
+ fn parameter_hint_heuristics() {
+ check_params(
+ r#"
+fn check(ra_fixture_thing: &str) {}
+
+fn map(f: i32) {}
+fn filter(predicate: i32) {}
+
+fn strip_suffix(suffix: &str) {}
+fn stripsuffix(suffix: &str) {}
+fn same(same: u32) {}
+fn same2(_same2: u32) {}
+
+fn enum_matches_param_name(completion_kind: CompletionKind) {}
+
+fn foo(param: u32) {}
+fn bar(param_eter: u32) {}
+
+enum CompletionKind {
+ Keyword,
+}
+
+fn non_ident_pat((a, b): (u32, u32)) {}
+
+fn main() {
+ const PARAM: u32 = 0;
+ foo(PARAM);
+ foo(!PARAM);
+ // ^^^^^^ param
+ check("");
+
+ map(0);
+ filter(0);
+
+ strip_suffix("");
+ stripsuffix("");
+ //^^ suffix
+ same(0);
+ same2(0);
+
+ enum_matches_param_name(CompletionKind::Keyword);
+
+ let param = 0;
+ foo(param);
+ foo(param as _);
+ let param_end = 0;
+ foo(param_end);
+ let start_param = 0;
+ foo(start_param);
+ let param2 = 0;
+ foo(param2);
+ //^^^^^^ param
+
+ macro_rules! param {
+ () => {};
+ };
+ foo(param!());
+
+ let param_eter = 0;
+ bar(param_eter);
+ let param_eter_end = 0;
+ bar(param_eter_end);
+ let start_param_eter = 0;
+ bar(start_param_eter);
+ let param_eter2 = 0;
+ bar(param_eter2);
+ //^^^^^^^^^^^ param_eter
+
+ non_ident_pat((0, 0));
+}"#,
+ );
+ }
+
+ // Type-Hint tests
+
+ #[test]
+ fn type_hints_only() {
+ check_types(
+ r#"
+fn foo(a: i32, b: i32) -> i32 { a + b }
+fn main() {
+ let _x = foo(4, 4);
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn type_hints_bindings_after_at() {
+ check_types(
+ r#"
+//- minicore: option
+fn main() {
+ let ref foo @ bar @ ref mut baz = 0;
+ //^^^ &i32
+ //^^^ i32
+ //^^^ &mut i32
+ let [x @ ..] = [0];
+ //^ [i32; 1]
+ if let x @ Some(_) = Some(0) {}
+ //^ Option<i32>
+ let foo @ (bar, baz) = (3, 3);
+ //^^^ (i32, i32)
+ //^^^ i32
+ //^^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn default_generic_types_should_not_be_displayed() {
+ check(
+ r#"
+struct Test<K, T = u8> { k: K, t: T }
+
+fn main() {
+ let zz = Test { t: 23u8, k: 33 };
+ //^^ Test<i32>
+ let zz_ref = &zz;
+ //^^^^^^ &Test<i32>
+ let test = || zz;
+ //^^^^ || -> Test<i32>
+}"#,
+ );
+ }
+
+ #[test]
+ fn shorten_iterators_in_associated_params() {
+ check_types(
+ r#"
+//- minicore: iterators
+use core::iter;
+
+pub struct SomeIter<T> {}
+
+impl<T> SomeIter<T> {
+ pub fn new() -> Self { SomeIter {} }
+ pub fn push(&mut self, t: T) {}
+}
+
+impl<T> Iterator for SomeIter<T> {
+ type Item = T;
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let mut some_iter = SomeIter::new();
+ //^^^^^^^^^ SomeIter<Take<Repeat<i32>>>
+ some_iter.push(iter::repeat(2).take(2));
+ let iter_of_iters = some_iter.take(2);
+ //^^^^^^^^^^^^^ impl Iterator<Item = impl Iterator<Item = i32>>
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn infer_call_method_return_associated_types_with_generic() {
+ check_types(
+ r#"
+ pub trait Default {
+ fn default() -> Self;
+ }
+ pub trait Foo {
+ type Bar: Default;
+ }
+
+ pub fn quux<T: Foo>() -> T::Bar {
+ let y = Default::default();
+ //^ <T as Foo>::Bar
+
+ y
+ }
+ "#,
+ );
+ }
+
+ #[test]
+ fn fn_hints() {
+ check_types(
+ r#"
+//- minicore: fn, sized
+fn foo() -> impl Fn() { loop {} }
+fn foo1() -> impl Fn(f64) { loop {} }
+fn foo2() -> impl Fn(f64, f64) { loop {} }
+fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} }
+fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} }
+fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} }
+fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} }
+fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} }
+
+fn main() {
+ let foo = foo();
+ // ^^^ impl Fn()
+ let foo = foo1();
+ // ^^^ impl Fn(f64)
+ let foo = foo2();
+ // ^^^ impl Fn(f64, f64)
+ let foo = foo3();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo4();
+ // ^^^ &dyn Fn(f64, f64) -> u32
+ let foo = foo5();
+ // ^^^ &dyn Fn(&dyn Fn(f64, f64) -> u32, f64) -> u32
+ let foo = foo6();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo7();
+ // ^^^ *const impl Fn(f64, f64) -> u32
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn check_hint_range_limit() {
+ let fixture = r#"
+ //- minicore: fn, sized
+ fn foo() -> impl Fn() { loop {} }
+ fn foo1() -> impl Fn(f64) { loop {} }
+ fn foo2() -> impl Fn(f64, f64) { loop {} }
+ fn foo3() -> impl Fn(f64, f64) -> u32 { loop {} }
+ fn foo4() -> &'static dyn Fn(f64, f64) -> u32 { loop {} }
+ fn foo5() -> &'static dyn Fn(&'static dyn Fn(f64, f64) -> u32, f64) -> u32 { loop {} }
+ fn foo6() -> impl Fn(f64, f64) -> u32 + Sized { loop {} }
+ fn foo7() -> *const (impl Fn(f64, f64) -> u32 + Sized) { loop {} }
+
+ fn main() {
+ let foo = foo();
+ let foo = foo1();
+ let foo = foo2();
+ // ^^^ impl Fn(f64, f64)
+ let foo = foo3();
+ // ^^^ impl Fn(f64, f64) -> u32
+ let foo = foo4();
+ let foo = foo5();
+ let foo = foo6();
+ let foo = foo7();
+ }
+ "#;
+ let (analysis, file_id) = fixture::file(fixture);
+ let expected = extract_annotations(&*analysis.file_text(file_id).unwrap());
+ let inlay_hints = analysis
+ .inlay_hints(
+ &InlayHintsConfig { type_hints: true, ..DISABLED_CONFIG },
+ file_id,
+ Some(FileRange {
+ file_id,
+ range: TextRange::new(TextSize::from(500), TextSize::from(600)),
+ }),
+ )
+ .unwrap();
+ let actual =
+ inlay_hints.into_iter().map(|it| (it.range, it.label.to_string())).collect::<Vec<_>>();
+ assert_eq!(expected, actual, "\nExpected:\n{:#?}\n\nActual:\n{:#?}", expected, actual);
+ }
+
+ #[test]
+ fn fn_hints_ptr_rpit_fn_parentheses() {
+ check_types(
+ r#"
+//- minicore: fn, sized
+trait Trait {}
+
+fn foo1() -> *const impl Fn() { loop {} }
+fn foo2() -> *const (impl Fn() + Sized) { loop {} }
+fn foo3() -> *const (impl Fn() + ?Sized) { loop {} }
+fn foo4() -> *const (impl Sized + Fn()) { loop {} }
+fn foo5() -> *const (impl ?Sized + Fn()) { loop {} }
+fn foo6() -> *const (impl Fn() + Trait) { loop {} }
+fn foo7() -> *const (impl Fn() + Sized + Trait) { loop {} }
+fn foo8() -> *const (impl Fn() + ?Sized + Trait) { loop {} }
+fn foo9() -> *const (impl Fn() -> u8 + ?Sized) { loop {} }
+fn foo10() -> *const (impl Fn() + Sized + ?Sized) { loop {} }
+
+fn main() {
+ let foo = foo1();
+ // ^^^ *const impl Fn()
+ let foo = foo2();
+ // ^^^ *const impl Fn()
+ let foo = foo3();
+ // ^^^ *const (impl Fn() + ?Sized)
+ let foo = foo4();
+ // ^^^ *const impl Fn()
+ let foo = foo5();
+ // ^^^ *const (impl Fn() + ?Sized)
+ let foo = foo6();
+ // ^^^ *const (impl Fn() + Trait)
+ let foo = foo7();
+ // ^^^ *const (impl Fn() + Trait)
+ let foo = foo8();
+ // ^^^ *const (impl Fn() + Trait + ?Sized)
+ let foo = foo9();
+ // ^^^ *const (impl Fn() -> u8 + ?Sized)
+ let foo = foo10();
+ // ^^^ *const impl Fn()
+}
+"#,
+ )
+ }
+
+ #[test]
+ fn unit_structs_have_no_type_hints() {
+ check_types(
+ r#"
+//- minicore: result
+struct SyntheticSyntax;
+
+fn main() {
+ match Ok(()) {
+ Ok(_) => (),
+ Err(SyntheticSyntax) => (),
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn let_statement() {
+ check_types(
+ r#"
+#[derive(PartialEq)]
+enum Option<T> { None, Some(T) }
+
+#[derive(PartialEq)]
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ struct InnerStruct {}
+
+ let test = 54;
+ //^^^^ i32
+ let test: i32 = 33;
+ let mut test = 33;
+ //^^^^ i32
+ let _ = 22;
+ let test = "test";
+ //^^^^ &str
+ let test = InnerStruct {};
+ //^^^^ InnerStruct
+
+ let test = unresolved();
+
+ let test = (42, 'a');
+ //^^^^ (i32, char)
+ let (a, (b, (c,)) = (2, (3, (9.2,));
+ //^ i32 ^ i32 ^ f64
+ let &x = &92;
+ //^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn if_expr() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ let test = Some(Test { a: Some(3), b: 1 });
+ //^^^^ Option<Test>
+ if let None = &test {};
+ if let test = &test {};
+ //^^^^ &Option<Test>
+ if let Some(test) = &test {};
+ //^^^^ &Test
+ if let Some(Test { a, b }) = &test {};
+ //^ &Option<u32> ^ &u8
+ if let Some(Test { a: x, b: y }) = &test {};
+ //^ &Option<u32> ^ &u8
+ if let Some(Test { a: Some(x), b: y }) = &test {};
+ //^ &u32 ^ &u8
+ if let Some(Test { a: None, b: y }) = &test {};
+ //^ &u8
+ if let Some(Test { b: y, .. }) = &test {};
+ //^ &u8
+ if test == None {}
+}"#,
+ );
+ }
+
+ #[test]
+ fn while_expr() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ let test = Some(Test { a: Some(3), b: 1 });
+ //^^^^ Option<Test>
+ while let Some(Test { a: Some(x), b: y }) = &test {};
+ //^ &u32 ^ &u8
+}"#,
+ );
+ }
+
+ #[test]
+ fn match_arm_list() {
+ check_types(
+ r#"
+//- minicore: option
+struct Test { a: Option<u32>, b: u8 }
+
+fn main() {
+ match Some(Test { a: Some(3), b: 1 }) {
+ None => (),
+ test => (),
+ //^^^^ Option<Test>
+ Some(Test { a: Some(x), b: y }) => (),
+ //^ u32 ^ u8
+ _ => {}
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn complete_for_hint() {
+ check_types(
+ r#"
+//- minicore: iterator
+pub struct Vec<T> {}
+
+impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+ pub fn push(&mut self, t: T) {}
+}
+
+impl<T> IntoIterator for Vec<T> {
+ type Item=T;
+}
+
+fn main() {
+ let mut data = Vec::new();
+ //^^^^ Vec<&str>
+ data.push("foo");
+ for i in data {
+ //^ &str
+ let z = i;
+ //^ &str
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_dyn_trait_bounds() {
+ check_types(
+ r#"
+pub struct Vec<T> {}
+
+impl<T> Vec<T> {
+ pub fn new() -> Self { Vec {} }
+}
+
+pub struct Box<T> {}
+
+trait Display {}
+trait Sync {}
+
+fn main() {
+ // The block expression wrapping disables the constructor hint hiding logic
+ let _v = { Vec::<Box<&(dyn Display + Sync)>>::new() };
+ //^^ Vec<Box<&(dyn Display + Sync)>>
+ let _v = { Vec::<Box<*const (dyn Display + Sync)>>::new() };
+ //^^ Vec<Box<*const (dyn Display + Sync)>>
+ let _v = { Vec::<Box<dyn Display + Sync>>::new() };
+ //^^ Vec<Box<dyn Display + Sync>>
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shorten_iterator_hints() {
+ check_types(
+ r#"
+//- minicore: iterators
+use core::iter;
+
+struct MyIter;
+
+impl Iterator for MyIter {
+ type Item = ();
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let _x = MyIter;
+ //^^ MyIter
+ let _x = iter::repeat(0);
+ //^^ impl Iterator<Item = i32>
+ fn generic<T: Clone>(t: T) {
+ let _x = iter::repeat(t);
+ //^^ impl Iterator<Item = T>
+ let _chained = iter::repeat(t).take(10);
+ //^^^^^^^^ impl Iterator<Item = T>
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn skip_constructor_and_enum_type_hints() {
+ check_with_config(
+ InlayHintsConfig {
+ type_hints: true,
+ hide_named_constructor_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+//- minicore: try, option
+use core::ops::ControlFlow;
+
+mod x {
+ pub mod y { pub struct Foo; }
+ pub struct Foo;
+ pub enum AnotherEnum {
+ Variant()
+ };
+}
+struct Struct;
+struct TupleStruct();
+
+impl Struct {
+ fn new() -> Self {
+ Struct
+ }
+ fn try_new() -> ControlFlow<(), Self> {
+ ControlFlow::Continue(Struct)
+ }
+}
+
+struct Generic<T>(T);
+impl Generic<i32> {
+ fn new() -> Self {
+ Generic(0)
+ }
+}
+
+enum Enum {
+ Variant(u32)
+}
+
+fn times2(value: i32) -> i32 {
+ 2 * value
+}
+
+fn main() {
+ let enumb = Enum::Variant(0);
+
+ let strukt = x::Foo;
+ let strukt = x::y::Foo;
+ let strukt = Struct;
+ let strukt = Struct::new();
+
+ let tuple_struct = TupleStruct();
+
+ let generic0 = Generic::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic1 = Generic(0);
+ // ^^^^^^^^ Generic<i32>
+ let generic2 = Generic::<i32>::new();
+ let generic3 = <Generic<i32>>::new();
+ let generic4 = Generic::<i32>(0);
+
+
+ let option = Some(0);
+ // ^^^^^^ Option<i32>
+ let func = times2;
+ // ^^^^ fn times2(i32) -> i32
+ let closure = |x: i32| x * 2;
+ // ^^^^^^^ |i32| -> i32
+}
+
+fn fallible() -> ControlFlow<()> {
+ let strukt = Struct::try_new()?;
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn shows_constructor_type_hints_when_enabled() {
+ check_types(
+ r#"
+//- minicore: try
+use core::ops::ControlFlow;
+
+struct Struct;
+struct TupleStruct();
+
+impl Struct {
+ fn new() -> Self {
+ Struct
+ }
+ fn try_new() -> ControlFlow<(), Self> {
+ ControlFlow::Continue(Struct)
+ }
+}
+
+struct Generic<T>(T);
+impl Generic<i32> {
+ fn new() -> Self {
+ Generic(0)
+ }
+}
+
+fn main() {
+ let strukt = Struct::new();
+ // ^^^^^^ Struct
+ let tuple_struct = TupleStruct();
+ // ^^^^^^^^^^^^ TupleStruct
+ let generic0 = Generic::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic1 = Generic::<i32>::new();
+ // ^^^^^^^^ Generic<i32>
+ let generic2 = <Generic<i32>>::new();
+ // ^^^^^^^^ Generic<i32>
+}
+
+fn fallible() -> ControlFlow<()> {
+ let strukt = Struct::try_new()?;
+ // ^^^^^^ Struct
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn closures() {
+ check(
+ r#"
+fn main() {
+ let mut start = 0;
+ //^^^^^ i32
+ (0..2).for_each(|increment | { start += increment; });
+ //^^^^^^^^^ i32
+
+ let multiply =
+ //^^^^^^^^ |i32, i32| -> i32
+ | a, b| a * b
+ //^ i32 ^ i32
+
+ ;
+
+ let _: i32 = multiply(1, 2);
+ //^ a ^ b
+ let multiply_ref = &multiply;
+ //^^^^^^^^^^^^ &|i32, i32| -> i32
+
+ let return_42 = || 42;
+ //^^^^^^^^^ || -> i32
+ || { 42 };
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn return_type_hints_for_closure_without_block() {
+ check_with_config(
+ InlayHintsConfig {
+ closure_return_type_hints: ClosureReturnTypeHints::Always,
+ ..DISABLED_CONFIG
+ },
+ r#"
+fn main() {
+ let a = || { 0 };
+ //^^ i32
+ let b = || 0;
+ //^^ i32
+}"#,
+ );
+ }
+
+ #[test]
+ fn skip_closure_type_hints() {
+ check_with_config(
+ InlayHintsConfig {
+ type_hints: true,
+ hide_closure_initialization_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+//- minicore: fn
+fn main() {
+ let multiple_2 = |x: i32| { x * 2 };
+
+ let multiple_2 = |x: i32| x * 2;
+ // ^^^^^^^^^^ |i32| -> i32
+
+ let (not) = (|x: bool| { !x });
+ // ^^^ |bool| -> bool
+
+ let (is_zero, _b) = (|x: usize| { x == 0 }, false);
+ // ^^^^^^^ |usize| -> bool
+ // ^^ bool
+
+ let plus_one = |x| { x + 1 };
+ // ^ u8
+ foo(plus_one);
+
+ let add_mul = bar(|x: u8| { x + 1 });
+ // ^^^^^^^ impl FnOnce(u8) -> u8 + ?Sized
+
+ let closure = if let Some(6) = add_mul(2).checked_sub(1) {
+ // ^^^^^^^ fn(i32) -> i32
+ |x: i32| { x * 2 }
+ } else {
+ |x: i32| { x * 3 }
+ };
+}
+
+fn foo(f: impl FnOnce(u8) -> u8) {}
+
+fn bar(f: impl FnOnce(u8) -> u8) -> impl FnOnce(u8) -> u8 {
+ move |x: u8| f(x) * 2
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hint_truncation() {
+ check_with_config(
+ InlayHintsConfig { max_length: Some(8), ..TEST_CONFIG },
+ r#"
+struct Smol<T>(T);
+
+struct VeryLongOuterName<T>(T);
+
+fn main() {
+ let a = Smol(0u32);
+ //^ Smol<u32>
+ let b = VeryLongOuterName(0usize);
+ //^ VeryLongOuterName<…>
+ let c = Smol(Smol(0u32))
+ //^ Smol<Smol<…>>
+}"#,
+ );
+ }
+
+ // Chaining hint tests
+
+ #[test]
+ fn chaining_hints_ignore_comments() {
+ check_expect(
+ InlayHintsConfig { type_hints: false, chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A(B);
+impl A { fn into_b(self) -> B { self.0 } }
+struct B(C);
+impl B { fn into_c(self) -> C { self.0 } }
+struct C;
+
+fn main() {
+ let c = A(B(C))
+ .into_b() // This is a comment
+ // This is another comment
+ .into_c();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 147..172,
+ kind: ChainingHint,
+ label: "B",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 147..172,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 147..154,
+ kind: ChainingHint,
+ label: "A",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 147..154,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn chaining_hints_without_newlines() {
+ check_chains(
+ r#"
+struct A(B);
+impl A { fn into_b(self) -> B { self.0 } }
+struct B(C);
+impl B { fn into_c(self) -> C { self.0 } }
+struct C;
+
+fn main() {
+ let c = A(B(C)).into_b().into_c();
+}"#,
+ );
+ }
+
+ #[test]
+ fn struct_access_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A { pub b: B }
+struct B { pub c: C }
+struct C(pub bool);
+struct D;
+
+impl D {
+ fn foo(&self) -> i32 { 42 }
+}
+
+fn main() {
+ let x = A { b: B { c: C(true) } }
+ .b
+ .c
+ .0;
+ let x = D
+ .foo();
+}"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 143..190,
+ kind: ChainingHint,
+ label: "C",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 143..190,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 143..179,
+ kind: ChainingHint,
+ label: "B",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 143..179,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn generic_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+struct A<T>(T);
+struct B<T>(T);
+struct C<T>(T);
+struct X<T,R>(T, R);
+
+impl<T> A<T> {
+ fn new(t: T) -> Self { A(t) }
+ fn into_b(self) -> B<T> { B(self.0) }
+}
+impl<T> B<T> {
+ fn into_c(self) -> C<T> { C(self.0) }
+}
+fn main() {
+ let c = A::new(X(42, true))
+ .into_b()
+ .into_c();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 246..283,
+ kind: ChainingHint,
+ label: "B<X<i32, bool>>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 246..283,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 246..265,
+ kind: ChainingHint,
+ label: "A<X<i32, bool>>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 246..265,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn shorten_iterator_chaining_hints() {
+ check_expect(
+ InlayHintsConfig { chaining_hints: true, ..DISABLED_CONFIG },
+ r#"
+//- minicore: iterators
+use core::iter;
+
+struct MyIter;
+
+impl Iterator for MyIter {
+ type Item = ();
+ fn next(&mut self) -> Option<Self::Item> {
+ None
+ }
+}
+
+fn main() {
+ let _x = MyIter.by_ref()
+ .take(5)
+ .by_ref()
+ .take(5)
+ .by_ref();
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 174..241,
+ kind: ChainingHint,
+ label: "impl Iterator<Item = ()>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..241,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..224,
+ kind: ChainingHint,
+ label: "impl Iterator<Item = ()>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..224,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..206,
+ kind: ChainingHint,
+ label: "impl Iterator<Item = ()>",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..206,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 174..189,
+ kind: ChainingHint,
+ label: "&mut MyIter",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 174..189,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn hints_in_attr_call() {
+ check_expect(
+ TEST_CONFIG,
+ r#"
+//- proc_macros: identity, input_replace
+struct Struct;
+impl Struct {
+ fn chain(self) -> Self {
+ self
+ }
+}
+#[proc_macros::identity]
+fn main() {
+ let strukt = Struct;
+ strukt
+ .chain()
+ .chain()
+ .chain();
+ Struct::chain(strukt);
+}
+"#,
+ expect![[r#"
+ [
+ InlayHint {
+ range: 124..130,
+ kind: TypeHint,
+ label: "Struct",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 124..130,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 145..185,
+ kind: ChainingHint,
+ label: "Struct",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 145..185,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 145..168,
+ kind: ChainingHint,
+ label: "Struct",
+ tooltip: Some(
+ HoverRanged(
+ FileId(
+ 0,
+ ),
+ 145..168,
+ ),
+ ),
+ },
+ InlayHint {
+ range: 222..228,
+ kind: ParameterHint,
+ label: "self",
+ tooltip: Some(
+ HoverOffset(
+ FileId(
+ 0,
+ ),
+ 42,
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes() {
+ check(
+ r#"
+fn empty() {}
+
+fn no_gpl(a: &()) {}
+ //^^^^^^<'0>
+ // ^'0
+fn empty_gpl<>(a: &()) {}
+ // ^'0 ^'0
+fn partial<'b>(a: &(), b: &'b ()) {}
+// ^'0, $ ^'0
+fn partial<'a>(a: &'a (), b: &()) {}
+// ^'0, $ ^'0
+
+fn single_ret(a: &()) -> &() {}
+// ^^^^^^^^^^<'0>
+ // ^'0 ^'0
+fn full_mul(a: &(), b: &()) {}
+// ^^^^^^^^<'0, '1>
+ // ^'0 ^'1
+
+fn foo<'c>(a: &'c ()) -> &() {}
+ // ^'c
+
+fn nested_in(a: & &X< &()>) {}
+// ^^^^^^^^^<'0, '1, '2>
+ //^'0 ^'1 ^'2
+fn nested_out(a: &()) -> & &X< &()>{}
+// ^^^^^^^^^^<'0>
+ //^'0 ^'0 ^'0 ^'0
+
+impl () {
+ fn foo(&self) {}
+ // ^^^<'0>
+ // ^'0
+ fn foo(&self) -> &() {}
+ // ^^^<'0>
+ // ^'0 ^'0
+ fn foo(&self, a: &()) -> &() {}
+ // ^^^<'0, '1>
+ // ^'0 ^'1 ^'0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_named() {
+ check_with_config(
+ InlayHintsConfig { param_names_for_lifetime_elision_hints: true, ..TEST_CONFIG },
+ r#"
+fn nested_in<'named>(named: & &X< &()>) {}
+// ^'named1, 'named2, 'named3, $
+ //^'named1 ^'named2 ^'named3
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_trivial_skip() {
+ check_with_config(
+ InlayHintsConfig {
+ lifetime_elision_hints: LifetimeElisionHints::SkipTrivial,
+ ..TEST_CONFIG
+ },
+ r#"
+fn no_gpl(a: &()) {}
+fn empty_gpl<>(a: &()) {}
+fn partial<'b>(a: &(), b: &'b ()) {}
+fn partial<'a>(a: &'a (), b: &()) {}
+
+fn single_ret(a: &()) -> &() {}
+// ^^^^^^^^^^<'0>
+ // ^'0 ^'0
+fn full_mul(a: &(), b: &()) {}
+
+fn foo<'c>(a: &'c ()) -> &() {}
+ // ^'c
+
+fn nested_in(a: & &X< &()>) {}
+fn nested_out(a: &()) -> & &X< &()>{}
+// ^^^^^^^^^^<'0>
+ //^'0 ^'0 ^'0 ^'0
+
+impl () {
+ fn foo(&self) {}
+ fn foo(&self) -> &() {}
+ // ^^^<'0>
+ // ^'0 ^'0
+ fn foo(&self, a: &()) -> &() {}
+ // ^^^<'0, '1>
+ // ^'0 ^'1 ^'0
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_lifetimes_static() {
+ check_with_config(
+ InlayHintsConfig {
+ lifetime_elision_hints: LifetimeElisionHints::Always,
+ ..TEST_CONFIG
+ },
+ r#"
+trait Trait {}
+static S: &str = "";
+// ^'static
+const C: &str = "";
+// ^'static
+const C: &dyn Trait = panic!();
+// ^'static
+
+impl () {
+ const C: &str = "";
+ const C: &dyn Trait = panic!();
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_implicit_reborrow() {
+ check_with_config(
+ InlayHintsConfig {
+ reborrow_hints: ReborrowHints::Always,
+ parameter_hints: true,
+ ..DISABLED_CONFIG
+ },
+ r#"
+fn __() {
+ let unique = &mut ();
+ let r_mov = unique;
+ let foo: &mut _ = unique;
+ //^^^^^^ &mut *
+ ref_mut_id(unique);
+ //^^^^^^ mut_ref
+ //^^^^^^ &mut *
+ let shared = ref_id(unique);
+ //^^^^^^ shared_ref
+ //^^^^^^ &*
+ let mov = shared;
+ let r_mov: &_ = shared;
+ ref_id(shared);
+ //^^^^^^ shared_ref
+
+ identity(unique);
+ identity(shared);
+}
+fn identity<T>(t: T) -> T {
+ t
+}
+fn ref_mut_id(mut_ref: &mut ()) -> &mut () {
+ mut_ref
+ //^^^^^^^ &mut *
+}
+fn ref_id(shared_ref: &()) -> &() {
+ shared_ref
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn hints_binding_modes() {
+ check_with_config(
+ InlayHintsConfig { binding_mode_hints: true, ..DISABLED_CONFIG },
+ r#"
+fn __(
+ (x,): (u32,),
+ (x,): &(u32,),
+ //^^^^&
+ //^ ref
+ (x,): &mut (u32,)
+ //^^^^&mut
+ //^ ref mut
+) {
+ let (x,) = (0,);
+ let (x,) = &(0,);
+ //^^^^ &
+ //^ ref
+ let (x,) = &mut (0,);
+ //^^^^ &mut
+ //^ ref mut
+ let &mut (x,) = &mut (0,);
+ let (ref mut x,) = &mut (0,);
+ //^^^^^^^^^^^^ &mut
+ let &mut (ref mut x,) = &mut (0,);
+ let (mut x,) = &mut (0,);
+ //^^^^^^^^ &mut
+ match (0,) {
+ (x,) => ()
+ }
+ match &(0,) {
+ (x,) => ()
+ //^^^^ &
+ //^ ref
+ }
+ match &mut (0,) {
+ (x,) => ()
+ //^^^^ &mut
+ //^ ref mut
+ }
+}"#,
+ );
+ }
+
+ #[test]
+ fn hints_closing_brace() {
+ check_with_config(
+ InlayHintsConfig { closing_brace_hints_min_lines: Some(2), ..DISABLED_CONFIG },
+ r#"
+fn a() {}
+
+fn f() {
+} // no hint unless `}` is the last token on the line
+
+fn g() {
+ }
+//^ fn g
+
+fn h<T>(with: T, arguments: u8, ...) {
+ }
+//^ fn h
+
+trait Tr {
+ fn f();
+ fn g() {
+ }
+ //^ fn g
+ }
+//^ trait Tr
+impl Tr for () {
+ }
+//^ impl Tr for ()
+impl dyn Tr {
+ }
+//^ impl dyn Tr
+
+static S0: () = 0;
+static S1: () = {};
+static S2: () = {
+ };
+//^ static S2
+const _: () = {
+ };
+//^ const _
+
+mod m {
+ }
+//^ mod m
+
+m! {}
+m!();
+m!(
+ );
+//^ m!
+
+m! {
+ }
+//^ m!
+
+fn f() {
+ let v = vec![
+ ];
+ }
+//^ fn f
+"#,
+ );
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Join lines**
+use ide_assists::utils::extract_trivial_expression;
+use ide_db::syntax_helpers::node_ext::expr_as_name_ref;
+use itertools::Itertools;
+use syntax::{
+ ast::{self, AstNode, AstToken, IsString},
+ NodeOrToken, SourceFile, SyntaxElement,
+ SyntaxKind::{self, USE_TREE, WHITESPACE},
+ SyntaxToken, TextRange, TextSize, T,
+};
+
+use text_edit::{TextEdit, TextEditBuilder};
+
+pub struct JoinLinesConfig {
+ pub join_else_if: bool,
+ pub remove_trailing_comma: bool,
+ pub unwrap_trivial_blocks: bool,
+ pub join_assignments: bool,
+}
+
+// Feature: Join Lines
+//
+// Join selected lines into one, smartly fixing up whitespace, trailing commas, and braces.
+//
+// See
+// https://user-images.githubusercontent.com/1711539/124515923-4504e800-dde9-11eb-8d58-d97945a1a785.gif[this gif]
+// for the cases handled specially by joined lines.
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: Join lines**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113020661-b6922200-917a-11eb-87c4-b75acc028f11.gif[]
+pub(crate) fn join_lines(
+ config: &JoinLinesConfig,
+ file: &SourceFile,
+ range: TextRange,
+) -> TextEdit {
+ let range = if range.is_empty() {
+ let syntax = file.syntax();
+ let text = syntax.text().slice(range.start()..);
+ let pos = match text.find_char('\n') {
+ None => return TextEdit::builder().finish(),
+ Some(pos) => pos,
+ };
+ TextRange::at(range.start() + pos, TextSize::of('\n'))
+ } else {
+ range
+ };
+
+ let mut edit = TextEdit::builder();
+ match file.syntax().covering_element(range) {
+ NodeOrToken::Node(node) => {
+ for token in node.descendants_with_tokens().filter_map(|it| it.into_token()) {
+ remove_newlines(config, &mut edit, &token, range)
+ }
+ }
+ NodeOrToken::Token(token) => remove_newlines(config, &mut edit, &token, range),
+ };
+ edit.finish()
+}
+
+fn remove_newlines(
+ config: &JoinLinesConfig,
+ edit: &mut TextEditBuilder,
+ token: &SyntaxToken,
+ range: TextRange,
+) {
+ let intersection = match range.intersect(token.text_range()) {
+ Some(range) => range,
+ None => return,
+ };
+
+ let range = intersection - token.text_range().start();
+ let text = token.text();
+ for (pos, _) in text[range].bytes().enumerate().filter(|&(_, b)| b == b'\n') {
+ let pos: TextSize = (pos as u32).into();
+ let offset = token.text_range().start() + range.start() + pos;
+ if !edit.invalidates_offset(offset) {
+ remove_newline(config, edit, token, offset);
+ }
+ }
+}
+
+fn remove_newline(
+ config: &JoinLinesConfig,
+ edit: &mut TextEditBuilder,
+ token: &SyntaxToken,
+ offset: TextSize,
+) {
+ if token.kind() != WHITESPACE || token.text().bytes().filter(|&b| b == b'\n').count() != 1 {
+ let n_spaces_after_line_break = {
+ let suff = &token.text()[TextRange::new(
+ offset - token.text_range().start() + TextSize::of('\n'),
+ TextSize::of(token.text()),
+ )];
+ suff.bytes().take_while(|&b| b == b' ').count()
+ };
+
+ let mut no_space = false;
+ if let Some(string) = ast::String::cast(token.clone()) {
+ if let Some(range) = string.open_quote_text_range() {
+ cov_mark::hit!(join_string_literal_open_quote);
+ no_space |= range.end() == offset;
+ }
+ if let Some(range) = string.close_quote_text_range() {
+ cov_mark::hit!(join_string_literal_close_quote);
+ no_space |= range.start()
+ == offset
+ + TextSize::of('\n')
+ + TextSize::try_from(n_spaces_after_line_break).unwrap();
+ }
+ }
+
+ let range = TextRange::at(offset, ((n_spaces_after_line_break + 1) as u32).into());
+ let replace_with = if no_space { "" } else { " " };
+ edit.replace(range, replace_with.to_string());
+ return;
+ }
+
+ // The node is between two other nodes
+ let (prev, next) = match (token.prev_sibling_or_token(), token.next_sibling_or_token()) {
+ (Some(prev), Some(next)) => (prev, next),
+ _ => return,
+ };
+
+ if config.remove_trailing_comma && prev.kind() == T![,] {
+ match next.kind() {
+ T![')'] | T![']'] => {
+ // Removes: trailing comma, newline (incl. surrounding whitespace)
+ edit.delete(TextRange::new(prev.text_range().start(), token.text_range().end()));
+ return;
+ }
+ T!['}'] => {
+ // Removes: comma, newline (incl. surrounding whitespace)
+ let space = match prev.prev_sibling_or_token() {
+ Some(left) => compute_ws(left.kind(), next.kind()),
+ None => " ",
+ };
+ edit.replace(
+ TextRange::new(prev.text_range().start(), token.text_range().end()),
+ space.to_string(),
+ );
+ return;
+ }
+ _ => (),
+ }
+ }
+
+ if config.join_else_if {
+ if let (Some(prev), Some(_next)) = (as_if_expr(&prev), as_if_expr(&next)) {
+ match prev.else_token() {
+ Some(_) => cov_mark::hit!(join_two_ifs_with_existing_else),
+ None => {
+ cov_mark::hit!(join_two_ifs);
+ edit.replace(token.text_range(), " else ".to_string());
+ return;
+ }
+ }
+ }
+ }
+
+ if config.join_assignments {
+ if join_assignments(edit, &prev, &next).is_some() {
+ return;
+ }
+ }
+
+ if config.unwrap_trivial_blocks {
+ // Special case that turns something like:
+ //
+ // ```
+ // my_function({$0
+ // <some-expr>
+ // })
+ // ```
+ //
+ // into `my_function(<some-expr>)`
+ if join_single_expr_block(edit, token).is_some() {
+ return;
+ }
+ // ditto for
+ //
+ // ```
+ // use foo::{$0
+ // bar
+ // };
+ // ```
+ if join_single_use_tree(edit, token).is_some() {
+ return;
+ }
+ }
+
+ if let (Some(_), Some(next)) = (
+ prev.as_token().cloned().and_then(ast::Comment::cast),
+ next.as_token().cloned().and_then(ast::Comment::cast),
+ ) {
+ // Removes: newline (incl. surrounding whitespace), start of the next comment
+ edit.delete(TextRange::new(
+ token.text_range().start(),
+ next.syntax().text_range().start() + TextSize::of(next.prefix()),
+ ));
+ return;
+ }
+
+ // Remove newline but add a computed amount of whitespace characters
+ edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string());
+}
+
+fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
+ let block_expr = ast::BlockExpr::cast(token.parent_ancestors().nth(1)?)?;
+ if !block_expr.is_standalone() {
+ return None;
+ }
+ let expr = extract_trivial_expression(&block_expr)?;
+
+ let block_range = block_expr.syntax().text_range();
+ let mut buf = expr.syntax().text().to_string();
+
+ // Match block needs to have a comma after the block
+ if let Some(match_arm) = block_expr.syntax().parent().and_then(ast::MatchArm::cast) {
+ if match_arm.comma_token().is_none() {
+ buf.push(',');
+ }
+ }
+
+ edit.replace(block_range, buf);
+
+ Some(())
+}
+
+fn join_single_use_tree(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {
+ let use_tree_list = ast::UseTreeList::cast(token.parent()?)?;
+ let (tree,) = use_tree_list.use_trees().collect_tuple()?;
+ edit.replace(use_tree_list.syntax().text_range(), tree.syntax().text().to_string());
+ Some(())
+}
+
+fn join_assignments(
+ edit: &mut TextEditBuilder,
+ prev: &SyntaxElement,
+ next: &SyntaxElement,
+) -> Option<()> {
+ let let_stmt = ast::LetStmt::cast(prev.as_node()?.clone())?;
+ if let_stmt.eq_token().is_some() {
+ cov_mark::hit!(join_assignments_already_initialized);
+ return None;
+ }
+ let let_ident_pat = match let_stmt.pat()? {
+ ast::Pat::IdentPat(it) => it,
+ _ => return None,
+ };
+
+ let expr_stmt = ast::ExprStmt::cast(next.as_node()?.clone())?;
+ let bin_expr = match expr_stmt.expr()? {
+ ast::Expr::BinExpr(it) => it,
+ _ => return None,
+ };
+ if !matches!(bin_expr.op_kind()?, ast::BinaryOp::Assignment { op: None }) {
+ return None;
+ }
+ let lhs = bin_expr.lhs()?;
+ let name_ref = expr_as_name_ref(&lhs)?;
+
+ if name_ref.to_string() != let_ident_pat.syntax().to_string() {
+ cov_mark::hit!(join_assignments_mismatch);
+ return None;
+ }
+
+ edit.delete(let_stmt.semicolon_token()?.text_range().cover(lhs.syntax().text_range()));
+ Some(())
+}
+
+fn as_if_expr(element: &SyntaxElement) -> Option<ast::IfExpr> {
+ let mut node = element.as_node()?.clone();
+ if let Some(stmt) = ast::ExprStmt::cast(node.clone()) {
+ node = stmt.expr()?.syntax().clone();
+ }
+ ast::IfExpr::cast(node)
+}
+
+fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str {
+ match left {
+ T!['('] | T!['['] => return "",
+ T!['{'] => {
+ if let USE_TREE = right {
+ return "";
+ }
+ }
+ _ => (),
+ }
+ match right {
+ T![')'] | T![']'] => return "",
+ T!['}'] => {
+ if let USE_TREE = left {
+ return "";
+ }
+ }
+ T![.] => return "",
+ _ => (),
+ }
+ " "
+}
+
+#[cfg(test)]
+mod tests {
+ use syntax::SourceFile;
+ use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range};
+
+ use super::*;
+
+ fn check_join_lines(ra_fixture_before: &str, ra_fixture_after: &str) {
+ let config = JoinLinesConfig {
+ join_else_if: true,
+ remove_trailing_comma: true,
+ unwrap_trivial_blocks: true,
+ join_assignments: true,
+ };
+
+ let (before_cursor_pos, before) = extract_offset(ra_fixture_before);
+ let file = SourceFile::parse(&before).ok().unwrap();
+
+ let range = TextRange::empty(before_cursor_pos);
+ let result = join_lines(&config, &file, range);
+
+ let actual = {
+ let mut actual = before;
+ result.apply(&mut actual);
+ actual
+ };
+ let actual_cursor_pos = result
+ .apply_to_offset(before_cursor_pos)
+ .expect("cursor position is affected by the edit");
+ let actual = add_cursor(&actual, actual_cursor_pos);
+ assert_eq_text!(ra_fixture_after, &actual);
+ }
+
+ fn check_join_lines_sel(ra_fixture_before: &str, ra_fixture_after: &str) {
+ let config = JoinLinesConfig {
+ join_else_if: true,
+ remove_trailing_comma: true,
+ unwrap_trivial_blocks: true,
+ join_assignments: true,
+ };
+
+ let (sel, before) = extract_range(ra_fixture_before);
+ let parse = SourceFile::parse(&before);
+ let result = join_lines(&config, &parse.tree(), sel);
+ let actual = {
+ let mut actual = before;
+ result.apply(&mut actual);
+ actual
+ };
+ assert_eq_text!(ra_fixture_after, &actual);
+ }
+
+ #[test]
+ fn test_join_lines_comma() {
+ check_join_lines(
+ r"
+fn foo() {
+ $0foo(1,
+ )
+}
+",
+ r"
+fn foo() {
+ $0foo(1)
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_lambda_block() {
+ check_join_lines(
+ r"
+pub fn reparse(&self, edit: &AtomTextEdit) -> File {
+ $0self.incremental_reparse(edit).unwrap_or_else(|| {
+ self.full_reparse(edit)
+ })
+}
+",
+ r"
+pub fn reparse(&self, edit: &AtomTextEdit) -> File {
+ $0self.incremental_reparse(edit).unwrap_or_else(|| self.full_reparse(edit))
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_block() {
+ check_join_lines(
+ r"
+fn foo() {
+ foo($0{
+ 92
+ })
+}",
+ r"
+fn foo() {
+ foo($092)
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_diverging_block() {
+ check_join_lines(
+ r"
+fn foo() {
+ loop {
+ match x {
+ 92 => $0{
+ continue;
+ }
+ }
+ }
+}
+ ",
+ r"
+fn foo() {
+ loop {
+ match x {
+ 92 => $0continue,
+ }
+ }
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn join_lines_adds_comma_for_block_in_match_arm() {
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ }
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo(),
+ Err(v) => v,
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn join_lines_multiline_in_block() {
+ check_join_lines(
+ r"
+fn foo() {
+ match ty {
+ $0 Some(ty) => {
+ match ty {
+ _ => false,
+ }
+ }
+ _ => true,
+ }
+}
+",
+ r"
+fn foo() {
+ match ty {
+ $0 Some(ty) => match ty {
+ _ => false,
+ },
+ _ => true,
+ }
+}
+",
+ );
+ }
+
+ #[test]
+ fn join_lines_keeps_comma_for_block_in_match_arm() {
+ // We already have a comma
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ },
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo(),
+ Err(v) => v,
+ }
+}",
+ );
+
+ // comma with whitespace between brace and ,
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ } ,
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo() ,
+ Err(v) => v,
+ }
+}",
+ );
+
+ // comma with newline between brace and ,
+ check_join_lines(
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0{
+ u.foo()
+ }
+ ,
+ Err(v) => v,
+ }
+}",
+ r"
+fn foo(e: Result<U, V>) {
+ match e {
+ Ok(u) => $0u.foo()
+ ,
+ Err(v) => v,
+ }
+}",
+ );
+ }
+
+ #[test]
+ fn join_lines_keeps_comma_with_single_arg_tuple() {
+ // A single arg tuple
+ check_join_lines(
+ r"
+fn foo() {
+ let x = ($0{
+ 4
+ },);
+}",
+ r"
+fn foo() {
+ let x = ($04,);
+}",
+ );
+
+ // single arg tuple with whitespace between brace and comma
+ check_join_lines(
+ r"
+fn foo() {
+ let x = ($0{
+ 4
+ } ,);
+}",
+ r"
+fn foo() {
+ let x = ($04 ,);
+}",
+ );
+
+ // single arg tuple with newline between brace and comma
+ check_join_lines(
+ r"
+fn foo() {
+ let x = ($0{
+ 4
+ }
+ ,);
+}",
+ r"
+fn foo() {
+ let x = ($04
+ ,);
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_items_left() {
+ // No space after the '{'
+ check_join_lines(
+ r"
+$0use syntax::{
+ TextSize, TextRange,
+};",
+ r"
+$0use syntax::{TextSize, TextRange,
+};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_items_right() {
+ // No space after the '}'
+ check_join_lines(
+ r"
+use syntax::{
+$0 TextSize, TextRange
+};",
+ r"
+use syntax::{
+$0 TextSize, TextRange};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_items_right_comma() {
+ // No space after the '}'
+ check_join_lines(
+ r"
+use syntax::{
+$0 TextSize, TextRange,
+};",
+ r"
+use syntax::{
+$0 TextSize, TextRange};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_use_tree() {
+ check_join_lines(
+ r"
+use syntax::{
+ algo::$0{
+ find_token_at_offset,
+ },
+ ast,
+};",
+ r"
+use syntax::{
+ algo::$0find_token_at_offset,
+ ast,
+};",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_normal_comments() {
+ check_join_lines(
+ r"
+fn foo() {
+ // Hello$0
+ // world!
+}
+",
+ r"
+fn foo() {
+ // Hello$0 world!
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_doc_comments() {
+ check_join_lines(
+ r"
+fn foo() {
+ /// Hello$0
+ /// world!
+}
+",
+ r"
+fn foo() {
+ /// Hello$0 world!
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_mod_comments() {
+ check_join_lines(
+ r"
+fn foo() {
+ //! Hello$0
+ //! world!
+}
+",
+ r"
+fn foo() {
+ //! Hello$0 world!
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_multiline_comments_1() {
+ check_join_lines(
+ r"
+fn foo() {
+ // Hello$0
+ /* world! */
+}
+",
+ r"
+fn foo() {
+ // Hello$0 world! */
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_multiline_comments_2() {
+ check_join_lines(
+ r"
+fn foo() {
+ // The$0
+ /* quick
+ brown
+ fox! */
+}
+",
+ r"
+fn foo() {
+ // The$0 quick
+ brown
+ fox! */
+}
+",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_fn_args() {
+ check_join_lines_sel(
+ r"
+fn foo() {
+ $0foo(1,
+ 2,
+ 3,
+ $0)
+}
+ ",
+ r"
+fn foo() {
+ foo(1, 2, 3)
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_struct() {
+ check_join_lines_sel(
+ r"
+struct Foo $0{
+ f: u32,
+}$0
+ ",
+ r"
+struct Foo { f: u32 }
+ ",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_dot_chain() {
+ check_join_lines_sel(
+ r"
+fn foo() {
+ join($0type_params.type_params()
+ .filter_map(|it| it.name())
+ .map(|it| it.text())$0)
+}",
+ r"
+fn foo() {
+ join(type_params.type_params().filter_map(|it| it.name()).map(|it| it.text()))
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_selection_lambda_block_body() {
+ check_join_lines_sel(
+ r"
+pub fn handle_find_matching_brace() {
+ params.offsets
+ .map(|offset| $0{
+ world.analysis().matching_brace(&file, offset).unwrap_or(offset)
+ }$0)
+ .collect();
+}",
+ r"
+pub fn handle_find_matching_brace() {
+ params.offsets
+ .map(|offset| world.analysis().matching_brace(&file, offset).unwrap_or(offset))
+ .collect();
+}",
+ );
+ }
+
+ #[test]
+ fn test_join_lines_commented_block() {
+ check_join_lines(
+ r"
+fn main() {
+ let _ = {
+ // $0foo
+ // bar
+ 92
+ };
+}
+ ",
+ r"
+fn main() {
+ let _ = {
+ // $0foo bar
+ 92
+ };
+}
+ ",
+ )
+ }
+
+ #[test]
+ fn join_lines_mandatory_blocks_block() {
+ check_join_lines(
+ r"
+$0fn foo() {
+ 92
+}
+ ",
+ r"
+$0fn foo() { 92
+}
+ ",
+ );
+
+ check_join_lines(
+ r"
+fn foo() {
+ $0if true {
+ 92
+ }
+}
+ ",
+ r"
+fn foo() {
+ $0if true { 92
+ }
+}
+ ",
+ );
+
+ check_join_lines(
+ r"
+fn foo() {
+ $0loop {
+ 92
+ }
+}
+ ",
+ r"
+fn foo() {
+ $0loop { 92
+ }
+}
+ ",
+ );
+
+ check_join_lines(
+ r"
+fn foo() {
+ $0unsafe {
+ 92
+ }
+}
+ ",
+ r"
+fn foo() {
+ $0unsafe { 92
+ }
+}
+ ",
+ );
+ }
+
+ #[test]
+ fn join_string_literal() {
+ {
+ cov_mark::check!(join_string_literal_open_quote);
+ check_join_lines(
+ r#"
+fn main() {
+ $0"
+hello
+";
+}
+"#,
+ r#"
+fn main() {
+ $0"hello
+";
+}
+"#,
+ );
+ }
+
+ {
+ cov_mark::check!(join_string_literal_close_quote);
+ check_join_lines(
+ r#"
+fn main() {
+ $0"hello
+";
+}
+"#,
+ r#"
+fn main() {
+ $0"hello";
+}
+"#,
+ );
+ check_join_lines(
+ r#"
+fn main() {
+ $0r"hello
+ ";
+}
+"#,
+ r#"
+fn main() {
+ $0r"hello";
+}
+"#,
+ );
+ }
+
+ check_join_lines(
+ r#"
+fn main() {
+ "
+$0hello
+world
+";
+}
+"#,
+ r#"
+fn main() {
+ "
+$0hello world
+";
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_last_line_empty() {
+ check_join_lines(
+ r#"
+fn main() {$0}
+"#,
+ r#"
+fn main() {$0}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_two_ifs() {
+ cov_mark::check!(join_two_ifs);
+ check_join_lines(
+ r#"
+fn main() {
+ if foo {
+
+ }$0
+ if bar {
+
+ }
+}
+"#,
+ r#"
+fn main() {
+ if foo {
+
+ }$0 else if bar {
+
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_two_ifs_with_existing_else() {
+ cov_mark::check!(join_two_ifs_with_existing_else);
+ check_join_lines(
+ r#"
+fn main() {
+ if foo {
+
+ } else {
+
+ }$0
+ if bar {
+
+ }
+}
+"#,
+ r#"
+fn main() {
+ if foo {
+
+ } else {
+
+ }$0 if bar {
+
+ }
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn join_assignments() {
+ check_join_lines(
+ r#"
+fn foo() {
+ $0let foo;
+ foo = "bar";
+}
+"#,
+ r#"
+fn foo() {
+ $0let foo = "bar";
+}
+"#,
+ );
+
+ cov_mark::check!(join_assignments_mismatch);
+ check_join_lines(
+ r#"
+fn foo() {
+ let foo;
+ let qux;$0
+ foo = "bar";
+}
+"#,
+ r#"
+fn foo() {
+ let foo;
+ let qux;$0 foo = "bar";
+}
+"#,
+ );
+
+ cov_mark::check!(join_assignments_already_initialized);
+ check_join_lines(
+ r#"
+fn foo() {
+ let foo = "bar";$0
+ foo = "bar";
+}
+"#,
+ r#"
+fn foo() {
+ let foo = "bar";$0 foo = "bar";
+}
+"#,
+ );
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Find matching brace**
+use syntax::{
+ ast::{self, AstNode},
+ SourceFile, SyntaxKind, TextSize, T,
+};
+
+// Feature: Matching Brace
+//
+// If the cursor is on any brace (`<>(){}[]||`) which is a part of a brace-pair,
+// moves cursor to the matching brace. It uses the actual parser to determine
+// braces, so it won't confuse generics with comparisons.
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: Find matching brace**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065573-04298180-91b1-11eb-8dec-d4e2a202f304.gif[]
+pub(crate) fn matching_brace(file: &SourceFile, offset: TextSize) -> Option<TextSize> {
+ const BRACES: &[SyntaxKind] =
+ &[T!['{'], T!['}'], T!['['], T![']'], T!['('], T![')'], T![<], T![>], T![|], T![|]];
+ let (brace_token, brace_idx) = file
+ .syntax()
+ .token_at_offset(offset)
+ .filter_map(|node| {
+ let idx = BRACES.iter().position(|&brace| brace == node.kind())?;
+ Some((node, idx))
+ })
+ .last()?;
+ let parent = brace_token.parent()?;
+ if brace_token.kind() == T![|] && !ast::ParamList::can_cast(parent.kind()) {
+ cov_mark::hit!(pipes_not_braces);
+ return None;
+ }
+ let matching_kind = BRACES[brace_idx ^ 1];
+ let matching_node = parent
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|node| node.kind() == matching_kind && node != &brace_token)?;
+ Some(matching_node.text_range().start())
+}
+
+#[cfg(test)]
+mod tests {
+ use test_utils::{add_cursor, assert_eq_text, extract_offset};
+
+ use super::*;
+
+ #[test]
+ fn test_matching_brace() {
+ fn do_check(before: &str, after: &str) {
+ let (pos, before) = extract_offset(before);
+ let parse = SourceFile::parse(&before);
+ let new_pos = match matching_brace(&parse.tree(), pos) {
+ None => pos,
+ Some(pos) => pos,
+ };
+ let actual = add_cursor(&before, new_pos);
+ assert_eq_text!(after, &actual);
+ }
+
+ do_check("struct Foo { a: i32, }$0", "struct Foo $0{ a: i32, }");
+ do_check("fn main() { |x: i32|$0 x * 2;}", "fn main() { $0|x: i32| x * 2;}");
+ do_check("fn main() { $0|x: i32| x * 2;}", "fn main() { |x: i32$0| x * 2;}");
+ do_check(
+ "fn func(x) { return (2 * (x + 3)$0) + 5;}",
+ "fn func(x) { return $0(2 * (x + 3)) + 5;}",
+ );
+
+ {
+ cov_mark::check!(pipes_not_braces);
+ do_check(
+ "fn main() { match 92 { 1 | 2 |$0 3 => 92 } }",
+ "fn main() { match 92 { 1 | 2 |$0 3 => 92 } }",
+ );
+ }
+ }
+}
--- /dev/null
- IdentClass::classify_token(sema, &token).map(IdentClass::definitions).map(|it| {
+//! This module generates [moniker](https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/#exportsImports)
+//! for LSIF and LSP.
+
+use hir::{db::DefDatabase, AsAssocItem, AssocItemContainer, Crate, Name, Semantics};
+use ide_db::{
+ base_db::{CrateOrigin, FileId, FileLoader, FilePosition, LangCrateOrigin},
+ defs::{Definition, IdentClass},
+ helpers::pick_best_token,
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{AstNode, SyntaxKind::*, T};
+
+use crate::{doc_links::token_as_doc_comment, RangeInfo};
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct MonikerIdentifier {
+ crate_name: String,
+ path: Vec<Name>,
+}
+
+impl ToString for MonikerIdentifier {
+ fn to_string(&self) -> String {
+ match self {
+ MonikerIdentifier { path, crate_name } => {
+ format!("{}::{}", crate_name, path.iter().map(|x| x.to_string()).join("::"))
+ }
+ }
+ }
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub enum MonikerKind {
+ Import,
+ Export,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct MonikerResult {
+ pub identifier: MonikerIdentifier,
+ pub kind: MonikerKind,
+ pub package_information: PackageInformation,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq, Hash)]
+pub struct PackageInformation {
+ pub name: String,
+ pub repo: String,
+ pub version: String,
+}
+
+pub(crate) fn crate_for_file(db: &RootDatabase, file_id: FileId) -> Option<Crate> {
+ for &krate in db.relevant_crates(file_id).iter() {
+ let crate_def_map = db.crate_def_map(krate);
+ for (_, data) in crate_def_map.modules() {
+ if data.origin.file_id() == Some(file_id) {
+ return Some(krate.into());
+ }
+ }
+ }
+ None
+}
+
+pub(crate) fn moniker(
+ db: &RootDatabase,
+ FilePosition { file_id, offset }: FilePosition,
+) -> Option<RangeInfo<Vec<MonikerResult>>> {
+ let sema = &Semantics::new(db);
+ let file = sema.parse(file_id).syntax().clone();
+ let current_crate = crate_for_file(db, file_id)?;
+ let original_token = pick_best_token(file.token_at_offset(offset), |kind| match kind {
+ IDENT
+ | INT_NUMBER
+ | LIFETIME_IDENT
+ | T![self]
+ | T![super]
+ | T![crate]
+ | T![Self]
+ | COMMENT => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ })?;
+ if let Some(doc_comment) = token_as_doc_comment(&original_token) {
+ return doc_comment.get_definition_with_descend_at(sema, offset, |def, _, _| {
+ let m = def_to_moniker(db, def, current_crate)?;
+ Some(RangeInfo::new(original_token.text_range(), vec![m]))
+ });
+ }
+ let navs = sema
+ .descend_into_macros(original_token.clone())
+ .into_iter()
+ .filter_map(|token| {
++ IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops).map(|it| {
+ it.into_iter().flat_map(|def| def_to_moniker(sema.db, def, current_crate))
+ })
+ })
+ .flatten()
+ .unique()
+ .collect::<Vec<_>>();
+ Some(RangeInfo::new(original_token.text_range(), navs))
+}
+
+pub(crate) fn def_to_moniker(
+ db: &RootDatabase,
+ def: Definition,
+ from_crate: Crate,
+) -> Option<MonikerResult> {
+ if matches!(def, Definition::GenericParam(_) | Definition::SelfType(_) | Definition::Local(_)) {
+ return None;
+ }
+ let module = def.module(db)?;
+ let krate = module.krate();
+ let mut path = vec![];
+ path.extend(module.path_to_root(db).into_iter().filter_map(|x| x.name(db)));
+
+ // Handle associated items within a trait
+ if let Some(assoc) = def.as_assoc_item(db) {
+ let container = assoc.container(db);
+ match container {
+ AssocItemContainer::Trait(trait_) => {
+ // Because different traits can have functions with the same name,
+ // we have to include the trait name as part of the moniker for uniqueness.
+ path.push(trait_.name(db));
+ }
+ AssocItemContainer::Impl(impl_) => {
+ // Because a struct can implement multiple traits, for implementations
+ // we add both the struct name and the trait name to the path
+ if let Some(adt) = impl_.self_ty(db).as_adt() {
+ path.push(adt.name(db));
+ }
+
+ if let Some(trait_) = impl_.trait_(db) {
+ path.push(trait_.name(db));
+ }
+ }
+ }
+ }
+
+ if let Definition::Field(it) = def {
+ path.push(it.parent_def(db).name(db));
+ }
+
+ path.push(def.name(db)?);
+ Some(MonikerResult {
+ identifier: MonikerIdentifier {
+ crate_name: krate.display_name(db)?.crate_name().to_string(),
+ path,
+ },
+ kind: if krate == from_crate { MonikerKind::Export } else { MonikerKind::Import },
+ package_information: {
+ let name = krate.display_name(db)?.to_string();
+ let (repo, version) = match krate.origin(db) {
+ CrateOrigin::CratesIo { repo } => (repo?, krate.version(db)?),
+ CrateOrigin::Lang(lang) => (
+ "https://github.com/rust-lang/rust/".to_string(),
+ match lang {
+ LangCrateOrigin::Other => {
+ "https://github.com/rust-lang/rust/library/".into()
+ }
+ lang => format!("https://github.com/rust-lang/rust/library/{lang}",),
+ },
+ ),
+ };
+ PackageInformation { name, repo, version }
+ },
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+
+ use super::MonikerKind;
+
+ #[track_caller]
+ fn no_moniker(ra_fixture: &str) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ if let Some(x) = analysis.moniker(position).unwrap() {
+ assert_eq!(x.info.len(), 0, "Moniker founded but no moniker expected: {:?}", x);
+ }
+ }
+
+ #[track_caller]
+ fn check_moniker(ra_fixture: &str, identifier: &str, package: &str, kind: MonikerKind) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let x = analysis.moniker(position).unwrap().expect("no moniker found").info;
+ assert_eq!(x.len(), 1);
+ let x = x.into_iter().next().unwrap();
+ assert_eq!(identifier, x.identifier.to_string());
+ assert_eq!(package, format!("{:?}", x.package_information));
+ assert_eq!(kind, x.kind);
+ }
+
+ #[test]
+ fn basic() {
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func$0();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func() {}
+}
+"#,
+ "foo::module::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Import,
+ );
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func$0() {}
+}
+"#,
+ "foo::module::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ pub fn func$0() {}
+ }
+}
+"#,
+ "foo::module::MyTrait::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_constant() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ const MY_CONST$0: u8;
+ }
+}
+"#,
+ "foo::module::MyTrait::MY_CONST",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_type() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ type MyType$0;
+ }
+}
+"#,
+ "foo::module::MyTrait::MyType",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_trait_impl_function() {
+ check_moniker(
+ r#"
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub trait MyTrait {
+ pub fn func() {}
+ }
+
+ struct MyStruct {}
+
+ impl MyTrait for MyStruct {
+ pub fn func$0() {}
+ }
+}
+"#,
+ "foo::module::MyStruct::MyTrait::func",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Export,
+ );
+ }
+
+ #[test]
+ fn moniker_for_field() {
+ check_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::St;
+fn main() {
+ let x = St { a$0: 2 };
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub struct St {
+ pub a: i32,
+}
+"#,
+ "foo::St::a",
+ r#"PackageInformation { name: "foo", repo: "https://a.b/foo.git", version: "0.1.0" }"#,
+ MonikerKind::Import,
+ );
+ }
+
+ #[test]
+ fn no_moniker_for_local() {
+ no_moniker(
+ r#"
+//- /lib.rs crate:main deps:foo
+use foo::module::func;
+fn main() {
+ func();
+}
+//- /foo/lib.rs crate:foo@CratesIo:0.1.0,https://a.b/foo.git
+pub mod module {
+ pub fn func() {
+ let x$0 = 2;
+ }
+}
+"#,
+ );
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Move item up**
- // | VS Code | **Rust Analyzer: Move item down**
+use std::{iter::once, mem};
+
+use hir::Semantics;
+use ide_db::{base_db::FileRange, helpers::pick_best_token, RootDatabase};
+use itertools::Itertools;
+use syntax::{algo, ast, match_ast, AstNode, SyntaxElement, SyntaxKind, SyntaxNode, TextRange};
+use text_edit::{TextEdit, TextEditBuilder};
+
+#[derive(Copy, Clone, Debug)]
+pub enum Direction {
+ Up,
+ Down,
+}
+
+// Feature: Move Item
+//
+// Move item under cursor or selection up and down.
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: Move item up**
++// | VS Code | **rust-analyzer: Move item down**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065576-04298180-91b1-11eb-91ce-4505e99ed598.gif[]
+pub(crate) fn move_item(
+ db: &RootDatabase,
+ range: FileRange,
+ direction: Direction,
+) -> Option<TextEdit> {
+ let sema = Semantics::new(db);
+ let file = sema.parse(range.file_id);
+
+ let item = if range.range.is_empty() {
+ SyntaxElement::Token(pick_best_token(
+ file.syntax().token_at_offset(range.range.start()),
+ |kind| match kind {
+ SyntaxKind::IDENT | SyntaxKind::LIFETIME_IDENT => 2,
+ kind if kind.is_trivia() => 0,
+ _ => 1,
+ },
+ )?)
+ } else {
+ file.syntax().covering_element(range.range)
+ };
+
+ find_ancestors(item, direction, range.range)
+}
+
+fn find_ancestors(item: SyntaxElement, direction: Direction, range: TextRange) -> Option<TextEdit> {
+ let root = match item {
+ SyntaxElement::Node(node) => node,
+ SyntaxElement::Token(token) => token.parent()?,
+ };
+
+ let movable = [
+ SyntaxKind::ARG_LIST,
+ SyntaxKind::GENERIC_PARAM_LIST,
+ SyntaxKind::GENERIC_ARG_LIST,
+ SyntaxKind::VARIANT_LIST,
+ SyntaxKind::TYPE_BOUND_LIST,
+ SyntaxKind::MATCH_ARM,
+ SyntaxKind::PARAM,
+ SyntaxKind::LET_STMT,
+ SyntaxKind::EXPR_STMT,
+ SyntaxKind::IF_EXPR,
+ SyntaxKind::FOR_EXPR,
+ SyntaxKind::LOOP_EXPR,
+ SyntaxKind::WHILE_EXPR,
+ SyntaxKind::RETURN_EXPR,
+ SyntaxKind::MATCH_EXPR,
+ SyntaxKind::MACRO_CALL,
+ SyntaxKind::TYPE_ALIAS,
+ SyntaxKind::TRAIT,
+ SyntaxKind::IMPL,
+ SyntaxKind::MACRO_DEF,
+ SyntaxKind::STRUCT,
+ SyntaxKind::UNION,
+ SyntaxKind::ENUM,
+ SyntaxKind::FN,
+ SyntaxKind::MODULE,
+ SyntaxKind::USE,
+ SyntaxKind::STATIC,
+ SyntaxKind::CONST,
+ SyntaxKind::MACRO_RULES,
+ SyntaxKind::MACRO_DEF,
+ ];
+
+ let ancestor = once(root.clone())
+ .chain(root.ancestors())
+ .find(|ancestor| movable.contains(&ancestor.kind()))?;
+
+ move_in_direction(&ancestor, direction, range)
+}
+
+fn move_in_direction(
+ node: &SyntaxNode,
+ direction: Direction,
+ range: TextRange,
+) -> Option<TextEdit> {
+ match_ast! {
+ match node {
+ ast::ArgList(it) => swap_sibling_in_list(node, it.args(), range, direction),
+ ast::GenericParamList(it) => swap_sibling_in_list(node, it.generic_params(), range, direction),
+ ast::GenericArgList(it) => swap_sibling_in_list(node, it.generic_args(), range, direction),
+ ast::VariantList(it) => swap_sibling_in_list(node, it.variants(), range, direction),
+ ast::TypeBoundList(it) => swap_sibling_in_list(node, it.bounds(), range, direction),
+ _ => Some(replace_nodes(range, node, &match direction {
+ Direction::Up => node.prev_sibling(),
+ Direction::Down => node.next_sibling(),
+ }?))
+ }
+ }
+}
+
+fn swap_sibling_in_list<A: AstNode + Clone, I: Iterator<Item = A>>(
+ node: &SyntaxNode,
+ list: I,
+ range: TextRange,
+ direction: Direction,
+) -> Option<TextEdit> {
+ let list_lookup = list.tuple_windows().find(|(l, r)| match direction {
+ Direction::Up => r.syntax().text_range().contains_range(range),
+ Direction::Down => l.syntax().text_range().contains_range(range),
+ });
+
+ if let Some((l, r)) = list_lookup {
+ Some(replace_nodes(range, l.syntax(), r.syntax()))
+ } else {
+ // Cursor is beyond any movable list item (for example, on curly brace in enum).
+ // It's not necessary, that parent of list is movable (arg list's parent is not, for example),
+ // and we have to continue tree traversal to find suitable node.
+ find_ancestors(SyntaxElement::Node(node.parent()?), direction, range)
+ }
+}
+
+fn replace_nodes<'a>(
+ range: TextRange,
+ mut first: &'a SyntaxNode,
+ mut second: &'a SyntaxNode,
+) -> TextEdit {
+ let cursor_offset = if range.is_empty() {
+ // FIXME: `applySnippetTextEdits` does not support non-empty selection ranges
+ if first.text_range().contains_range(range) {
+ Some(range.start() - first.text_range().start())
+ } else if second.text_range().contains_range(range) {
+ mem::swap(&mut first, &mut second);
+ Some(range.start() - first.text_range().start())
+ } else {
+ None
+ }
+ } else {
+ None
+ };
+
+ let first_with_cursor = match cursor_offset {
+ Some(offset) => {
+ let mut item_text = first.text().to_string();
+ item_text.insert_str(offset.into(), "$0");
+ item_text
+ }
+ None => first.text().to_string(),
+ };
+
+ let mut edit = TextEditBuilder::default();
+
+ algo::diff(first, second).into_text_edit(&mut edit);
+ edit.replace(second.text_range(), first_with_cursor);
+
+ edit.finish()
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::fixture;
+ use expect_test::{expect, Expect};
+
+ use crate::Direction;
+
+ fn check(ra_fixture: &str, expect: Expect, direction: Direction) {
+ let (analysis, range) = fixture::range(ra_fixture);
+ let edit = analysis.move_item(range, direction).unwrap().unwrap_or_default();
+ let mut file = analysis.file_text(range.file_id).unwrap().to_string();
+ edit.apply(&mut file);
+ expect.assert_eq(&file);
+ }
+
+ #[test]
+ fn test_moves_match_arm_up() {
+ check(
+ r#"
+fn main() {
+ match true {
+ true => {
+ println!("Hello, world");
+ },
+ false =>$0$0 {
+ println!("Test");
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match true {
+ false =>$0 {
+ println!("Test");
+ }
+ true => {
+ println!("Hello, world");
+ },
+ };
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_match_arm_down() {
+ check(
+ r#"
+fn main() {
+ match true {
+ true =>$0$0 {
+ println!("Hello, world");
+ },
+ false => {
+ println!("Test");
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match true {
+ false => {
+ println!("Test");
+ }
+ true =>$0 {
+ println!("Hello, world");
+ },
+ };
+ }
+ "#]],
+ Direction::Down,
+ );
+ }
+
+ #[test]
+ fn test_nowhere_to_move() {
+ check(
+ r#"
+fn main() {
+ match true {
+ true =>$0$0 {
+ println!("Hello, world");
+ },
+ false => {
+ println!("Test");
+ }
+ };
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match true {
+ true => {
+ println!("Hello, world");
+ },
+ false => {
+ println!("Test");
+ }
+ };
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_let_stmt_up() {
+ check(
+ r#"
+fn main() {
+ let test = 123;
+ let test2$0$0 = 456;
+}
+"#,
+ expect![[r#"
+ fn main() {
+ let test2$0 = 456;
+ let test = 123;
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_expr_up() {
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+ println!("All I want to say is...");$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ println!("All I want to say is...");$0
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ if true {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ if true {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ for i in 0..10 {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ for i in 0..10 {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ loop {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ loop {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ while true {
+ println!("Test");
+ }$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ while true {
+ println!("Test");
+ }$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn main() {
+ println!("Hello, world");
+
+ return 123;$0$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ return 123;$0
+
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_nowhere_to_move_stmt() {
+ check(
+ r#"
+fn main() {
+ println!("All I want to say is...");$0$0
+ println!("Hello, world");
+}
+"#,
+ expect![[r#"
+ fn main() {
+ println!("All I want to say is...");
+ println!("Hello, world");
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_move_item() {
+ check(
+ r#"
+fn main() {}
+
+fn foo() {}$0$0
+"#,
+ expect![[r#"
+ fn foo() {}$0
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_move_impl_up() {
+ check(
+ r#"
+struct Yay;
+
+trait Wow {}
+
+impl Wow for Yay $0$0{}
+"#,
+ expect![[r#"
+ struct Yay;
+
+ impl Wow for Yay $0{}
+
+ trait Wow {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_move_use_up() {
+ check(
+ r#"
+use std::vec::Vec;
+use std::collections::HashMap$0$0;
+"#,
+ expect![[r#"
+ use std::collections::HashMap$0;
+ use std::vec::Vec;
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_match_expr_up() {
+ check(
+ r#"
+fn main() {
+ let test = 123;
+
+ $0match test {
+ 456 => {},
+ _ => {}
+ };$0
+}
+"#,
+ expect![[r#"
+ fn main() {
+ match test {
+ 456 => {},
+ _ => {}
+ };
+
+ let test = 123;
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_param() {
+ check(
+ r#"
+fn test(one: i32, two$0$0: u32) {}
+
+fn main() {
+ test(123, 456);
+}
+"#,
+ expect![[r#"
+ fn test(two$0: u32, one: i32) {}
+
+ fn main() {
+ test(123, 456);
+ }
+ "#]],
+ Direction::Up,
+ );
+ check(
+ r#"
+fn f($0$0arg: u8, arg2: u16) {}
+"#,
+ expect![[r#"
+ fn f(arg2: u16, $0arg: u8) {}
+ "#]],
+ Direction::Down,
+ );
+ }
+
+ #[test]
+ fn test_moves_arg_up() {
+ check(
+ r#"
+fn test(one: i32, two: u32) {}
+
+fn main() {
+ test(123, 456$0$0);
+}
+"#,
+ expect![[r#"
+ fn test(one: i32, two: u32) {}
+
+ fn main() {
+ test(456$0, 123);
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_arg_down() {
+ check(
+ r#"
+fn test(one: i32, two: u32) {}
+
+fn main() {
+ test(123$0$0, 456);
+}
+"#,
+ expect![[r#"
+ fn test(one: i32, two: u32) {}
+
+ fn main() {
+ test(456, 123$0);
+ }
+ "#]],
+ Direction::Down,
+ );
+ }
+
+ #[test]
+ fn test_nowhere_to_move_arg() {
+ check(
+ r#"
+fn test(one: i32, two: u32) {}
+
+fn main() {
+ test(123$0$0, 456);
+}
+"#,
+ expect![[r#"
+ fn test(one: i32, two: u32) {}
+
+ fn main() {
+ test(123, 456);
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_generic_param_up() {
+ check(
+ r#"
+struct Test<A, B$0$0>(A, B);
+
+fn main() {}
+"#,
+ expect![[r#"
+ struct Test<B$0, A>(A, B);
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_generic_arg_up() {
+ check(
+ r#"
+struct Test<A, B>(A, B);
+
+fn main() {
+ let t = Test::<i32, &str$0$0>(123, "yay");
+}
+"#,
+ expect![[r#"
+ struct Test<A, B>(A, B);
+
+ fn main() {
+ let t = Test::<&str$0, i32>(123, "yay");
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_variant_up() {
+ check(
+ r#"
+enum Hello {
+ One,
+ Two$0$0
+}
+
+fn main() {}
+"#,
+ expect![[r#"
+ enum Hello {
+ Two$0,
+ One
+ }
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_moves_type_bound_up() {
+ check(
+ r#"
+trait One {}
+
+trait Two {}
+
+fn test<T: One + Two$0$0>(t: T) {}
+
+fn main() {}
+"#,
+ expect![[r#"
+ trait One {}
+
+ trait Two {}
+
+ fn test<T: Two$0 + One>(t: T) {}
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_prioritizes_trait_items() {
+ check(
+ r#"
+struct Test;
+
+trait Yay {
+ type One;
+
+ type Two;
+
+ fn inner();
+}
+
+impl Yay for Test {
+ type One = i32;
+
+ type Two = u32;
+
+ fn inner() {$0$0
+ println!("Mmmm");
+ }
+}
+"#,
+ expect![[r#"
+ struct Test;
+
+ trait Yay {
+ type One;
+
+ type Two;
+
+ fn inner();
+ }
+
+ impl Yay for Test {
+ type One = i32;
+
+ fn inner() {$0
+ println!("Mmmm");
+ }
+
+ type Two = u32;
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_weird_nesting() {
+ check(
+ r#"
+fn test() {
+ mod hello {
+ fn inner() {}
+ }
+
+ mod hi {$0$0
+ fn inner() {}
+ }
+}
+"#,
+ expect![[r#"
+ fn test() {
+ mod hi {$0
+ fn inner() {}
+ }
+
+ mod hello {
+ fn inner() {}
+ }
+ }
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_cursor_at_item_start() {
+ check(
+ r#"
+$0$0#[derive(Debug)]
+enum FooBar {
+ Foo,
+ Bar,
+}
+
+fn main() {}
+"#,
+ expect![[r##"
+ fn main() {}
+
+ $0#[derive(Debug)]
+ enum FooBar {
+ Foo,
+ Bar,
+ }
+ "##]],
+ Direction::Down,
+ );
+ check(
+ r#"
+$0$0enum FooBar {
+ Foo,
+ Bar,
+}
+
+fn main() {}
+"#,
+ expect![[r#"
+ fn main() {}
+
+ $0enum FooBar {
+ Foo,
+ Bar,
+ }
+ "#]],
+ Direction::Down,
+ );
+ check(
+ r#"
+struct Test;
+
+trait SomeTrait {}
+
+$0$0impl SomeTrait for Test {}
+
+fn main() {}
+"#,
+ expect![[r#"
+ struct Test;
+
+ $0impl SomeTrait for Test {}
+
+ trait SomeTrait {}
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn test_cursor_at_item_end() {
+ check(
+ r#"
+enum FooBar {
+ Foo,
+ Bar,
+}$0$0
+
+fn main() {}
+"#,
+ expect![[r#"
+ fn main() {}
+
+ enum FooBar {
+ Foo,
+ Bar,
+ }$0
+ "#]],
+ Direction::Down,
+ );
+ check(
+ r#"
+struct Test;
+
+trait SomeTrait {}
+
+impl SomeTrait for Test {}$0$0
+
+fn main() {}
+"#,
+ expect![[r#"
+ struct Test;
+
+ impl SomeTrait for Test {}$0
+
+ trait SomeTrait {}
+
+ fn main() {}
+ "#]],
+ Direction::Up,
+ );
+ }
+
+ #[test]
+ fn handles_empty_file() {
+ check(r#"$0$0"#, expect![[r#""#]], Direction::Up);
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Locate parent module**
+use hir::Semantics;
+use ide_db::{
+ base_db::{CrateId, FileId, FilePosition},
+ RootDatabase,
+};
+use itertools::Itertools;
+use syntax::{
+ algo::find_node_at_offset,
+ ast::{self, AstNode},
+};
+
+use crate::NavigationTarget;
+
+// Feature: Parent Module
+//
+// Navigates to the parent module of the current module.
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: Locate parent module**
+// |===
+//
+// image::https://user-images.githubusercontent.com/48062697/113065580-04c21800-91b1-11eb-9a32-00086161c0bd.gif[]
+
+/// This returns `Vec` because a module may be included from several places.
+pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec<NavigationTarget> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+
+ let mut module = find_node_at_offset::<ast::Module>(source_file.syntax(), position.offset);
+
+ // If cursor is literally on `mod foo`, go to the grandpa.
+ if let Some(m) = &module {
+ if !m
+ .item_list()
+ .map_or(false, |it| it.syntax().text_range().contains_inclusive(position.offset))
+ {
+ cov_mark::hit!(test_resolve_parent_module_on_module_decl);
+ module = m.syntax().ancestors().skip(1).find_map(ast::Module::cast);
+ }
+ }
+
+ match module {
+ Some(module) => sema
+ .to_def(&module)
+ .into_iter()
+ .map(|module| NavigationTarget::from_module_to_decl(db, module))
+ .collect(),
+ None => sema
+ .to_module_defs(position.file_id)
+ .map(|module| NavigationTarget::from_module_to_decl(db, module))
+ .collect(),
+ }
+}
+
+/// Returns `Vec` for the same reason as `parent_module`
+pub(crate) fn crate_for(db: &RootDatabase, file_id: FileId) -> Vec<CrateId> {
+ let sema = Semantics::new(db);
+ sema.to_module_defs(file_id).map(|module| module.krate().into()).unique().collect()
+}
+
+#[cfg(test)]
+mod tests {
+ use ide_db::base_db::FileRange;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str) {
+ let (analysis, position, expected) = fixture::annotations(ra_fixture);
+ let navs = analysis.parent_module(position).unwrap();
+ let navs = navs
+ .iter()
+ .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() })
+ .collect::<Vec<_>>();
+ assert_eq!(expected.into_iter().map(|(fr, _)| fr).collect::<Vec<_>>(), navs);
+ }
+
+ #[test]
+ fn test_resolve_parent_module() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+ //^^^
+
+//- /foo.rs
+$0// empty
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_parent_module_on_module_decl() {
+ cov_mark::check!(test_resolve_parent_module_on_module_decl);
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+ //^^^
+//- /foo.rs
+mod $0bar;
+
+//- /foo/bar.rs
+// empty
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_parent_module_for_inline() {
+ check(
+ r#"
+//- /lib.rs
+mod foo {
+ mod bar {
+ mod baz { $0 }
+ } //^^^
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_multi_parent_module() {
+ check(
+ r#"
+//- /main.rs
+mod foo;
+ //^^^
+#[path = "foo.rs"]
+mod bar;
+ //^^^
+//- /foo.rs
+$0
+"#,
+ );
+ }
+
+ #[test]
+ fn test_resolve_crate_root() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+//- /foo.rs
+$0
+//- /main.rs
+mod foo;
+"#,
+ );
+ assert_eq!(analysis.crate_for(file_id).unwrap().len(), 1);
+ }
+
+ #[test]
+ fn test_resolve_multi_parent_crate() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+//- /baz.rs
+$0
+//- /foo.rs crate:foo
+mod baz;
+//- /bar.rs crate:bar
+mod baz;
+"#,
+ );
+ assert_eq!(analysis.crate_for(file_id).unwrap().len(), 2);
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Run**
+use std::fmt;
+
+use ast::HasName;
+use cfg::CfgExpr;
+use hir::{AsAssocItem, HasAttrs, HasSource, HirDisplay, Semantics};
+use ide_assists::utils::test_related_attribute;
+use ide_db::{
+ base_db::{FilePosition, FileRange},
+ defs::Definition,
+ helpers::visit_file_defs,
+ search::SearchScope,
+ FxHashMap, FxHashSet, RootDatabase, SymbolKind,
+};
+use itertools::Itertools;
+use stdx::{always, format_to};
+use syntax::{
+ ast::{self, AstNode, HasAttrs as _},
+ SmolStr, SyntaxNode,
+};
+
+use crate::{references, FileId, NavigationTarget, ToNav, TryToNav};
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub struct Runnable {
+ pub use_name_in_title: bool,
+ pub nav: NavigationTarget,
+ pub kind: RunnableKind,
+ pub cfg: Option<CfgExpr>,
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub enum TestId {
+ Name(SmolStr),
+ Path(String),
+}
+
+impl fmt::Display for TestId {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ TestId::Name(name) => name.fmt(f),
+ TestId::Path(path) => path.fmt(f),
+ }
+ }
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+pub enum RunnableKind {
+ Test { test_id: TestId, attr: TestAttr },
+ TestMod { path: String },
+ Bench { test_id: TestId },
+ DocTest { test_id: TestId },
+ Bin,
+}
+
+#[cfg(test)]
+#[derive(Debug, Clone, Hash, PartialEq, Eq)]
+enum RunnableTestKind {
+ Test,
+ TestMod,
+ DocTest,
+ Bench,
+ Bin,
+}
+
+impl Runnable {
+ // test package::module::testname
+ pub fn label(&self, target: Option<String>) -> String {
+ match &self.kind {
+ RunnableKind::Test { test_id, .. } => format!("test {}", test_id),
+ RunnableKind::TestMod { path } => format!("test-mod {}", path),
+ RunnableKind::Bench { test_id } => format!("bench {}", test_id),
+ RunnableKind::DocTest { test_id, .. } => format!("doctest {}", test_id),
+ RunnableKind::Bin => {
+ target.map_or_else(|| "run binary".to_string(), |t| format!("run {}", t))
+ }
+ }
+ }
+
+ pub fn title(&self) -> String {
+ let mut s = String::from("▶\u{fe0e} Run ");
+ if self.use_name_in_title {
+ format_to!(s, "{}", self.nav.name);
+ if !matches!(self.kind, RunnableKind::Bin) {
+ s.push(' ');
+ }
+ }
+ let suffix = match &self.kind {
+ RunnableKind::TestMod { .. } => "Tests",
+ RunnableKind::Test { .. } => "Test",
+ RunnableKind::DocTest { .. } => "Doctest",
+ RunnableKind::Bench { .. } => "Bench",
+ RunnableKind::Bin => return s,
+ };
+ s.push_str(suffix);
+ s
+ }
+
+ #[cfg(test)]
+ fn test_kind(&self) -> RunnableTestKind {
+ match &self.kind {
+ RunnableKind::TestMod { .. } => RunnableTestKind::TestMod,
+ RunnableKind::Test { .. } => RunnableTestKind::Test,
+ RunnableKind::DocTest { .. } => RunnableTestKind::DocTest,
+ RunnableKind::Bench { .. } => RunnableTestKind::Bench,
+ RunnableKind::Bin => RunnableTestKind::Bin,
+ }
+ }
+}
+
+// Feature: Run
+//
+// Shows a popup suggesting to run a test/benchmark/binary **at the current cursor
+// location**. Super useful for repeatedly running just a single test. Do bind this
+// to a shortcut!
+//
+// |===
+// | Editor | Action Name
+//
- // | VS Code | **Rust Analyzer: Peek related tests**
++// | VS Code | **rust-analyzer: Run**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065583-055aae80-91b1-11eb-958f-d67efcaf6a2f.gif[]
+pub(crate) fn runnables(db: &RootDatabase, file_id: FileId) -> Vec<Runnable> {
+ let sema = Semantics::new(db);
+
+ let mut res = Vec::new();
+ // Record all runnables that come from macro expansions here instead.
+ // In case an expansion creates multiple runnables we want to name them to avoid emitting a bunch of equally named runnables.
+ let mut in_macro_expansion = FxHashMap::<hir::HirFileId, Vec<Runnable>>::default();
+ let mut add_opt = |runnable: Option<Runnable>, def| {
+ if let Some(runnable) = runnable.filter(|runnable| {
+ always!(
+ runnable.nav.file_id == file_id,
+ "tried adding a runnable pointing to a different file: {:?} for {:?}",
+ runnable.kind,
+ file_id
+ )
+ }) {
+ if let Some(def) = def {
+ let file_id = match def {
+ Definition::Module(it) => it.declaration_source(db).map(|src| src.file_id),
+ Definition::Function(it) => it.source(db).map(|src| src.file_id),
+ _ => None,
+ };
+ if let Some(file_id) = file_id.filter(|file| file.call_node(db).is_some()) {
+ in_macro_expansion.entry(file_id).or_default().push(runnable);
+ return;
+ }
+ }
+ res.push(runnable);
+ }
+ };
+ visit_file_defs(&sema, file_id, &mut |def| {
+ let runnable = match def {
+ Definition::Module(it) => runnable_mod(&sema, it),
+ Definition::Function(it) => runnable_fn(&sema, it),
+ Definition::SelfType(impl_) => runnable_impl(&sema, &impl_),
+ _ => None,
+ };
+ add_opt(
+ runnable
+ .or_else(|| module_def_doctest(sema.db, def))
+ // #[macro_export] mbe macros are declared in the root, while their definition may reside in a different module
+ .filter(|it| it.nav.file_id == file_id),
+ Some(def),
+ );
+ if let Definition::SelfType(impl_) = def {
+ impl_.items(db).into_iter().for_each(|assoc| {
+ let runnable = match assoc {
+ hir::AssocItem::Function(it) => {
+ runnable_fn(&sema, it).or_else(|| module_def_doctest(sema.db, it.into()))
+ }
+ hir::AssocItem::Const(it) => module_def_doctest(sema.db, it.into()),
+ hir::AssocItem::TypeAlias(it) => module_def_doctest(sema.db, it.into()),
+ };
+ add_opt(runnable, Some(assoc.into()))
+ });
+ }
+ });
+
+ sema.to_module_defs(file_id)
+ .map(|it| runnable_mod_outline_definition(&sema, it))
+ .for_each(|it| add_opt(it, None));
+
+ res.extend(in_macro_expansion.into_iter().flat_map(|(_, runnables)| {
+ let use_name_in_title = runnables.len() != 1;
+ runnables.into_iter().map(move |mut r| {
+ r.use_name_in_title = use_name_in_title;
+ r
+ })
+ }));
+ res
+}
+
+// Feature: Related Tests
+//
+// Provides a sneak peek of all tests where the current item is used.
+//
+// The simplest way to use this feature is via the context menu:
+// - Right-click on the selected item. The context menu opens.
+// - Select **Peek related tests**
+//
+// |===
+// | Editor | Action Name
+//
- format!("<{}>", ty_args.format_with(", ", |ty, cb| cb(&ty.display(sema.db))))
++// | VS Code | **rust-analyzer: Peek related tests**
+// |===
+pub(crate) fn related_tests(
+ db: &RootDatabase,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+) -> Vec<Runnable> {
+ let sema = Semantics::new(db);
+ let mut res: FxHashSet<Runnable> = FxHashSet::default();
+ let syntax = sema.parse(position.file_id).syntax().clone();
+
+ find_related_tests(&sema, &syntax, position, search_scope, &mut res);
+
+ res.into_iter().collect()
+}
+
+fn find_related_tests(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ position: FilePosition,
+ search_scope: Option<SearchScope>,
+ tests: &mut FxHashSet<Runnable>,
+) {
+ // FIXME: why is this using references::find_defs, this should use ide_db::search
+ let defs = match references::find_defs(sema, syntax, position.offset) {
+ Some(defs) => defs,
+ None => return,
+ };
+ for def in defs {
+ let defs = def
+ .usages(sema)
+ .set_scope(search_scope.clone())
+ .all()
+ .references
+ .into_values()
+ .flatten();
+ for ref_ in defs {
+ let name_ref = match ref_.name {
+ ast::NameLike::NameRef(name_ref) => name_ref,
+ _ => continue,
+ };
+ if let Some(fn_def) =
+ sema.ancestors_with_macros(name_ref.syntax().clone()).find_map(ast::Fn::cast)
+ {
+ if let Some(runnable) = as_test_runnable(sema, &fn_def) {
+ // direct test
+ tests.insert(runnable);
+ } else if let Some(module) = parent_test_module(sema, &fn_def) {
+ // indirect test
+ find_related_tests_in_module(sema, syntax, &fn_def, &module, tests);
+ }
+ }
+ }
+ }
+}
+
+fn find_related_tests_in_module(
+ sema: &Semantics<'_, RootDatabase>,
+ syntax: &SyntaxNode,
+ fn_def: &ast::Fn,
+ parent_module: &hir::Module,
+ tests: &mut FxHashSet<Runnable>,
+) {
+ let fn_name = match fn_def.name() {
+ Some(it) => it,
+ _ => return,
+ };
+ let mod_source = parent_module.definition_source(sema.db);
+ let range = match &mod_source.value {
+ hir::ModuleSource::Module(m) => m.syntax().text_range(),
+ hir::ModuleSource::BlockExpr(b) => b.syntax().text_range(),
+ hir::ModuleSource::SourceFile(f) => f.syntax().text_range(),
+ };
+
+ let file_id = mod_source.file_id.original_file(sema.db);
+ let mod_scope = SearchScope::file_range(FileRange { file_id, range });
+ let fn_pos = FilePosition { file_id, offset: fn_name.syntax().text_range().start() };
+ find_related_tests(sema, syntax, fn_pos, Some(mod_scope), tests)
+}
+
+fn as_test_runnable(sema: &Semantics<'_, RootDatabase>, fn_def: &ast::Fn) -> Option<Runnable> {
+ if test_related_attribute(fn_def).is_some() {
+ let function = sema.to_def(fn_def)?;
+ runnable_fn(sema, function)
+ } else {
+ None
+ }
+}
+
+fn parent_test_module(sema: &Semantics<'_, RootDatabase>, fn_def: &ast::Fn) -> Option<hir::Module> {
+ fn_def.syntax().ancestors().find_map(|node| {
+ let module = ast::Module::cast(node)?;
+ let module = sema.to_def(&module)?;
+
+ if has_test_function_or_multiple_test_submodules(sema, &module) {
+ Some(module)
+ } else {
+ None
+ }
+ })
+}
+
+pub(crate) fn runnable_fn(
+ sema: &Semantics<'_, RootDatabase>,
+ def: hir::Function,
+) -> Option<Runnable> {
+ let func = def.source(sema.db)?;
+ let name = def.name(sema.db).to_smol_str();
+
+ let root = def.module(sema.db).krate().root_module(sema.db);
+
+ let kind = if name == "main" && def.module(sema.db) == root {
+ RunnableKind::Bin
+ } else {
+ let test_id = || {
+ let canonical_path = {
+ let def: hir::ModuleDef = def.into();
+ def.canonical_path(sema.db)
+ };
+ canonical_path.map(TestId::Path).unwrap_or(TestId::Name(name))
+ };
+
+ if test_related_attribute(&func.value).is_some() {
+ let attr = TestAttr::from_fn(&func.value);
+ RunnableKind::Test { test_id: test_id(), attr }
+ } else if func.value.has_atom_attr("bench") {
+ RunnableKind::Bench { test_id: test_id() }
+ } else {
+ return None;
+ }
+ };
+
+ let nav = NavigationTarget::from_named(
+ sema.db,
+ func.as_ref().map(|it| it as &dyn ast::HasName),
+ SymbolKind::Function,
+ );
+ let cfg = def.attrs(sema.db).cfg();
+ Some(Runnable { use_name_in_title: false, nav, kind, cfg })
+}
+
+pub(crate) fn runnable_mod(
+ sema: &Semantics<'_, RootDatabase>,
+ def: hir::Module,
+) -> Option<Runnable> {
+ if !has_test_function_or_multiple_test_submodules(sema, &def) {
+ return None;
+ }
+ let path =
+ def.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
+
+ let attrs = def.attrs(sema.db);
+ let cfg = attrs.cfg();
+ let nav = NavigationTarget::from_module_to_decl(sema.db, def);
+ Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::TestMod { path }, cfg })
+}
+
+pub(crate) fn runnable_impl(
+ sema: &Semantics<'_, RootDatabase>,
+ def: &hir::Impl,
+) -> Option<Runnable> {
+ let attrs = def.attrs(sema.db);
+ if !has_runnable_doc_test(&attrs) {
+ return None;
+ }
+ let cfg = attrs.cfg();
+ let nav = def.try_to_nav(sema.db)?;
+ let ty = def.self_ty(sema.db);
+ let adt_name = ty.as_adt()?.name(sema.db);
+ let mut ty_args = ty.type_arguments().peekable();
+ let params = if ty_args.peek().is_some() {
- let test_id = TestId::Path(format!("{}{}", adt_name, params));
++ format!("<{}>", ty_args.format_with(",", |ty, cb| cb(&ty.display(sema.db))))
+ } else {
+ String::new()
+ };
- ty_args.format_with(", ", |ty, cb| cb(&ty.display(db)))
++ let mut test_id = format!("{}{}", adt_name, params);
++ test_id.retain(|c| c != ' ');
++ let test_id = TestId::Path(test_id);
+
+ Some(Runnable { use_name_in_title: false, nav, kind: RunnableKind::DocTest { test_id }, cfg })
+}
+
+/// Creates a test mod runnable for outline modules at the top of their definition.
+fn runnable_mod_outline_definition(
+ sema: &Semantics<'_, RootDatabase>,
+ def: hir::Module,
+) -> Option<Runnable> {
+ if !has_test_function_or_multiple_test_submodules(sema, &def) {
+ return None;
+ }
+ let path =
+ def.path_to_root(sema.db).into_iter().rev().filter_map(|it| it.name(sema.db)).join("::");
+
+ let attrs = def.attrs(sema.db);
+ let cfg = attrs.cfg();
+ match def.definition_source(sema.db).value {
+ hir::ModuleSource::SourceFile(_) => Some(Runnable {
+ use_name_in_title: false,
+ nav: def.to_nav(sema.db),
+ kind: RunnableKind::TestMod { path },
+ cfg,
+ }),
+ _ => None,
+ }
+}
+
+fn module_def_doctest(db: &RootDatabase, def: Definition) -> Option<Runnable> {
+ let attrs = match def {
+ Definition::Module(it) => it.attrs(db),
+ Definition::Function(it) => it.attrs(db),
+ Definition::Adt(it) => it.attrs(db),
+ Definition::Variant(it) => it.attrs(db),
+ Definition::Const(it) => it.attrs(db),
+ Definition::Static(it) => it.attrs(db),
+ Definition::Trait(it) => it.attrs(db),
+ Definition::TypeAlias(it) => it.attrs(db),
+ Definition::Macro(it) => it.attrs(db),
+ Definition::SelfType(it) => it.attrs(db),
+ _ => return None,
+ };
+ if !has_runnable_doc_test(&attrs) {
+ return None;
+ }
+ let def_name = def.name(db)?;
+ let path = (|| {
+ let mut path = String::new();
+ def.canonical_module_path(db)?
+ .flat_map(|it| it.name(db))
+ .for_each(|name| format_to!(path, "{}::", name));
+ // This probably belongs to canonical_path?
+ if let Some(assoc_item) = def.as_assoc_item(db) {
+ if let hir::AssocItemContainer::Impl(imp) = assoc_item.container(db) {
+ let ty = imp.self_ty(db);
+ if let Some(adt) = ty.as_adt() {
+ let name = adt.name(db);
+ let mut ty_args = ty.type_arguments().peekable();
+ format_to!(path, "{}", name);
+ if ty_args.peek().is_some() {
+ format_to!(
+ path,
+ "<{}>",
- &[DocTest],
++ ty_args.format_with(",", |ty, cb| cb(&ty.display(db)))
+ );
+ }
+ format_to!(path, "::{}", def_name);
++ path.retain(|c| c != ' ');
+ return Some(path);
+ }
+ }
+ }
+ format_to!(path, "{}", def_name);
+ Some(path)
+ })();
+
+ let test_id = path.map_or_else(|| TestId::Name(def_name.to_smol_str()), TestId::Path);
+
+ let mut nav = match def {
+ Definition::Module(def) => NavigationTarget::from_module_to_decl(db, def),
+ def => def.try_to_nav(db)?,
+ };
+ nav.focus_range = None;
+ nav.description = None;
+ nav.docs = None;
+ nav.kind = None;
+ let res = Runnable {
+ use_name_in_title: false,
+ nav,
+ kind: RunnableKind::DocTest { test_id },
+ cfg: attrs.cfg(),
+ };
+ Some(res)
+}
+
+#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
+pub struct TestAttr {
+ pub ignore: bool,
+}
+
+impl TestAttr {
+ fn from_fn(fn_def: &ast::Fn) -> TestAttr {
+ let ignore = fn_def
+ .attrs()
+ .filter_map(|attr| attr.simple_name())
+ .any(|attribute_text| attribute_text == "ignore");
+ TestAttr { ignore }
+ }
+}
+
+const RUSTDOC_FENCES: [&str; 2] = ["```", "~~~"];
+const RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE: &[&str] =
+ &["", "rust", "should_panic", "edition2015", "edition2018", "edition2021"];
+
+fn has_runnable_doc_test(attrs: &hir::Attrs) -> bool {
+ attrs.docs().map_or(false, |doc| {
+ let mut in_code_block = false;
+
+ for line in String::from(doc).lines() {
+ if let Some(header) =
+ RUSTDOC_FENCES.into_iter().find_map(|fence| line.strip_prefix(fence))
+ {
+ in_code_block = !in_code_block;
+
+ if in_code_block
+ && header
+ .split(',')
+ .all(|sub| RUSTDOC_CODE_BLOCK_ATTRIBUTES_RUNNABLE.contains(&sub.trim()))
+ {
+ return true;
+ }
+ }
+ }
+
+ false
+ })
+}
+
+// We could create runnables for modules with number_of_test_submodules > 0,
+// but that bloats the runnables for no real benefit, since all tests can be run by the submodule already
+fn has_test_function_or_multiple_test_submodules(
+ sema: &Semantics<'_, RootDatabase>,
+ module: &hir::Module,
+) -> bool {
+ let mut number_of_test_submodules = 0;
+
+ for item in module.declarations(sema.db) {
+ match item {
+ hir::ModuleDef::Function(f) => {
+ if let Some(it) = f.source(sema.db) {
+ if test_related_attribute(&it.value).is_some() {
+ return true;
+ }
+ }
+ }
+ hir::ModuleDef::Module(submodule) => {
+ if has_test_function_or_multiple_test_submodules(sema, &submodule) {
+ number_of_test_submodules += 1;
+ }
+ }
+ _ => (),
+ }
+ }
+
+ number_of_test_submodules > 1
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::{expect, Expect};
+
+ use crate::fixture;
+
+ use super::{RunnableTestKind::*, *};
+
+ fn check(
+ ra_fixture: &str,
+ // FIXME: fold this into `expect` as well
+ actions: &[RunnableTestKind],
+ expect: Expect,
+ ) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let mut runnables = analysis.runnables(position.file_id).unwrap();
+ runnables.sort_by_key(|it| (it.nav.full_range.start(), it.nav.name.clone()));
+ expect.assert_debug_eq(&runnables);
+ assert_eq!(
+ actions,
+ runnables.into_iter().map(|it| it.test_kind()).collect::<Vec<_>>().as_slice()
+ );
+ }
+
+ fn check_tests(ra_fixture: &str, expect: Expect) {
+ let (analysis, position) = fixture::position(ra_fixture);
+ let tests = analysis.related_tests(position, None).unwrap();
+ expect.assert_debug_eq(&tests);
+ }
+
+ #[test]
+ fn test_runnables() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+#[test]
+fn test_foo() {}
+
+#[test]
+#[ignore]
+fn test_foo() {}
+
+#[bench]
+fn bench() {}
+
+mod not_a_root {
+ fn main() {}
+}
+"#,
+ &[TestMod, Bin, Test, Test, Bench],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..137,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 15..39,
+ focus_range: 26..34,
+ name: "test_foo",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 41..75,
+ focus_range: 62..70,
+ name: "test_foo",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo",
+ ),
+ attr: TestAttr {
+ ignore: true,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 77..99,
+ focus_range: 89..94,
+ name: "bench",
+ kind: Function,
+ },
+ kind: Bench {
+ test_id: Path(
+ "bench",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_doc_test() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+/// ```
+/// let x = 5;
+/// ```
+fn should_have_runnable() {}
+
+/// ```edition2018
+/// let x = 5;
+/// ```
+fn should_have_runnable_1() {}
+
+/// ```
+/// let z = 55;
+/// ```
+///
+/// ```ignore
+/// let z = 56;
+/// ```
+fn should_have_runnable_2() {}
+
+/**
+```rust
+let z = 55;
+```
+*/
+fn should_have_no_runnable_3() {}
+
+/**
+ ```rust
+ let z = 55;
+ ```
+*/
+fn should_have_no_runnable_4() {}
+
+/// ```no_run
+/// let z = 55;
+/// ```
+fn should_have_no_runnable() {}
+
+/// ```ignore
+/// let z = 55;
+/// ```
+fn should_have_no_runnable_2() {}
+
+/// ```compile_fail
+/// let z = 55;
+/// ```
+fn should_have_no_runnable_3() {}
+
+/// ```text
+/// arbitrary plain text
+/// ```
+fn should_have_no_runnable_4() {}
+
+/// ```text
+/// arbitrary plain text
+/// ```
+///
+/// ```sh
+/// $ shell code
+/// ```
+fn should_have_no_runnable_5() {}
+
+/// ```rust,no_run
+/// let z = 55;
+/// ```
+fn should_have_no_runnable_6() {}
+
+/// ```
+/// let x = 5;
+/// ```
+struct StructWithRunnable(String);
+
+/// ```
+/// let x = 5;
+/// ```
+impl StructWithRunnable {}
+
+trait Test {
+ fn test() -> usize {
+ 5usize
+ }
+}
+
+/// ```
+/// let x = 5;
+/// ```
+impl Test for StructWithRunnable {}
+"#,
+ &[Bin, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 15..74,
+ name: "should_have_runnable",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_runnable",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 76..148,
+ name: "should_have_runnable_1",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_runnable_1",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 150..254,
+ name: "should_have_runnable_2",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_runnable_2",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 256..320,
+ name: "should_have_no_runnable_3",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_no_runnable_3",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 322..398,
+ name: "should_have_no_runnable_4",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "should_have_no_runnable_4",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 900..965,
+ name: "StructWithRunnable",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "StructWithRunnable",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 967..1024,
+ focus_range: 1003..1021,
+ name: "impl",
+ kind: Impl,
+ },
+ kind: DocTest {
+ test_id: Path(
+ "StructWithRunnable",
+ ),
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1088..1154,
+ focus_range: 1133..1151,
+ name: "impl",
+ kind: Impl,
+ },
+ kind: DocTest {
+ test_id: Path(
+ "StructWithRunnable",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_doc_test_in_impl() {
+ check(
+ r#"
+//- /lib.rs
+$0
+fn main() {}
+
+struct Data;
+impl Data {
+ /// ```
+ /// let x = 5;
+ /// ```
+ fn foo() {}
+}
+"#,
+ &[Bin, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..13,
+ focus_range: 4..8,
+ name: "main",
+ kind: Function,
+ },
+ kind: Bin,
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 44..98,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "Data::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_module() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod test_mod {
+ #[test]
+ fn test_foo1() {}
+}
+"#,
+ &[TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..51,
+ focus_range: 5..13,
+ name: "test_mod",
+ kind: Module,
+ description: "mod test_mod",
+ },
+ kind: TestMod {
+ path: "test_mod",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 20..49,
+ focus_range: 35..44,
+ name: "test_foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_mod::test_foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn only_modules_with_test_functions_or_more_than_one_test_submodule_have_runners() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod root_tests {
+ mod nested_tests_0 {
+ mod nested_tests_1 {
+ #[test]
+ fn nested_test_11() {}
+
+ #[test]
+ fn nested_test_12() {}
+ }
+
+ mod nested_tests_2 {
+ #[test]
+ fn nested_test_2() {}
+ }
+
+ mod nested_tests_3 {}
+ }
+
+ mod nested_tests_4 {}
+}
+"#,
+ &[TestMod, TestMod, Test, Test, TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 22..323,
+ focus_range: 26..40,
+ name: "nested_tests_0",
+ kind: Module,
+ description: "mod nested_tests_0",
+ },
+ kind: TestMod {
+ path: "root_tests::nested_tests_0",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 51..192,
+ focus_range: 55..69,
+ name: "nested_tests_1",
+ kind: Module,
+ description: "mod nested_tests_1",
+ },
+ kind: TestMod {
+ path: "root_tests::nested_tests_0::nested_tests_1",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 84..126,
+ focus_range: 107..121,
+ name: "nested_test_11",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "root_tests::nested_tests_0::nested_tests_1::nested_test_11",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 140..182,
+ focus_range: 163..177,
+ name: "nested_test_12",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "root_tests::nested_tests_0::nested_tests_1::nested_test_12",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 202..286,
+ focus_range: 206..220,
+ name: "nested_tests_2",
+ kind: Module,
+ description: "mod nested_tests_2",
+ },
+ kind: TestMod {
+ path: "root_tests::nested_tests_0::nested_tests_2",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 235..276,
+ focus_range: 258..271,
+ name: "nested_test_2",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "root_tests::nested_tests_0::nested_tests_2::nested_test_2",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_with_feature() {
+ check(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo
+$0
+#[test]
+#[cfg(feature = "foo")]
+fn test_foo1() {}
+"#,
+ &[TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..51,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..50,
+ focus_range: 36..45,
+ name: "test_foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: Some(
+ Atom(
+ KeyValue {
+ key: "feature",
+ value: "foo",
+ },
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_with_features() {
+ check(
+ r#"
+//- /lib.rs crate:foo cfg:feature=foo,feature=bar
+$0
+#[test]
+#[cfg(all(feature = "foo", feature = "bar"))]
+fn test_foo1() {}
+"#,
+ &[TestMod, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..73,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..72,
+ focus_range: 58..67,
+ name: "test_foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "test_foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: Some(
+ All(
+ [
+ Atom(
+ KeyValue {
+ key: "feature",
+ value: "foo",
+ },
+ ),
+ Atom(
+ KeyValue {
+ key: "feature",
+ value: "bar",
+ },
+ ),
+ ],
+ ),
+ ),
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_no_test_function_in_module() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod test_mod {
+ fn foo1() {}
+}
+"#,
+ &[],
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_doc_runnables_impl_mod() {
+ check(
+ r#"
+//- /lib.rs
+mod foo;
+//- /foo.rs
+struct Foo;$0
+impl Foo {
+ /// ```
+ /// let x = 5;
+ /// ```
+ fn foo() {}
+}
+ "#,
+ &[DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 27..81,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "foo::Foo::foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_runnables_in_macro() {
+ check(
+ r#"
+//- /lib.rs
+$0
+macro_rules! gen {
+ () => {
+ #[test]
+ fn foo_test() {}
+ }
+}
+macro_rules! gen2 {
+ () => {
+ mod tests2 {
+ #[test]
+ fn foo_test2() {}
+ }
+ }
+}
+mod tests {
+ gen!();
+}
+gen2!();
+"#,
+ &[TestMod, TestMod, Test, Test, TestMod],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 0..237,
+ name: "",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 202..227,
+ focus_range: 206..211,
+ name: "tests",
+ kind: Module,
+ description: "mod tests",
+ },
+ kind: TestMod {
+ path: "tests",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 218..225,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 228..236,
+ name: "foo_test2",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests2::foo_test2",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 228..236,
+ name: "tests2",
+ kind: Module,
+ description: "mod tests2",
+ },
+ kind: TestMod {
+ path: "tests2",
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn big_mac() {
+ check(
+ r#"
+//- /lib.rs
+$0
+macro_rules! foo {
+ () => {
+ mod foo_tests {
+ #[test]
+ fn foo0() {}
+ #[test]
+ fn foo1() {}
+ #[test]
+ fn foo2() {}
+ }
+ };
+}
+foo!();
+"#,
+ &[Test, Test, Test, TestMod],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo0",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_tests::foo0",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_tests::foo1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo2",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "foo_tests::foo2",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 210..217,
+ name: "foo_tests",
+ kind: Module,
+ description: "mod foo_tests",
+ },
+ kind: TestMod {
+ path: "foo_tests",
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn dont_recurse_in_outline_submodules() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod m;
+//- /m.rs
+mod tests {
+ #[test]
+ fn t() {}
+}
+"#,
+ &[],
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn outline_submodule1() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod m;
+//- /m.rs
+#[test]
+fn t0() {}
+#[test]
+fn t1() {}
+"#,
+ &[TestMod],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..7,
+ focus_range: 5..6,
+ name: "m",
+ kind: Module,
+ description: "mod m",
+ },
+ kind: TestMod {
+ path: "m",
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn outline_submodule2() {
+ check(
+ r#"
+//- /lib.rs
+mod m;
+//- /m.rs
+$0
+#[test]
+fn t0() {}
+#[test]
+fn t1() {}
+"#,
+ &[TestMod, Test, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 0..39,
+ name: "m",
+ kind: Module,
+ },
+ kind: TestMod {
+ path: "m",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 1..19,
+ focus_range: 12..14,
+ name: "t0",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "m::t0",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 1,
+ ),
+ full_range: 20..38,
+ focus_range: 31..33,
+ name: "t1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "m::t1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn attributed_module() {
+ check(
+ r#"
+//- proc_macros: identity
+//- /lib.rs
+$0
+#[proc_macros::identity]
+mod module {
+ #[test]
+ fn t0() {}
+ #[test]
+ fn t1() {}
+}
+"#,
+ &[TestMod, Test, Test],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 26..94,
+ focus_range: 30..36,
+ name: "module",
+ kind: Module,
+ description: "mod module",
+ },
+ kind: TestMod {
+ path: "module",
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 43..65,
+ focus_range: 58..60,
+ name: "t0",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "module::t0",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: true,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 70..92,
+ focus_range: 85..87,
+ name: "t1",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "module::t1",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_no_tests() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+"#,
+ expect![[r#"
+ []
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_direct_fn_test() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+
+mod tests {
+ #[test]
+ fn foo_test() {
+ super::foo()
+ }
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 31..85,
+ focus_range: 46..54,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_direct_struct_test() {
+ check_tests(
+ r#"
+//- /lib.rs
+struct Fo$0o;
+fn foo(arg: &Foo) { };
+
+mod tests {
+ use super::*;
+
+ #[test]
+ fn foo_test() {
+ foo(Foo);
+ }
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 71..122,
+ focus_range: 86..94,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn find_indirect_fn_test() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+
+mod tests {
+ use super::foo;
+
+ fn check1() {
+ check2()
+ }
+
+ fn check2() {
+ foo()
+ }
+
+ #[test]
+ fn foo_test() {
+ check1()
+ }
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 133..183,
+ focus_range: 148..156,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn tests_are_unique() {
+ check_tests(
+ r#"
+//- /lib.rs
+fn foo$0() { };
+
+mod tests {
+ use super::foo;
+
+ #[test]
+ fn foo_test() {
+ foo();
+ foo();
+ }
+
+ #[test]
+ fn foo2_test() {
+ foo();
+ foo();
+ }
+
+}
+"#,
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 52..115,
+ focus_range: 67..75,
+ name: "foo_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 121..185,
+ focus_range: 136..145,
+ name: "foo2_test",
+ kind: Function,
+ },
+ kind: Test {
+ test_id: Path(
+ "tests::foo2_test",
+ ),
+ attr: TestAttr {
+ ignore: false,
+ },
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn doc_test_type_params() {
+ check(
+ r#"
+//- /lib.rs
+$0
+struct Foo<T, U>;
+
++/// ```
++/// ```
+impl<T, U> Foo<T, U> {
+ /// ```rust
+ /// ````
+ fn t() {}
+}
++
++/// ```
++/// ```
++impl Foo<Foo<(), ()>, ()> {
++ /// ```
++ /// ```
++ fn t() {}
++}
+"#,
- full_range: 47..85,
++ &[DocTest, DocTest, DocTest, DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
- "Foo<T, U>::t",
++ full_range: 20..103,
++ focus_range: 47..56,
++ name: "impl",
++ kind: Impl,
++ },
++ kind: DocTest {
++ test_id: Path(
++ "Foo<T,U>",
++ ),
++ },
++ cfg: None,
++ },
++ Runnable {
++ use_name_in_title: false,
++ nav: NavigationTarget {
++ file_id: FileId(
++ 0,
++ ),
++ full_range: 63..101,
++ name: "t",
++ },
++ kind: DocTest {
++ test_id: Path(
++ "Foo<T,U>::t",
++ ),
++ },
++ cfg: None,
++ },
++ Runnable {
++ use_name_in_title: false,
++ nav: NavigationTarget {
++ file_id: FileId(
++ 0,
++ ),
++ full_range: 105..188,
++ focus_range: 126..146,
++ name: "impl",
++ kind: Impl,
++ },
++ kind: DocTest {
++ test_id: Path(
++ "Foo<Foo<(),()>,()>",
++ ),
++ },
++ cfg: None,
++ },
++ Runnable {
++ use_name_in_title: false,
++ nav: NavigationTarget {
++ file_id: FileId(
++ 0,
++ ),
++ full_range: 153..186,
+ name: "t",
+ },
+ kind: DocTest {
+ test_id: Path(
++ "Foo<Foo<(),()>,()>::t",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+
+ #[test]
+ fn doc_test_macro_export_mbe() {
+ check(
+ r#"
+//- /lib.rs
+$0
+mod foo;
+
+//- /foo.rs
+/// ```
+/// fn foo() {
+/// }
+/// ```
+#[macro_export]
+macro_rules! foo {
+ () => {
+
+ };
+}
+"#,
+ &[],
+ expect![[r#"
+ []
+ "#]],
+ );
+ check(
+ r#"
+//- /lib.rs
+$0
+/// ```
+/// fn foo() {
+/// }
+/// ```
+#[macro_export]
+macro_rules! foo {
+ () => {
+
+ };
+}
+"#,
+ &[DocTest],
+ expect![[r#"
+ [
+ Runnable {
+ use_name_in_title: false,
+ nav: NavigationTarget {
+ file_id: FileId(
+ 0,
+ ),
+ full_range: 1..94,
+ name: "foo",
+ },
+ kind: DocTest {
+ test_id: Path(
+ "foo",
+ ),
+ },
+ cfg: None,
+ },
+ ]
+ "#]],
+ );
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Shuffle Crate Graph**
+use std::sync::Arc;
+
+use ide_db::{
+ base_db::{salsa::Durability, CrateGraph, SourceDatabase},
+ FxHashMap, RootDatabase,
+};
+
+// Feature: Shuffle Crate Graph
+//
+// Randomizes all crate IDs in the crate graph, for debugging.
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: Shuffle Crate Graph**
+// |===
+pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) {
+ let crate_graph = db.crate_graph();
+
+ let mut shuffled_ids = crate_graph.iter().collect::<Vec<_>>();
+ shuffle(&mut shuffled_ids);
+
+ let mut new_graph = CrateGraph::default();
+
+ let mut map = FxHashMap::default();
+ for old_id in shuffled_ids.iter().copied() {
+ let data = &crate_graph[old_id];
+ let new_id = new_graph.add_crate_root(
+ data.root_file_id,
+ data.edition,
+ data.display_name.clone(),
+ data.version.clone(),
+ data.cfg_options.clone(),
+ data.potential_cfg_options.clone(),
+ data.env.clone(),
+ data.proc_macro.clone(),
+ data.is_proc_macro,
+ data.origin.clone(),
+ );
+ map.insert(old_id, new_id);
+ }
+
+ for old_id in shuffled_ids.iter().copied() {
+ let data = &crate_graph[old_id];
+ for dep in &data.dependencies {
+ let mut new_dep = dep.clone();
+ new_dep.crate_id = map[&dep.crate_id];
+ new_graph.add_dep(map[&old_id], new_dep).unwrap();
+ }
+ }
+
+ db.set_crate_graph_with_durability(Arc::new(new_graph), Durability::HIGH);
+}
+
+fn shuffle<T>(slice: &mut [T]) {
+ let mut rng = oorandom::Rand32::new(seed());
+
+ let mut remaining = slice.len() - 1;
+ while remaining > 0 {
+ let index = rng.rand_range(0..remaining as u32);
+ slice.swap(remaining, index as usize);
+ remaining -= 1;
+ }
+}
+
+fn seed() -> u64 {
+ use std::collections::hash_map::RandomState;
+ use std::hash::{BuildHasher, Hasher};
+
+ RandomState::new().build_hasher().finish()
+}
--- /dev/null
- let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions);
+//! This module provides `StaticIndex` which is used for powering
+//! read-only code browsers and emitting LSIF
+
+use std::collections::HashMap;
+
+use hir::{db::HirDatabase, Crate, Module, Semantics};
+use ide_db::{
+ base_db::{FileId, FileRange, SourceDatabaseExt},
+ defs::{Definition, IdentClass},
+ FxHashSet, RootDatabase,
+};
+use syntax::{AstNode, SyntaxKind::*, SyntaxToken, TextRange, T};
+
+use crate::{
+ hover::hover_for_definition,
+ moniker::{crate_for_file, def_to_moniker, MonikerResult},
+ Analysis, Fold, HoverConfig, HoverDocFormat, HoverResult, InlayHint, InlayHintsConfig,
+ TryToNav,
+};
+
+/// A static representation of fully analyzed source code.
+///
+/// The intended use-case is powering read-only code browsers and emitting LSIF
+#[derive(Debug)]
+pub struct StaticIndex<'a> {
+ pub files: Vec<StaticIndexedFile>,
+ pub tokens: TokenStore,
+ analysis: &'a Analysis,
+ db: &'a RootDatabase,
+ def_map: HashMap<Definition, TokenId>,
+}
+
+#[derive(Debug)]
+pub struct ReferenceData {
+ pub range: FileRange,
+ pub is_definition: bool,
+}
+
+#[derive(Debug)]
+pub struct TokenStaticData {
+ pub hover: Option<HoverResult>,
+ pub definition: Option<FileRange>,
+ pub references: Vec<ReferenceData>,
+ pub moniker: Option<MonikerResult>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
+pub struct TokenId(usize);
+
+impl TokenId {
+ pub fn raw(self) -> usize {
+ self.0
+ }
+}
+
+#[derive(Default, Debug)]
+pub struct TokenStore(Vec<TokenStaticData>);
+
+impl TokenStore {
+ pub fn insert(&mut self, data: TokenStaticData) -> TokenId {
+ let id = TokenId(self.0.len());
+ self.0.push(data);
+ id
+ }
+
+ pub fn get_mut(&mut self, id: TokenId) -> Option<&mut TokenStaticData> {
+ self.0.get_mut(id.0)
+ }
+
+ pub fn get(&self, id: TokenId) -> Option<&TokenStaticData> {
+ self.0.get(id.0)
+ }
+
+ pub fn iter(self) -> impl Iterator<Item = (TokenId, TokenStaticData)> {
+ self.0.into_iter().enumerate().map(|(i, x)| (TokenId(i), x))
+ }
+}
+
+#[derive(Debug)]
+pub struct StaticIndexedFile {
+ pub file_id: FileId,
+ pub folds: Vec<Fold>,
+ pub inlay_hints: Vec<InlayHint>,
+ pub tokens: Vec<(TextRange, TokenId)>,
+}
+
+fn all_modules(db: &dyn HirDatabase) -> Vec<Module> {
+ let mut worklist: Vec<_> =
+ Crate::all(db).into_iter().map(|krate| krate.root_module(db)).collect();
+ let mut modules = Vec::new();
+
+ while let Some(module) = worklist.pop() {
+ modules.push(module);
+ worklist.extend(module.children(db));
+ }
+
+ modules
+}
+
+impl StaticIndex<'_> {
+ fn add_file(&mut self, file_id: FileId) {
+ let current_crate = crate_for_file(self.db, file_id);
+ let folds = self.analysis.folding_ranges(file_id).unwrap();
+ let inlay_hints = self
+ .analysis
+ .inlay_hints(
+ &InlayHintsConfig {
+ render_colons: true,
+ type_hints: true,
+ parameter_hints: true,
+ chaining_hints: true,
+ closure_return_type_hints: crate::ClosureReturnTypeHints::WithBlock,
+ lifetime_elision_hints: crate::LifetimeElisionHints::Never,
+ reborrow_hints: crate::ReborrowHints::Never,
+ hide_named_constructor_hints: false,
+ hide_closure_initialization_hints: false,
+ param_names_for_lifetime_elision_hints: false,
+ binding_mode_hints: false,
+ max_length: Some(25),
+ closing_brace_hints_min_lines: Some(25),
+ },
+ file_id,
+ None,
+ )
+ .unwrap();
+ // hovers
+ let sema = hir::Semantics::new(self.db);
+ let tokens_or_nodes = sema.parse(file_id).syntax().clone();
+ let tokens = tokens_or_nodes.descendants_with_tokens().filter_map(|x| match x {
+ syntax::NodeOrToken::Node(_) => None,
+ syntax::NodeOrToken::Token(x) => Some(x),
+ });
+ let hover_config =
+ HoverConfig { links_in_hover: true, documentation: Some(HoverDocFormat::Markdown) };
+ let tokens = tokens.filter(|token| {
+ matches!(
+ token.kind(),
+ IDENT | INT_NUMBER | LIFETIME_IDENT | T![self] | T![super] | T![crate] | T![Self]
+ )
+ });
+ let mut result = StaticIndexedFile { file_id, inlay_hints, folds, tokens: vec![] };
+ for token in tokens {
+ let range = token.text_range();
+ let node = token.parent().unwrap();
+ let def = match get_definition(&sema, token.clone()) {
+ Some(x) => x,
+ None => continue,
+ };
+ let id = if let Some(x) = self.def_map.get(&def) {
+ *x
+ } else {
+ let x = self.tokens.insert(TokenStaticData {
+ hover: hover_for_definition(&sema, file_id, def, &node, &hover_config),
+ definition: def
+ .try_to_nav(self.db)
+ .map(|x| FileRange { file_id: x.file_id, range: x.focus_or_full_range() }),
+ references: vec![],
+ moniker: current_crate.and_then(|cc| def_to_moniker(self.db, def, cc)),
+ });
+ self.def_map.insert(def, x);
+ x
+ };
+ let token = self.tokens.get_mut(id).unwrap();
+ token.references.push(ReferenceData {
+ range: FileRange { range, file_id },
+ is_definition: match def.try_to_nav(self.db) {
+ Some(x) => x.file_id == file_id && x.focus_or_full_range() == range,
+ None => false,
+ },
+ });
+ result.tokens.push((range, id));
+ }
+ self.files.push(result);
+ }
+
+ pub fn compute(analysis: &Analysis) -> StaticIndex<'_> {
+ let db = &*analysis.db;
+ let work = all_modules(db).into_iter().filter(|module| {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ let source_root = db.file_source_root(file_id);
+ let source_root = db.source_root(source_root);
+ !source_root.is_library
+ });
+ let mut this = StaticIndex {
+ files: vec![],
+ tokens: Default::default(),
+ analysis,
+ db,
+ def_map: Default::default(),
+ };
+ let mut visited_files = FxHashSet::default();
+ for module in work {
+ let file_id = module.definition_source(db).file_id.original_file(db);
+ if visited_files.contains(&file_id) {
+ continue;
+ }
+ this.add_file(file_id);
+ // mark the file
+ visited_files.insert(file_id);
+ }
+ this
+ }
+}
+
+fn get_definition(sema: &Semantics<'_, RootDatabase>, token: SyntaxToken) -> Option<Definition> {
+ for token in sema.descend_into_macros(token) {
++ let def = IdentClass::classify_token(sema, &token).map(IdentClass::definitions_no_ops);
+ if let Some(&[x]) = def.as_deref() {
+ return Some(x);
+ } else {
+ continue;
+ };
+ }
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{fixture, StaticIndex};
+ use ide_db::base_db::FileRange;
+ use std::collections::HashSet;
+ use syntax::TextSize;
+
+ fn check_all_ranges(ra_fixture: &str) {
+ let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
+ let s = StaticIndex::compute(&analysis);
+ let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ for f in s.files {
+ for (range, _) in f.tokens {
+ let x = FileRange { file_id: f.file_id, range };
+ if !range_set.contains(&x) {
+ panic!("additional range {:?}", x);
+ }
+ range_set.remove(&x);
+ }
+ }
+ if !range_set.is_empty() {
+ panic!("unfound ranges {:?}", range_set);
+ }
+ }
+
+ fn check_definitions(ra_fixture: &str) {
+ let (analysis, ranges) = fixture::annotations_without_marker(ra_fixture);
+ let s = StaticIndex::compute(&analysis);
+ let mut range_set: HashSet<_> = ranges.iter().map(|x| x.0).collect();
+ for (_, t) in s.tokens.iter() {
+ if let Some(x) = t.definition {
+ if x.range.start() == TextSize::from(0) {
+ // ignore definitions that are whole of file
+ continue;
+ }
+ if !range_set.contains(&x) {
+ panic!("additional definition {:?}", x);
+ }
+ range_set.remove(&x);
+ }
+ }
+ if !range_set.is_empty() {
+ panic!("unfound definitions {:?}", range_set);
+ }
+ }
+
+ #[test]
+ fn struct_and_enum() {
+ check_all_ranges(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo) }
+ //^ ^ ^^^
+"#,
+ );
+ check_definitions(
+ r#"
+struct Foo;
+ //^^^
+enum E { X(Foo) }
+ //^ ^
+"#,
+ );
+ }
+
+ #[test]
+ fn multi_crate() {
+ check_definitions(
+ r#"
+//- /main.rs crate:main deps:foo
+
+
+use foo::func;
+
+fn main() {
+ //^^^^
+ func();
+}
+//- /foo/lib.rs crate:foo
+
+pub func() {
+
+}
+"#,
+ );
+ }
+
+ #[test]
+ fn derives() {
+ check_all_ranges(
+ r#"
+//- minicore:derive
+#[rustc_builtin_macro]
+//^^^^^^^^^^^^^^^^^^^
+pub macro Copy {}
+ //^^^^
+#[derive(Copy)]
+//^^^^^^ ^^^^
+struct Hello(i32);
+ //^^^^^ ^^^
+"#,
+ );
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Status**
+use std::{fmt, iter::FromIterator, sync::Arc};
+
+use hir::{ExpandResult, MacroFile};
+use ide_db::base_db::{
+ salsa::debug::{DebugQueryTable, TableEntry},
+ CrateId, FileId, FileTextQuery, SourceDatabase, SourceRootId,
+};
+use ide_db::{
+ symbol_index::{LibrarySymbolsQuery, SymbolIndex},
+ RootDatabase,
+};
+use itertools::Itertools;
+use profile::{memory_usage, Bytes};
+use std::env;
+use stdx::format_to;
+use syntax::{ast, Parse, SyntaxNode};
+
+fn syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats {
+ ide_db::base_db::ParseQuery.in_db(db).entries::<SyntaxTreeStats>()
+}
+fn macro_syntax_tree_stats(db: &RootDatabase) -> SyntaxTreeStats {
+ hir::db::ParseMacroExpansionQuery.in_db(db).entries::<SyntaxTreeStats>()
+}
+
+// Feature: Status
+//
+// Shows internal statistic about memory usage of rust-analyzer.
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: Status**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065584-05f34500-91b1-11eb-98cc-5c196f76be7f.gif[]
+pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
+ let mut buf = String::new();
+ format_to!(buf, "{}\n", FileTextQuery.in_db(db).entries::<FilesStats>());
+ format_to!(buf, "{}\n", LibrarySymbolsQuery.in_db(db).entries::<LibrarySymbolsStats>());
+ format_to!(buf, "{}\n", syntax_tree_stats(db));
+ format_to!(buf, "{} (Macros)\n", macro_syntax_tree_stats(db));
+ format_to!(buf, "{} in total\n", memory_usage());
+ if env::var("RA_COUNT").is_ok() {
+ format_to!(buf, "\nCounts:\n{}", profile::countme::get_all());
+ }
+
+ if let Some(file_id) = file_id {
+ format_to!(buf, "\nFile info:\n");
+ let crates = crate::parent_module::crate_for(db, file_id);
+ if crates.is_empty() {
+ format_to!(buf, "Does not belong to any crate");
+ }
+ let crate_graph = db.crate_graph();
+ for krate in crates {
+ let display_crate = |krate: CrateId| match &crate_graph[krate].display_name {
+ Some(it) => format!("{}({:?})", it, krate),
+ None => format!("{:?}", krate),
+ };
+ format_to!(buf, "Crate: {}\n", display_crate(krate));
+ let deps = crate_graph[krate]
+ .dependencies
+ .iter()
+ .map(|dep| format!("{}={:?}", dep.name, dep.crate_id))
+ .format(", ");
+ format_to!(buf, "Dependencies: {}\n", deps);
+ }
+ }
+
+ buf.trim().to_string()
+}
+
+#[derive(Default)]
+struct FilesStats {
+ total: usize,
+ size: Bytes,
+}
+
+impl fmt::Display for FilesStats {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(fmt, "{} of files", self.size)
+ }
+}
+
+impl FromIterator<TableEntry<FileId, Arc<String>>> for FilesStats {
+ fn from_iter<T>(iter: T) -> FilesStats
+ where
+ T: IntoIterator<Item = TableEntry<FileId, Arc<String>>>,
+ {
+ let mut res = FilesStats::default();
+ for entry in iter {
+ res.total += 1;
+ res.size += entry.value.unwrap().len();
+ }
+ res
+ }
+}
+
+#[derive(Default)]
+pub(crate) struct SyntaxTreeStats {
+ total: usize,
+ pub(crate) retained: usize,
+}
+
+impl fmt::Display for SyntaxTreeStats {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(fmt, "{} trees, {} preserved", self.total, self.retained)
+ }
+}
+
+impl FromIterator<TableEntry<FileId, Parse<ast::SourceFile>>> for SyntaxTreeStats {
+ fn from_iter<T>(iter: T) -> SyntaxTreeStats
+ where
+ T: IntoIterator<Item = TableEntry<FileId, Parse<ast::SourceFile>>>,
+ {
+ let mut res = SyntaxTreeStats::default();
+ for entry in iter {
+ res.total += 1;
+ res.retained += entry.value.is_some() as usize;
+ }
+ res
+ }
+}
+
+impl<M> FromIterator<TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>>
+ for SyntaxTreeStats
+{
+ fn from_iter<T>(iter: T) -> SyntaxTreeStats
+ where
+ T: IntoIterator<Item = TableEntry<MacroFile, ExpandResult<Option<(Parse<SyntaxNode>, M)>>>>,
+ {
+ let mut res = SyntaxTreeStats::default();
+ for entry in iter {
+ res.total += 1;
+ res.retained += entry.value.is_some() as usize;
+ }
+ res
+ }
+}
+
+#[derive(Default)]
+struct LibrarySymbolsStats {
+ total: usize,
+ size: Bytes,
+}
+
+impl fmt::Display for LibrarySymbolsStats {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(fmt, "{} of index symbols ({})", self.size, self.total)
+ }
+}
+
+impl FromIterator<TableEntry<SourceRootId, Arc<SymbolIndex>>> for LibrarySymbolsStats {
+ fn from_iter<T>(iter: T) -> LibrarySymbolsStats
+ where
+ T: IntoIterator<Item = TableEntry<SourceRootId, Arc<SymbolIndex>>>,
+ {
+ let mut res = LibrarySymbolsStats::default();
+ for entry in iter {
+ let symbols = entry.value.unwrap();
+ res.total += symbols.len();
+ res.size += symbols.memory_size();
+ }
+ res
+ }
+}
--- /dev/null
- all(unix, not(target_pointer_width = "64")),
+use std::time::Instant;
+
+use expect_test::{expect_file, ExpectFile};
+use ide_db::SymbolKind;
+use test_utils::{bench, bench_fixture, skip_slow_tests, AssertLinear};
+
+use crate::{fixture, FileRange, HlTag, TextRange};
+
+#[test]
+fn attributes() {
+ check_highlighting(
+ r#"
+//- proc_macros: identity
+//- minicore: derive, copy
+#[allow(dead_code)]
+#[rustfmt::skip]
+#[proc_macros::identity]
+#[derive(Copy)]
+/// This is a doc comment
+// This is a normal comment
+/// This is a doc comment
+#[derive(Copy)]
+// This is another normal comment
+/// This is another doc comment
+// This is another normal comment
+#[derive(Copy)]
+// The reason for these being here is to test AttrIds
+struct Foo;
+"#,
+ expect_file!["./test_data/highlight_attributes.html"],
+ false,
+ );
+}
+
+#[test]
+fn macros() {
+ check_highlighting(
+ r#"
+//- proc_macros: mirror
+proc_macros::mirror! {
+ {
+ ,i32 :x pub
+ ,i32 :y pub
+ } Foo struct
+}
+macro_rules! def_fn {
+ ($($tt:tt)*) => {$($tt)*}
+}
+
+def_fn! {
+ fn bar() -> u32 {
+ 100
+ }
+}
+
+macro_rules! dont_color_me_braces {
+ () => {0}
+}
+
+macro_rules! noop {
+ ($expr:expr) => {
+ $expr
+ }
+}
+
+/// textually shadow previous definition
+macro_rules! noop {
+ ($expr:expr) => {
+ $expr
+ }
+}
+
+macro_rules! keyword_frag {
+ ($type:ty) => ($type)
+}
+
+macro with_args($i:ident) {
+ $i
+}
+
+macro without_args {
+ ($i:ident) => {
+ $i
+ }
+}
+
+fn main() {
+ println!("Hello, {}!", 92);
+ dont_color_me_braces!();
+ noop!(noop!(1));
+}
+"#,
+ expect_file!["./test_data/highlight_macros.html"],
+ false,
+ );
+}
+
+/// If what you want to test feels like a specific entity consider making a new test instead,
+/// this test fixture here in fact should shrink instead of grow ideally.
+#[test]
+fn test_highlighting() {
+ check_highlighting(
+ r#"
+//- minicore: derive, copy
+//- /main.rs crate:main deps:foo
+use inner::{self as inner_mod};
+mod inner {}
+
+pub mod ops {
+ #[lang = "fn_once"]
+ pub trait FnOnce<Args> {}
+
+ #[lang = "fn_mut"]
+ pub trait FnMut<Args>: FnOnce<Args> {}
+
+ #[lang = "fn"]
+ pub trait Fn<Args>: FnMut<Args> {}
+}
+
+struct Foo {
+ x: u32,
+}
+
+trait Bar {
+ fn bar(&self) -> i32;
+}
+
+impl Bar for Foo {
+ fn bar(&self) -> i32 {
+ self.x
+ }
+}
+
+impl Foo {
+ fn baz(mut self, f: Foo) -> i32 {
+ f.baz(self)
+ }
+
+ fn qux(&mut self) {
+ self.x = 0;
+ }
+
+ fn quop(&self) -> i32 {
+ self.x
+ }
+}
+
+use self::FooCopy::{self as BarCopy};
+
+#[derive(Copy)]
+struct FooCopy {
+ x: u32,
+}
+
+impl FooCopy {
+ fn baz(self, f: FooCopy) -> u32 {
+ f.baz(self)
+ }
+
+ fn qux(&mut self) {
+ self.x = 0;
+ }
+
+ fn quop(&self) -> u32 {
+ self.x
+ }
+}
+
+fn str() {
+ str();
+}
+
+fn foo<'a, T>() -> T {
+ foo::<'a, i32>()
+}
+
+fn never() -> ! {
+ loop {}
+}
+
+fn const_param<const FOO: usize>() -> usize {
+ const_param::<{ FOO }>();
+ FOO
+}
+
+use ops::Fn;
+fn baz<F: Fn() -> ()>(f: F) {
+ f()
+}
+
+fn foobar() -> impl Copy {}
+
+fn foo() {
+ let bar = foobar();
+}
+
+// comment
+fn main() {
+ let mut x = 42;
+ x += 1;
+ let y = &mut x;
+ let z = &y;
+
+ let Foo { x: z, y } = Foo { x: z, y };
+
+ y;
+
+ let mut foo = Foo { x, y: x };
+ let foo2 = Foo { x, y: x };
+ foo.quop();
+ foo.qux();
+ foo.baz(foo2);
+
+ let mut copy = FooCopy { x };
+ copy.quop();
+ copy.qux();
+ copy.baz(copy);
+
+ let a = |x| x;
+ let bar = Foo::baz;
+
+ let baz = (-42,);
+ let baz = -baz.0;
+
+ let _ = !true;
+
+ 'foo: loop {
+ break 'foo;
+ continue 'foo;
+ }
+}
+
+enum Option<T> {
+ Some(T),
+ None,
+}
+use Option::*;
+
+impl<T> Option<T> {
+ fn and<U>(self, other: Option<U>) -> Option<(T, U)> {
+ match other {
+ None => unimplemented!(),
+ Nope => Nope,
+ }
+ }
+}
+
+async fn learn_and_sing() {
+ let song = learn_song().await;
+ sing_song(song).await;
+}
+
+async fn async_main() {
+ let f1 = learn_and_sing();
+ let f2 = dance();
+ futures::join!(f1, f2);
+}
+
+fn use_foo_items() {
+ let bob = foo::Person {
+ name: "Bob",
+ age: foo::consts::NUMBER,
+ };
+
+ let control_flow = foo::identity(foo::ControlFlow::Continue);
+
+ if control_flow.should_die() {
+ foo::die!();
+ }
+}
+
+pub enum Bool { True, False }
+
+impl Bool {
+ pub const fn to_primitive(self) -> bool {
+ true
+ }
+}
+const USAGE_OF_BOOL:bool = Bool::True.to_primitive();
+
+trait Baz {
+ type Qux;
+}
+
+fn baz<T>(t: T)
+where
+ T: Baz,
+ <T as Baz>::Qux: Bar {}
+
+fn gp_shadows_trait<Baz: Bar>() {
+ Baz::bar;
+}
+
+//- /foo.rs crate:foo
+pub struct Person {
+ pub name: &'static str,
+ pub age: u8,
+}
+
+pub enum ControlFlow {
+ Continue,
+ Die,
+}
+
+impl ControlFlow {
+ pub fn should_die(self) -> bool {
+ matches!(self, ControlFlow::Die)
+ }
+}
+
+pub fn identity<T>(x: T) -> T { x }
+
+pub mod consts {
+ pub const NUMBER: i64 = 92;
+}
+
+macro_rules! die {
+ () => {
+ panic!();
+ };
+}
+"#,
+ expect_file!["./test_data/highlight_general.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_lifetime_highlighting() {
+ check_highlighting(
+ r#"
+//- minicore: derive
+
+#[derive()]
+struct Foo<'a, 'b, 'c> where 'a: 'a, 'static: 'static {
+ field: &'a (),
+ field2: &'static (),
+}
+impl<'a> Foo<'_, 'a, 'static>
+where
+ 'a: 'a,
+ 'static: 'static
+{}
+"#,
+ expect_file!["./test_data/highlight_lifetimes.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_keyword_highlighting() {
+ check_highlighting(
+ r#"
+extern crate self;
+
+use crate;
+use self;
+mod __ {
+ use super::*;
+}
+
+macro_rules! void {
+ ($($tt:tt)*) => {}
+}
+void!(Self);
+struct __ where Self:;
+fn __(_: Self) {}
+"#,
+ expect_file!["./test_data/highlight_keywords.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_string_highlighting() {
+ // The format string detection is based on macro-expansion,
+ // thus, we have to copy the macro definition from `std`
+ check_highlighting(
+ r#"
+macro_rules! println {
+ ($($arg:tt)*) => ({
+ $crate::io::_print($crate::format_args_nl!($($arg)*));
+ })
+}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args {}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! const_format_args {}
+#[rustc_builtin_macro]
+#[macro_export]
+macro_rules! format_args_nl {}
+
+mod panic {
+ pub macro panic_2015 {
+ () => (
+ $crate::panicking::panic("explicit panic")
+ ),
+ ($msg:literal $(,)?) => (
+ $crate::panicking::panic($msg)
+ ),
+ // Use `panic_str` instead of `panic_display::<&str>` for non_fmt_panic lint.
+ ($msg:expr $(,)?) => (
+ $crate::panicking::panic_str($msg)
+ ),
+ // Special-case the single-argument case for const_panic.
+ ("{}", $arg:expr $(,)?) => (
+ $crate::panicking::panic_display(&$arg)
+ ),
+ ($fmt:expr, $($arg:tt)+) => (
+ $crate::panicking::panic_fmt($crate::const_format_args!($fmt, $($arg)+))
+ ),
+ }
+}
+
+#[rustc_builtin_macro(std_panic)]
+#[macro_export]
+macro_rules! panic {}
+#[rustc_builtin_macro]
+macro_rules! assert {}
+#[rustc_builtin_macro]
+macro_rules! asm {}
+
+macro_rules! toho {
+ () => ($crate::panic!("not yet implemented"));
+ ($($arg:tt)+) => ($crate::panic!("not yet implemented: {}", $crate::format_args!($($arg)+)));
+}
+
+fn main() {
+ println!("Hello {{Hello}}");
+ // from https://doc.rust-lang.org/std/fmt/index.html
+ println!("Hello"); // => "Hello"
+ println!("Hello, {}!", "world"); // => "Hello, world!"
+ println!("The number is {}", 1); // => "The number is 1"
+ println!("{:?}", (3, 4)); // => "(3, 4)"
+ println!("{value}", value=4); // => "4"
+ println!("{} {}", 1, 2); // => "1 2"
+ println!("{:04}", 42); // => "0042" with leading zerosV
+ println!("{1} {} {0} {}", 1, 2); // => "2 1 1 2"
+ println!("{argument}", argument = "test"); // => "test"
+ println!("{name} {}", 1, name = 2); // => "2 1"
+ println!("{a} {c} {b}", a="a", b='b', c=3); // => "a 3 b"
+ println!("{{{}}}", 2); // => "{2}"
+ println!("Hello {:5}!", "x");
+ println!("Hello {:1$}!", "x", 5);
+ println!("Hello {1:0$}!", 5, "x");
+ println!("Hello {:width$}!", "x", width = 5);
+ println!("Hello {:<5}!", "x");
+ println!("Hello {:-<5}!", "x");
+ println!("Hello {:^5}!", "x");
+ println!("Hello {:>5}!", "x");
+ println!("Hello {:+}!", 5);
+ println!("{:#x}!", 27);
+ println!("Hello {:05}!", 5);
+ println!("Hello {:05}!", -5);
+ println!("{:#010x}!", 27);
+ println!("Hello {0} is {1:.5}", "x", 0.01);
+ println!("Hello {1} is {2:.0$}", 5, "x", 0.01);
+ println!("Hello {0} is {2:.1$}", "x", 5, 0.01);
+ println!("Hello {} is {:.*}", "x", 5, 0.01);
+ println!("Hello {} is {2:.*}", "x", 5, 0.01);
+ println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01);
+ println!("{}, `{name:.*}` has 3 fractional digits", "Hello", 3, name=1234.56);
+ println!("{}, `{name:.*}` has 3 characters", "Hello", 3, name="1234.56");
+ println!("{}, `{name:>8.*}` has 3 right-aligned characters", "Hello", 3, name="1234.56");
+
+ let _ = "{}"
+ let _ = "{{}}";
+
+ println!("Hello {{}}");
+ println!("{{ Hello");
+ println!("Hello }}");
+ println!("{{Hello}}");
+ println!("{{ Hello }}");
+ println!("{{Hello }}");
+ println!("{{ Hello}}");
+
+ println!(r"Hello, {}!", "world");
+
+ // escape sequences
+ println!("Hello\nWorld");
+ println!("\u{48}\x65\x6C\x6C\x6F World");
+
+ let _ = "\x28\x28\x00\x63\n";
+ let _ = b"\x28\x28\x00\x63\n";
+
+ println!("{\x41}", A = 92);
+ println!("{ничоси}", ничоси = 92);
+
+ println!("{:x?} {} ", thingy, n2);
+ panic!("{}", 0);
+ panic!("more {}", 1);
+ assert!(true, "{}", 1);
+ assert!(true, "{} asdasd", 1);
+ toho!("{}fmt", 0);
+ asm!("mov eax, {0}");
+ format_args!(concat!("{}"), "{}");
+}"#,
+ expect_file!["./test_data/highlight_strings.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_unsafe_highlighting() {
+ check_highlighting(
+ r#"
+macro_rules! id {
+ ($($tt:tt)*) => {
+ $($tt)*
+ };
+}
+macro_rules! unsafe_deref {
+ () => {
+ *(&() as *const ())
+ };
+}
+static mut MUT_GLOBAL: Struct = Struct { field: 0 };
+static GLOBAL: Struct = Struct { field: 0 };
+unsafe fn unsafe_fn() {}
+
+union Union {
+ a: u32,
+ b: f32,
+}
+
+struct Struct { field: i32 }
+impl Struct {
+ unsafe fn unsafe_method(&self) {}
+}
+
+#[repr(packed)]
+struct Packed {
+ a: u16,
+}
+
+unsafe trait UnsafeTrait {}
+unsafe impl UnsafeTrait for Packed {}
+impl !UnsafeTrait for () {}
+
+fn unsafe_trait_bound<T: UnsafeTrait>(_: T) {}
+
+trait DoTheAutoref {
+ fn calls_autoref(&self);
+}
+
+impl DoTheAutoref for u16 {
+ fn calls_autoref(&self) {}
+}
+
+fn main() {
+ let x = &5 as *const _ as *const usize;
+ let u = Union { b: 0 };
+
+ id! {
+ unsafe { unsafe_deref!() }
+ };
+
+ unsafe {
+ unsafe_deref!();
+ id! { unsafe_deref!() };
+
+ // unsafe fn and method calls
+ unsafe_fn();
+ let b = u.b;
+ match u {
+ Union { b: 0 } => (),
+ Union { a } => (),
+ }
+ Struct { field: 0 }.unsafe_method();
+
+ // unsafe deref
+ *x;
+
+ // unsafe access to a static mut
+ MUT_GLOBAL.field;
+ GLOBAL.field;
+
+ // unsafe ref of packed fields
+ let packed = Packed { a: 0 };
+ let a = &packed.a;
+ let ref a = packed.a;
+ let Packed { ref a } = packed;
+ let Packed { a: ref _a } = packed;
+
+ // unsafe auto ref of packed field
+ packed.a.calls_autoref();
+ }
+}
+"#,
+ expect_file!["./test_data/highlight_unsafe.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_highlight_doc_comment() {
+ check_highlighting(
+ r#"
+//- /main.rs
+//! This is a module to test doc injection.
+//! ```
+//! fn test() {}
+//! ```
+
+mod outline_module;
+
+/// ```
+/// let _ = "early doctests should not go boom";
+/// ```
+struct Foo {
+ bar: bool,
+}
+
+/// This is an impl with a code block.
+///
+/// ```
+/// fn foo() {
+///
+/// }
+/// ```
+impl Foo {
+ /// ```
+ /// let _ = "Call me
+ // KILLER WHALE
+ /// Ishmael.";
+ /// ```
+ pub const bar: bool = true;
+
+ /// Constructs a new `Foo`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// # #![allow(unused_mut)]
+ /// let mut foo: Foo = Foo::new();
+ /// ```
+ pub const fn new() -> Foo {
+ Foo { bar: true }
+ }
+
+ /// `bar` method on `Foo`.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// use x::y;
+ ///
+ /// let foo = Foo::new();
+ ///
+ /// // calls bar on foo
+ /// assert!(foo.bar());
+ ///
+ /// let bar = foo.bar || Foo::bar;
+ ///
+ /// /* multi-line
+ /// comment */
+ ///
+ /// let multi_line_string = "Foo
+ /// bar\n
+ /// ";
+ ///
+ /// ```
+ ///
+ /// ```rust,no_run
+ /// let foobar = Foo::new().bar();
+ /// ```
+ ///
+ /// ~~~rust,no_run
+ /// // code block with tilde.
+ /// let foobar = Foo::new().bar();
+ /// ~~~
+ ///
+ /// ```
+ /// // functions
+ /// fn foo<T, const X: usize>(arg: i32) {
+ /// let x: T = X;
+ /// }
+ /// ```
+ ///
+ /// ```sh
+ /// echo 1
+ /// ```
+ pub fn foo(&self) -> bool {
+ true
+ }
+}
+
+/// [`Foo`](Foo) is a struct
+/// This function is > [`all_the_links`](all_the_links) <
+/// [`noop`](noop) is a macro below
+/// [`Item`] is a struct in the module [`module`]
+///
+/// [`Item`]: module::Item
+/// [mix_and_match]: ThisShouldntResolve
+pub fn all_the_links() {}
+
+pub mod module {
+ pub struct Item;
+}
+
+/// ```
+/// macro_rules! noop { ($expr:expr) => { $expr }}
+/// noop!(1);
+/// ```
+macro_rules! noop {
+ ($expr:expr) => {
+ $expr
+ }
+}
+
+/// ```rust
+/// let _ = example(&[1, 2, 3]);
+/// ```
+///
+/// ```
+/// loop {}
+#[cfg_attr(not(feature = "false"), doc = "loop {}")]
+#[doc = "loop {}"]
+/// ```
+///
+#[cfg_attr(feature = "alloc", doc = "```rust")]
+#[cfg_attr(not(feature = "alloc"), doc = "```ignore")]
+/// let _ = example(&alloc::vec![1, 2, 3]);
+/// ```
+pub fn mix_and_match() {}
+
+/**
+It is beyond me why you'd use these when you got ///
+```rust
+let _ = example(&[1, 2, 3]);
+```
+[`block_comments2`] tests these with indentation
+ */
+pub fn block_comments() {}
+
+/**
+ Really, I don't get it
+ ```rust
+ let _ = example(&[1, 2, 3]);
+ ```
+ [`block_comments`] tests these without indentation
+*/
+pub fn block_comments2() {}
+
+//- /outline_module.rs
+//! This is an outline module whose purpose is to test that its inline attribute injection does not
+//! spill into its parent.
+//! ```
+//! fn test() {}
+//! ```
+"#,
+ expect_file!["./test_data/highlight_doctest.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_extern_crate() {
+ check_highlighting(
+ r#"
+//- /main.rs crate:main deps:std,alloc
+extern crate std;
+extern crate alloc as abc;
+//- /std/lib.rs crate:std
+pub struct S;
+//- /alloc/lib.rs crate:alloc
+pub struct A
+"#,
+ expect_file!["./test_data/highlight_extern_crate.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_crate_root() {
+ check_highlighting(
+ r#"
+//- minicore: iterators
+//- /main.rs crate:main deps:foo
+extern crate foo;
+use core::iter;
+
+pub const NINETY_TWO: u8 = 92;
+
+use foo as foooo;
+
+pub(crate) fn main() {
+ let baz = iter::repeat(92);
+}
+
+mod bar {
+ pub(in super) const FORTY_TWO: u8 = 42;
+
+ mod baz {
+ use super::super::NINETY_TWO;
+ use crate::foooo::Point;
+
+ pub(in super::super) const TWENTY_NINE: u8 = 29;
+ }
+}
+//- /foo.rs crate:foo
+struct Point {
+ x: u8,
+ y: u8,
+}
+
+mod inner {
+ pub(super) fn swap(p: crate::Point) -> crate::Point {
+ crate::Point { x: p.y, y: p.x }
+ }
+}
+"#,
+ expect_file!["./test_data/highlight_crate_root.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_default_library() {
+ check_highlighting(
+ r#"
+//- minicore: option, iterators
+use core::iter;
+
+fn main() {
+ let foo = Some(92);
+ let nums = iter::repeat(foo.unwrap());
+}
+"#,
+ expect_file!["./test_data/highlight_default_library.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_associated_function() {
+ check_highlighting(
+ r#"
+fn not_static() {}
+
+struct foo {}
+
+impl foo {
+ pub fn is_static() {}
+ pub fn is_not_static(&self) {}
+}
+
+trait t {
+ fn t_is_static() {}
+ fn t_is_not_static(&self) {}
+}
+
+impl t for foo {
+ pub fn is_static() {}
+ pub fn is_not_static(&self) {}
+}
+"#,
+ expect_file!["./test_data/highlight_assoc_functions.html"],
+ false,
+ )
+}
+
+#[test]
+fn test_injection() {
+ check_highlighting(
+ r##"
+fn fixture(ra_fixture: &str) {}
+
+fn main() {
+ fixture(r#"
+trait Foo {
+ fn foo() {
+ println!("2 + 2 = {}", 4);
+ }
+}"#
+ );
+ fixture(r"
+fn foo() {
+ foo(\$0{
+ 92
+ }\$0)
+}"
+ );
+}
+"##,
+ expect_file!["./test_data/highlight_injection.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_operators() {
+ check_highlighting(
+ r##"
+fn main() {
+ 1 + 1 - 1 * 1 / 1 % 1 | 1 & 1 ! 1 ^ 1 >> 1 << 1;
+ let mut a = 0;
+ a += 1;
+ a -= 1;
+ a *= 1;
+ a /= 1;
+ a %= 1;
+ a |= 1;
+ a &= 1;
+ a ^= 1;
+ a >>= 1;
+ a <<= 1;
+}
+"##,
+ expect_file!["./test_data/highlight_operators.html"],
+ false,
+ );
+}
+
+#[test]
+fn test_mod_hl_injection() {
+ check_highlighting(
+ r##"
+//- /foo.rs
+//! [Struct]
+//! This is an intra doc injection test for modules
+//! [Struct]
+//! This is an intra doc injection test for modules
+
+pub struct Struct;
+//- /lib.rs crate:foo
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+mod foo;
+"##,
+ expect_file!["./test_data/highlight_module_docs_inline.html"],
+ false,
+ );
+ check_highlighting(
+ r##"
+//- /lib.rs crate:foo
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+/// [crate::foo::Struct]
+/// This is an intra doc injection test for modules
+mod foo;
+//- /foo.rs
+//! [Struct]
+//! This is an intra doc injection test for modules
+//! [Struct]
+//! This is an intra doc injection test for modules
+
+pub struct Struct;
+"##,
+ expect_file!["./test_data/highlight_module_docs_outline.html"],
+ false,
+ );
+}
+
+#[test]
+#[cfg_attr(
++ not(all(unix, target_pointer_width = "64")),
+ ignore = "depends on `DefaultHasher` outputs"
+)]
+fn test_rainbow_highlighting() {
+ check_highlighting(
+ r#"
+fn main() {
+ let hello = "hello";
+ let x = hello.to_string();
+ let y = hello.to_string();
+
+ let x = "other color please!";
+ let y = x.to_string();
+}
+
+fn bar() {
+ let mut hello = "hello";
+}
+"#,
+ expect_file!["./test_data/highlight_rainbow.html"],
+ true,
+ );
+}
+
+#[test]
+fn test_ranges() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+#[derive(Clone, Debug)]
+struct Foo {
+ pub x: i32,
+ pub y: i32,
+}
+"#,
+ );
+
+ // The "x"
+ let highlights = &analysis
+ .highlight_range(FileRange { file_id, range: TextRange::at(45.into(), 1.into()) })
+ .unwrap();
+
+ assert_eq!(&highlights[0].highlight.to_string(), "field.declaration.public");
+}
+
+#[test]
+fn ranges_sorted() {
+ let (analysis, file_id) = fixture::file(
+ r#"
+#[foo(bar = "bar")]
+macro_rules! test {}
+}"#
+ .trim(),
+ );
+ let _ = analysis.highlight(file_id).unwrap();
+}
+
+/// Highlights the code given by the `ra_fixture` argument, renders the
+/// result as HTML, and compares it with the HTML file given as `snapshot`.
+/// Note that the `snapshot` file is overwritten by the rendered HTML.
+fn check_highlighting(ra_fixture: &str, expect: ExpectFile, rainbow: bool) {
+ let (analysis, file_id) = fixture::file(ra_fixture.trim());
+ let actual_html = &analysis.highlight_as_html(file_id, rainbow).unwrap();
+ expect.assert_eq(actual_html)
+}
+
+#[test]
+fn benchmark_syntax_highlighting_long_struct() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let fixture = bench_fixture::big_struct();
+ let (analysis, file_id) = fixture::file(&fixture);
+
+ let hash = {
+ let _pt = bench("syntax highlighting long struct");
+ analysis
+ .highlight(file_id)
+ .unwrap()
+ .iter()
+ .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
+ .count()
+ };
+ assert_eq!(hash, 2001);
+}
+
+#[test]
+fn syntax_highlighting_not_quadratic() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let mut al = AssertLinear::default();
+ while al.next_round() {
+ for i in 6..=10 {
+ let n = 1 << i;
+
+ let fixture = bench_fixture::big_struct_n(n);
+ let (analysis, file_id) = fixture::file(&fixture);
+
+ let time = Instant::now();
+
+ let hash = analysis
+ .highlight(file_id)
+ .unwrap()
+ .iter()
+ .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Struct))
+ .count();
+ assert!(hash > n as usize);
+
+ let elapsed = time.elapsed();
+ al.sample(n as f64, elapsed.as_millis() as f64);
+ }
+ }
+}
+
+#[test]
+fn benchmark_syntax_highlighting_parser() {
+ if skip_slow_tests() {
+ return;
+ }
+
+ let fixture = bench_fixture::glorious_old_parser();
+ let (analysis, file_id) = fixture::file(&fixture);
+
+ let hash = {
+ let _pt = bench("syntax highlighting parser");
+ analysis
+ .highlight(file_id)
+ .unwrap()
+ .iter()
+ .filter(|it| it.highlight.tag == HlTag::Symbol(SymbolKind::Function))
+ .count()
+ };
+ assert_eq!(hash, 1609);
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Show Syntax Tree**
+use ide_db::base_db::{FileId, SourceDatabase};
+use ide_db::RootDatabase;
+use syntax::{
+ AstNode, NodeOrToken, SourceFile, SyntaxKind::STRING, SyntaxToken, TextRange, TextSize,
+};
+
+// Feature: Show Syntax Tree
+//
+// Shows the parse tree of the current file. It exists mostly for debugging
+// rust-analyzer itself.
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: Show Syntax Tree**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065586-068bdb80-91b1-11eb-9507-fee67f9f45a0.gif[]
+pub(crate) fn syntax_tree(
+ db: &RootDatabase,
+ file_id: FileId,
+ text_range: Option<TextRange>,
+) -> String {
+ let parse = db.parse(file_id);
+ if let Some(text_range) = text_range {
+ let node = match parse.tree().syntax().covering_element(text_range) {
+ NodeOrToken::Node(node) => node,
+ NodeOrToken::Token(token) => {
+ if let Some(tree) = syntax_tree_for_string(&token, text_range) {
+ return tree;
+ }
+ token.parent().unwrap()
+ }
+ };
+
+ format!("{:#?}", node)
+ } else {
+ format!("{:#?}", parse.tree().syntax())
+ }
+}
+
+/// Attempts parsing the selected contents of a string literal
+/// as rust syntax and returns its syntax tree
+fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<String> {
+ // When the range is inside a string
+ // we'll attempt parsing it as rust syntax
+ // to provide the syntax tree of the contents of the string
+ match token.kind() {
+ STRING => syntax_tree_for_token(token, text_range),
+ _ => None,
+ }
+}
+
+fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> {
+ // Range of the full node
+ let node_range = node.text_range();
+ let text = node.text().to_string();
+
+ // We start at some point inside the node
+ // Either we have selected the whole string
+ // or our selection is inside it
+ let start = text_range.start() - node_range.start();
+
+ // how many characters we have selected
+ let len = text_range.len();
+
+ let node_len = node_range.len();
+
+ let start = start;
+
+ // We want to cap our length
+ let len = len.min(node_len);
+
+ // Ensure our slice is inside the actual string
+ let end =
+ if start + len < TextSize::of(&text) { start + len } else { TextSize::of(&text) - start };
+
+ let text = &text[TextRange::new(start, end)];
+
+ // Remove possible extra string quotes from the start
+ // and the end of the string
+ let text = text
+ .trim_start_matches('r')
+ .trim_start_matches('#')
+ .trim_start_matches('"')
+ .trim_end_matches('#')
+ .trim_end_matches('"')
+ .trim()
+ // Remove custom markers
+ .replace("$0", "");
+
+ let parsed = SourceFile::parse(&text);
+
+ // If the "file" parsed without errors,
+ // return its syntax
+ if parsed.errors().is_empty() {
+ return Some(format!("{:#?}", parsed.tree().syntax()));
+ }
+
+ None
+}
+
+#[cfg(test)]
+mod tests {
+ use expect_test::expect;
+
+ use crate::fixture;
+
+ fn check(ra_fixture: &str, expect: expect_test::Expect) {
+ let (analysis, file_id) = fixture::file(ra_fixture);
+ let syn = analysis.syntax_tree(file_id, None).unwrap();
+ expect.assert_eq(&syn)
+ }
+ fn check_range(ra_fixture: &str, expect: expect_test::Expect) {
+ let (analysis, frange) = fixture::range(ra_fixture);
+ let syn = analysis.syntax_tree(frange.file_id, Some(frange.range)).unwrap();
+ expect.assert_eq(&syn)
+ }
+
+ #[test]
+ fn test_syntax_tree_without_range() {
+ // Basic syntax
+ check(
+ r#"fn foo() {}"#,
+ expect![[r#"
+ SOURCE_FILE@0..11
+ FN@0..11
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..11
+ STMT_LIST@9..11
+ L_CURLY@9..10 "{"
+ R_CURLY@10..11 "}"
+ "#]],
+ );
+
+ check(
+ r#"
+fn test() {
+ assert!("
+ fn foo() {
+ }
+ ", "");
+}"#,
+ expect![[r#"
+ SOURCE_FILE@0..60
+ FN@0..60
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..7
+ IDENT@3..7 "test"
+ PARAM_LIST@7..9
+ L_PAREN@7..8 "("
+ R_PAREN@8..9 ")"
+ WHITESPACE@9..10 " "
+ BLOCK_EXPR@10..60
+ STMT_LIST@10..60
+ L_CURLY@10..11 "{"
+ WHITESPACE@11..16 "\n "
+ EXPR_STMT@16..58
+ MACRO_EXPR@16..57
+ MACRO_CALL@16..57
+ PATH@16..22
+ PATH_SEGMENT@16..22
+ NAME_REF@16..22
+ IDENT@16..22 "assert"
+ BANG@22..23 "!"
+ TOKEN_TREE@23..57
+ L_PAREN@23..24 "("
+ STRING@24..52 "\"\n fn foo() {\n ..."
+ COMMA@52..53 ","
+ WHITESPACE@53..54 " "
+ STRING@54..56 "\"\""
+ R_PAREN@56..57 ")"
+ SEMICOLON@57..58 ";"
+ WHITESPACE@58..59 "\n"
+ R_CURLY@59..60 "}"
+ "#]],
+ )
+ }
+
+ #[test]
+ fn test_syntax_tree_with_range() {
+ check_range(
+ r#"$0fn foo() {}$0"#,
+ expect![[r#"
+ FN@0..11
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..11
+ STMT_LIST@9..11
+ L_CURLY@9..10 "{"
+ R_CURLY@10..11 "}"
+ "#]],
+ );
+
+ check_range(
+ r#"
+fn test() {
+ $0assert!("
+ fn foo() {
+ }
+ ", "");$0
+}"#,
+ expect![[r#"
+ EXPR_STMT@16..58
+ MACRO_EXPR@16..57
+ MACRO_CALL@16..57
+ PATH@16..22
+ PATH_SEGMENT@16..22
+ NAME_REF@16..22
+ IDENT@16..22 "assert"
+ BANG@22..23 "!"
+ TOKEN_TREE@23..57
+ L_PAREN@23..24 "("
+ STRING@24..52 "\"\n fn foo() {\n ..."
+ COMMA@52..53 ","
+ WHITESPACE@53..54 " "
+ STRING@54..56 "\"\""
+ R_PAREN@56..57 ")"
+ SEMICOLON@57..58 ";"
+ "#]],
+ );
+ }
+
+ #[test]
+ fn test_syntax_tree_inside_string() {
+ check_range(
+ r#"fn test() {
+ assert!("
+$0fn foo() {
+}$0
+fn bar() {
+}
+ ", "");
+}"#,
+ expect![[r#"
+ SOURCE_FILE@0..12
+ FN@0..12
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..12
+ STMT_LIST@9..12
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..11 "\n"
+ R_CURLY@11..12 "}"
+ "#]],
+ );
+
+ // With a raw string
+ check_range(
+ r###"fn test() {
+ assert!(r#"
+$0fn foo() {
+}$0
+fn bar() {
+}
+ "#, "");
+}"###,
+ expect![[r#"
+ SOURCE_FILE@0..12
+ FN@0..12
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..12
+ STMT_LIST@9..12
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..11 "\n"
+ R_CURLY@11..12 "}"
+ "#]],
+ );
+
+ // With a raw string
+ check_range(
+ r###"fn test() {
+ assert!(r$0#"
+fn foo() {
+}
+fn bar() {
+}"$0#, "");
+}"###,
+ expect![[r#"
+ SOURCE_FILE@0..25
+ FN@0..12
+ FN_KW@0..2 "fn"
+ WHITESPACE@2..3 " "
+ NAME@3..6
+ IDENT@3..6 "foo"
+ PARAM_LIST@6..8
+ L_PAREN@6..7 "("
+ R_PAREN@7..8 ")"
+ WHITESPACE@8..9 " "
+ BLOCK_EXPR@9..12
+ STMT_LIST@9..12
+ L_CURLY@9..10 "{"
+ WHITESPACE@10..11 "\n"
+ R_CURLY@11..12 "}"
+ WHITESPACE@12..13 "\n"
+ FN@13..25
+ FN_KW@13..15 "fn"
+ WHITESPACE@15..16 " "
+ NAME@16..19
+ IDENT@16..19 "bar"
+ PARAM_LIST@19..21
+ L_PAREN@19..20 "("
+ R_PAREN@20..21 ")"
+ WHITESPACE@21..22 " "
+ BLOCK_EXPR@22..25
+ STMT_LIST@22..25
+ L_CURLY@22..23 "{"
+ WHITESPACE@23..24 "\n"
+ R_CURLY@24..25 "}"
+ "#]],
+ );
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: View Crate Graph**
+use std::sync::Arc;
+
+use dot::{Id, LabelText};
+use ide_db::{
+ base_db::{CrateGraph, CrateId, Dependency, SourceDatabase, SourceDatabaseExt},
+ FxHashSet, RootDatabase,
+};
+
+// Feature: View Crate Graph
+//
+// Renders the currently loaded crate graph as an SVG graphic. Requires the `dot` tool, which
+// is part of graphviz, to be installed.
+//
+// Only workspace crates are included, no crates.io dependencies or sysroot crates.
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: View Crate Graph**
+// |===
+pub(crate) fn view_crate_graph(db: &RootDatabase, full: bool) -> Result<String, String> {
+ let crate_graph = db.crate_graph();
+ let crates_to_render = crate_graph
+ .iter()
+ .filter(|krate| {
+ if full {
+ true
+ } else {
+ // Only render workspace crates
+ let root_id = db.file_source_root(crate_graph[*krate].root_file_id);
+ !db.source_root(root_id).is_library
+ }
+ })
+ .collect();
+ let graph = DotCrateGraph { graph: crate_graph, crates_to_render };
+
+ let mut dot = Vec::new();
+ dot::render(&graph, &mut dot).unwrap();
+ Ok(String::from_utf8(dot).unwrap())
+}
+
+struct DotCrateGraph {
+ graph: Arc<CrateGraph>,
+ crates_to_render: FxHashSet<CrateId>,
+}
+
+type Edge<'a> = (CrateId, &'a Dependency);
+
+impl<'a> dot::GraphWalk<'a, CrateId, Edge<'a>> for DotCrateGraph {
+ fn nodes(&'a self) -> dot::Nodes<'a, CrateId> {
+ self.crates_to_render.iter().copied().collect()
+ }
+
+ fn edges(&'a self) -> dot::Edges<'a, Edge<'a>> {
+ self.crates_to_render
+ .iter()
+ .flat_map(|krate| {
+ self.graph[*krate]
+ .dependencies
+ .iter()
+ .filter(|dep| self.crates_to_render.contains(&dep.crate_id))
+ .map(move |dep| (*krate, dep))
+ })
+ .collect()
+ }
+
+ fn source(&'a self, edge: &Edge<'a>) -> CrateId {
+ edge.0
+ }
+
+ fn target(&'a self, edge: &Edge<'a>) -> CrateId {
+ edge.1.crate_id
+ }
+}
+
+impl<'a> dot::Labeller<'a, CrateId, Edge<'a>> for DotCrateGraph {
+ fn graph_id(&'a self) -> Id<'a> {
+ Id::new("rust_analyzer_crate_graph").unwrap()
+ }
+
+ fn node_id(&'a self, n: &CrateId) -> Id<'a> {
+ Id::new(format!("_{}", n.0)).unwrap()
+ }
+
+ fn node_shape(&'a self, _node: &CrateId) -> Option<LabelText<'a>> {
+ Some(LabelText::LabelStr("box".into()))
+ }
+
+ fn node_label(&'a self, n: &CrateId) -> LabelText<'a> {
+ let name = self.graph[*n].display_name.as_ref().map_or("(unnamed crate)", |name| &*name);
+ LabelText::LabelStr(name.into())
+ }
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: View Hir**
+use hir::{Function, Semantics};
+use ide_db::base_db::FilePosition;
+use ide_db::RootDatabase;
+use syntax::{algo::find_node_at_offset, ast, AstNode};
+
+// Feature: View Hir
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: View Hir**
+// |===
+// image::https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif[]
+pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String {
+ body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_string())
+}
+
+fn body_hir(db: &RootDatabase, position: FilePosition) -> Option<String> {
+ let sema = Semantics::new(db);
+ let source_file = sema.parse(position.file_id);
+
+ let function = find_node_at_offset::<ast::Fn>(source_file.syntax(), position.offset)?;
+
+ let function: Function = sema.to_def(&function)?;
+ Some(function.debug_hir(db))
+}
--- /dev/null
- // | VS Code | **Rust Analyzer: Debug ItemTree**
+use hir::db::DefDatabase;
+use ide_db::base_db::FileId;
+use ide_db::RootDatabase;
+
+// Feature: Debug ItemTree
+//
+// Displays the ItemTree of the currently open file, for debugging.
+//
+// |===
+// | Editor | Action Name
+//
++// | VS Code | **rust-analyzer: Debug ItemTree**
+// |===
+pub(crate) fn view_item_tree(db: &RootDatabase, file_id: FileId) -> String {
+ db.file_item_tree(file_id.into()).pretty_print()
+}
--- /dev/null
+//! Thin wrappers around `std::path`, distinguishing between absolute and
+//! relative paths.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{
+ borrow::Borrow,
+ ffi::OsStr,
+ ops,
+ path::{Component, Path, PathBuf},
+};
+
+/// Wrapper around an absolute [`PathBuf`].
+#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct AbsPathBuf(PathBuf);
+
+impl From<AbsPathBuf> for PathBuf {
+ fn from(AbsPathBuf(path_buf): AbsPathBuf) -> PathBuf {
+ path_buf
+ }
+}
+
+impl ops::Deref for AbsPathBuf {
+ type Target = AbsPath;
+ fn deref(&self) -> &AbsPath {
+ self.as_path()
+ }
+}
+
+impl AsRef<Path> for AbsPathBuf {
+ fn as_ref(&self) -> &Path {
+ self.0.as_path()
+ }
+}
+
+impl AsRef<AbsPath> for AbsPathBuf {
+ fn as_ref(&self) -> &AbsPath {
+ self.as_path()
+ }
+}
+
+impl Borrow<AbsPath> for AbsPathBuf {
+ fn borrow(&self) -> &AbsPath {
+ self.as_path()
+ }
+}
+
+impl TryFrom<PathBuf> for AbsPathBuf {
+ type Error = PathBuf;
+ fn try_from(path_buf: PathBuf) -> Result<AbsPathBuf, PathBuf> {
+ if !path_buf.is_absolute() {
+ return Err(path_buf);
+ }
+ Ok(AbsPathBuf(path_buf))
+ }
+}
+
+impl TryFrom<&str> for AbsPathBuf {
+ type Error = PathBuf;
+ fn try_from(path: &str) -> Result<AbsPathBuf, PathBuf> {
+ AbsPathBuf::try_from(PathBuf::from(path))
+ }
+}
+
+impl PartialEq<AbsPath> for AbsPathBuf {
+ fn eq(&self, other: &AbsPath) -> bool {
+ self.as_path() == other
+ }
+}
+
+impl AbsPathBuf {
+ /// Wrap the given absolute path in `AbsPathBuf`
+ ///
+ /// # Panics
+ ///
+ /// Panics if `path` is not absolute.
+ pub fn assert(path: PathBuf) -> AbsPathBuf {
+ AbsPathBuf::try_from(path)
+ .unwrap_or_else(|path| panic!("expected absolute path, got {}", path.display()))
+ }
+
+ /// Coerces to an `AbsPath` slice.
+ ///
+ /// Equivalent of [`PathBuf::as_path`] for `AbsPathBuf`.
+ pub fn as_path(&self) -> &AbsPath {
+ AbsPath::assert(self.0.as_path())
+ }
+
+ /// Equivalent of [`PathBuf::pop`] for `AbsPathBuf`.
+ ///
+ /// Note that this won't remove the root component, so `self` will still be
+ /// absolute.
+ pub fn pop(&mut self) -> bool {
+ self.0.pop()
+ }
+}
+
+/// Wrapper around an absolute [`Path`].
+#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+#[repr(transparent)]
+pub struct AbsPath(Path);
+
+impl AsRef<Path> for AbsPath {
+ fn as_ref(&self) -> &Path {
+ &self.0
+ }
+}
+
++impl ToOwned for AbsPath {
++ type Owned = AbsPathBuf;
++
++ fn to_owned(&self) -> Self::Owned {
++ AbsPathBuf(self.0.to_owned())
++ }
++}
++
+impl<'a> TryFrom<&'a Path> for &'a AbsPath {
+ type Error = &'a Path;
+ fn try_from(path: &'a Path) -> Result<&'a AbsPath, &'a Path> {
+ if !path.is_absolute() {
+ return Err(path);
+ }
+ Ok(AbsPath::assert(path))
+ }
+}
+
+impl AbsPath {
+ /// Wrap the given absolute path in `AbsPath`
+ ///
+ /// # Panics
+ ///
+ /// Panics if `path` is not absolute.
+ pub fn assert(path: &Path) -> &AbsPath {
+ assert!(path.is_absolute());
+ unsafe { &*(path as *const Path as *const AbsPath) }
+ }
+
+ /// Equivalent of [`Path::parent`] for `AbsPath`.
+ pub fn parent(&self) -> Option<&AbsPath> {
+ self.0.parent().map(AbsPath::assert)
+ }
+
+ /// Equivalent of [`Path::join`] for `AbsPath`.
+ pub fn join(&self, path: impl AsRef<Path>) -> AbsPathBuf {
+ self.as_ref().join(path).try_into().unwrap()
+ }
+
+ /// Normalize the given path:
+ /// - Removes repeated separators: `/a//b` becomes `/a/b`
+ /// - Removes occurrences of `.` and resolves `..`.
+ /// - Removes trailing slashes: `/a/b/` becomes `/a/b`.
+ ///
+ /// # Example
+ /// ```
+ /// # use paths::AbsPathBuf;
+ /// let abs_path_buf = AbsPathBuf::assert("/a/../../b/.//c//".into());
+ /// let normalized = abs_path_buf.normalize();
+ /// assert_eq!(normalized, AbsPathBuf::assert("/b/c".into()));
+ /// ```
+ pub fn normalize(&self) -> AbsPathBuf {
+ AbsPathBuf(normalize_path(&self.0))
+ }
+
+ /// Equivalent of [`Path::to_path_buf`] for `AbsPath`.
+ pub fn to_path_buf(&self) -> AbsPathBuf {
+ AbsPathBuf::try_from(self.0.to_path_buf()).unwrap()
+ }
+
+ /// Equivalent of [`Path::strip_prefix`] for `AbsPath`.
+ ///
+ /// Returns a relative path.
+ pub fn strip_prefix(&self, base: &AbsPath) -> Option<&RelPath> {
+ self.0.strip_prefix(base).ok().map(RelPath::new_unchecked)
+ }
+ pub fn starts_with(&self, base: &AbsPath) -> bool {
+ self.0.starts_with(&base.0)
+ }
+ pub fn ends_with(&self, suffix: &RelPath) -> bool {
+ self.0.ends_with(&suffix.0)
+ }
+
+ // region:delegate-methods
+
+ // Note that we deliberately don't implement `Deref<Target = Path>` here.
+ //
+ // The problem with `Path` is that it directly exposes convenience IO-ing
+ // methods. For example, `Path::exists` delegates to `fs::metadata`.
+ //
+ // For `AbsPath`, we want to make sure that this is a POD type, and that all
+ // IO goes via `fs`. That way, it becomes easier to mock IO when we need it.
+
+ pub fn file_name(&self) -> Option<&OsStr> {
+ self.0.file_name()
+ }
+ pub fn extension(&self) -> Option<&OsStr> {
+ self.0.extension()
+ }
+ pub fn file_stem(&self) -> Option<&OsStr> {
+ self.0.file_stem()
+ }
+ pub fn as_os_str(&self) -> &OsStr {
+ self.0.as_os_str()
+ }
+ pub fn display(&self) -> std::path::Display<'_> {
+ self.0.display()
+ }
+ #[deprecated(note = "use std::fs::metadata().is_ok() instead")]
+ pub fn exists(&self) -> bool {
+ self.0.exists()
+ }
+ // endregion:delegate-methods
+}
+
+/// Wrapper around a relative [`PathBuf`].
+#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)]
+pub struct RelPathBuf(PathBuf);
+
+impl From<RelPathBuf> for PathBuf {
+ fn from(RelPathBuf(path_buf): RelPathBuf) -> PathBuf {
+ path_buf
+ }
+}
+
+impl ops::Deref for RelPathBuf {
+ type Target = RelPath;
+ fn deref(&self) -> &RelPath {
+ self.as_path()
+ }
+}
+
+impl AsRef<Path> for RelPathBuf {
+ fn as_ref(&self) -> &Path {
+ self.0.as_path()
+ }
+}
+
+impl TryFrom<PathBuf> for RelPathBuf {
+ type Error = PathBuf;
+ fn try_from(path_buf: PathBuf) -> Result<RelPathBuf, PathBuf> {
+ if !path_buf.is_relative() {
+ return Err(path_buf);
+ }
+ Ok(RelPathBuf(path_buf))
+ }
+}
+
+impl TryFrom<&str> for RelPathBuf {
+ type Error = PathBuf;
+ fn try_from(path: &str) -> Result<RelPathBuf, PathBuf> {
+ RelPathBuf::try_from(PathBuf::from(path))
+ }
+}
+
+impl RelPathBuf {
+ /// Coerces to a `RelPath` slice.
+ ///
+ /// Equivalent of [`PathBuf::as_path`] for `RelPathBuf`.
+ pub fn as_path(&self) -> &RelPath {
+ RelPath::new_unchecked(self.0.as_path())
+ }
+}
+
+/// Wrapper around a relative [`Path`].
+#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)]
+#[repr(transparent)]
+pub struct RelPath(Path);
+
+impl AsRef<Path> for RelPath {
+ fn as_ref(&self) -> &Path {
+ &self.0
+ }
+}
+
+impl RelPath {
+ /// Creates a new `RelPath` from `path`, without checking if it is relative.
+ pub fn new_unchecked(path: &Path) -> &RelPath {
+ unsafe { &*(path as *const Path as *const RelPath) }
+ }
+}
+
+/// Taken from <https://github.com/rust-lang/cargo/blob/79c769c3d7b4c2cf6a93781575b7f592ef974255/src/cargo/util/paths.rs#L60-L85>
+fn normalize_path(path: &Path) -> PathBuf {
+ let mut components = path.components().peekable();
+ let mut ret = if let Some(c @ Component::Prefix(..)) = components.peek().copied() {
+ components.next();
+ PathBuf::from(c.as_os_str())
+ } else {
+ PathBuf::new()
+ };
+
+ for component in components {
+ match component {
+ Component::Prefix(..) => unreachable!(),
+ Component::RootDir => {
+ ret.push(component.as_os_str());
+ }
+ Component::CurDir => {}
+ Component::ParentDir => {
+ ret.pop();
+ }
+ Component::Normal(c) => {
+ ret.push(c);
+ }
+ }
+ }
+ ret
+}
--- /dev/null
- let msg = format!("proc-macro {} built by {:#?} is not supported by Rust Analyzer, please update your rust version.", path.display(), info);
+//! Client-side Proc-Macro crate
+//!
+//! We separate proc-macro expanding logic to an extern program to allow
+//! different implementations (e.g. wasm or dylib loading). And this crate
+//! is used to provide basic infrastructure for communication between two
+//! processes: Client (RA itself), Server (the external program)
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+pub mod msg;
+mod process;
+mod version;
+
+use paths::AbsPathBuf;
+use std::{
+ ffi::OsStr,
+ fmt, io,
+ sync::{Arc, Mutex},
+};
+
+use serde::{Deserialize, Serialize};
+use tt::Subtree;
+
+use crate::{
+ msg::{ExpandMacro, FlatTree, PanicMessage},
+ process::ProcMacroProcessSrv,
+};
+
+pub use version::{read_dylib_info, read_version, RustCInfo};
+
+#[derive(Copy, Clone, Eq, PartialEq, Debug, Serialize, Deserialize)]
+pub enum ProcMacroKind {
+ CustomDerive,
+ FuncLike,
+ Attr,
+}
+
+/// A handle to an external process which load dylibs with macros (.so or .dll)
+/// and runs actual macro expansion functions.
+#[derive(Debug)]
+pub struct ProcMacroServer {
+ /// Currently, the proc macro process expands all procedural macros sequentially.
+ ///
+ /// That means that concurrent salsa requests may block each other when expanding proc macros,
+ /// which is unfortunate, but simple and good enough for the time being.
+ ///
+ /// Therefore, we just wrap the `ProcMacroProcessSrv` in a mutex here.
+ process: Arc<Mutex<ProcMacroProcessSrv>>,
+}
+
+pub struct MacroDylib {
+ path: AbsPathBuf,
+}
+
+impl MacroDylib {
+ // FIXME: this is buggy due to TOCTOU, we should check the version in the
+ // macro process instead.
+ pub fn new(path: AbsPathBuf) -> io::Result<MacroDylib> {
+ let _p = profile::span("MacroDylib::new");
+
+ let info = version::read_dylib_info(&path)?;
+ if info.version.0 < 1 || info.version.1 < 47 {
++ let msg = format!("proc-macro {} built by {:#?} is not supported by rust-analyzer, please update your Rust version.", path.display(), info);
+ return Err(io::Error::new(io::ErrorKind::InvalidData, msg));
+ }
+
+ Ok(MacroDylib { path })
+ }
+}
+
+/// A handle to a specific macro (a `#[proc_macro]` annotated function).
+///
+/// It exists withing a context of a specific [`ProcMacroProcess`] -- currently
+/// we share a single expander process for all macros.
+#[derive(Debug, Clone)]
+pub struct ProcMacro {
+ process: Arc<Mutex<ProcMacroProcessSrv>>,
+ dylib_path: AbsPathBuf,
+ name: String,
+ kind: ProcMacroKind,
+}
+
+impl Eq for ProcMacro {}
+impl PartialEq for ProcMacro {
+ fn eq(&self, other: &Self) -> bool {
+ self.name == other.name
+ && self.kind == other.kind
+ && self.dylib_path == other.dylib_path
+ && Arc::ptr_eq(&self.process, &other.process)
+ }
+}
+
+pub struct ServerError {
+ pub message: String,
+ pub io: Option<io::Error>,
+}
+
+impl fmt::Display for ServerError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.message.fmt(f)?;
+ if let Some(io) = &self.io {
+ f.write_str(": ")?;
+ io.fmt(f)?;
+ }
+ Ok(())
+ }
+}
+
+pub struct MacroPanic {
+ pub message: String,
+}
+
+impl ProcMacroServer {
+ /// Spawns an external process as the proc macro server and returns a client connected to it.
+ pub fn spawn(
+ process_path: AbsPathBuf,
+ args: impl IntoIterator<Item = impl AsRef<OsStr>>,
+ ) -> io::Result<ProcMacroServer> {
+ let process = ProcMacroProcessSrv::run(process_path, args)?;
+ Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) })
+ }
+
+ pub fn load_dylib(&self, dylib: MacroDylib) -> Result<Vec<ProcMacro>, ServerError> {
+ let _p = profile::span("ProcMacroClient::by_dylib_path");
+ let macros =
+ self.process.lock().unwrap_or_else(|e| e.into_inner()).find_proc_macros(&dylib.path)?;
+
+ match macros {
+ Ok(macros) => Ok(macros
+ .into_iter()
+ .map(|(name, kind)| ProcMacro {
+ process: self.process.clone(),
+ name,
+ kind,
+ dylib_path: dylib.path.clone(),
+ })
+ .collect()),
+ Err(message) => Err(ServerError { message, io: None }),
+ }
+ }
+}
+
+impl ProcMacro {
+ pub fn name(&self) -> &str {
+ &self.name
+ }
+
+ pub fn kind(&self) -> ProcMacroKind {
+ self.kind
+ }
+
+ pub fn expand(
+ &self,
+ subtree: &Subtree,
+ attr: Option<&Subtree>,
+ env: Vec<(String, String)>,
+ ) -> Result<Result<Subtree, PanicMessage>, ServerError> {
+ let current_dir = env
+ .iter()
+ .find(|(name, _)| name == "CARGO_MANIFEST_DIR")
+ .map(|(_, value)| value.clone());
+
+ let task = ExpandMacro {
+ macro_body: FlatTree::new(subtree),
+ macro_name: self.name.to_string(),
+ attributes: attr.map(FlatTree::new),
+ lib: self.dylib_path.to_path_buf().into(),
+ env,
+ current_dir,
+ };
+
+ let request = msg::Request::ExpandMacro(task);
+ let response = self.process.lock().unwrap_or_else(|e| e.into_inner()).send_task(request)?;
+ match response {
+ msg::Response::ExpandMacro(it) => Ok(it.map(FlatTree::to_subtree)),
+ msg::Response::ListMacros { .. } => {
+ Err(ServerError { message: "unexpected response".to_string(), io: None })
+ }
+ }
+ }
+}
--- /dev/null
- //! interface the rest of rust analyzer can use to talk to the macro
+//! Procedural macros are implemented by compiling the macro providing crate
+//! to a dynamic library with a particular ABI which the compiler uses to expand
+//! macros. Unfortunately this ABI is not specified and can change from version
+//! to version of the compiler. To support this we copy the ABI from the rust
+//! compiler into submodules of this module (e.g proc_macro_srv::abis::abi_1_47).
+//!
+//! All of these ABIs are subsumed in the `Abi` enum, which exposes a simple
++//! interface the rest of rust-analyzer can use to talk to the macro
+//! provider.
+//!
+//! # Adding a new ABI
+//!
+//! To add a new ABI you'll need to copy the source of the target proc_macro
+//! crate from the source tree of the Rust compiler into this directory tree.
+//! Then you'll need to modify it
+//! - Remove any feature! or other things which won't compile on stable
+//! - change any absolute imports to relative imports within the ABI tree
+//!
+//! Then you'll need to add a branch to the `Abi` enum and an implementation of
+//! `Abi::expand`, `Abi::list_macros` and `Abi::from_lib` for the new ABI. See
+//! `proc_macro_srv/src/abis/abi_1_47/mod.rs` for an example. Finally you'll
+//! need to update the conditionals in `Abi::from_lib` to return your new ABI
+//! for the relevant versions of the rust compiler
+//!
+
+mod abi_1_58;
+mod abi_1_63;
+mod abi_1_64;
+#[cfg(feature = "sysroot-abi")]
+mod abi_sysroot;
+
+// see `build.rs`
+include!(concat!(env!("OUT_DIR"), "/rustc_version.rs"));
+
+// Used by `test/utils.rs`
+#[cfg(test)]
+pub(crate) use abi_1_64::TokenStream as TestTokenStream;
+
+use super::dylib::LoadProcMacroDylibError;
+pub(crate) use abi_1_58::Abi as Abi_1_58;
+pub(crate) use abi_1_63::Abi as Abi_1_63;
+pub(crate) use abi_1_64::Abi as Abi_1_64;
+#[cfg(feature = "sysroot-abi")]
+pub(crate) use abi_sysroot::Abi as Abi_Sysroot;
+use libloading::Library;
+use proc_macro_api::{ProcMacroKind, RustCInfo};
+
+pub struct PanicMessage {
+ message: Option<String>,
+}
+
+impl PanicMessage {
+ pub fn as_str(&self) -> Option<String> {
+ self.message.clone()
+ }
+}
+
+pub(crate) enum Abi {
+ Abi1_58(Abi_1_58),
+ Abi1_63(Abi_1_63),
+ Abi1_64(Abi_1_64),
+ #[cfg(feature = "sysroot-abi")]
+ AbiSysroot(Abi_Sysroot),
+}
+
+impl Abi {
+ /// Load a new ABI.
+ ///
+ /// # Arguments
+ ///
+ /// *`lib` - The dynamic library containing the macro implementations
+ /// *`symbol_name` - The symbol name the macros can be found attributes
+ /// *`info` - RustCInfo about the compiler that was used to compile the
+ /// macro crate. This is the information we use to figure out
+ /// which ABI to return
+ pub fn from_lib(
+ lib: &Library,
+ symbol_name: String,
+ info: RustCInfo,
+ ) -> Result<Abi, LoadProcMacroDylibError> {
+ // the sysroot ABI relies on `extern proc_macro` with unstable features,
+ // instead of a snapshot of the proc macro bridge's source code. it's only
+ // enabled if we have an exact version match.
+ #[cfg(feature = "sysroot-abi")]
+ {
+ if info.version_string == RUSTC_VERSION_STRING {
+ let inner = unsafe { Abi_Sysroot::from_lib(lib, symbol_name) }?;
+ return Ok(Abi::AbiSysroot(inner));
+ }
+
+ // if we reached this point, versions didn't match. in testing, we
+ // want that to panic - this could mean that the format of `rustc
+ // --version` no longer matches the format of the version string
+ // stored in the `.rustc` section, and we want to catch that in-tree
+ // with `x.py test`
+ #[cfg(test)]
+ {
+ let allow_mismatch = std::env::var("PROC_MACRO_SRV_ALLOW_SYSROOT_MISMATCH");
+ if let Ok("1") = allow_mismatch.as_deref() {
+ // only used by rust-analyzer developers, when working on the
+ // sysroot ABI from the rust-analyzer repository - which should
+ // only happen pre-subtree. this can be removed later.
+ } else {
+ panic!(
+ "sysroot ABI mismatch: dylib rustc version (read from .rustc section): {:?} != proc-macro-srv version (read from 'rustc --version'): {:?}",
+ info.version_string, RUSTC_VERSION_STRING
+ );
+ }
+ }
+ }
+
+ // FIXME: this should use exclusive ranges when they're stable
+ // https://github.com/rust-lang/rust/issues/37854
+ match (info.version.0, info.version.1) {
+ (1, 58..=62) => {
+ let inner = unsafe { Abi_1_58::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_58(inner))
+ }
+ (1, 63) => {
+ let inner = unsafe { Abi_1_63::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_63(inner))
+ }
+ (1, 64..) => {
+ let inner = unsafe { Abi_1_64::from_lib(lib, symbol_name) }?;
+ Ok(Abi::Abi1_64(inner))
+ }
+ _ => Err(LoadProcMacroDylibError::UnsupportedABI),
+ }
+ }
+
+ pub fn expand(
+ &self,
+ macro_name: &str,
+ macro_body: &tt::Subtree,
+ attributes: Option<&tt::Subtree>,
+ ) -> Result<tt::Subtree, PanicMessage> {
+ match self {
+ Self::Abi1_58(abi) => abi.expand(macro_name, macro_body, attributes),
+ Self::Abi1_63(abi) => abi.expand(macro_name, macro_body, attributes),
+ Self::Abi1_64(abi) => abi.expand(macro_name, macro_body, attributes),
+ #[cfg(feature = "sysroot-abi")]
+ Self::AbiSysroot(abi) => abi.expand(macro_name, macro_body, attributes),
+ }
+ }
+
+ pub fn list_macros(&self) -> Vec<(String, ProcMacroKind)> {
+ match self {
+ Self::Abi1_58(abi) => abi.list_macros(),
+ Self::Abi1_63(abi) => abi.list_macros(),
+ Self::Abi1_64(abi) => abi.list_macros(),
+ #[cfg(feature = "sysroot-abi")]
+ Self::AbiSysroot(abi) => abi.list_macros(),
+ }
+ }
+}
--- /dev/null
- /// Note that internally, rust analyzer uses a different structure:
+//! See [`CargoWorkspace`].
+
+use std::iter;
+use std::path::PathBuf;
+use std::{ops, process::Command};
+
+use anyhow::{Context, Result};
+use base_db::Edition;
+use cargo_metadata::{CargoOpt, MetadataCommand};
+use la_arena::{Arena, Idx};
+use paths::{AbsPath, AbsPathBuf};
+use rustc_hash::FxHashMap;
+use serde::Deserialize;
+use serde_json::from_value;
+
+use crate::CfgOverrides;
+use crate::{utf8_stdout, ManifestPath};
+
+/// [`CargoWorkspace`] represents the logical structure of, well, a Cargo
+/// workspace. It pretty closely mirrors `cargo metadata` output.
+///
++/// Note that internally, rust-analyzer uses a different structure:
+/// `CrateGraph`. `CrateGraph` is lower-level: it knows only about the crates,
+/// while this knows about `Packages` & `Targets`: purely cargo-related
+/// concepts.
+///
+/// We use absolute paths here, `cargo metadata` guarantees to always produce
+/// abs paths.
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct CargoWorkspace {
+ packages: Arena<PackageData>,
+ targets: Arena<TargetData>,
+ workspace_root: AbsPathBuf,
+}
+
+impl ops::Index<Package> for CargoWorkspace {
+ type Output = PackageData;
+ fn index(&self, index: Package) -> &PackageData {
+ &self.packages[index]
+ }
+}
+
+impl ops::Index<Target> for CargoWorkspace {
+ type Output = TargetData;
+ fn index(&self, index: Target) -> &TargetData {
+ &self.targets[index]
+ }
+}
+
+/// Describes how to set the rustc source directory.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum RustcSource {
+ /// Explicit path for the rustc source directory.
+ Path(AbsPathBuf),
+ /// Try to automatically detect where the rustc source directory is.
+ Discover,
+}
+
+/// Crates to disable `#[cfg(test)]` on.
+#[derive(Clone, Debug, PartialEq, Eq)]
+pub enum UnsetTestCrates {
+ None,
+ Only(Vec<String>),
+ All,
+}
+
+impl Default for UnsetTestCrates {
+ fn default() -> Self {
+ Self::None
+ }
+}
+
+#[derive(Default, Clone, Debug, PartialEq, Eq)]
+pub struct CargoConfig {
+ /// Do not activate the `default` feature.
+ pub no_default_features: bool,
+
+ /// Activate all available features
+ pub all_features: bool,
+
+ /// List of features to activate.
+ /// This will be ignored if `cargo_all_features` is true.
+ pub features: Vec<String>,
+
+ /// rustc target
+ pub target: Option<String>,
+
+ /// Don't load sysroot crates (`std`, `core` & friends). Might be useful
+ /// when debugging isolated issues.
+ pub no_sysroot: bool,
+
+ /// rustc private crate source
+ pub rustc_source: Option<RustcSource>,
+
+ /// crates to disable `#[cfg(test)]` on
+ pub unset_test_crates: UnsetTestCrates,
+
+ pub wrap_rustc_in_build_scripts: bool,
+
+ pub run_build_script_command: Option<Vec<String>>,
+}
+
+impl CargoConfig {
+ pub fn cfg_overrides(&self) -> CfgOverrides {
+ match &self.unset_test_crates {
+ UnsetTestCrates::None => CfgOverrides::Selective(iter::empty().collect()),
+ UnsetTestCrates::Only(unset_test_crates) => CfgOverrides::Selective(
+ unset_test_crates
+ .iter()
+ .cloned()
+ .zip(iter::repeat_with(|| {
+ cfg::CfgDiff::new(Vec::new(), vec![cfg::CfgAtom::Flag("test".into())])
+ .unwrap()
+ }))
+ .collect(),
+ ),
+ UnsetTestCrates::All => CfgOverrides::Wildcard(
+ cfg::CfgDiff::new(Vec::new(), vec![cfg::CfgAtom::Flag("test".into())]).unwrap(),
+ ),
+ }
+ }
+}
+
+pub type Package = Idx<PackageData>;
+
+pub type Target = Idx<TargetData>;
+
+/// Information associated with a cargo crate
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct PackageData {
+ /// Version given in the `Cargo.toml`
+ pub version: semver::Version,
+ /// Name as given in the `Cargo.toml`
+ pub name: String,
+ /// Repository as given in the `Cargo.toml`
+ pub repository: Option<String>,
+ /// Path containing the `Cargo.toml`
+ pub manifest: ManifestPath,
+ /// Targets provided by the crate (lib, bin, example, test, ...)
+ pub targets: Vec<Target>,
+ /// Does this package come from the local filesystem (and is editable)?
+ pub is_local: bool,
+ // Whether this package is a member of the workspace
+ pub is_member: bool,
+ /// List of packages this package depends on
+ pub dependencies: Vec<PackageDependency>,
+ /// Rust edition for this package
+ pub edition: Edition,
+ /// Features provided by the crate, mapped to the features required by that feature.
+ pub features: FxHashMap<String, Vec<String>>,
+ /// List of features enabled on this package
+ pub active_features: Vec<String>,
+ /// String representation of package id
+ pub id: String,
+ /// The contents of [package.metadata.rust-analyzer]
+ pub metadata: RustAnalyzerPackageMetaData,
+}
+
+#[derive(Deserialize, Default, Debug, Clone, Eq, PartialEq)]
+pub struct RustAnalyzerPackageMetaData {
+ pub rustc_private: bool,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct PackageDependency {
+ pub pkg: Package,
+ pub name: String,
+ pub kind: DepKind,
+}
+
+#[derive(Debug, Clone, Eq, PartialEq, PartialOrd, Ord)]
+pub enum DepKind {
+ /// Available to the library, binary, and dev targets in the package (but not the build script).
+ Normal,
+ /// Available only to test and bench targets (and the library target, when built with `cfg(test)`).
+ Dev,
+ /// Available only to the build script target.
+ Build,
+}
+
+impl DepKind {
+ fn iter(list: &[cargo_metadata::DepKindInfo]) -> impl Iterator<Item = Self> + '_ {
+ let mut dep_kinds = Vec::new();
+ if list.is_empty() {
+ dep_kinds.push(Self::Normal);
+ }
+ for info in list {
+ let kind = match info.kind {
+ cargo_metadata::DependencyKind::Normal => Self::Normal,
+ cargo_metadata::DependencyKind::Development => Self::Dev,
+ cargo_metadata::DependencyKind::Build => Self::Build,
+ cargo_metadata::DependencyKind::Unknown => continue,
+ };
+ dep_kinds.push(kind);
+ }
+ dep_kinds.sort_unstable();
+ dep_kinds.dedup();
+ dep_kinds.into_iter()
+ }
+}
+
+/// Information associated with a package's target
+#[derive(Debug, Clone, Eq, PartialEq)]
+pub struct TargetData {
+ /// Package that provided this target
+ pub package: Package,
+ /// Name as given in the `Cargo.toml` or generated from the file name
+ pub name: String,
+ /// Path to the main source file of the target
+ pub root: AbsPathBuf,
+ /// Kind of target
+ pub kind: TargetKind,
+ /// Is this target a proc-macro
+ pub is_proc_macro: bool,
+ /// Required features of the target without which it won't build
+ pub required_features: Vec<String>,
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum TargetKind {
+ Bin,
+ /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...).
+ Lib,
+ Example,
+ Test,
+ Bench,
+ BuildScript,
+ Other,
+}
+
+impl TargetKind {
+ fn new(kinds: &[String]) -> TargetKind {
+ for kind in kinds {
+ return match kind.as_str() {
+ "bin" => TargetKind::Bin,
+ "test" => TargetKind::Test,
+ "bench" => TargetKind::Bench,
+ "example" => TargetKind::Example,
+ "custom-build" => TargetKind::BuildScript,
+ "proc-macro" => TargetKind::Lib,
+ _ if kind.contains("lib") => TargetKind::Lib,
+ _ => continue,
+ };
+ }
+ TargetKind::Other
+ }
+}
+
+#[derive(Deserialize, Default)]
+// Deserialise helper for the cargo metadata
+struct PackageMetadata {
+ #[serde(rename = "rust-analyzer")]
+ rust_analyzer: Option<RustAnalyzerPackageMetaData>,
+}
+
+impl CargoWorkspace {
+ pub fn fetch_metadata(
+ cargo_toml: &ManifestPath,
+ current_dir: &AbsPath,
+ config: &CargoConfig,
+ progress: &dyn Fn(String),
+ ) -> Result<cargo_metadata::Metadata> {
+ let target = config
+ .target
+ .clone()
+ .or_else(|| cargo_config_build_target(cargo_toml))
+ .or_else(|| rustc_discover_host_triple(cargo_toml));
+
+ let mut meta = MetadataCommand::new();
+ meta.cargo_path(toolchain::cargo());
+ meta.manifest_path(cargo_toml.to_path_buf());
+ if config.all_features {
+ meta.features(CargoOpt::AllFeatures);
+ } else {
+ if config.no_default_features {
+ // FIXME: `NoDefaultFeatures` is mutual exclusive with `SomeFeatures`
+ // https://github.com/oli-obk/cargo_metadata/issues/79
+ meta.features(CargoOpt::NoDefaultFeatures);
+ }
+ if !config.features.is_empty() {
+ meta.features(CargoOpt::SomeFeatures(config.features.clone()));
+ }
+ }
+ meta.current_dir(current_dir.as_os_str());
+
+ if let Some(target) = target {
+ meta.other_options(vec![String::from("--filter-platform"), target]);
+ }
+
+ // FIXME: Fetching metadata is a slow process, as it might require
+ // calling crates.io. We should be reporting progress here, but it's
+ // unclear whether cargo itself supports it.
+ progress("metadata".to_string());
+
+ let meta =
+ meta.exec().with_context(|| format!("Failed to run `{:?}`", meta.cargo_command()))?;
+
+ Ok(meta)
+ }
+
+ pub fn new(mut meta: cargo_metadata::Metadata) -> CargoWorkspace {
+ let mut pkg_by_id = FxHashMap::default();
+ let mut packages = Arena::default();
+ let mut targets = Arena::default();
+
+ let ws_members = &meta.workspace_members;
+
+ meta.packages.sort_by(|a, b| a.id.cmp(&b.id));
+ for meta_pkg in &meta.packages {
+ let cargo_metadata::Package {
+ id,
+ edition,
+ name,
+ manifest_path,
+ version,
+ metadata,
+ repository,
+ ..
+ } = meta_pkg;
+ let meta = from_value::<PackageMetadata>(metadata.clone()).unwrap_or_default();
+ let edition = match edition {
+ cargo_metadata::Edition::E2015 => Edition::Edition2015,
+ cargo_metadata::Edition::E2018 => Edition::Edition2018,
+ cargo_metadata::Edition::E2021 => Edition::Edition2021,
+ _ => {
+ tracing::error!("Unsupported edition `{:?}`", edition);
+ Edition::CURRENT
+ }
+ };
+ // We treat packages without source as "local" packages. That includes all members of
+ // the current workspace, as well as any path dependency outside the workspace.
+ let is_local = meta_pkg.source.is_none();
+ let is_member = ws_members.contains(id);
+
+ let pkg = packages.alloc(PackageData {
+ id: id.repr.clone(),
+ name: name.clone(),
+ version: version.clone(),
+ manifest: AbsPathBuf::assert(PathBuf::from(&manifest_path)).try_into().unwrap(),
+ targets: Vec::new(),
+ is_local,
+ is_member,
+ edition,
+ repository: repository.clone(),
+ dependencies: Vec::new(),
+ features: meta_pkg.features.clone().into_iter().collect(),
+ active_features: Vec::new(),
+ metadata: meta.rust_analyzer.unwrap_or_default(),
+ });
+ let pkg_data = &mut packages[pkg];
+ pkg_by_id.insert(id, pkg);
+ for meta_tgt in &meta_pkg.targets {
+ let is_proc_macro = meta_tgt.kind.as_slice() == ["proc-macro"];
+ let tgt = targets.alloc(TargetData {
+ package: pkg,
+ name: meta_tgt.name.clone(),
+ root: AbsPathBuf::assert(PathBuf::from(&meta_tgt.src_path)),
+ kind: TargetKind::new(meta_tgt.kind.as_slice()),
+ is_proc_macro,
+ required_features: meta_tgt.required_features.clone(),
+ });
+ pkg_data.targets.push(tgt);
+ }
+ }
+ let resolve = meta.resolve.expect("metadata executed with deps");
+ for mut node in resolve.nodes {
+ let source = match pkg_by_id.get(&node.id) {
+ Some(&src) => src,
+ // FIXME: replace this and a similar branch below with `.unwrap`, once
+ // https://github.com/rust-lang/cargo/issues/7841
+ // is fixed and hits stable (around 1.43-is probably?).
+ None => {
+ tracing::error!("Node id do not match in cargo metadata, ignoring {}", node.id);
+ continue;
+ }
+ };
+ node.deps.sort_by(|a, b| a.pkg.cmp(&b.pkg));
+ for (dep_node, kind) in node
+ .deps
+ .iter()
+ .flat_map(|dep| DepKind::iter(&dep.dep_kinds).map(move |kind| (dep, kind)))
+ {
+ let pkg = match pkg_by_id.get(&dep_node.pkg) {
+ Some(&pkg) => pkg,
+ None => {
+ tracing::error!(
+ "Dep node id do not match in cargo metadata, ignoring {}",
+ dep_node.pkg
+ );
+ continue;
+ }
+ };
+ let dep = PackageDependency { name: dep_node.name.clone(), pkg, kind };
+ packages[source].dependencies.push(dep);
+ }
+ packages[source].active_features.extend(node.features);
+ }
+
+ let workspace_root =
+ AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string()));
+
+ CargoWorkspace { packages, targets, workspace_root }
+ }
+
+ pub fn packages<'a>(&'a self) -> impl Iterator<Item = Package> + ExactSizeIterator + 'a {
+ self.packages.iter().map(|(id, _pkg)| id)
+ }
+
+ pub fn target_by_root(&self, root: &AbsPath) -> Option<Target> {
+ self.packages()
+ .filter(|&pkg| self[pkg].is_member)
+ .find_map(|pkg| self[pkg].targets.iter().find(|&&it| &self[it].root == root))
+ .copied()
+ }
+
+ pub fn workspace_root(&self) -> &AbsPath {
+ &self.workspace_root
+ }
+
+ pub fn package_flag(&self, package: &PackageData) -> String {
+ if self.is_unique(&*package.name) {
+ package.name.clone()
+ } else {
+ format!("{}:{}", package.name, package.version)
+ }
+ }
+
+ pub fn parent_manifests(&self, manifest_path: &ManifestPath) -> Option<Vec<ManifestPath>> {
+ let mut found = false;
+ let parent_manifests = self
+ .packages()
+ .filter_map(|pkg| {
+ if !found && &self[pkg].manifest == manifest_path {
+ found = true
+ }
+ self[pkg].dependencies.iter().find_map(|dep| {
+ if &self[dep.pkg].manifest == manifest_path {
+ return Some(self[pkg].manifest.clone());
+ }
+ None
+ })
+ })
+ .collect::<Vec<ManifestPath>>();
+
+ // some packages has this pkg as dep. return their manifests
+ if parent_manifests.len() > 0 {
+ return Some(parent_manifests);
+ }
+
+ // this pkg is inside this cargo workspace, fallback to workspace root
+ if found {
+ return Some(vec![
+ ManifestPath::try_from(self.workspace_root().join("Cargo.toml")).ok()?
+ ]);
+ }
+
+ // not in this workspace
+ None
+ }
+
+ fn is_unique(&self, name: &str) -> bool {
+ self.packages.iter().filter(|(_, v)| v.name == name).count() == 1
+ }
+}
+
+fn rustc_discover_host_triple(cargo_toml: &ManifestPath) -> Option<String> {
+ let mut rustc = Command::new(toolchain::rustc());
+ rustc.current_dir(cargo_toml.parent()).arg("-vV");
+ tracing::debug!("Discovering host platform by {:?}", rustc);
+ match utf8_stdout(rustc) {
+ Ok(stdout) => {
+ let field = "host: ";
+ let target = stdout.lines().find_map(|l| l.strip_prefix(field));
+ if let Some(target) = target {
+ Some(target.to_string())
+ } else {
+ // If we fail to resolve the host platform, it's not the end of the world.
+ tracing::info!("rustc -vV did not report host platform, got:\n{}", stdout);
+ None
+ }
+ }
+ Err(e) => {
+ tracing::warn!("Failed to discover host platform: {}", e);
+ None
+ }
+ }
+}
+
+fn cargo_config_build_target(cargo_toml: &ManifestPath) -> Option<String> {
+ let mut cargo_config = Command::new(toolchain::cargo());
+ cargo_config
+ .current_dir(cargo_toml.parent())
+ .args(&["-Z", "unstable-options", "config", "get", "build.target"])
+ .env("RUSTC_BOOTSTRAP", "1");
+ // if successful we receive `build.target = "target-triple"`
+ tracing::debug!("Discovering cargo config target by {:?}", cargo_config);
+ match utf8_stdout(cargo_config) {
+ Ok(stdout) => stdout
+ .strip_prefix("build.target = \"")
+ .and_then(|stdout| stdout.strip_suffix('"'))
+ .map(ToOwned::to_owned),
+ Err(_) => None,
+ }
+}
--- /dev/null
- pub(crate) type CheckFixes = Arc<FxHashMap<FileId, Vec<Fix>>>;
+//! Book keeping for keeping diagnostics easily in sync with the client.
+pub(crate) mod to_proto;
+
+use std::{mem, sync::Arc};
+
+use ide::FileId;
+use rustc_hash::{FxHashMap, FxHashSet};
+
+use crate::lsp_ext;
+
- pub(crate) check: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
++pub(crate) type CheckFixes = Arc<FxHashMap<usize, FxHashMap<FileId, Vec<Fix>>>>;
+
+#[derive(Debug, Default, Clone)]
+pub struct DiagnosticsMapConfig {
+ pub remap_prefix: FxHashMap<String, String>,
+ pub warnings_as_info: Vec<String>,
+ pub warnings_as_hint: Vec<String>,
+}
+
+#[derive(Debug, Default, Clone)]
+pub(crate) struct DiagnosticCollection {
+ // FIXME: should be FxHashMap<FileId, Vec<ra_id::Diagnostic>>
+ pub(crate) native: FxHashMap<FileId, Vec<lsp_types::Diagnostic>>,
+ // FIXME: should be Vec<flycheck::Diagnostic>
- pub(crate) fn clear_check(&mut self) {
++ pub(crate) check: FxHashMap<usize, FxHashMap<FileId, Vec<lsp_types::Diagnostic>>>,
+ pub(crate) check_fixes: CheckFixes,
+ changes: FxHashSet<FileId>,
+}
+
+#[derive(Debug, Clone)]
+pub(crate) struct Fix {
+ // Fixes may be triggerable from multiple ranges.
+ pub(crate) ranges: Vec<lsp_types::Range>,
+ pub(crate) action: lsp_ext::CodeAction,
+}
+
+impl DiagnosticCollection {
- self.changes.extend(self.check.drain().map(|(key, _value)| key))
++ pub(crate) fn clear_check(&mut self, flycheck_id: usize) {
++ if let Some(it) = Arc::make_mut(&mut self.check_fixes).get_mut(&flycheck_id) {
++ it.clear();
++ }
++ if let Some(it) = self.check.get_mut(&flycheck_id) {
++ self.changes.extend(it.drain().map(|(key, _value)| key));
++ }
++ }
++
++ pub(crate) fn clear_check_all(&mut self) {
+ Arc::make_mut(&mut self.check_fixes).clear();
- let diagnostics = self.check.entry(file_id).or_default();
++ self.changes
++ .extend(self.check.values_mut().flat_map(|it| it.drain().map(|(key, _value)| key)))
+ }
+
+ pub(crate) fn clear_native_for(&mut self, file_id: FileId) {
+ self.native.remove(&file_id);
+ self.changes.insert(file_id);
+ }
+
+ pub(crate) fn add_check_diagnostic(
+ &mut self,
++ flycheck_id: usize,
+ file_id: FileId,
+ diagnostic: lsp_types::Diagnostic,
+ fix: Option<Fix>,
+ ) {
- check_fixes.entry(file_id).or_default().extend(fix);
++ let diagnostics = self.check.entry(flycheck_id).or_default().entry(file_id).or_default();
+ for existing_diagnostic in diagnostics.iter() {
+ if are_diagnostics_equal(existing_diagnostic, &diagnostic) {
+ return;
+ }
+ }
+
+ let check_fixes = Arc::make_mut(&mut self.check_fixes);
- let check = self.check.get(&file_id).into_iter().flatten();
++ check_fixes.entry(flycheck_id).or_default().entry(file_id).or_default().extend(fix);
+ diagnostics.push(diagnostic);
+ self.changes.insert(file_id);
+ }
+
+ pub(crate) fn set_native_diagnostics(
+ &mut self,
+ file_id: FileId,
+ diagnostics: Vec<lsp_types::Diagnostic>,
+ ) {
+ if let Some(existing_diagnostics) = self.native.get(&file_id) {
+ if existing_diagnostics.len() == diagnostics.len()
+ && diagnostics
+ .iter()
+ .zip(existing_diagnostics)
+ .all(|(new, existing)| are_diagnostics_equal(new, existing))
+ {
+ return;
+ }
+ }
+
+ self.native.insert(file_id, diagnostics);
+ self.changes.insert(file_id);
+ }
+
+ pub(crate) fn diagnostics_for(
+ &self,
+ file_id: FileId,
+ ) -> impl Iterator<Item = &lsp_types::Diagnostic> {
+ let native = self.native.get(&file_id).into_iter().flatten();
++ let check =
++ self.check.values().filter_map(move |it| it.get(&file_id)).into_iter().flatten();
+ native.chain(check)
+ }
+
+ pub(crate) fn take_changes(&mut self) -> Option<FxHashSet<FileId>> {
+ if self.changes.is_empty() {
+ return None;
+ }
+ Some(mem::take(&mut self.changes))
+ }
+}
+
+fn are_diagnostics_equal(left: &lsp_types::Diagnostic, right: &lsp_types::Diagnostic) -> bool {
+ left.source == right.source
+ && left.severity == right.severity
+ && left.range == right.range
+ && left.message == right.message
+}
--- /dev/null
- use ide_db::base_db::{CrateId, FileLoader, SourceDatabase};
+//! The context or environment in which the language server functions. In our
+//! server implementation this is know as the `WorldState`.
+//!
+//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
+
+use std::{sync::Arc, time::Instant};
+
+use crossbeam_channel::{unbounded, Receiver, Sender};
+use flycheck::FlycheckHandle;
+use ide::{Analysis, AnalysisHost, Cancellable, Change, FileId};
- let mut fs_changes = Vec::new();
++use ide_db::base_db::{CrateId, FileLoader, SourceDatabase, SourceDatabaseExt};
+use lsp_types::{SemanticTokens, Url};
+use parking_lot::{Mutex, RwLock};
+use proc_macro_api::ProcMacroServer;
+use project_model::{CargoWorkspace, ProjectWorkspace, Target, WorkspaceBuildScripts};
+use rustc_hash::FxHashMap;
+use vfs::AnchoredPathBuf;
+
+use crate::{
+ config::Config,
+ diagnostics::{CheckFixes, DiagnosticCollection},
+ from_proto,
+ line_index::{LineEndings, LineIndex},
+ lsp_ext,
+ main_loop::Task,
+ mem_docs::MemDocs,
+ op_queue::OpQueue,
+ reload::{self, SourceRootConfig},
+ task_pool::TaskPool,
+ to_proto::url_from_abs_path,
+ Result,
+};
+
+// Enforces drop order
+pub(crate) struct Handle<H, C> {
+ pub(crate) handle: H,
+ pub(crate) receiver: C,
+}
+
+pub(crate) type ReqHandler = fn(&mut GlobalState, lsp_server::Response);
+pub(crate) type ReqQueue = lsp_server::ReqQueue<(String, Instant), ReqHandler>;
+
+/// `GlobalState` is the primary mutable state of the language server
+///
+/// The most interesting components are `vfs`, which stores a consistent
+/// snapshot of the file systems, and `analysis_host`, which stores our
+/// incremental salsa database.
+///
+/// Note that this struct has more than one impl in various modules!
+pub(crate) struct GlobalState {
+ sender: Sender<lsp_server::Message>,
+ req_queue: ReqQueue,
+ pub(crate) task_pool: Handle<TaskPool<Task>, Receiver<Task>>,
+ pub(crate) loader: Handle<Box<dyn vfs::loader::Handle>, Receiver<vfs::loader::Message>>,
+ pub(crate) config: Arc<Config>,
+ pub(crate) analysis_host: AnalysisHost,
+ pub(crate) diagnostics: DiagnosticCollection,
+ pub(crate) mem_docs: MemDocs,
+ pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
+ pub(crate) shutdown_requested: bool,
+ pub(crate) proc_macro_changed: bool,
+ pub(crate) last_reported_status: Option<lsp_ext::ServerStatusParams>,
+ pub(crate) source_root_config: SourceRootConfig,
+ pub(crate) proc_macro_clients: Vec<Result<ProcMacroServer, String>>,
+
+ pub(crate) flycheck: Vec<FlycheckHandle>,
+ pub(crate) flycheck_sender: Sender<flycheck::Message>,
+ pub(crate) flycheck_receiver: Receiver<flycheck::Message>,
+
+ pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
+ pub(crate) vfs_config_version: u32,
+ pub(crate) vfs_progress_config_version: u32,
+ pub(crate) vfs_progress_n_total: usize,
+ pub(crate) vfs_progress_n_done: usize,
+
+ /// `workspaces` field stores the data we actually use, while the `OpQueue`
+ /// stores the result of the last fetch.
+ ///
+ /// If the fetch (partially) fails, we do not update the current value.
+ ///
+ /// The handling of build data is subtle. We fetch workspace in two phases:
+ ///
+ /// *First*, we run `cargo metadata`, which gives us fast results for
+ /// initial analysis.
+ ///
+ /// *Second*, we run `cargo check` which runs build scripts and compiles
+ /// proc macros.
+ ///
+ /// We need both for the precise analysis, but we want rust-analyzer to be
+ /// at least partially available just after the first phase. That's because
+ /// first phase is much faster, and is much less likely to fail.
+ ///
+ /// This creates a complication -- by the time the second phase completes,
+ /// the results of the fist phase could be invalid. That is, while we run
+ /// `cargo check`, the user edits `Cargo.toml`, we notice this, and the new
+ /// `cargo metadata` completes before `cargo check`.
+ ///
+ /// An additional complication is that we want to avoid needless work. When
+ /// the user just adds comments or whitespace to Cargo.toml, we do not want
+ /// to invalidate any salsa caches.
+ pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
+ pub(crate) fetch_workspaces_queue: OpQueue<Vec<anyhow::Result<ProjectWorkspace>>>,
+ pub(crate) fetch_build_data_queue:
+ OpQueue<(Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)>,
+
+ pub(crate) prime_caches_queue: OpQueue<()>,
+}
+
+/// An immutable snapshot of the world's state at a point in time.
+pub(crate) struct GlobalStateSnapshot {
+ pub(crate) config: Arc<Config>,
+ pub(crate) analysis: Analysis,
+ pub(crate) check_fixes: CheckFixes,
+ mem_docs: MemDocs,
+ pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
+ vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
+ pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
+}
+
+impl std::panic::UnwindSafe for GlobalStateSnapshot {}
+
+impl GlobalState {
+ pub(crate) fn new(sender: Sender<lsp_server::Message>, config: Config) -> GlobalState {
+ let loader = {
+ let (sender, receiver) = unbounded::<vfs::loader::Message>();
+ let handle: vfs_notify::NotifyHandle =
+ vfs::loader::Handle::spawn(Box::new(move |msg| sender.send(msg).unwrap()));
+ let handle = Box::new(handle) as Box<dyn vfs::loader::Handle>;
+ Handle { handle, receiver }
+ };
+
+ let task_pool = {
+ let (sender, receiver) = unbounded();
+ let handle = TaskPool::new(sender);
+ Handle { handle, receiver }
+ };
+
+ let analysis_host = AnalysisHost::new(config.lru_capacity());
+ let (flycheck_sender, flycheck_receiver) = unbounded();
+ let mut this = GlobalState {
+ sender,
+ req_queue: ReqQueue::default(),
+ task_pool,
+ loader,
+ config: Arc::new(config.clone()),
+ analysis_host,
+ diagnostics: Default::default(),
+ mem_docs: MemDocs::default(),
+ semantic_tokens_cache: Arc::new(Default::default()),
+ shutdown_requested: false,
+ proc_macro_changed: false,
+ last_reported_status: None,
+ source_root_config: SourceRootConfig::default(),
+ proc_macro_clients: vec![],
+
+ flycheck: Vec::new(),
+ flycheck_sender,
+ flycheck_receiver,
+
+ vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
+ vfs_config_version: 0,
+ vfs_progress_config_version: 0,
+ vfs_progress_n_total: 0,
+ vfs_progress_n_done: 0,
+
+ workspaces: Arc::new(Vec::new()),
+ fetch_workspaces_queue: OpQueue::default(),
+ prime_caches_queue: OpQueue::default(),
+
+ fetch_build_data_queue: OpQueue::default(),
+ };
+ // Apply any required database inputs from the config.
+ this.update_configuration(config);
+ this
+ }
+
+ pub(crate) fn process_changes(&mut self) -> bool {
+ let _p = profile::span("GlobalState::process_changes");
- self.fetch_workspaces_queue
- .request_op(format!("vfs file change: {}", path.display()));
++ let mut fs_refresh_changes = Vec::new();
+ // A file was added or deleted
+ let mut has_structure_changes = false;
+
+ let (change, changed_files) = {
+ let mut change = Change::new();
+ let (vfs, line_endings_map) = &mut *self.vfs.write();
+ let changed_files = vfs.take_changes();
+ if changed_files.is_empty() {
+ return false;
+ }
+
+ for file in &changed_files {
+ if let Some(path) = vfs.file_path(file.file_id).as_path() {
+ let path = path.to_path_buf();
+ if reload::should_refresh_for_change(&path, file.change_kind) {
- fs_changes.push((path, file.change_kind));
++ fs_refresh_changes.push((path, file.file_id));
+ }
- let raw_database = &self.analysis_host.raw_database();
- self.proc_macro_changed =
- changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
- let crates = raw_database.relevant_crates(file.file_id);
- let crate_graph = raw_database.crate_graph();
+ if file.is_created_or_deleted() {
+ has_structure_changes = true;
+ }
+ }
+
++ // Clear native diagnostics when their file gets deleted
+ if !file.exists() {
+ self.diagnostics.clear_native_for(file.file_id);
+ }
+
+ let text = if file.exists() {
+ let bytes = vfs.file_contents(file.file_id).to_vec();
+ String::from_utf8(bytes).ok().and_then(|text| {
+ let (text, line_endings) = LineEndings::normalize(text);
+ line_endings_map.insert(file.file_id, line_endings);
+ Some(Arc::new(text))
+ })
+ } else {
+ None
+ };
+ change.change_file(file.file_id, text);
+ }
+ if has_structure_changes {
+ let roots = self.source_root_config.partition(vfs);
+ change.set_roots(roots);
+ }
+ (change, changed_files)
+ };
+
+ self.analysis_host.apply_change(change);
+
- crates.iter().any(|&krate| crate_graph[krate].is_proc_macro)
- });
++ {
++ let raw_database = self.analysis_host.raw_database();
++ let workspace_structure_change =
++ fs_refresh_changes.into_iter().find(|&(_, file_id)| {
++ !raw_database.source_root(raw_database.file_source_root(file_id)).is_library
++ });
++ if let Some((path, _)) = workspace_structure_change {
++ self.fetch_workspaces_queue
++ .request_op(format!("workspace vfs file change: {}", path.display()));
++ }
++ self.proc_macro_changed =
++ changed_files.iter().filter(|file| !file.is_created_or_deleted()).any(|file| {
++ let crates = raw_database.relevant_crates(file.file_id);
++ let crate_graph = raw_database.crate_graph();
++
++ crates.iter().any(|&krate| crate_graph[krate].is_proc_macro)
++ });
++ }
+
+ true
+ }
+
+ pub(crate) fn snapshot(&self) -> GlobalStateSnapshot {
+ GlobalStateSnapshot {
+ config: Arc::clone(&self.config),
+ workspaces: Arc::clone(&self.workspaces),
+ analysis: self.analysis_host.analysis(),
+ vfs: Arc::clone(&self.vfs),
+ check_fixes: Arc::clone(&self.diagnostics.check_fixes),
+ mem_docs: self.mem_docs.clone(),
+ semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
+ }
+ }
+
+ pub(crate) fn send_request<R: lsp_types::request::Request>(
+ &mut self,
+ params: R::Params,
+ handler: ReqHandler,
+ ) {
+ let request = self.req_queue.outgoing.register(R::METHOD.to_string(), params, handler);
+ self.send(request.into());
+ }
+
+ pub(crate) fn complete_request(&mut self, response: lsp_server::Response) {
+ let handler = self
+ .req_queue
+ .outgoing
+ .complete(response.id.clone())
+ .expect("received response for unknown request");
+ handler(self, response)
+ }
+
+ pub(crate) fn send_notification<N: lsp_types::notification::Notification>(
+ &mut self,
+ params: N::Params,
+ ) {
+ let not = lsp_server::Notification::new(N::METHOD.to_string(), params);
+ self.send(not.into());
+ }
+
+ pub(crate) fn register_request(
+ &mut self,
+ request: &lsp_server::Request,
+ request_received: Instant,
+ ) {
+ self.req_queue
+ .incoming
+ .register(request.id.clone(), (request.method.clone(), request_received));
+ }
+
+ pub(crate) fn respond(&mut self, response: lsp_server::Response) {
+ if let Some((method, start)) = self.req_queue.incoming.complete(response.id.clone()) {
+ if let Some(err) = &response.error {
+ if err.message.starts_with("server panicked") {
+ self.poke_rust_analyzer_developer(format!("{}, check the log", err.message))
+ }
+ }
+
+ let duration = start.elapsed();
+ tracing::debug!("handled {} - ({}) in {:0.2?}", method, response.id, duration);
+ self.send(response.into());
+ }
+ }
+
+ pub(crate) fn cancel(&mut self, request_id: lsp_server::RequestId) {
+ if let Some(response) = self.req_queue.incoming.cancel(request_id) {
+ self.send(response.into());
+ }
+ }
+
+ fn send(&mut self, message: lsp_server::Message) {
+ self.sender.send(message).unwrap()
+ }
+}
+
+impl Drop for GlobalState {
+ fn drop(&mut self) {
+ self.analysis_host.request_cancellation();
+ }
+}
+
+impl GlobalStateSnapshot {
+ pub(crate) fn url_to_file_id(&self, url: &Url) -> Result<FileId> {
+ url_to_file_id(&self.vfs.read().0, url)
+ }
+
+ pub(crate) fn file_id_to_url(&self, id: FileId) -> Url {
+ file_id_to_url(&self.vfs.read().0, id)
+ }
+
+ pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
+ let endings = self.vfs.read().1[&file_id];
+ let index = self.analysis.file_line_index(file_id)?;
+ let res = LineIndex { index, endings, encoding: self.config.offset_encoding() };
+ Ok(res)
+ }
+
+ pub(crate) fn url_file_version(&self, url: &Url) -> Option<i32> {
+ let path = from_proto::vfs_path(url).ok()?;
+ Some(self.mem_docs.get(&path)?.version)
+ }
+
+ pub(crate) fn anchored_path(&self, path: &AnchoredPathBuf) -> Url {
+ let mut base = self.vfs.read().0.file_path(path.anchor);
+ base.pop();
+ let path = base.join(&path.path).unwrap();
+ let path = path.as_path().unwrap();
+ url_from_abs_path(path)
+ }
+
+ pub(crate) fn cargo_target_for_crate_root(
+ &self,
+ crate_id: CrateId,
+ ) -> Option<(&CargoWorkspace, Target)> {
+ let file_id = self.analysis.crate_root(crate_id).ok()?;
+ let path = self.vfs.read().0.file_path(file_id);
+ let path = path.as_path()?;
+ self.workspaces.iter().find_map(|ws| match ws {
+ ProjectWorkspace::Cargo { cargo, .. } => {
+ cargo.target_by_root(path).map(|it| (cargo, it))
+ }
+ ProjectWorkspace::Json { .. } => None,
+ ProjectWorkspace::DetachedFiles { .. } => None,
+ })
+ }
+}
+
+pub(crate) fn file_id_to_url(vfs: &vfs::Vfs, id: FileId) -> Url {
+ let path = vfs.file_path(id);
+ let path = path.as_path().unwrap();
+ url_from_abs_path(path)
+}
+
+pub(crate) fn url_to_file_id(vfs: &vfs::Vfs, url: &Url) -> Result<FileId> {
+ let path = from_proto::vfs_path(url)?;
+ let res = vfs.file_id(&path).ok_or_else(|| format!("file not found: {}", path))?;
+ Ok(res)
+}
--- /dev/null
- for fix in snap.check_fixes.get(&frange.file_id).into_iter().flatten() {
+//! This module is responsible for implementing handlers for Language Server
+//! Protocol. The majority of requests are fulfilled by calling into the
+//! `ide` crate.
+
+use std::{
+ io::Write as _,
+ process::{self, Stdio},
+};
+
+use anyhow::Context;
+use ide::{
+ AnnotationConfig, AssistKind, AssistResolveStrategy, FileId, FilePosition, FileRange,
+ HoverAction, HoverGotoTypeData, Query, RangeInfo, Runnable, RunnableKind, SingleResolve,
+ SourceChange, TextEdit,
+};
+use ide_db::SymbolKind;
+use lsp_server::ErrorCode;
+use lsp_types::{
+ CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem,
+ CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
+ CodeLens, CompletionItem, Diagnostic, DiagnosticTag, DocumentFormattingParams, FoldingRange,
+ FoldingRangeParams, HoverContents, InlayHint, InlayHintParams, Location, LocationLink,
+ NumberOrString, Position, PrepareRenameResponse, Range, RenameParams,
+ SemanticTokensDeltaParams, SemanticTokensFullDeltaResult, SemanticTokensParams,
+ SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
+ SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
+};
+use project_model::{ManifestPath, ProjectWorkspace, TargetKind};
+use serde_json::json;
+use stdx::{format_to, never};
+use syntax::{algo, ast, AstNode, TextRange, TextSize, T};
+use vfs::AbsPathBuf;
+
+use crate::{
+ cargo_target_spec::CargoTargetSpec,
+ config::{RustfmtConfig, WorkspaceSymbolConfig},
+ diff::diff,
+ from_proto,
+ global_state::{GlobalState, GlobalStateSnapshot},
+ line_index::LineEndings,
+ lsp_ext::{self, PositionOrRange, ViewCrateGraphParams, WorkspaceSymbolParams},
+ lsp_utils::{all_edits_are_disjoint, invalid_params_error},
+ to_proto, LspError, Result,
+};
+
+pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> Result<()> {
+ state.proc_macro_clients.clear();
+ state.proc_macro_changed = false;
+ state.fetch_workspaces_queue.request_op("reload workspace request".to_string());
+ state.fetch_build_data_queue.request_op("reload workspace request".to_string());
+ Ok(())
+}
+
+pub(crate) fn handle_analyzer_status(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::AnalyzerStatusParams,
+) -> Result<String> {
+ let _p = profile::span("handle_analyzer_status");
+
+ let mut buf = String::new();
+
+ let mut file_id = None;
+ if let Some(tdi) = params.text_document {
+ match from_proto::file_id(&snap, &tdi.uri) {
+ Ok(it) => file_id = Some(it),
+ Err(_) => format_to!(buf, "file {} not found in vfs", tdi.uri),
+ }
+ }
+
+ if snap.workspaces.is_empty() {
+ buf.push_str("No workspaces\n")
+ } else {
+ buf.push_str("Workspaces:\n");
+ format_to!(
+ buf,
+ "Loaded {:?} packages across {} workspace{}.\n",
+ snap.workspaces.iter().map(|w| w.n_packages()).sum::<usize>(),
+ snap.workspaces.len(),
+ if snap.workspaces.len() == 1 { "" } else { "s" }
+ );
+ }
+ buf.push_str("\nAnalysis:\n");
+ buf.push_str(
+ &snap
+ .analysis
+ .status(file_id)
+ .unwrap_or_else(|_| "Analysis retrieval was cancelled".to_owned()),
+ );
+ Ok(buf)
+}
+
+pub(crate) fn handle_memory_usage(state: &mut GlobalState, _: ()) -> Result<String> {
+ let _p = profile::span("handle_memory_usage");
+ let mut mem = state.analysis_host.per_query_memory_usage();
+ mem.push(("Remaining".into(), profile::memory_usage().allocated));
+
+ let mut out = String::new();
+ for (name, bytes) in mem {
+ format_to!(out, "{:>8} {}\n", bytes, name);
+ }
+ Ok(out)
+}
+
+pub(crate) fn handle_shuffle_crate_graph(state: &mut GlobalState, _: ()) -> Result<()> {
+ state.analysis_host.shuffle_crate_graph();
+ Ok(())
+}
+
+pub(crate) fn handle_syntax_tree(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::SyntaxTreeParams,
+) -> Result<String> {
+ let _p = profile::span("handle_syntax_tree");
+ let id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(id)?;
+ let text_range = params.range.and_then(|r| from_proto::text_range(&line_index, r).ok());
+ let res = snap.analysis.syntax_tree(id, text_range)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_hir(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_hir");
+ let position = from_proto::file_position(&snap, params)?;
+ let res = snap.analysis.view_hir(position)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_file_text(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentIdentifier,
+) -> Result<String> {
+ let file_id = from_proto::file_id(&snap, ¶ms.uri)?;
+ Ok(snap.analysis.file_text(file_id)?.to_string())
+}
+
+pub(crate) fn handle_view_item_tree(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::ViewItemTreeParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_item_tree");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let res = snap.analysis.view_item_tree(file_id)?;
+ Ok(res)
+}
+
+pub(crate) fn handle_view_crate_graph(
+ snap: GlobalStateSnapshot,
+ params: ViewCrateGraphParams,
+) -> Result<String> {
+ let _p = profile::span("handle_view_crate_graph");
+ let dot = snap.analysis.view_crate_graph(params.full)??;
+ Ok(dot)
+}
+
+pub(crate) fn handle_expand_macro(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::ExpandMacroParams,
+) -> Result<Option<lsp_ext::ExpandedMacro>> {
+ let _p = profile::span("handle_expand_macro");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = from_proto::offset(&line_index, params.position)?;
+
+ let res = snap.analysis.expand_macro(FilePosition { file_id, offset })?;
+ Ok(res.map(|it| lsp_ext::ExpandedMacro { name: it.name, expansion: it.expansion }))
+}
+
+pub(crate) fn handle_selection_range(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::SelectionRangeParams,
+) -> Result<Option<Vec<lsp_types::SelectionRange>>> {
+ let _p = profile::span("handle_selection_range");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let res: Result<Vec<lsp_types::SelectionRange>> = params
+ .positions
+ .into_iter()
+ .map(|position| {
+ let offset = from_proto::offset(&line_index, position)?;
+ let mut ranges = Vec::new();
+ {
+ let mut range = TextRange::new(offset, offset);
+ loop {
+ ranges.push(range);
+ let frange = FileRange { file_id, range };
+ let next = snap.analysis.extend_selection(frange)?;
+ if next == range {
+ break;
+ } else {
+ range = next
+ }
+ }
+ }
+ let mut range = lsp_types::SelectionRange {
+ range: to_proto::range(&line_index, *ranges.last().unwrap()),
+ parent: None,
+ };
+ for &r in ranges.iter().rev().skip(1) {
+ range = lsp_types::SelectionRange {
+ range: to_proto::range(&line_index, r),
+ parent: Some(Box::new(range)),
+ }
+ }
+ Ok(range)
+ })
+ .collect();
+
+ Ok(Some(res?))
+}
+
+pub(crate) fn handle_matching_brace(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::MatchingBraceParams,
+) -> Result<Vec<Position>> {
+ let _p = profile::span("handle_matching_brace");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ params
+ .positions
+ .into_iter()
+ .map(|position| {
+ let offset = from_proto::offset(&line_index, position);
+ offset.map(|offset| {
+ let offset = match snap.analysis.matching_brace(FilePosition { file_id, offset }) {
+ Ok(Some(matching_brace_offset)) => matching_brace_offset,
+ Err(_) | Ok(None) => offset,
+ };
+ to_proto::position(&line_index, offset)
+ })
+ })
+ .collect()
+}
+
+pub(crate) fn handle_join_lines(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::JoinLinesParams,
+) -> Result<Vec<lsp_types::TextEdit>> {
+ let _p = profile::span("handle_join_lines");
+
+ let config = snap.config.join_lines();
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut res = TextEdit::default();
+ for range in params.ranges {
+ let range = from_proto::text_range(&line_index, range)?;
+ let edit = snap.analysis.join_lines(&config, FileRange { file_id, range })?;
+ match res.union(edit) {
+ Ok(()) => (),
+ Err(_edit) => {
+ // just ignore overlapping edits
+ }
+ }
+ }
+
+ Ok(to_proto::text_edit_vec(&line_index, res))
+}
+
+pub(crate) fn handle_on_enter(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+ let _p = profile::span("handle_on_enter");
+ let position = from_proto::file_position(&snap, params)?;
+ let edit = match snap.analysis.on_enter(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let line_index = snap.file_line_index(position.file_id)?;
+ let edit = to_proto::snippet_text_edit_vec(&line_index, true, edit);
+ Ok(Some(edit))
+}
+
+pub(crate) fn handle_on_type_formatting(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentOnTypeFormattingParams,
+) -> Result<Option<Vec<lsp_ext::SnippetTextEdit>>> {
+ let _p = profile::span("handle_on_type_formatting");
+ let mut position = from_proto::file_position(&snap, params.text_document_position)?;
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ // in `ide`, the `on_type` invariant is that
+ // `text.char_at(position) == typed_char`.
+ position.offset -= TextSize::of('.');
+ let char_typed = params.ch.chars().next().unwrap_or('\0');
+
+ let text = snap.analysis.file_text(position.file_id)?;
+ if stdx::never!(!text[usize::from(position.offset)..].starts_with(char_typed)) {
+ return Ok(None);
+ }
+
+ // We have an assist that inserts ` ` after typing `->` in `fn foo() ->{`,
+ // but it requires precise cursor positioning to work, and one can't
+ // position the cursor with on_type formatting. So, let's just toggle this
+ // feature off here, hoping that we'll enable it one day, 😿.
+ if char_typed == '>' {
+ return Ok(None);
+ }
+
+ let edit =
+ snap.analysis.on_char_typed(position, char_typed, snap.config.typing_autoclose_angle())?;
+ let edit = match edit {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ // This should be a single-file edit
+ let (_, text_edit) = edit.source_file_edits.into_iter().next().unwrap();
+
+ let change = to_proto::snippet_text_edit_vec(&line_index, edit.is_snippet, text_edit);
+ Ok(Some(change))
+}
+
+pub(crate) fn handle_document_symbol(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentSymbolParams,
+) -> Result<Option<lsp_types::DocumentSymbolResponse>> {
+ let _p = profile::span("handle_document_symbol");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut parents: Vec<(lsp_types::DocumentSymbol, Option<usize>)> = Vec::new();
+
+ for symbol in snap.analysis.file_structure(file_id)? {
+ let mut tags = Vec::new();
+ if symbol.deprecated {
+ tags.push(SymbolTag::DEPRECATED)
+ };
+
+ #[allow(deprecated)]
+ let doc_symbol = lsp_types::DocumentSymbol {
+ name: symbol.label,
+ detail: symbol.detail,
+ kind: to_proto::structure_node_kind(symbol.kind),
+ tags: Some(tags),
+ deprecated: Some(symbol.deprecated),
+ range: to_proto::range(&line_index, symbol.node_range),
+ selection_range: to_proto::range(&line_index, symbol.navigation_range),
+ children: None,
+ };
+ parents.push((doc_symbol, symbol.parent));
+ }
+
+ // Builds hierarchy from a flat list, in reverse order (so that indices
+ // makes sense)
+ let document_symbols = {
+ let mut acc = Vec::new();
+ while let Some((mut node, parent_idx)) = parents.pop() {
+ if let Some(children) = &mut node.children {
+ children.reverse();
+ }
+ let parent = match parent_idx {
+ None => &mut acc,
+ Some(i) => parents[i].0.children.get_or_insert_with(Vec::new),
+ };
+ parent.push(node);
+ }
+ acc.reverse();
+ acc
+ };
+
+ let res = if snap.config.hierarchical_symbols() {
+ document_symbols.into()
+ } else {
+ let url = to_proto::url(&snap, file_id);
+ let mut symbol_information = Vec::<SymbolInformation>::new();
+ for symbol in document_symbols {
+ flatten_document_symbol(&symbol, None, &url, &mut symbol_information);
+ }
+ symbol_information.into()
+ };
+ return Ok(Some(res));
+
+ fn flatten_document_symbol(
+ symbol: &lsp_types::DocumentSymbol,
+ container_name: Option<String>,
+ url: &Url,
+ res: &mut Vec<SymbolInformation>,
+ ) {
+ let mut tags = Vec::new();
+
+ #[allow(deprecated)]
+ if let Some(true) = symbol.deprecated {
+ tags.push(SymbolTag::DEPRECATED)
+ }
+
+ #[allow(deprecated)]
+ res.push(SymbolInformation {
+ name: symbol.name.clone(),
+ kind: symbol.kind,
+ tags: Some(tags),
+ deprecated: symbol.deprecated,
+ location: Location::new(url.clone(), symbol.range),
+ container_name,
+ });
+
+ for child in symbol.children.iter().flatten() {
+ flatten_document_symbol(child, Some(symbol.name.clone()), url, res);
+ }
+ }
+}
+
+pub(crate) fn handle_workspace_symbol(
+ snap: GlobalStateSnapshot,
+ params: WorkspaceSymbolParams,
+) -> Result<Option<Vec<SymbolInformation>>> {
+ let _p = profile::span("handle_workspace_symbol");
+
+ let config = snap.config.workspace_symbol();
+ let (all_symbols, libs) = decide_search_scope_and_kind(¶ms, &config);
+ let limit = config.search_limit;
+
+ let query = {
+ let query: String = params.query.chars().filter(|&c| c != '#' && c != '*').collect();
+ let mut q = Query::new(query);
+ if !all_symbols {
+ q.only_types();
+ }
+ if libs {
+ q.libs();
+ }
+ q.limit(limit);
+ q
+ };
+ let mut res = exec_query(&snap, query)?;
+ if res.is_empty() && !all_symbols {
+ let mut query = Query::new(params.query);
+ query.limit(limit);
+ res = exec_query(&snap, query)?;
+ }
+
+ return Ok(Some(res));
+
+ fn decide_search_scope_and_kind(
+ params: &WorkspaceSymbolParams,
+ config: &WorkspaceSymbolConfig,
+ ) -> (bool, bool) {
+ // Support old-style parsing of markers in the query.
+ let mut all_symbols = params.query.contains('#');
+ let mut libs = params.query.contains('*');
+
+ // If no explicit marker was set, check request params. If that's also empty
+ // use global config.
+ if !all_symbols {
+ let search_kind = match params.search_kind {
+ Some(ref search_kind) => search_kind,
+ None => &config.search_kind,
+ };
+ all_symbols = match search_kind {
+ lsp_ext::WorkspaceSymbolSearchKind::OnlyTypes => false,
+ lsp_ext::WorkspaceSymbolSearchKind::AllSymbols => true,
+ }
+ }
+
+ if !libs {
+ let search_scope = match params.search_scope {
+ Some(ref search_scope) => search_scope,
+ None => &config.search_scope,
+ };
+ libs = match search_scope {
+ lsp_ext::WorkspaceSymbolSearchScope::Workspace => false,
+ lsp_ext::WorkspaceSymbolSearchScope::WorkspaceAndDependencies => true,
+ }
+ }
+
+ (all_symbols, libs)
+ }
+
+ fn exec_query(snap: &GlobalStateSnapshot, query: Query) -> Result<Vec<SymbolInformation>> {
+ let mut res = Vec::new();
+ for nav in snap.analysis.symbol_search(query)? {
+ let container_name = nav.container_name.as_ref().map(|v| v.to_string());
+
+ #[allow(deprecated)]
+ let info = SymbolInformation {
+ name: nav.name.to_string(),
+ kind: nav
+ .kind
+ .map(to_proto::symbol_kind)
+ .unwrap_or(lsp_types::SymbolKind::VARIABLE),
+ tags: None,
+ location: to_proto::location_from_nav(snap, nav)?,
+ container_name,
+ deprecated: None,
+ };
+ res.push(info);
+ }
+ Ok(res)
+ }
+}
+
+pub(crate) fn handle_will_rename_files(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::RenameFilesParams,
+) -> Result<Option<lsp_types::WorkspaceEdit>> {
+ let _p = profile::span("handle_will_rename_files");
+
+ let source_changes: Vec<SourceChange> = params
+ .files
+ .into_iter()
+ .filter_map(|file_rename| {
+ let from = Url::parse(&file_rename.old_uri).ok()?;
+ let to = Url::parse(&file_rename.new_uri).ok()?;
+
+ let from_path = from.to_file_path().ok()?;
+ let to_path = to.to_file_path().ok()?;
+
+ // Limit to single-level moves for now.
+ match (from_path.parent(), to_path.parent()) {
+ (Some(p1), Some(p2)) if p1 == p2 => {
+ if from_path.is_dir() {
+ // add '/' to end of url -- from `file://path/to/folder` to `file://path/to/folder/`
+ let mut old_folder_name = from_path.file_stem()?.to_str()?.to_string();
+ old_folder_name.push('/');
+ let from_with_trailing_slash = from.join(&old_folder_name).ok()?;
+
+ let imitate_from_url = from_with_trailing_slash.join("mod.rs").ok()?;
+ let new_file_name = to_path.file_name()?.to_str()?;
+ Some((
+ snap.url_to_file_id(&imitate_from_url).ok()?,
+ new_file_name.to_string(),
+ ))
+ } else {
+ let old_name = from_path.file_stem()?.to_str()?;
+ let new_name = to_path.file_stem()?.to_str()?;
+ match (old_name, new_name) {
+ ("mod", _) => None,
+ (_, "mod") => None,
+ _ => Some((snap.url_to_file_id(&from).ok()?, new_name.to_string())),
+ }
+ }
+ }
+ _ => None,
+ }
+ })
+ .filter_map(|(file_id, new_name)| {
+ snap.analysis.will_rename_file(file_id, &new_name).ok()?
+ })
+ .collect();
+
+ // Drop file system edits since we're just renaming things on the same level
+ let mut source_changes = source_changes.into_iter();
+ let mut source_change = source_changes.next().unwrap_or_default();
+ source_change.file_system_edits.clear();
+ // no collect here because we want to merge text edits on same file ids
+ source_change.extend(source_changes.flat_map(|it| it.source_file_edits));
+ if source_change.source_file_edits.is_empty() {
+ Ok(None)
+ } else {
+ to_proto::workspace_edit(&snap, source_change).map(Some)
+ }
+}
+
+pub(crate) fn handle_goto_definition(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::GotoDefinitionParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_goto_definition");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_definition(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_declaration(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoDeclarationParams,
+) -> Result<Option<lsp_types::request::GotoDeclarationResponse>> {
+ let _p = profile::span("handle_goto_declaration");
+ let position = from_proto::file_position(&snap, params.text_document_position_params.clone())?;
+ let nav_info = match snap.analysis.goto_declaration(position)? {
+ None => return handle_goto_definition(snap, params),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_implementation(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoImplementationParams,
+) -> Result<Option<lsp_types::request::GotoImplementationResponse>> {
+ let _p = profile::span("handle_goto_implementation");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_implementation(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_goto_type_definition(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::request::GotoTypeDefinitionParams,
+) -> Result<Option<lsp_types::request::GotoTypeDefinitionResponse>> {
+ let _p = profile::span("handle_goto_type_definition");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let nav_info = match snap.analysis.goto_type_definition(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+ let src = FileRange { file_id: position.file_id, range: nav_info.range };
+ let res = to_proto::goto_definition_response(&snap, Some(src), nav_info.info)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_parent_module(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_parent_module");
+ if let Ok(file_path) = ¶ms.text_document.uri.to_file_path() {
+ if file_path.file_name().unwrap_or_default() == "Cargo.toml" {
+ // search workspaces for parent packages or fallback to workspace root
+ let abs_path_buf = match AbsPathBuf::try_from(file_path.to_path_buf()).ok() {
+ Some(abs_path_buf) => abs_path_buf,
+ None => return Ok(None),
+ };
+
+ let manifest_path = match ManifestPath::try_from(abs_path_buf).ok() {
+ Some(manifest_path) => manifest_path,
+ None => return Ok(None),
+ };
+
+ let links: Vec<LocationLink> = snap
+ .workspaces
+ .iter()
+ .filter_map(|ws| match ws {
+ ProjectWorkspace::Cargo { cargo, .. } => cargo.parent_manifests(&manifest_path),
+ _ => None,
+ })
+ .flatten()
+ .map(|parent_manifest_path| LocationLink {
+ origin_selection_range: None,
+ target_uri: to_proto::url_from_abs_path(&parent_manifest_path),
+ target_range: Range::default(),
+ target_selection_range: Range::default(),
+ })
+ .collect::<_>();
+ return Ok(Some(links.into()));
+ }
+
+ // check if invoked at the crate root
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let crate_id = match snap.analysis.crate_for(file_id)?.first() {
+ Some(&crate_id) => crate_id,
+ None => return Ok(None),
+ };
+ let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ if snap.analysis.crate_root(crate_id)? == file_id {
+ let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
+ let res = vec![LocationLink {
+ origin_selection_range: None,
+ target_uri: cargo_toml_url,
+ target_range: Range::default(),
+ target_selection_range: Range::default(),
+ }]
+ .into();
+ return Ok(Some(res));
+ }
+ }
+
+ // locate parent module by semantics
+ let position = from_proto::file_position(&snap, params)?;
+ let navs = snap.analysis.parent_module(position)?;
+ let res = to_proto::goto_definition_response(&snap, None, navs)?;
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_runnables(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::RunnablesParams,
+) -> Result<Vec<lsp_ext::Runnable>> {
+ let _p = profile::span("handle_runnables");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = params.position.and_then(|it| from_proto::offset(&line_index, it).ok());
+ let cargo_spec = CargoTargetSpec::for_file(&snap, file_id)?;
+
+ let expect_test = match offset {
+ Some(offset) => {
+ let source_file = snap.analysis.parse(file_id)?;
+ algo::find_node_at_offset::<ast::MacroCall>(source_file.syntax(), offset)
+ .and_then(|it| it.path()?.segment()?.name_ref())
+ .map_or(false, |it| it.text() == "expect" || it.text() == "expect_file")
+ }
+ None => false,
+ };
+
+ let mut res = Vec::new();
+ for runnable in snap.analysis.runnables(file_id)? {
+ if let Some(offset) = offset {
+ if !runnable.nav.full_range.contains_inclusive(offset) {
+ continue;
+ }
+ }
+ if should_skip_target(&runnable, cargo_spec.as_ref()) {
+ continue;
+ }
+ let mut runnable = to_proto::runnable(&snap, runnable)?;
+ if expect_test {
+ runnable.label = format!("{} + expect", runnable.label);
+ runnable.args.expect_test = Some(true);
+ }
+ res.push(runnable);
+ }
+
+ // Add `cargo check` and `cargo test` for all targets of the whole package
+ let config = snap.config.runnables();
+ match cargo_spec {
+ Some(spec) => {
+ for cmd in ["check", "test"] {
+ res.push(lsp_ext::Runnable {
+ label: format!("cargo {} -p {} --all-targets", cmd, spec.package),
+ location: None,
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: Some(spec.workspace_root.clone().into()),
+ override_cargo: config.override_cargo.clone(),
+ cargo_args: vec![
+ cmd.to_string(),
+ "--package".to_string(),
+ spec.package.clone(),
+ "--all-targets".to_string(),
+ ],
+ cargo_extra_args: config.cargo_extra_args.clone(),
+ executable_args: Vec::new(),
+ expect_test: None,
+ },
+ })
+ }
+ }
+ None => {
+ if !snap.config.linked_projects().is_empty()
+ || !snap
+ .config
+ .discovered_projects
+ .as_ref()
+ .map(|projects| projects.is_empty())
+ .unwrap_or(true)
+ {
+ res.push(lsp_ext::Runnable {
+ label: "cargo check --workspace".to_string(),
+ location: None,
+ kind: lsp_ext::RunnableKind::Cargo,
+ args: lsp_ext::CargoRunnable {
+ workspace_root: None,
+ override_cargo: config.override_cargo,
+ cargo_args: vec!["check".to_string(), "--workspace".to_string()],
+ cargo_extra_args: config.cargo_extra_args,
+ executable_args: Vec::new(),
+ expect_test: None,
+ },
+ });
+ }
+ }
+ }
+ Ok(res)
+}
+
+pub(crate) fn handle_related_tests(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Vec<lsp_ext::TestInfo>> {
+ let _p = profile::span("handle_related_tests");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let tests = snap.analysis.related_tests(position, None)?;
+ let mut res = Vec::new();
+ for it in tests {
+ if let Ok(runnable) = to_proto::runnable(&snap, it) {
+ res.push(lsp_ext::TestInfo { runnable })
+ }
+ }
+
+ Ok(res)
+}
+
+pub(crate) fn handle_completion(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CompletionParams,
+) -> Result<Option<lsp_types::CompletionResponse>> {
+ let _p = profile::span("handle_completion");
+ let text_document_position = params.text_document_position.clone();
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+ let completion_trigger_character =
+ params.context.and_then(|ctx| ctx.trigger_character).and_then(|s| s.chars().next());
+
+ if Some(':') == completion_trigger_character {
+ let source_file = snap.analysis.parse(position.file_id)?;
+ let left_token = source_file.syntax().token_at_offset(position.offset).left_biased();
+ let completion_triggered_after_single_colon = match left_token {
+ Some(left_token) => left_token.kind() == T![:],
+ None => true,
+ };
+ if completion_triggered_after_single_colon {
+ return Ok(None);
+ }
+ }
+
+ let completion_config = &snap.config.completion();
+ let items = match snap.analysis.completions(
+ completion_config,
+ position,
+ completion_trigger_character,
+ )? {
+ None => return Ok(None),
+ Some(items) => items,
+ };
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ let items =
+ to_proto::completion_items(&snap.config, &line_index, text_document_position, items);
+
+ let completion_list = lsp_types::CompletionList { is_incomplete: true, items };
+ Ok(Some(completion_list.into()))
+}
+
+pub(crate) fn handle_completion_resolve(
+ snap: GlobalStateSnapshot,
+ mut original_completion: CompletionItem,
+) -> Result<CompletionItem> {
+ let _p = profile::span("handle_completion_resolve");
+
+ if !all_edits_are_disjoint(&original_completion, &[]) {
+ return Err(invalid_params_error(
+ "Received a completion with overlapping edits, this is not LSP-compliant".to_string(),
+ )
+ .into());
+ }
+
+ let data = match original_completion.data.take() {
+ Some(it) => it,
+ None => return Ok(original_completion),
+ };
+
+ let resolve_data: lsp_ext::CompletionResolveData = serde_json::from_value(data)?;
+
+ let file_id = from_proto::file_id(&snap, &resolve_data.position.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let offset = from_proto::offset(&line_index, resolve_data.position.position)?;
+
+ let additional_edits = snap
+ .analysis
+ .resolve_completion_edits(
+ &snap.config.completion(),
+ FilePosition { file_id, offset },
+ resolve_data
+ .imports
+ .into_iter()
+ .map(|import| (import.full_import_path, import.imported_name)),
+ )?
+ .into_iter()
+ .flat_map(|edit| edit.into_iter().map(|indel| to_proto::text_edit(&line_index, indel)))
+ .collect::<Vec<_>>();
+
+ if !all_edits_are_disjoint(&original_completion, &additional_edits) {
+ return Err(LspError::new(
+ ErrorCode::InternalError as i32,
+ "Import edit overlaps with the original completion edits, this is not LSP-compliant"
+ .into(),
+ )
+ .into());
+ }
+
+ if let Some(original_additional_edits) = original_completion.additional_text_edits.as_mut() {
+ original_additional_edits.extend(additional_edits.into_iter())
+ } else {
+ original_completion.additional_text_edits = Some(additional_edits);
+ }
+
+ Ok(original_completion)
+}
+
+pub(crate) fn handle_folding_range(
+ snap: GlobalStateSnapshot,
+ params: FoldingRangeParams,
+) -> Result<Option<Vec<FoldingRange>>> {
+ let _p = profile::span("handle_folding_range");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let folds = snap.analysis.folding_ranges(file_id)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let line_folding_only = snap.config.line_folding_only();
+ let res = folds
+ .into_iter()
+ .map(|it| to_proto::folding_range(&*text, &line_index, line_folding_only, it))
+ .collect();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_signature_help(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::SignatureHelpParams,
+) -> Result<Option<lsp_types::SignatureHelp>> {
+ let _p = profile::span("handle_signature_help");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let help = match snap.analysis.signature_help(position)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+ let config = snap.config.call_info();
+ let res = to_proto::signature_help(help, config, snap.config.signature_help_label_offsets());
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_hover(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::HoverParams,
+) -> Result<Option<lsp_ext::Hover>> {
+ let _p = profile::span("handle_hover");
+ let range = match params.position {
+ PositionOrRange::Position(position) => Range::new(position, position),
+ PositionOrRange::Range(range) => range,
+ };
+
+ let file_range = from_proto::file_range(&snap, params.text_document, range)?;
+ let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
+ None => return Ok(None),
+ Some(info) => info,
+ };
+
+ let line_index = snap.file_line_index(file_range.file_id)?;
+ let range = to_proto::range(&line_index, info.range);
+ let markup_kind =
+ snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
+ let hover = lsp_ext::Hover {
+ hover: lsp_types::Hover {
+ contents: HoverContents::Markup(to_proto::markup_content(
+ info.info.markup,
+ markup_kind,
+ )),
+ range: Some(range),
+ },
+ actions: if snap.config.hover_actions().none() {
+ Vec::new()
+ } else {
+ prepare_hover_actions(&snap, &info.info.actions)
+ },
+ };
+
+ Ok(Some(hover))
+}
+
+pub(crate) fn handle_prepare_rename(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<PrepareRenameResponse>> {
+ let _p = profile::span("handle_prepare_rename");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let change = snap.analysis.prepare_rename(position)?.map_err(to_proto::rename_error)?;
+
+ let line_index = snap.file_line_index(position.file_id)?;
+ let range = to_proto::range(&line_index, change.range);
+ Ok(Some(PrepareRenameResponse::Range(range)))
+}
+
+pub(crate) fn handle_rename(
+ snap: GlobalStateSnapshot,
+ params: RenameParams,
+) -> Result<Option<WorkspaceEdit>> {
+ let _p = profile::span("handle_rename");
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+
+ let mut change =
+ snap.analysis.rename(position, &*params.new_name)?.map_err(to_proto::rename_error)?;
+
+ // this is kind of a hack to prevent double edits from happening when moving files
+ // When a module gets renamed by renaming the mod declaration this causes the file to move
+ // which in turn will trigger a WillRenameFiles request to the server for which we reply with a
+ // a second identical set of renames, the client will then apply both edits causing incorrect edits
+ // with this we only emit source_file_edits in the WillRenameFiles response which will do the rename instead
+ // See https://github.com/microsoft/vscode-languageserver-node/issues/752 for more info
+ if !change.file_system_edits.is_empty() && snap.config.will_rename() {
+ change.source_file_edits.clear();
+ }
+ let workspace_edit = to_proto::workspace_edit(&snap, change)?;
+ Ok(Some(workspace_edit))
+}
+
+pub(crate) fn handle_references(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::ReferenceParams,
+) -> Result<Option<Vec<Location>>> {
+ let _p = profile::span("handle_references");
+ let position = from_proto::file_position(&snap, params.text_document_position)?;
+
+ let refs = match snap.analysis.find_all_refs(position, None)? {
+ None => return Ok(None),
+ Some(refs) => refs,
+ };
+
+ let include_declaration = params.context.include_declaration;
+ let locations = refs
+ .into_iter()
+ .flat_map(|refs| {
+ let decl = if include_declaration {
+ refs.declaration.map(|decl| FileRange {
+ file_id: decl.nav.file_id,
+ range: decl.nav.focus_or_full_range(),
+ })
+ } else {
+ None
+ };
+ refs.references
+ .into_iter()
+ .flat_map(|(file_id, refs)| {
+ refs.into_iter().map(move |(range, _)| FileRange { file_id, range })
+ })
+ .chain(decl)
+ })
+ .filter_map(|frange| to_proto::location(&snap, frange).ok())
+ .collect();
+
+ Ok(Some(locations))
+}
+
+pub(crate) fn handle_formatting(
+ snap: GlobalStateSnapshot,
+ params: DocumentFormattingParams,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let _p = profile::span("handle_formatting");
+
+ run_rustfmt(&snap, params.text_document, None)
+}
+
+pub(crate) fn handle_range_formatting(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentRangeFormattingParams,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let _p = profile::span("handle_range_formatting");
+
+ run_rustfmt(&snap, params.text_document, Some(params.range))
+}
+
+pub(crate) fn handle_code_action(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CodeActionParams,
+) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
+ let _p = profile::span("handle_code_action");
+
+ if !snap.config.code_action_literals() {
+ // We intentionally don't support command-based actions, as those either
+ // require either custom client-code or server-initiated edits. Server
+ // initiated edits break causality, so we avoid those.
+ return Ok(None);
+ }
+
+ let line_index =
+ snap.file_line_index(from_proto::file_id(&snap, ¶ms.text_document.uri)?)?;
+ let frange = from_proto::file_range(&snap, params.text_document.clone(), params.range)?;
+
+ let mut assists_config = snap.config.assist();
+ assists_config.allowed = params
+ .context
+ .only
+ .clone()
+ .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
+
+ let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
+
+ let code_action_resolve_cap = snap.config.code_action_resolve();
+ let resolve = if code_action_resolve_cap {
+ AssistResolveStrategy::None
+ } else {
+ AssistResolveStrategy::All
+ };
+ let assists = snap.analysis.assists_with_fixes(
+ &assists_config,
+ &snap.config.diagnostics(),
+ resolve,
+ frange,
+ )?;
+ for (index, assist) in assists.into_iter().enumerate() {
+ let resolve_data =
+ if code_action_resolve_cap { Some((index, params.clone())) } else { None };
+ let code_action = to_proto::code_action(&snap, assist, resolve_data)?;
+ res.push(code_action)
+ }
+
+ // Fixes from `cargo check`.
++ for fix in
++ snap.check_fixes.values().filter_map(|it| it.get(&frange.file_id)).into_iter().flatten()
++ {
+ // FIXME: this mapping is awkward and shouldn't exist. Refactor
+ // `snap.check_fixes` to not convert to LSP prematurely.
+ let intersect_fix_range = fix
+ .ranges
+ .iter()
+ .copied()
+ .filter_map(|range| from_proto::text_range(&line_index, range).ok())
+ .any(|fix_range| fix_range.intersect(frange.range).is_some());
+ if intersect_fix_range {
+ res.push(fix.action.clone());
+ }
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_code_action_resolve(
+ snap: GlobalStateSnapshot,
+ mut code_action: lsp_ext::CodeAction,
+) -> Result<lsp_ext::CodeAction> {
+ let _p = profile::span("handle_code_action_resolve");
+ let params = match code_action.data.take() {
+ Some(it) => it,
+ None => return Err(invalid_params_error("code action without data".to_string()).into()),
+ };
+
+ let file_id = from_proto::file_id(&snap, ¶ms.code_action_params.text_document.uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let range = from_proto::text_range(&line_index, params.code_action_params.range)?;
+ let frange = FileRange { file_id, range };
+
+ let mut assists_config = snap.config.assist();
+ assists_config.allowed = params
+ .code_action_params
+ .context
+ .only
+ .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
+
+ let (assist_index, assist_resolve) = match parse_action_id(¶ms.id) {
+ Ok(parsed_data) => parsed_data,
+ Err(e) => {
+ return Err(invalid_params_error(format!(
+ "Failed to parse action id string '{}': {}",
+ params.id, e
+ ))
+ .into())
+ }
+ };
+
+ let expected_assist_id = assist_resolve.assist_id.clone();
+ let expected_kind = assist_resolve.assist_kind;
+
+ let assists = snap.analysis.assists_with_fixes(
+ &assists_config,
+ &snap.config.diagnostics(),
+ AssistResolveStrategy::Single(assist_resolve),
+ frange,
+ )?;
+
+ let assist = match assists.get(assist_index) {
+ Some(assist) => assist,
+ None => return Err(invalid_params_error(format!(
+ "Failed to find the assist for index {} provided by the resolve request. Resolve request assist id: {}",
+ assist_index, params.id,
+ ))
+ .into())
+ };
+ if assist.id.0 != expected_assist_id || assist.id.1 != expected_kind {
+ return Err(invalid_params_error(format!(
+ "Mismatching assist at index {} for the resolve parameters given. Resolve request assist id: {}, actual id: {:?}.",
+ assist_index, params.id, assist.id
+ ))
+ .into());
+ }
+ let ca = to_proto::code_action(&snap, assist.clone(), None)?;
+ code_action.edit = ca.edit;
+ code_action.command = ca.command;
+ Ok(code_action)
+}
+
+fn parse_action_id(action_id: &str) -> Result<(usize, SingleResolve), String> {
+ let id_parts = action_id.split(':').collect::<Vec<_>>();
+ match id_parts.as_slice() {
+ [assist_id_string, assist_kind_string, index_string] => {
+ let assist_kind: AssistKind = assist_kind_string.parse()?;
+ let index: usize = match index_string.parse() {
+ Ok(index) => index,
+ Err(e) => return Err(format!("Incorrect index string: {}", e)),
+ };
+ Ok((index, SingleResolve { assist_id: assist_id_string.to_string(), assist_kind }))
+ }
+ _ => Err("Action id contains incorrect number of segments".to_string()),
+ }
+}
+
+pub(crate) fn handle_code_lens(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::CodeLensParams,
+) -> Result<Option<Vec<CodeLens>>> {
+ let _p = profile::span("handle_code_lens");
+
+ let lens_config = snap.config.lens();
+ if lens_config.none() {
+ // early return before any db query!
+ return Ok(Some(Vec::default()));
+ }
+
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let cargo_target_spec = CargoTargetSpec::for_file(&snap, file_id)?;
+
+ let annotations = snap.analysis.annotations(
+ &AnnotationConfig {
+ binary_target: cargo_target_spec
+ .map(|spec| {
+ matches!(
+ spec.target_kind,
+ TargetKind::Bin | TargetKind::Example | TargetKind::Test
+ )
+ })
+ .unwrap_or(false),
+ annotate_runnables: lens_config.runnable(),
+ annotate_impls: lens_config.implementations,
+ annotate_references: lens_config.refs_adt,
+ annotate_method_references: lens_config.method_refs,
+ annotate_enum_variant_references: lens_config.enum_variant_refs,
+ },
+ file_id,
+ )?;
+
+ let mut res = Vec::new();
+ for a in annotations {
+ to_proto::code_lens(&mut res, &snap, a)?;
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_code_lens_resolve(
+ snap: GlobalStateSnapshot,
+ code_lens: CodeLens,
+) -> Result<CodeLens> {
+ let annotation = from_proto::annotation(&snap, code_lens.clone())?;
+ let annotation = snap.analysis.resolve_annotation(annotation)?;
+
+ let mut acc = Vec::new();
+ to_proto::code_lens(&mut acc, &snap, annotation)?;
+
+ let res = match acc.pop() {
+ Some(it) if acc.is_empty() => it,
+ _ => {
+ never!();
+ code_lens
+ }
+ };
+
+ Ok(res)
+}
+
+pub(crate) fn handle_document_highlight(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::DocumentHighlightParams,
+) -> Result<Option<Vec<lsp_types::DocumentHighlight>>> {
+ let _p = profile::span("handle_document_highlight");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+ let line_index = snap.file_line_index(position.file_id)?;
+
+ let refs = match snap.analysis.highlight_related(snap.config.highlight_related(), position)? {
+ None => return Ok(None),
+ Some(refs) => refs,
+ };
+ let res = refs
+ .into_iter()
+ .map(|ide::HighlightedRange { range, category }| lsp_types::DocumentHighlight {
+ range: to_proto::range(&line_index, range),
+ kind: category.map(to_proto::document_highlight_kind),
+ })
+ .collect();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_ssr(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::SsrParams,
+) -> Result<lsp_types::WorkspaceEdit> {
+ let _p = profile::span("handle_ssr");
+ let selections = params
+ .selections
+ .iter()
+ .map(|range| from_proto::file_range(&snap, params.position.text_document.clone(), *range))
+ .collect::<Result<Vec<_>, _>>()?;
+ let position = from_proto::file_position(&snap, params.position)?;
+ let source_change = snap.analysis.structural_search_replace(
+ ¶ms.query,
+ params.parse_only,
+ position,
+ selections,
+ )??;
+ to_proto::workspace_edit(&snap, source_change)
+}
+
+pub(crate) fn publish_diagnostics(
+ snap: &GlobalStateSnapshot,
+ file_id: FileId,
+) -> Result<Vec<Diagnostic>> {
+ let _p = profile::span("publish_diagnostics");
+ let line_index = snap.file_line_index(file_id)?;
+
+ let diagnostics: Vec<Diagnostic> = snap
+ .analysis
+ .diagnostics(&snap.config.diagnostics(), AssistResolveStrategy::None, file_id)?
+ .into_iter()
+ .map(|d| Diagnostic {
+ range: to_proto::range(&line_index, d.range),
+ severity: Some(to_proto::diagnostic_severity(d.severity)),
+ code: Some(NumberOrString::String(d.code.as_str().to_string())),
+ code_description: Some(lsp_types::CodeDescription {
+ href: lsp_types::Url::parse(&format!(
+ "https://rust-analyzer.github.io/manual.html#{}",
+ d.code.as_str()
+ ))
+ .unwrap(),
+ }),
+ source: Some("rust-analyzer".to_string()),
+ // https://github.com/rust-lang/rust-analyzer/issues/11404
+ message: if !d.message.is_empty() { d.message } else { " ".to_string() },
+ related_information: None,
+ tags: if d.unused { Some(vec![DiagnosticTag::UNNECESSARY]) } else { None },
+ data: None,
+ })
+ .collect();
+ Ok(diagnostics)
+}
+
+pub(crate) fn handle_inlay_hints(
+ snap: GlobalStateSnapshot,
+ params: InlayHintParams,
+) -> Result<Option<Vec<InlayHint>>> {
+ let _p = profile::span("handle_inlay_hints");
+ let document_uri = ¶ms.text_document.uri;
+ let file_id = from_proto::file_id(&snap, document_uri)?;
+ let line_index = snap.file_line_index(file_id)?;
+ let range = from_proto::file_range(
+ &snap,
+ TextDocumentIdentifier::new(document_uri.to_owned()),
+ params.range,
+ )?;
+ let inlay_hints_config = snap.config.inlay_hints();
+ Ok(Some(
+ snap.analysis
+ .inlay_hints(&inlay_hints_config, file_id, Some(range))?
+ .into_iter()
+ .map(|it| {
+ to_proto::inlay_hint(&snap, &line_index, inlay_hints_config.render_colons, it)
+ })
+ .collect(),
+ ))
+}
+
+pub(crate) fn handle_inlay_hints_resolve(
+ snap: GlobalStateSnapshot,
+ mut hint: InlayHint,
+) -> Result<InlayHint> {
+ let _p = profile::span("handle_inlay_hints_resolve");
+ let data = match hint.data.take() {
+ Some(it) => it,
+ None => return Ok(hint),
+ };
+
+ let resolve_data: lsp_ext::InlayHintResolveData = serde_json::from_value(data)?;
+
+ let file_range = from_proto::file_range(
+ &snap,
+ resolve_data.text_document,
+ match resolve_data.position {
+ PositionOrRange::Position(pos) => Range::new(pos, pos),
+ PositionOrRange::Range(range) => range,
+ },
+ )?;
+ let info = match snap.analysis.hover(&snap.config.hover(), file_range)? {
+ None => return Ok(hint),
+ Some(info) => info,
+ };
+
+ let markup_kind =
+ snap.config.hover().documentation.map_or(ide::HoverDocFormat::Markdown, |kind| kind);
+
+ // FIXME: hover actions?
+ hint.tooltip = Some(lsp_types::InlayHintTooltip::MarkupContent(to_proto::markup_content(
+ info.info.markup,
+ markup_kind,
+ )));
+ Ok(hint)
+}
+
+pub(crate) fn handle_call_hierarchy_prepare(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyPrepareParams,
+) -> Result<Option<Vec<CallHierarchyItem>>> {
+ let _p = profile::span("handle_call_hierarchy_prepare");
+ let position = from_proto::file_position(&snap, params.text_document_position_params)?;
+
+ let nav_info = match snap.analysis.call_hierarchy(position)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let RangeInfo { range: _, info: navs } = nav_info;
+ let res = navs
+ .into_iter()
+ .filter(|it| it.kind == Some(SymbolKind::Function))
+ .map(|it| to_proto::call_hierarchy_item(&snap, it))
+ .collect::<Result<Vec<_>>>()?;
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_call_hierarchy_incoming(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyIncomingCallsParams,
+) -> Result<Option<Vec<CallHierarchyIncomingCall>>> {
+ let _p = profile::span("handle_call_hierarchy_incoming");
+ let item = params.item;
+
+ let doc = TextDocumentIdentifier::new(item.uri);
+ let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
+ let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
+
+ let call_items = match snap.analysis.incoming_calls(fpos)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let mut res = vec![];
+
+ for call_item in call_items.into_iter() {
+ let file_id = call_item.target.file_id;
+ let line_index = snap.file_line_index(file_id)?;
+ let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
+ res.push(CallHierarchyIncomingCall {
+ from: item,
+ from_ranges: call_item
+ .ranges
+ .into_iter()
+ .map(|it| to_proto::range(&line_index, it))
+ .collect(),
+ });
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_call_hierarchy_outgoing(
+ snap: GlobalStateSnapshot,
+ params: CallHierarchyOutgoingCallsParams,
+) -> Result<Option<Vec<CallHierarchyOutgoingCall>>> {
+ let _p = profile::span("handle_call_hierarchy_outgoing");
+ let item = params.item;
+
+ let doc = TextDocumentIdentifier::new(item.uri);
+ let frange = from_proto::file_range(&snap, doc, item.selection_range)?;
+ let fpos = FilePosition { file_id: frange.file_id, offset: frange.range.start() };
+
+ let call_items = match snap.analysis.outgoing_calls(fpos)? {
+ None => return Ok(None),
+ Some(it) => it,
+ };
+
+ let mut res = vec![];
+
+ for call_item in call_items.into_iter() {
+ let file_id = call_item.target.file_id;
+ let line_index = snap.file_line_index(file_id)?;
+ let item = to_proto::call_hierarchy_item(&snap, call_item.target)?;
+ res.push(CallHierarchyOutgoingCall {
+ to: item,
+ from_ranges: call_item
+ .ranges
+ .into_iter()
+ .map(|it| to_proto::range(&line_index, it))
+ .collect(),
+ });
+ }
+
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_semantic_tokens_full(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensParams,
+) -> Result<Option<SemanticTokensResult>> {
+ let _p = profile::span("handle_semantic_tokens_full");
+
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let highlights = snap.analysis.highlight(file_id)?;
+ let highlight_strings = snap.config.highlighting_strings();
+ let semantic_tokens =
+ to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
+
+ // Unconditionally cache the tokens
+ snap.semantic_tokens_cache.lock().insert(params.text_document.uri, semantic_tokens.clone());
+
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_semantic_tokens_full_delta(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensDeltaParams,
+) -> Result<Option<SemanticTokensFullDeltaResult>> {
+ let _p = profile::span("handle_semantic_tokens_full_delta");
+
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.file_line_index(file_id)?;
+
+ let highlights = snap.analysis.highlight(file_id)?;
+ let highlight_strings = snap.config.highlighting_strings();
+ let semantic_tokens =
+ to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
+
+ let mut cache = snap.semantic_tokens_cache.lock();
+ let cached_tokens = cache.entry(params.text_document.uri).or_default();
+
+ if let Some(prev_id) = &cached_tokens.result_id {
+ if *prev_id == params.previous_result_id {
+ let delta = to_proto::semantic_token_delta(cached_tokens, &semantic_tokens);
+ *cached_tokens = semantic_tokens;
+ return Ok(Some(delta.into()));
+ }
+ }
+
+ *cached_tokens = semantic_tokens.clone();
+
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_semantic_tokens_range(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensRangeParams,
+) -> Result<Option<SemanticTokensRangeResult>> {
+ let _p = profile::span("handle_semantic_tokens_range");
+
+ let frange = from_proto::file_range(&snap, params.text_document, params.range)?;
+ let text = snap.analysis.file_text(frange.file_id)?;
+ let line_index = snap.file_line_index(frange.file_id)?;
+
+ let highlights = snap.analysis.highlight_range(frange)?;
+ let highlight_strings = snap.config.highlighting_strings();
+ let semantic_tokens =
+ to_proto::semantic_tokens(&text, &line_index, highlights, highlight_strings);
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_open_docs(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::TextDocumentPositionParams,
+) -> Result<Option<lsp_types::Url>> {
+ let _p = profile::span("handle_open_docs");
+ let position = from_proto::file_position(&snap, params)?;
+
+ let remote = snap.analysis.external_docs(position)?;
+
+ Ok(remote.and_then(|remote| Url::parse(&remote).ok()))
+}
+
+pub(crate) fn handle_open_cargo_toml(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::OpenCargoTomlParams,
+) -> Result<Option<lsp_types::GotoDefinitionResponse>> {
+ let _p = profile::span("handle_open_cargo_toml");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+
+ let cargo_spec = match CargoTargetSpec::for_file(&snap, file_id)? {
+ Some(it) => it,
+ None => return Ok(None),
+ };
+
+ let cargo_toml_url = to_proto::url_from_abs_path(&cargo_spec.cargo_toml);
+ let res: lsp_types::GotoDefinitionResponse =
+ Location::new(cargo_toml_url, Range::default()).into();
+ Ok(Some(res))
+}
+
+pub(crate) fn handle_move_item(
+ snap: GlobalStateSnapshot,
+ params: lsp_ext::MoveItemParams,
+) -> Result<Vec<lsp_ext::SnippetTextEdit>> {
+ let _p = profile::span("handle_move_item");
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let range = from_proto::file_range(&snap, params.text_document, params.range)?;
+
+ let direction = match params.direction {
+ lsp_ext::MoveItemDirection::Up => ide::Direction::Up,
+ lsp_ext::MoveItemDirection::Down => ide::Direction::Down,
+ };
+
+ match snap.analysis.move_item(range, direction)? {
+ Some(text_edit) => {
+ let line_index = snap.file_line_index(file_id)?;
+ Ok(to_proto::snippet_text_edit_vec(&line_index, true, text_edit))
+ }
+ None => Ok(vec![]),
+ }
+}
+
+fn to_command_link(command: lsp_types::Command, tooltip: String) -> lsp_ext::CommandLink {
+ lsp_ext::CommandLink { tooltip: Some(tooltip), command }
+}
+
+fn show_impl_command_link(
+ snap: &GlobalStateSnapshot,
+ position: &FilePosition,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if snap.config.hover_actions().implementations && snap.config.client_commands().show_reference {
+ if let Some(nav_data) = snap.analysis.goto_implementation(*position).unwrap_or(None) {
+ let uri = to_proto::url(snap, position.file_id);
+ let line_index = snap.file_line_index(position.file_id).ok()?;
+ let position = to_proto::position(&line_index, position.offset);
+ let locations: Vec<_> = nav_data
+ .info
+ .into_iter()
+ .filter_map(|nav| to_proto::location_from_nav(snap, nav).ok())
+ .collect();
+ let title = to_proto::implementation_title(locations.len());
+ let command = to_proto::command::show_references(title, &uri, position, locations);
+
+ return Some(lsp_ext::CommandLinkGroup {
+ commands: vec![to_command_link(command, "Go to implementations".into())],
+ ..Default::default()
+ });
+ }
+ }
+ None
+}
+
+fn show_ref_command_link(
+ snap: &GlobalStateSnapshot,
+ position: &FilePosition,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if snap.config.hover_actions().references && snap.config.client_commands().show_reference {
+ if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) {
+ let uri = to_proto::url(snap, position.file_id);
+ let line_index = snap.file_line_index(position.file_id).ok()?;
+ let position = to_proto::position(&line_index, position.offset);
+ let locations: Vec<_> = ref_search_res
+ .into_iter()
+ .flat_map(|res| res.references)
+ .flat_map(|(file_id, ranges)| {
+ ranges.into_iter().filter_map(move |(range, _)| {
+ to_proto::location(snap, FileRange { file_id, range }).ok()
+ })
+ })
+ .collect();
+ let title = to_proto::reference_title(locations.len());
+ let command = to_proto::command::show_references(title, &uri, position, locations);
+
+ return Some(lsp_ext::CommandLinkGroup {
+ commands: vec![to_command_link(command, "Go to references".into())],
+ ..Default::default()
+ });
+ }
+ }
+ None
+}
+
+fn runnable_action_links(
+ snap: &GlobalStateSnapshot,
+ runnable: Runnable,
+) -> Option<lsp_ext::CommandLinkGroup> {
+ let hover_actions_config = snap.config.hover_actions();
+ if !hover_actions_config.runnable() {
+ return None;
+ }
+
+ let cargo_spec = CargoTargetSpec::for_file(snap, runnable.nav.file_id).ok()?;
+ if should_skip_target(&runnable, cargo_spec.as_ref()) {
+ return None;
+ }
+
+ let client_commands_config = snap.config.client_commands();
+ if !(client_commands_config.run_single || client_commands_config.debug_single) {
+ return None;
+ }
+
+ let title = runnable.title();
+ let r = to_proto::runnable(snap, runnable).ok()?;
+
+ let mut group = lsp_ext::CommandLinkGroup::default();
+
+ if hover_actions_config.run && client_commands_config.run_single {
+ let run_command = to_proto::command::run_single(&r, &title);
+ group.commands.push(to_command_link(run_command, r.label.clone()));
+ }
+
+ if hover_actions_config.debug && client_commands_config.debug_single {
+ let dbg_command = to_proto::command::debug_single(&r);
+ group.commands.push(to_command_link(dbg_command, r.label));
+ }
+
+ Some(group)
+}
+
+fn goto_type_action_links(
+ snap: &GlobalStateSnapshot,
+ nav_targets: &[HoverGotoTypeData],
+) -> Option<lsp_ext::CommandLinkGroup> {
+ if !snap.config.hover_actions().goto_type_def
+ || nav_targets.is_empty()
+ || !snap.config.client_commands().goto_location
+ {
+ return None;
+ }
+
+ Some(lsp_ext::CommandLinkGroup {
+ title: Some("Go to ".into()),
+ commands: nav_targets
+ .iter()
+ .filter_map(|it| {
+ to_proto::command::goto_location(snap, &it.nav)
+ .map(|cmd| to_command_link(cmd, it.mod_path.clone()))
+ })
+ .collect(),
+ })
+}
+
+fn prepare_hover_actions(
+ snap: &GlobalStateSnapshot,
+ actions: &[HoverAction],
+) -> Vec<lsp_ext::CommandLinkGroup> {
+ actions
+ .iter()
+ .filter_map(|it| match it {
+ HoverAction::Implementation(position) => show_impl_command_link(snap, position),
+ HoverAction::Reference(position) => show_ref_command_link(snap, position),
+ HoverAction::Runnable(r) => runnable_action_links(snap, r.clone()),
+ HoverAction::GoToType(targets) => goto_type_action_links(snap, targets),
+ })
+ .collect()
+}
+
+fn should_skip_target(runnable: &Runnable, cargo_spec: Option<&CargoTargetSpec>) -> bool {
+ match runnable.kind {
+ RunnableKind::Bin => {
+ // Do not suggest binary run on other target than binary
+ match &cargo_spec {
+ Some(spec) => !matches!(
+ spec.target_kind,
+ TargetKind::Bin | TargetKind::Example | TargetKind::Test
+ ),
+ None => true,
+ }
+ }
+ _ => false,
+ }
+}
+
+fn run_rustfmt(
+ snap: &GlobalStateSnapshot,
+ text_document: TextDocumentIdentifier,
+ range: Option<lsp_types::Range>,
+) -> Result<Option<Vec<lsp_types::TextEdit>>> {
+ let file_id = from_proto::file_id(snap, &text_document.uri)?;
+ let file = snap.analysis.file_text(file_id)?;
+ let crate_ids = snap.analysis.crate_for(file_id)?;
+
+ let line_index = snap.file_line_index(file_id)?;
+
+ let mut rustfmt = match snap.config.rustfmt() {
+ RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => {
+ let mut cmd = process::Command::new(toolchain::rustfmt());
+ cmd.args(extra_args);
+ // try to chdir to the file so we can respect `rustfmt.toml`
+ // FIXME: use `rustfmt --config-path` once
+ // https://github.com/rust-lang/rustfmt/issues/4660 gets fixed
+ match text_document.uri.to_file_path() {
+ Ok(mut path) => {
+ // pop off file name
+ if path.pop() && path.is_dir() {
+ cmd.current_dir(path);
+ }
+ }
+ Err(_) => {
+ tracing::error!(
+ "Unable to get file path for {}, rustfmt.toml might be ignored",
+ text_document.uri
+ );
+ }
+ }
+ if let Some(&crate_id) = crate_ids.first() {
+ // Assume all crates are in the same edition
+ let edition = snap.analysis.crate_edition(crate_id)?;
+ cmd.arg("--edition");
+ cmd.arg(edition.to_string());
+ }
+
+ if let Some(range) = range {
+ if !enable_range_formatting {
+ return Err(LspError::new(
+ ErrorCode::InvalidRequest as i32,
+ String::from(
+ "rustfmt range formatting is unstable. \
+ Opt-in by using a nightly build of rustfmt and setting \
+ `rustfmt.rangeFormatting.enable` to true in your LSP configuration",
+ ),
+ )
+ .into());
+ }
+
+ let frange = from_proto::file_range(snap, text_document, range)?;
+ let start_line = line_index.index.line_col(frange.range.start()).line;
+ let end_line = line_index.index.line_col(frange.range.end()).line;
+
+ cmd.arg("--unstable-features");
+ cmd.arg("--file-lines");
+ cmd.arg(
+ json!([{
+ "file": "stdin",
+ "range": [start_line, end_line]
+ }])
+ .to_string(),
+ );
+ }
+
+ cmd
+ }
+ RustfmtConfig::CustomCommand { command, args } => {
+ let mut cmd = process::Command::new(command);
+ cmd.args(args);
+ cmd
+ }
+ };
+
+ let mut rustfmt = rustfmt
+ .stdin(Stdio::piped())
+ .stdout(Stdio::piped())
+ .stderr(Stdio::piped())
+ .spawn()
+ .context(format!("Failed to spawn {:?}", rustfmt))?;
+
+ rustfmt.stdin.as_mut().unwrap().write_all(file.as_bytes())?;
+
+ let output = rustfmt.wait_with_output()?;
+ let captured_stdout = String::from_utf8(output.stdout)?;
+ let captured_stderr = String::from_utf8(output.stderr).unwrap_or_default();
+
+ if !output.status.success() {
+ let rustfmt_not_installed =
+ captured_stderr.contains("not installed") || captured_stderr.contains("not available");
+
+ return match output.status.code() {
+ Some(1) if !rustfmt_not_installed => {
+ // While `rustfmt` doesn't have a specific exit code for parse errors this is the
+ // likely cause exiting with 1. Most Language Servers swallow parse errors on
+ // formatting because otherwise an error is surfaced to the user on top of the
+ // syntax error diagnostics they're already receiving. This is especially jarring
+ // if they have format on save enabled.
+ tracing::info!("rustfmt exited with status 1, assuming parse error and ignoring");
+ Ok(None)
+ }
+ _ => {
+ // Something else happened - e.g. `rustfmt` is missing or caught a signal
+ Err(LspError::new(
+ -32900,
+ format!(
+ r#"rustfmt exited with:
+ Status: {}
+ stdout: {}
+ stderr: {}"#,
+ output.status, captured_stdout, captured_stderr,
+ ),
+ )
+ .into())
+ }
+ };
+ }
+
+ let (new_text, new_line_endings) = LineEndings::normalize(captured_stdout);
+
+ if line_index.endings != new_line_endings {
+ // If line endings are different, send the entire file.
+ // Diffing would not work here, as the line endings might be the only
+ // difference.
+ Ok(Some(to_proto::text_edit_vec(
+ &line_index,
+ TextEdit::replace(TextRange::up_to(TextSize::of(&*file)), new_text),
+ )))
+ } else if *file == new_text {
+ // The document is already formatted correctly -- no edits needed.
+ Ok(None)
+ } else {
+ Ok(Some(to_proto::text_edit_vec(&line_index, diff(&file, &new_text))))
+ }
+}
--- /dev/null
- //! Note that "Rust Analyzer: Run" action does not allow running a single test
- //! in release mode in VS Code. There's however "Rust Analyzer: Copy Run Command Line"
+//! Fully integrated benchmarks for rust-analyzer, which load real cargo
+//! projects.
+//!
+//! The benchmark here is used to debug specific performance regressions. If you
+//! notice that, eg, completion is slow in some specific case, you can modify
+//! code here exercise this specific completion, and thus have a fast
+//! edit/compile/test cycle.
+//!
++//! Note that "rust-analyzer: Run" action does not allow running a single test
++//! in release mode in VS Code. There's however "rust-analyzer: Copy Run Command Line"
+//! which you can use to paste the command in terminal and add `--release` manually.
+
+use std::sync::Arc;
+
+use ide::{CallableSnippets, Change, CompletionConfig, FilePosition, TextSize};
+use ide_db::{
+ imports::insert_use::{ImportGranularity, InsertUseConfig},
+ SnippetCap,
+};
+use project_model::CargoConfig;
+use test_utils::project_root;
+use vfs::{AbsPathBuf, VfsPath};
+
+use crate::cli::load_cargo::{load_workspace_at, LoadCargoConfig};
+
+#[test]
+fn integrated_highlighting_benchmark() {
+ if std::env::var("RUN_SLOW_BENCHES").is_err() {
+ return;
+ }
+
+ // Load rust-analyzer itself.
+ let workspace_to_load = project_root();
+ let file = "./crates/ide-db/src/apply_change.rs";
+
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: false,
+ prefill_caches: false,
+ };
+
+ let (mut host, vfs, _proc_macro) = {
+ let _it = stdx::timeit("workspace loading");
+ load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
+ };
+
+ let file_id = {
+ let file = workspace_to_load.join(file);
+ let path = VfsPath::from(AbsPathBuf::assert(file));
+ vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
+ };
+
+ {
+ let _it = stdx::timeit("initial");
+ let analysis = host.analysis();
+ analysis.highlight_as_html(file_id, false).unwrap();
+ }
+
+ profile::init_from("*>100");
+ // let _s = profile::heartbeat_span();
+
+ {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ text.push_str("\npub fn _dummy() {}\n");
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::new(text)));
+ host.apply_change(change);
+ }
+
+ {
+ let _it = stdx::timeit("after change");
+ let _span = profile::cpu_span();
+ let analysis = host.analysis();
+ analysis.highlight_as_html(file_id, false).unwrap();
+ }
+}
+
+#[test]
+fn integrated_completion_benchmark() {
+ if std::env::var("RUN_SLOW_BENCHES").is_err() {
+ return;
+ }
+
+ // Load rust-analyzer itself.
+ let workspace_to_load = project_root();
+ let file = "./crates/hir/src/lib.rs";
+
+ let cargo_config = CargoConfig::default();
+ let load_cargo_config = LoadCargoConfig {
+ load_out_dirs_from_check: true,
+ with_proc_macro: false,
+ prefill_caches: true,
+ };
+
+ let (mut host, vfs, _proc_macro) = {
+ let _it = stdx::timeit("workspace loading");
+ load_workspace_at(&workspace_to_load, &cargo_config, &load_cargo_config, &|_| {}).unwrap()
+ };
+
+ let file_id = {
+ let file = workspace_to_load.join(file);
+ let path = VfsPath::from(AbsPathBuf::assert(file));
+ vfs.file_id(&path).unwrap_or_else(|| panic!("can't find virtual file for {}", path))
+ };
+
+ {
+ let _it = stdx::timeit("initial");
+ let analysis = host.analysis();
+ analysis.highlight_as_html(file_id, false).unwrap();
+ }
+
+ profile::init_from("*>5");
+ // let _s = profile::heartbeat_span();
+
+ let completion_offset = {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ let completion_offset =
+ patch(&mut text, "db.struct_data(self.id)", "sel;\ndb.struct_data(self.id)")
+ + "sel".len();
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::new(text)));
+ host.apply_change(change);
+ completion_offset
+ };
+
+ {
+ let _p = profile::span("unqualified path completion");
+ let _span = profile::cpu_span();
+ let analysis = host.analysis();
+ let config = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: true,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+ };
+ let position =
+ FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
+ analysis.completions(&config, position, None).unwrap();
+ }
+
+ let completion_offset = {
+ let _it = stdx::timeit("change");
+ let mut text = host.analysis().file_text(file_id).unwrap().to_string();
+ let completion_offset =
+ patch(&mut text, "sel;\ndb.struct_data(self.id)", "self.;\ndb.struct_data(self.id)")
+ + "self.".len();
+ let mut change = Change::new();
+ change.change_file(file_id, Some(Arc::new(text)));
+ host.apply_change(change);
+ completion_offset
+ };
+
+ {
+ let _p = profile::span("dot completion");
+ let _span = profile::cpu_span();
+ let analysis = host.analysis();
+ let config = CompletionConfig {
+ enable_postfix_completions: true,
+ enable_imports_on_the_fly: true,
+ enable_self_on_the_fly: true,
+ enable_private_editable: true,
+ callable: Some(CallableSnippets::FillArguments),
+ snippet_cap: SnippetCap::new(true),
+ insert_use: InsertUseConfig {
+ granularity: ImportGranularity::Crate,
+ prefix_kind: hir::PrefixKind::ByCrate,
+ enforce_granularity: true,
+ group: true,
+ skip_glob_imports: true,
+ },
+ snippets: Vec::new(),
+ };
+ let position =
+ FilePosition { file_id, offset: TextSize::try_from(completion_offset).unwrap() };
+ analysis.completions(&config, position, None).unwrap();
+ }
+}
+
+fn patch(what: &mut String, from: &str, to: &str) -> usize {
+ let idx = what.find(from).unwrap();
+ *what = what.replacen(from, to, 1);
+ idx
+}
--- /dev/null
- use ide_db::base_db::{SourceDatabaseExt, VfsPath};
+//! The main loop of `rust-analyzer` responsible for dispatching LSP
+//! requests/replies and notifications back to the client.
+use std::{
+ fmt,
++ ops::Deref,
+ sync::Arc,
+ time::{Duration, Instant},
+};
+
+use always_assert::always;
+use crossbeam_channel::{select, Receiver};
- flycheck::Message::AddDiagnostic { workspace_root, diagnostic } => {
++use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath};
++use itertools::Itertools;
+use lsp_server::{Connection, Notification, Request};
+use lsp_types::notification::Notification as _;
+use vfs::{ChangeKind, FileId};
+
+use crate::{
+ config::Config,
+ dispatch::{NotificationDispatcher, RequestDispatcher},
+ from_proto,
+ global_state::{file_id_to_url, url_to_file_id, GlobalState},
+ handlers, lsp_ext,
+ lsp_utils::{apply_document_changes, notification_is, Progress},
+ mem_docs::DocumentData,
+ reload::{self, BuildDataProgress, ProjectWorkspaceProgress},
+ Result,
+};
+
+pub fn main_loop(config: Config, connection: Connection) -> Result<()> {
+ tracing::info!("initial config: {:#?}", config);
+
+ // Windows scheduler implements priority boosts: if thread waits for an
+ // event (like a condvar), and event fires, priority of the thread is
+ // temporary bumped. This optimization backfires in our case: each time the
+ // `main_loop` schedules a task to run on a threadpool, the worker threads
+ // gets a higher priority, and (on a machine with fewer cores) displaces the
+ // main loop! We work-around this by marking the main loop as a
+ // higher-priority thread.
+ //
+ // https://docs.microsoft.com/en-us/windows/win32/procthread/scheduling-priorities
+ // https://docs.microsoft.com/en-us/windows/win32/procthread/priority-boosts
+ // https://github.com/rust-lang/rust-analyzer/issues/2835
+ #[cfg(windows)]
+ unsafe {
+ use winapi::um::processthreadsapi::*;
+ let thread = GetCurrentThread();
+ let thread_priority_above_normal = 1;
+ SetThreadPriority(thread, thread_priority_above_normal);
+ }
+
+ GlobalState::new(connection.sender, config).run(connection.receiver)
+}
+
+enum Event {
+ Lsp(lsp_server::Message),
+ Task(Task),
+ Vfs(vfs::loader::Message),
+ Flycheck(flycheck::Message),
+}
+
+#[derive(Debug)]
+pub(crate) enum Task {
+ Response(lsp_server::Response),
+ Retry(lsp_server::Request),
+ Diagnostics(Vec<(FileId, Vec<lsp_types::Diagnostic>)>),
+ PrimeCaches(PrimeCachesProgress),
+ FetchWorkspace(ProjectWorkspaceProgress),
+ FetchBuildData(BuildDataProgress),
+}
+
+#[derive(Debug)]
+pub(crate) enum PrimeCachesProgress {
+ Begin,
+ Report(ide::ParallelPrimeCachesProgress),
+ End { cancelled: bool },
+}
+
+impl fmt::Debug for Event {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let debug_verbose_not = |not: &Notification, f: &mut fmt::Formatter<'_>| {
+ f.debug_struct("Notification").field("method", ¬.method).finish()
+ };
+
+ match self {
+ Event::Lsp(lsp_server::Message::Notification(not)) => {
+ if notification_is::<lsp_types::notification::DidOpenTextDocument>(not)
+ || notification_is::<lsp_types::notification::DidChangeTextDocument>(not)
+ {
+ return debug_verbose_not(not, f);
+ }
+ }
+ Event::Task(Task::Response(resp)) => {
+ return f
+ .debug_struct("Response")
+ .field("id", &resp.id)
+ .field("error", &resp.error)
+ .finish();
+ }
+ _ => (),
+ }
+ match self {
+ Event::Lsp(it) => fmt::Debug::fmt(it, f),
+ Event::Task(it) => fmt::Debug::fmt(it, f),
+ Event::Vfs(it) => fmt::Debug::fmt(it, f),
+ Event::Flycheck(it) => fmt::Debug::fmt(it, f),
+ }
+ }
+}
+
+impl GlobalState {
+ fn run(mut self, inbox: Receiver<lsp_server::Message>) -> Result<()> {
+ if self.config.linked_projects().is_empty()
+ && self.config.detached_files().is_empty()
+ && self.config.notifications().cargo_toml_not_found
+ {
+ self.show_and_log_error("rust-analyzer failed to discover workspace".to_string(), None);
+ };
+
+ if self.config.did_save_text_document_dynamic_registration() {
+ let save_registration_options = lsp_types::TextDocumentSaveRegistrationOptions {
+ include_text: Some(false),
+ text_document_registration_options: lsp_types::TextDocumentRegistrationOptions {
+ document_selector: Some(vec![
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/*.rs".into()),
+ },
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/Cargo.toml".into()),
+ },
+ lsp_types::DocumentFilter {
+ language: None,
+ scheme: None,
+ pattern: Some("**/Cargo.lock".into()),
+ },
+ ]),
+ },
+ };
+
+ let registration = lsp_types::Registration {
+ id: "textDocument/didSave".to_string(),
+ method: "textDocument/didSave".to_string(),
+ register_options: Some(serde_json::to_value(save_registration_options).unwrap()),
+ };
+ self.send_request::<lsp_types::request::RegisterCapability>(
+ lsp_types::RegistrationParams { registrations: vec![registration] },
+ |_, _| (),
+ );
+ }
+
+ self.fetch_workspaces_queue.request_op("startup".to_string());
+ if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
+ self.fetch_workspaces(cause);
+ }
+
+ while let Some(event) = self.next_event(&inbox) {
+ if let Event::Lsp(lsp_server::Message::Notification(not)) = &event {
+ if not.method == lsp_types::notification::Exit::METHOD {
+ return Ok(());
+ }
+ }
+ self.handle_event(event)?
+ }
+
+ Err("client exited without proper shutdown sequence".into())
+ }
+
+ fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
+ select! {
+ recv(inbox) -> msg =>
+ msg.ok().map(Event::Lsp),
+
+ recv(self.task_pool.receiver) -> task =>
+ Some(Event::Task(task.unwrap())),
+
+ recv(self.loader.receiver) -> task =>
+ Some(Event::Vfs(task.unwrap())),
+
+ recv(self.flycheck_receiver) -> task =>
+ Some(Event::Flycheck(task.unwrap())),
+ }
+ }
+
+ fn handle_event(&mut self, event: Event) -> Result<()> {
+ let loop_start = Instant::now();
+ // NOTE: don't count blocking select! call as a loop-turn time
+ let _p = profile::span("GlobalState::handle_event");
+
+ tracing::debug!("handle_event({:?})", event);
+ let task_queue_len = self.task_pool.handle.len();
+ if task_queue_len > 0 {
+ tracing::info!("task queue len: {}", task_queue_len);
+ }
+
+ let was_quiescent = self.is_quiescent();
+ match event {
+ Event::Lsp(msg) => match msg {
+ lsp_server::Message::Request(req) => self.on_new_request(loop_start, req),
+ lsp_server::Message::Notification(not) => {
+ self.on_notification(not)?;
+ }
+ lsp_server::Message::Response(resp) => self.complete_request(resp),
+ },
+ Event::Task(mut task) => {
+ let _p = profile::span("GlobalState::handle_event/task");
+ let mut prime_caches_progress = Vec::new();
+ loop {
+ match task {
+ Task::Response(response) => self.respond(response),
+ Task::Retry(req) => self.on_request(req),
+ Task::Diagnostics(diagnostics_per_file) => {
+ for (file_id, diagnostics) in diagnostics_per_file {
+ self.diagnostics.set_native_diagnostics(file_id, diagnostics)
+ }
+ }
+ Task::PrimeCaches(progress) => match progress {
+ PrimeCachesProgress::Begin => prime_caches_progress.push(progress),
+ PrimeCachesProgress::Report(_) => {
+ match prime_caches_progress.last_mut() {
+ Some(last @ PrimeCachesProgress::Report(_)) => {
+ // Coalesce subsequent update events.
+ *last = progress;
+ }
+ _ => prime_caches_progress.push(progress),
+ }
+ }
+ PrimeCachesProgress::End { .. } => prime_caches_progress.push(progress),
+ },
+ Task::FetchWorkspace(progress) => {
+ let (state, msg) = match progress {
+ ProjectWorkspaceProgress::Begin => (Progress::Begin, None),
+ ProjectWorkspaceProgress::Report(msg) => {
+ (Progress::Report, Some(msg))
+ }
+ ProjectWorkspaceProgress::End(workspaces) => {
+ self.fetch_workspaces_queue.op_completed(workspaces);
+
+ let old = Arc::clone(&self.workspaces);
+ self.switch_workspaces("fetched workspace".to_string());
+ let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces);
+
+ if self.config.run_build_scripts() && workspaces_updated {
+ self.fetch_build_data_queue
+ .request_op(format!("workspace updated"));
+ }
+
+ (Progress::End, None)
+ }
+ };
+
+ self.report_progress("Fetching", state, msg, None);
+ }
+ Task::FetchBuildData(progress) => {
+ let (state, msg) = match progress {
+ BuildDataProgress::Begin => (Some(Progress::Begin), None),
+ BuildDataProgress::Report(msg) => {
+ (Some(Progress::Report), Some(msg))
+ }
+ BuildDataProgress::End(build_data_result) => {
+ self.fetch_build_data_queue.op_completed(build_data_result);
+
+ self.switch_workspaces("fetched build data".to_string());
+
+ (Some(Progress::End), None)
+ }
+ };
+
+ if let Some(state) = state {
+ self.report_progress("Loading", state, msg, None);
+ }
+ }
+ }
+
+ // Coalesce multiple task events into one loop turn
+ task = match self.task_pool.receiver.try_recv() {
+ Ok(task) => task,
+ Err(_) => break,
+ };
+ }
+
+ for progress in prime_caches_progress {
+ let (state, message, fraction);
+ match progress {
+ PrimeCachesProgress::Begin => {
+ state = Progress::Begin;
+ message = None;
+ fraction = 0.0;
+ }
+ PrimeCachesProgress::Report(report) => {
+ state = Progress::Report;
+
+ message = match &report.crates_currently_indexing[..] {
+ [crate_name] => Some(format!(
+ "{}/{} ({})",
+ report.crates_done, report.crates_total, crate_name
+ )),
+ [crate_name, rest @ ..] => Some(format!(
+ "{}/{} ({} + {} more)",
+ report.crates_done,
+ report.crates_total,
+ crate_name,
+ rest.len()
+ )),
+ _ => None,
+ };
+
+ fraction = Progress::fraction(report.crates_done, report.crates_total);
+ }
+ PrimeCachesProgress::End { cancelled } => {
+ state = Progress::End;
+ message = None;
+ fraction = 1.0;
+
+ self.prime_caches_queue.op_completed(());
+ if cancelled {
+ self.prime_caches_queue
+ .request_op("restart after cancellation".to_string());
+ }
+ }
+ };
+
+ self.report_progress("Indexing", state, message, Some(fraction));
+ }
+ }
+ Event::Vfs(mut task) => {
+ let _p = profile::span("GlobalState::handle_event/vfs");
+ loop {
+ match task {
+ vfs::loader::Message::Loaded { files } => {
+ let vfs = &mut self.vfs.write().0;
+ for (path, contents) in files {
+ let path = VfsPath::from(path);
+ if !self.mem_docs.contains(&path) {
+ vfs.set_file_contents(path, contents);
+ }
+ }
+ }
+ vfs::loader::Message::Progress { n_total, n_done, config_version } => {
+ always!(config_version <= self.vfs_config_version);
+
+ self.vfs_progress_config_version = config_version;
+ self.vfs_progress_n_total = n_total;
+ self.vfs_progress_n_done = n_done;
+
+ let state = if n_done == 0 {
+ Progress::Begin
+ } else if n_done < n_total {
+ Progress::Report
+ } else {
+ assert_eq!(n_done, n_total);
+ Progress::End
+ };
+ self.report_progress(
+ "Roots Scanned",
+ state,
+ Some(format!("{}/{}", n_done, n_total)),
+ Some(Progress::fraction(n_done, n_total)),
+ )
+ }
+ }
+ // Coalesce many VFS event into a single loop turn
+ task = match self.loader.receiver.try_recv() {
+ Ok(task) => task,
+ Err(_) => break,
+ }
+ }
+ }
+ Event::Flycheck(mut task) => {
+ let _p = profile::span("GlobalState::handle_event/flycheck");
+ loop {
+ match task {
- self.diagnostics.clear_check();
++ flycheck::Message::AddDiagnostic { id, workspace_root, diagnostic } => {
+ let snap = self.snapshot();
+ let diagnostics =
+ crate::diagnostics::to_proto::map_rust_diagnostic_to_lsp(
+ &self.config.diagnostics_map(),
+ &diagnostic,
+ &workspace_root,
+ &snap,
+ );
+ for diag in diagnostics {
+ match url_to_file_id(&self.vfs.read().0, &diag.url) {
+ Ok(file_id) => self.diagnostics.add_check_diagnostic(
++ id,
+ file_id,
+ diag.diagnostic,
+ diag.fix,
+ ),
+ Err(err) => {
+ tracing::error!(
+ "File with cargo diagnostic not found in VFS: {}",
+ err
+ );
+ }
+ };
+ }
+ }
+
+ flycheck::Message::Progress { id, progress } => {
+ let (state, message) = match progress {
+ flycheck::Progress::DidStart => {
- if !was_quiescent {
++ self.diagnostics.clear_check(id);
+ (Progress::Begin, None)
+ }
+ flycheck::Progress::DidCheckCrate(target) => {
+ (Progress::Report, Some(target))
+ }
+ flycheck::Progress::DidCancel => (Progress::End, None),
+ flycheck::Progress::DidFinish(result) => {
+ if let Err(err) = result {
+ self.show_and_log_error(
+ "cargo check failed".to_string(),
+ Some(err.to_string()),
+ );
+ }
+ (Progress::End, None)
+ }
+ };
+
+ // When we're running multiple flychecks, we have to include a disambiguator in
+ // the title, or the editor complains. Note that this is a user-facing string.
+ let title = if self.flycheck.len() == 1 {
+ match self.config.flycheck() {
+ Some(config) => format!("{}", config),
+ None => "cargo check".to_string(),
+ }
+ } else {
+ format!("cargo check (#{})", id + 1)
+ };
+ self.report_progress(&title, state, message, None);
+ }
+ }
+ // Coalesce many flycheck updates into a single loop turn
+ task = match self.flycheck_receiver.try_recv() {
+ Ok(task) => task,
+ Err(_) => break,
+ }
+ }
+ }
+ }
+
+ let state_changed = self.process_changes();
+ let memdocs_added_or_removed = self.mem_docs.take_changes();
+
+ if self.is_quiescent() {
- for flycheck in &this.flycheck {
- flycheck.update();
++ if !was_quiescent
++ && !self.fetch_workspaces_queue.op_requested()
++ && !self.fetch_build_data_queue.op_requested()
++ {
+ for flycheck in &self.flycheck {
+ flycheck.update();
+ }
+ if self.config.prefill_caches() {
+ self.prime_caches_queue.request_op("became quiescent".to_string());
+ }
+ }
+
+ if !was_quiescent || state_changed {
+ // Refresh semantic tokens if the client supports it.
+ if self.config.semantic_tokens_refresh() {
+ self.semantic_tokens_cache.lock().clear();
+ self.send_request::<lsp_types::request::SemanticTokensRefresh>((), |_, _| ());
+ }
+
+ // Refresh code lens if the client supports it.
+ if self.config.code_lens_refresh() {
+ self.send_request::<lsp_types::request::CodeLensRefresh>((), |_, _| ());
+ }
+ }
+
+ if !was_quiescent || state_changed || memdocs_added_or_removed {
+ if self.config.publish_diagnostics() {
+ self.update_diagnostics()
+ }
+ }
+ }
+
+ if let Some(diagnostic_changes) = self.diagnostics.take_changes() {
+ for file_id in diagnostic_changes {
+ let db = self.analysis_host.raw_database();
+ let source_root = db.file_source_root(file_id);
+ if db.source_root(source_root).is_library {
+ // Only publish diagnostics for files in the workspace, not from crates.io deps
+ // or the sysroot.
+ // While theoretically these should never have errors, we have quite a few false
+ // positives particularly in the stdlib, and those diagnostics would stay around
+ // forever if we emitted them here.
+ continue;
+ }
+
+ let url = file_id_to_url(&self.vfs.read().0, file_id);
+ let mut diagnostics =
+ self.diagnostics.diagnostics_for(file_id).cloned().collect::<Vec<_>>();
+ // https://github.com/rust-lang/rust-analyzer/issues/11404
+ for d in &mut diagnostics {
+ if d.message.is_empty() {
+ d.message = " ".to_string();
+ }
+ if let Some(rds) = d.related_information.as_mut() {
+ for rd in rds {
+ if rd.message.is_empty() {
+ rd.message = " ".to_string();
+ }
+ }
+ }
+ }
+ let version = from_proto::vfs_path(&url)
+ .map(|path| self.mem_docs.get(&path).map(|it| it.version))
+ .unwrap_or_default();
+
+ self.send_notification::<lsp_types::notification::PublishDiagnostics>(
+ lsp_types::PublishDiagnosticsParams { uri: url, diagnostics, version },
+ );
+ }
+ }
+
+ if self.config.cargo_autoreload() {
+ if let Some(cause) = self.fetch_workspaces_queue.should_start_op() {
+ self.fetch_workspaces(cause);
+ }
+ }
+
+ if !self.fetch_workspaces_queue.op_in_progress() {
+ if let Some(cause) = self.fetch_build_data_queue.should_start_op() {
+ self.fetch_build_data(cause);
+ }
+ }
+
+ if let Some(cause) = self.prime_caches_queue.should_start_op() {
+ tracing::debug!(%cause, "will prime caches");
+ let num_worker_threads = self.config.prime_caches_num_threads();
+
+ self.task_pool.handle.spawn_with_sender({
+ let analysis = self.snapshot().analysis;
+ move |sender| {
+ sender.send(Task::PrimeCaches(PrimeCachesProgress::Begin)).unwrap();
+ let res = analysis.parallel_prime_caches(num_worker_threads, |progress| {
+ let report = PrimeCachesProgress::Report(progress);
+ sender.send(Task::PrimeCaches(report)).unwrap();
+ });
+ sender
+ .send(Task::PrimeCaches(PrimeCachesProgress::End {
+ cancelled: res.is_err(),
+ }))
+ .unwrap();
+ }
+ });
+ }
+
+ let status = self.current_status();
+ if self.last_reported_status.as_ref() != Some(&status) {
+ self.last_reported_status = Some(status.clone());
+
+ if let (lsp_ext::Health::Error, Some(message)) = (status.health, &status.message) {
+ self.show_message(lsp_types::MessageType::ERROR, message.clone());
+ }
+
+ if self.config.server_status_notification() {
+ self.send_notification::<lsp_ext::ServerStatusNotification>(status);
+ }
+ }
+
+ let loop_duration = loop_start.elapsed();
+ if loop_duration > Duration::from_millis(100) && was_quiescent {
+ tracing::warn!("overly long loop turn: {:?}", loop_duration);
+ self.poke_rust_analyzer_developer(format!(
+ "overly long loop turn: {:?}",
+ loop_duration
+ ));
+ }
+ Ok(())
+ }
+
+ fn on_new_request(&mut self, request_received: Instant, req: Request) {
+ self.register_request(&req, request_received);
+ self.on_request(req);
+ }
+
+ fn on_request(&mut self, req: Request) {
+ if self.shutdown_requested {
+ self.respond(lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::InvalidRequest as i32,
+ "Shutdown already requested.".to_owned(),
+ ));
+ return;
+ }
+
+ // Avoid flashing a bunch of unresolved references during initial load.
+ if self.workspaces.is_empty() && !self.is_quiescent() {
+ self.respond(lsp_server::Response::new_err(
+ req.id,
+ lsp_server::ErrorCode::ContentModified as i32,
+ "waiting for cargo metadata or cargo check".to_owned(),
+ ));
+ return;
+ }
+
+ RequestDispatcher { req: Some(req), global_state: self }
+ .on_sync_mut::<lsp_types::request::Shutdown>(|s, ()| {
+ s.shutdown_requested = true;
+ Ok(())
+ })
+ .on_sync_mut::<lsp_ext::ReloadWorkspace>(handlers::handle_workspace_reload)
+ .on_sync_mut::<lsp_ext::MemoryUsage>(handlers::handle_memory_usage)
+ .on_sync_mut::<lsp_ext::ShuffleCrateGraph>(handlers::handle_shuffle_crate_graph)
+ .on_sync::<lsp_ext::JoinLines>(handlers::handle_join_lines)
+ .on_sync::<lsp_ext::OnEnter>(handlers::handle_on_enter)
+ .on_sync::<lsp_types::request::SelectionRangeRequest>(handlers::handle_selection_range)
+ .on_sync::<lsp_ext::MatchingBrace>(handlers::handle_matching_brace)
+ .on::<lsp_ext::AnalyzerStatus>(handlers::handle_analyzer_status)
+ .on::<lsp_ext::SyntaxTree>(handlers::handle_syntax_tree)
+ .on::<lsp_ext::ViewHir>(handlers::handle_view_hir)
+ .on::<lsp_ext::ViewFileText>(handlers::handle_view_file_text)
+ .on::<lsp_ext::ViewCrateGraph>(handlers::handle_view_crate_graph)
+ .on::<lsp_ext::ViewItemTree>(handlers::handle_view_item_tree)
+ .on::<lsp_ext::ExpandMacro>(handlers::handle_expand_macro)
+ .on::<lsp_ext::ParentModule>(handlers::handle_parent_module)
+ .on::<lsp_ext::Runnables>(handlers::handle_runnables)
+ .on::<lsp_ext::RelatedTests>(handlers::handle_related_tests)
+ .on::<lsp_ext::CodeActionRequest>(handlers::handle_code_action)
+ .on::<lsp_ext::CodeActionResolveRequest>(handlers::handle_code_action_resolve)
+ .on::<lsp_ext::HoverRequest>(handlers::handle_hover)
+ .on::<lsp_ext::ExternalDocs>(handlers::handle_open_docs)
+ .on::<lsp_ext::OpenCargoToml>(handlers::handle_open_cargo_toml)
+ .on::<lsp_ext::MoveItem>(handlers::handle_move_item)
+ .on::<lsp_ext::WorkspaceSymbol>(handlers::handle_workspace_symbol)
+ .on::<lsp_ext::OnTypeFormatting>(handlers::handle_on_type_formatting)
+ .on::<lsp_types::request::DocumentSymbolRequest>(handlers::handle_document_symbol)
+ .on::<lsp_types::request::GotoDefinition>(handlers::handle_goto_definition)
+ .on::<lsp_types::request::GotoDeclaration>(handlers::handle_goto_declaration)
+ .on::<lsp_types::request::GotoImplementation>(handlers::handle_goto_implementation)
+ .on::<lsp_types::request::GotoTypeDefinition>(handlers::handle_goto_type_definition)
+ .on::<lsp_types::request::InlayHintRequest>(handlers::handle_inlay_hints)
+ .on::<lsp_types::request::InlayHintResolveRequest>(handlers::handle_inlay_hints_resolve)
+ .on::<lsp_types::request::Completion>(handlers::handle_completion)
+ .on::<lsp_types::request::ResolveCompletionItem>(handlers::handle_completion_resolve)
+ .on::<lsp_types::request::CodeLensRequest>(handlers::handle_code_lens)
+ .on::<lsp_types::request::CodeLensResolve>(handlers::handle_code_lens_resolve)
+ .on::<lsp_types::request::FoldingRangeRequest>(handlers::handle_folding_range)
+ .on::<lsp_types::request::SignatureHelpRequest>(handlers::handle_signature_help)
+ .on::<lsp_types::request::PrepareRenameRequest>(handlers::handle_prepare_rename)
+ .on::<lsp_types::request::Rename>(handlers::handle_rename)
+ .on::<lsp_types::request::References>(handlers::handle_references)
+ .on::<lsp_types::request::Formatting>(handlers::handle_formatting)
+ .on::<lsp_types::request::RangeFormatting>(handlers::handle_range_formatting)
+ .on::<lsp_types::request::DocumentHighlightRequest>(handlers::handle_document_highlight)
+ .on::<lsp_types::request::CallHierarchyPrepare>(handlers::handle_call_hierarchy_prepare)
+ .on::<lsp_types::request::CallHierarchyIncomingCalls>(
+ handlers::handle_call_hierarchy_incoming,
+ )
+ .on::<lsp_types::request::CallHierarchyOutgoingCalls>(
+ handlers::handle_call_hierarchy_outgoing,
+ )
+ .on::<lsp_types::request::SemanticTokensFullRequest>(
+ handlers::handle_semantic_tokens_full,
+ )
+ .on::<lsp_types::request::SemanticTokensFullDeltaRequest>(
+ handlers::handle_semantic_tokens_full_delta,
+ )
+ .on::<lsp_types::request::SemanticTokensRangeRequest>(
+ handlers::handle_semantic_tokens_range,
+ )
+ .on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files)
+ .on::<lsp_ext::Ssr>(handlers::handle_ssr)
+ .finish();
+ }
+
+ fn on_notification(&mut self, not: Notification) -> Result<()> {
+ NotificationDispatcher { not: Some(not), global_state: self }
+ .on::<lsp_types::notification::Cancel>(|this, params| {
+ let id: lsp_server::RequestId = match params.id {
+ lsp_types::NumberOrString::Number(id) => id.into(),
+ lsp_types::NumberOrString::String(id) => id.into(),
+ };
+ this.cancel(id);
+ Ok(())
+ })?
+ .on::<lsp_types::notification::WorkDoneProgressCancel>(|_this, _params| {
+ // Just ignore this. It is OK to continue sending progress
+ // notifications for this token, as the client can't know when
+ // we accepted notification.
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidOpenTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
+ let already_exists = this
+ .mem_docs
+ .insert(path.clone(), DocumentData::new(params.text_document.version))
+ .is_err();
+ if already_exists {
+ tracing::error!("duplicate DidOpenTextDocument: {}", path)
+ }
+ this.vfs
+ .write()
+ .0
+ .set_file_contents(path, Some(params.text_document.text.into_bytes()));
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
+ match this.mem_docs.get_mut(&path) {
+ Some(doc) => {
+ // The version passed in DidChangeTextDocument is the version after all edits are applied
+ // so we should apply it before the vfs is notified.
+ doc.version = params.text_document.version;
+ }
+ None => {
+ tracing::error!("unexpected DidChangeTextDocument: {}", path);
+ return Ok(());
+ }
+ };
+
+ let vfs = &mut this.vfs.write().0;
+ let file_id = vfs.file_id(&path).unwrap();
+ let mut text = String::from_utf8(vfs.file_contents(file_id).to_vec()).unwrap();
+ apply_document_changes(&mut text, params.content_changes);
+
+ vfs.set_file_contents(path, Some(text.into_bytes()));
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidCloseTextDocument>(|this, params| {
+ if let Ok(path) = from_proto::vfs_path(¶ms.text_document.uri) {
+ if this.mem_docs.remove(&path).is_err() {
+ tracing::error!("orphan DidCloseTextDocument: {}", path);
+ }
+
+ this.semantic_tokens_cache.lock().remove(¶ms.text_document.uri);
+
+ if let Some(path) = path.as_path() {
+ this.loader.handle.invalidate(path.to_path_buf());
+ }
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidSaveTextDocument>(|this, params| {
- if let Ok(abs_path) = from_proto::abs_path(¶ms.text_document.uri) {
- if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
- this.fetch_workspaces_queue
- .request_op(format!("DidSaveTextDocument {}", abs_path.display()));
++ let mut updated = false;
++ if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) {
++ let (vfs, _) = &*this.vfs.read();
++ if let Some(file_id) = vfs.file_id(&vfs_path) {
++ let analysis = this.analysis_host.analysis();
++ // Crates containing or depending on the saved file
++ let crate_ids: Vec<_> = analysis
++ .crate_for(file_id)?
++ .into_iter()
++ .flat_map(|id| {
++ this.analysis_host
++ .raw_database()
++ .crate_graph()
++ .transitive_rev_deps(id)
++ })
++ .sorted()
++ .unique()
++ .collect();
++
++ let crate_root_paths: Vec<_> = crate_ids
++ .iter()
++ .filter_map(|&crate_id| {
++ analysis
++ .crate_root(crate_id)
++ .map(|file_id| {
++ vfs.file_path(file_id).as_path().map(ToOwned::to_owned)
++ })
++ .transpose()
++ })
++ .collect::<ide::Cancellable<_>>()?;
++ let crate_root_paths: Vec<_> =
++ crate_root_paths.iter().map(Deref::deref).collect();
++
++ // Find all workspaces that have at least one target containing the saved file
++ let workspace_ids =
++ this.workspaces.iter().enumerate().filter(|(_, ws)| match ws {
++ project_model::ProjectWorkspace::Cargo { cargo, .. } => {
++ cargo.packages().any(|pkg| {
++ cargo[pkg].targets.iter().any(|&it| {
++ crate_root_paths.contains(&cargo[it].root.as_path())
++ })
++ })
++ }
++ project_model::ProjectWorkspace::Json { project, .. } => project
++ .crates()
++ .any(|(c, _)| crate_ids.iter().any(|&crate_id| crate_id == c)),
++ project_model::ProjectWorkspace::DetachedFiles { .. } => false,
++ });
++
++ // Find and trigger corresponding flychecks
++ for flycheck in &this.flycheck {
++ for (id, _) in workspace_ids.clone() {
++ if id == flycheck.id() {
++ updated = true;
++ flycheck.update();
++ continue;
++ }
++ }
++ }
++ }
++ if let Some(abs_path) = vfs_path.as_path() {
++ if reload::should_refresh_for_change(&abs_path, ChangeKind::Modify) {
++ this.fetch_workspaces_queue
++ .request_op(format!("DidSaveTextDocument {}", abs_path.display()));
++ }
++ }
+ }
++ if !updated {
++ for flycheck in &this.flycheck {
++ flycheck.update();
+ }
+ }
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeConfiguration>(|this, _params| {
+ // As stated in https://github.com/microsoft/language-server-protocol/issues/676,
+ // this notification's parameters should be ignored and the actual config queried separately.
+ this.send_request::<lsp_types::request::WorkspaceConfiguration>(
+ lsp_types::ConfigurationParams {
+ items: vec![lsp_types::ConfigurationItem {
+ scope_uri: None,
+ section: Some("rust-analyzer".to_string()),
+ }],
+ },
+ |this, resp| {
+ tracing::debug!("config update response: '{:?}", resp);
+ let lsp_server::Response { error, result, .. } = resp;
+
+ match (error, result) {
+ (Some(err), _) => {
+ tracing::error!("failed to fetch the server settings: {:?}", err)
+ }
+ (None, Some(mut configs)) => {
+ if let Some(json) = configs.get_mut(0) {
+ // Note that json can be null according to the spec if the client can't
+ // provide a configuration. This is handled in Config::update below.
+ let mut config = Config::clone(&*this.config);
+ if let Err(error) = config.update(json.take()) {
+ this.show_message(
+ lsp_types::MessageType::WARNING,
+ error.to_string(),
+ );
+ }
+ this.update_configuration(config);
+ }
+ }
+ (None, None) => tracing::error!(
+ "received empty server settings response from the client"
+ ),
+ }
+ },
+ );
+
+ Ok(())
+ })?
+ .on::<lsp_types::notification::DidChangeWatchedFiles>(|this, params| {
+ for change in params.changes {
+ if let Ok(path) = from_proto::abs_path(&change.uri) {
+ this.loader.handle.invalidate(path);
+ }
+ }
+ Ok(())
+ })?
+ .finish();
+ Ok(())
+ }
+
+ fn update_diagnostics(&mut self) {
+ let subscriptions = self
+ .mem_docs
+ .iter()
+ .map(|path| self.vfs.read().0.file_id(path).unwrap())
+ .collect::<Vec<_>>();
+
+ tracing::trace!("updating notifications for {:?}", subscriptions);
+
+ let snapshot = self.snapshot();
+ self.task_pool.handle.spawn(move || {
+ let diagnostics = subscriptions
+ .into_iter()
+ .filter_map(|file_id| {
+ handlers::publish_diagnostics(&snapshot, file_id)
+ .ok()
+ .map(|diags| (file_id, diags))
+ })
+ .collect::<Vec<_>>();
+ Task::Diagnostics(diagnostics)
+ })
+ }
+}
--- /dev/null
- self.show_and_log_error(
- "rust-analyzer failed to run build scripts".to_string(),
- Some(error),
- );
+//! Project loading & configuration updates.
+//!
+//! This is quite tricky. The main problem is time and changes -- there's no
+//! fixed "project" rust-analyzer is working with, "current project" is itself
+//! mutable state. For example, when the user edits `Cargo.toml` by adding a new
+//! dependency, project model changes. What's more, switching project model is
+//! not instantaneous -- it takes time to run `cargo metadata` and (for proc
+//! macros) `cargo check`.
+//!
+//! The main guiding principle here is, as elsewhere in rust-analyzer,
+//! robustness. We try not to assume that the project model exists or is
+//! correct. Instead, we try to provide a best-effort service. Even if the
+//! project is currently loading and we don't have a full project model, we
+//! still want to respond to various requests.
+use std::{mem, sync::Arc};
+
+use flycheck::{FlycheckConfig, FlycheckHandle};
+use hir::db::DefDatabase;
+use ide::Change;
+use ide_db::base_db::{
+ CrateGraph, Env, ProcMacro, ProcMacroExpander, ProcMacroExpansionError, ProcMacroKind,
+ ProcMacroLoadResult, SourceRoot, VfsPath,
+};
+use proc_macro_api::{MacroDylib, ProcMacroServer};
+use project_model::{ProjectWorkspace, WorkspaceBuildScripts};
+use syntax::SmolStr;
+use vfs::{file_set::FileSetConfig, AbsPath, AbsPathBuf, ChangeKind};
+
+use crate::{
+ config::{Config, FilesWatcher, LinkedProject},
+ global_state::GlobalState,
+ lsp_ext,
+ main_loop::Task,
+ op_queue::Cause,
+};
+
+#[derive(Debug)]
+pub(crate) enum ProjectWorkspaceProgress {
+ Begin,
+ Report(String),
+ End(Vec<anyhow::Result<ProjectWorkspace>>),
+}
+
+#[derive(Debug)]
+pub(crate) enum BuildDataProgress {
+ Begin,
+ Report(String),
+ End((Arc<Vec<ProjectWorkspace>>, Vec<anyhow::Result<WorkspaceBuildScripts>>)),
+}
+
+impl GlobalState {
+ pub(crate) fn is_quiescent(&self) -> bool {
+ !(self.fetch_workspaces_queue.op_in_progress()
+ || self.fetch_build_data_queue.op_in_progress()
+ || self.vfs_progress_config_version < self.vfs_config_version
+ || self.vfs_progress_n_done < self.vfs_progress_n_total)
+ }
+
+ pub(crate) fn update_configuration(&mut self, config: Config) {
+ let _p = profile::span("GlobalState::update_configuration");
+ let old_config = mem::replace(&mut self.config, Arc::new(config));
+ if self.config.lru_capacity() != old_config.lru_capacity() {
+ self.analysis_host.update_lru_capacity(self.config.lru_capacity());
+ }
+ if self.config.linked_projects() != old_config.linked_projects() {
+ self.fetch_workspaces_queue.request_op("linked projects changed".to_string())
+ } else if self.config.flycheck() != old_config.flycheck() {
+ self.reload_flycheck();
+ }
+
+ if self.analysis_host.raw_database().enable_proc_attr_macros()
+ != self.config.expand_proc_attr_macros()
+ {
+ self.analysis_host
+ .raw_database_mut()
+ .set_enable_proc_attr_macros(self.config.expand_proc_attr_macros());
+ }
+ }
+
+ pub(crate) fn current_status(&self) -> lsp_ext::ServerStatusParams {
+ let mut status = lsp_ext::ServerStatusParams {
+ health: lsp_ext::Health::Ok,
+ quiescent: self.is_quiescent(),
+ message: None,
+ };
+
+ if self.proc_macro_changed {
+ status.health = lsp_ext::Health::Warning;
+ status.message =
+ Some("Reload required due to source changes of a procedural macro.".into())
+ }
+ if let Err(_) = self.fetch_build_data_error() {
+ status.health = lsp_ext::Health::Warning;
+ status.message =
+ Some("Failed to run build scripts of some packages, check the logs.".to_string());
+ }
+ if !self.config.cargo_autoreload()
+ && self.is_quiescent()
+ && self.fetch_workspaces_queue.op_requested()
+ {
+ status.health = lsp_ext::Health::Warning;
+ status.message = Some("Workspace reload required".to_string())
+ }
+
+ if let Err(error) = self.fetch_workspace_error() {
+ status.health = lsp_ext::Health::Error;
+ status.message = Some(error)
+ }
+ status
+ }
+
+ pub(crate) fn fetch_workspaces(&mut self, cause: Cause) {
+ tracing::info!(%cause, "will fetch workspaces");
+
+ self.task_pool.handle.spawn_with_sender({
+ let linked_projects = self.config.linked_projects();
+ let detached_files = self.config.detached_files().to_vec();
+ let cargo_config = self.config.cargo();
+
+ move |sender| {
+ let progress = {
+ let sender = sender.clone();
+ move |msg| {
+ sender
+ .send(Task::FetchWorkspace(ProjectWorkspaceProgress::Report(msg)))
+ .unwrap()
+ }
+ };
+
+ sender.send(Task::FetchWorkspace(ProjectWorkspaceProgress::Begin)).unwrap();
+
+ let mut workspaces = linked_projects
+ .iter()
+ .map(|project| match project {
+ LinkedProject::ProjectManifest(manifest) => {
+ project_model::ProjectWorkspace::load(
+ manifest.clone(),
+ &cargo_config,
+ &progress,
+ )
+ }
+ LinkedProject::InlineJsonProject(it) => {
+ project_model::ProjectWorkspace::load_inline(
+ it.clone(),
+ cargo_config.target.as_deref(),
+ )
+ }
+ })
+ .collect::<Vec<_>>();
+
+ if !detached_files.is_empty() {
+ workspaces
+ .push(project_model::ProjectWorkspace::load_detached_files(detached_files));
+ }
+
+ tracing::info!("did fetch workspaces {:?}", workspaces);
+ sender
+ .send(Task::FetchWorkspace(ProjectWorkspaceProgress::End(workspaces)))
+ .unwrap();
+ }
+ });
+ }
+
+ pub(crate) fn fetch_build_data(&mut self, cause: Cause) {
+ tracing::info!(%cause, "will fetch build data");
+ let workspaces = Arc::clone(&self.workspaces);
+ let config = self.config.cargo();
+ self.task_pool.handle.spawn_with_sender(move |sender| {
+ sender.send(Task::FetchBuildData(BuildDataProgress::Begin)).unwrap();
+
+ let progress = {
+ let sender = sender.clone();
+ move |msg| {
+ sender.send(Task::FetchBuildData(BuildDataProgress::Report(msg))).unwrap()
+ }
+ };
+ let mut res = Vec::new();
+ for ws in workspaces.iter() {
+ res.push(ws.run_build_scripts(&config, &progress));
+ }
+ sender.send(Task::FetchBuildData(BuildDataProgress::End((workspaces, res)))).unwrap();
+ });
+ }
+
+ pub(crate) fn switch_workspaces(&mut self, cause: Cause) {
+ let _p = profile::span("GlobalState::switch_workspaces");
+ tracing::info!(%cause, "will switch workspaces");
+
+ if let Err(error_message) = self.fetch_workspace_error() {
+ self.show_and_log_error(error_message, None);
+ if !self.workspaces.is_empty() {
+ // It only makes sense to switch to a partially broken workspace
+ // if we don't have any workspace at all yet.
+ return;
+ }
+ }
+
+ if let Err(error) = self.fetch_build_data_error() {
- tracing::info!("Found a cargo workspace...");
++ self.show_and_log_error("failed to run build scripts".to_string(), Some(error));
+ }
+
+ let workspaces = self
+ .fetch_workspaces_queue
+ .last_op_result()
+ .iter()
+ .filter_map(|res| res.as_ref().ok().cloned())
+ .collect::<Vec<_>>();
+
+ fn eq_ignore_build_data<'a>(
+ left: &'a ProjectWorkspace,
+ right: &'a ProjectWorkspace,
+ ) -> bool {
+ let key = |p: &'a ProjectWorkspace| match p {
+ ProjectWorkspace::Cargo {
+ cargo,
+ sysroot,
+ rustc,
+ rustc_cfg,
+ cfg_overrides,
+
+ build_scripts: _,
+ } => Some((cargo, sysroot, rustc, rustc_cfg, cfg_overrides)),
+ _ => None,
+ };
+ match (key(left), key(right)) {
+ (Some(lk), Some(rk)) => lk == rk,
+ _ => left == right,
+ }
+ }
+
+ let same_workspaces = workspaces.len() == self.workspaces.len()
+ && workspaces
+ .iter()
+ .zip(self.workspaces.iter())
+ .all(|(l, r)| eq_ignore_build_data(l, r));
+
+ if same_workspaces {
+ let (workspaces, build_scripts) = self.fetch_build_data_queue.last_op_result();
+ if Arc::ptr_eq(workspaces, &self.workspaces) {
+ tracing::debug!("set build scripts to workspaces");
+
+ let workspaces = workspaces
+ .iter()
+ .cloned()
+ .zip(build_scripts)
+ .map(|(mut ws, bs)| {
+ ws.set_build_scripts(bs.as_ref().ok().cloned().unwrap_or_default());
+ ws
+ })
+ .collect::<Vec<_>>();
+
+ // Workspaces are the same, but we've updated build data.
+ self.workspaces = Arc::new(workspaces);
+ } else {
+ tracing::info!("build scripts do not match the version of the active workspace");
+ // Current build scripts do not match the version of the active
+ // workspace, so there's nothing for us to update.
+ return;
+ }
+ } else {
+ tracing::debug!("abandon build scripts for workspaces");
+
+ // Here, we completely changed the workspace (Cargo.toml edit), so
+ // we don't care about build-script results, they are stale.
+ self.workspaces = Arc::new(workspaces)
+ }
+
+ if let FilesWatcher::Client = self.config.files().watcher {
+ let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions {
+ watchers: self
+ .workspaces
+ .iter()
+ .flat_map(|ws| ws.to_roots())
+ .filter(|it| it.is_local)
+ .flat_map(|root| {
+ root.include.into_iter().flat_map(|it| {
+ [
+ format!("{}/**/*.rs", it.display()),
+ format!("{}/**/Cargo.toml", it.display()),
+ format!("{}/**/Cargo.lock", it.display()),
+ ]
+ })
+ })
+ .map(|glob_pattern| lsp_types::FileSystemWatcher { glob_pattern, kind: None })
+ .collect(),
+ };
+ let registration = lsp_types::Registration {
+ id: "workspace/didChangeWatchedFiles".to_string(),
+ method: "workspace/didChangeWatchedFiles".to_string(),
+ register_options: Some(serde_json::to_value(registration_options).unwrap()),
+ };
+ self.send_request::<lsp_types::request::RegisterCapability>(
+ lsp_types::RegistrationParams { registrations: vec![registration] },
+ |_, _| (),
+ );
+ }
+
+ let mut change = Change::new();
+
+ let files_config = self.config.files();
+ let project_folders = ProjectFolders::new(&self.workspaces, &files_config.exclude);
+
+ let standalone_server_name =
+ format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX);
+
+ if self.proc_macro_clients.is_empty() {
+ if let Some((path, args)) = self.config.proc_macro_srv() {
++ tracing::info!("Spawning proc-macro servers");
+ self.proc_macro_clients = self
+ .workspaces
+ .iter()
+ .map(|ws| {
+ let mut args = args.clone();
+ let mut path = path.clone();
+
+ if let ProjectWorkspace::Cargo { sysroot, .. } = ws {
- tracing::info!("Found a cargo workspace with a sysroot...");
++ tracing::debug!("Found a cargo workspace...");
+ if let Some(sysroot) = sysroot.as_ref() {
- tracing::info!(
++ tracing::debug!("Found a cargo workspace with a sysroot...");
+ let server_path =
+ sysroot.root().join("libexec").join(&standalone_server_name);
+ if std::fs::metadata(&server_path).is_ok() {
- tracing::info!(
++ tracing::debug!(
+ "And the server exists at {}",
+ server_path.display()
+ );
+ path = server_path;
+ args = vec![];
+ } else {
- tracing::info!(
- "Using proc-macro server at {} with args {:?}",
- path.display(),
- args
- );
++ tracing::debug!(
+ "And the server does not exist at {}",
+ server_path.display()
+ );
+ }
+ }
+ }
+
- "Failed to run proc_macro_srv from path {}, error: {:?}",
++ tracing::info!(?args, "Using proc-macro server at {}", path.display(),);
+ ProcMacroServer::spawn(path.clone(), args.clone()).map_err(|err| {
+ let error = format!(
- self.diagnostics.clear_check();
++ "Failed to run proc-macro server from path {}, error: {:?}",
+ path.display(),
+ err
+ );
+ tracing::error!(error);
+ error
+ })
+ })
+ .collect();
+ }
+ }
+
+ let watch = match files_config.watcher {
+ FilesWatcher::Client => vec![],
+ FilesWatcher::Server => project_folders.watch,
+ };
+ self.vfs_config_version += 1;
+ self.loader.handle.set_config(vfs::loader::Config {
+ load: project_folders.load,
+ watch,
+ version: self.vfs_config_version,
+ });
+
+ // Create crate graph from all the workspaces
+ let crate_graph = {
+ let dummy_replacements = self.config.dummy_replacements();
+
+ let vfs = &mut self.vfs.write().0;
+ let loader = &mut self.loader;
+ let mem_docs = &self.mem_docs;
+ let mut load = move |path: &AbsPath| {
+ let _p = profile::span("GlobalState::load");
+ let vfs_path = vfs::VfsPath::from(path.to_path_buf());
+ if !mem_docs.contains(&vfs_path) {
+ let contents = loader.handle.load_sync(path);
+ vfs.set_file_contents(vfs_path.clone(), contents);
+ }
+ let res = vfs.file_id(&vfs_path);
+ if res.is_none() {
+ tracing::warn!("failed to load {}", path.display())
+ }
+ res
+ };
+
+ let mut crate_graph = CrateGraph::default();
+ for (idx, ws) in self.workspaces.iter().enumerate() {
+ let proc_macro_client = match self.proc_macro_clients.get(idx) {
+ Some(res) => res.as_ref().map_err(|e| &**e),
+ None => Err("Proc macros are disabled"),
+ };
+ let mut load_proc_macro = move |crate_name: &str, path: &AbsPath| {
+ load_proc_macro(
+ proc_macro_client,
+ path,
+ dummy_replacements.get(crate_name).map(|v| &**v).unwrap_or_default(),
+ )
+ };
+ crate_graph.extend(ws.to_crate_graph(&mut load_proc_macro, &mut load));
+ }
+ crate_graph
+ };
+ change.set_crate_graph(crate_graph);
+
+ self.source_root_config = project_folders.source_root_config;
+
+ self.analysis_host.apply_change(change);
+ self.process_changes();
+ self.reload_flycheck();
+ tracing::info!("did switch workspaces");
+ }
+
+ fn fetch_workspace_error(&self) -> Result<(), String> {
+ let mut buf = String::new();
+
+ for ws in self.fetch_workspaces_queue.last_op_result() {
+ if let Err(err) = ws {
+ stdx::format_to!(buf, "rust-analyzer failed to load workspace: {:#}\n", err);
+ }
+ }
+
+ if buf.is_empty() {
+ return Ok(());
+ }
+
+ Err(buf)
+ }
+
+ fn fetch_build_data_error(&self) -> Result<(), String> {
+ let mut buf = String::new();
+
+ for ws in &self.fetch_build_data_queue.last_op_result().1 {
+ match ws {
+ Ok(data) => match data.error() {
+ Some(stderr) => stdx::format_to!(buf, "{:#}\n", stderr),
+ _ => (),
+ },
+ // io errors
+ Err(err) => stdx::format_to!(buf, "{:#}\n", err),
+ }
+ }
+
+ if buf.is_empty() {
+ Ok(())
+ } else {
+ Err(buf)
+ }
+ }
+
+ fn reload_flycheck(&mut self) {
+ let _p = profile::span("GlobalState::reload_flycheck");
+ let config = match self.config.flycheck() {
+ Some(it) => it,
+ None => {
+ self.flycheck = Vec::new();
- Arc::new(DummyExpander)
++ self.diagnostics.clear_check_all();
+ return;
+ }
+ };
+
+ let sender = self.flycheck_sender.clone();
+ self.flycheck = self
+ .workspaces
+ .iter()
+ .enumerate()
+ .filter_map(|(id, w)| match w {
+ ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())),
+ ProjectWorkspace::Json { project, .. } => {
+ // Enable flychecks for json projects if a custom flycheck command was supplied
+ // in the workspace configuration.
+ match config {
+ FlycheckConfig::CustomCommand { .. } => Some((id, project.path())),
+ _ => None,
+ }
+ }
+ ProjectWorkspace::DetachedFiles { .. } => None,
+ })
+ .map(|(id, root)| {
+ let sender = sender.clone();
+ FlycheckHandle::spawn(
+ id,
+ Box::new(move |msg| sender.send(msg).unwrap()),
+ config.clone(),
+ root.to_path_buf(),
+ )
+ })
+ .collect();
+ }
+}
+
+#[derive(Default)]
+pub(crate) struct ProjectFolders {
+ pub(crate) load: Vec<vfs::loader::Entry>,
+ pub(crate) watch: Vec<usize>,
+ pub(crate) source_root_config: SourceRootConfig,
+}
+
+impl ProjectFolders {
+ pub(crate) fn new(
+ workspaces: &[ProjectWorkspace],
+ global_excludes: &[AbsPathBuf],
+ ) -> ProjectFolders {
+ let mut res = ProjectFolders::default();
+ let mut fsc = FileSetConfig::builder();
+ let mut local_filesets = vec![];
+
+ for root in workspaces.iter().flat_map(|ws| ws.to_roots()) {
+ let file_set_roots: Vec<VfsPath> =
+ root.include.iter().cloned().map(VfsPath::from).collect();
+
+ let entry = {
+ let mut dirs = vfs::loader::Directories::default();
+ dirs.extensions.push("rs".into());
+ dirs.include.extend(root.include);
+ dirs.exclude.extend(root.exclude);
+ for excl in global_excludes {
+ if dirs
+ .include
+ .iter()
+ .any(|incl| incl.starts_with(excl) || excl.starts_with(incl))
+ {
+ dirs.exclude.push(excl.clone());
+ }
+ }
+
+ vfs::loader::Entry::Directories(dirs)
+ };
+
+ if root.is_local {
+ res.watch.push(res.load.len());
+ }
+ res.load.push(entry);
+
+ if root.is_local {
+ local_filesets.push(fsc.len());
+ }
+ fsc.add_file_set(file_set_roots)
+ }
+
+ let fsc = fsc.build();
+ res.source_root_config = SourceRootConfig { fsc, local_filesets };
+
+ res
+ }
+}
+
+#[derive(Default, Debug)]
+pub(crate) struct SourceRootConfig {
+ pub(crate) fsc: FileSetConfig,
+ pub(crate) local_filesets: Vec<usize>,
+}
+
+impl SourceRootConfig {
+ pub(crate) fn partition(&self, vfs: &vfs::Vfs) -> Vec<SourceRoot> {
+ let _p = profile::span("SourceRootConfig::partition");
+ self.fsc
+ .partition(vfs)
+ .into_iter()
+ .enumerate()
+ .map(|(idx, file_set)| {
+ let is_local = self.local_filesets.contains(&idx);
+ if is_local {
+ SourceRoot::new_local(file_set)
+ } else {
+ SourceRoot::new_library(file_set)
+ }
+ })
+ .collect()
+ }
+}
+
+/// Load the proc-macros for the given lib path, replacing all expanders whose names are in `dummy_replace`
+/// with an identity dummy expander.
+pub(crate) fn load_proc_macro(
+ server: Result<&ProcMacroServer, &str>,
+ path: &AbsPath,
+ dummy_replace: &[Box<str>],
+) -> ProcMacroLoadResult {
+ let res: Result<Vec<_>, String> = (|| {
+ let dylib = MacroDylib::new(path.to_path_buf())
+ .map_err(|io| format!("Proc-macro dylib loading failed: {io}"))?;
+ let server = server.map_err(ToOwned::to_owned)?;
+ let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
+ if vec.is_empty() {
+ return Err("proc macro library returned no proc macros".to_string());
+ }
+ Ok(vec
+ .into_iter()
+ .map(|expander| expander_to_proc_macro(expander, dummy_replace))
+ .collect())
+ })();
+ return match res {
+ Ok(proc_macros) => {
+ tracing::info!(
+ "Loaded proc-macros for {}: {:?}",
+ path.display(),
+ proc_macros.iter().map(|it| it.name.clone()).collect::<Vec<_>>()
+ );
+ Ok(proc_macros)
+ }
+ Err(e) => {
+ tracing::warn!("proc-macro loading for {} failed: {e}", path.display());
+ Err(e)
+ }
+ };
+
+ fn expander_to_proc_macro(
+ expander: proc_macro_api::ProcMacro,
+ dummy_replace: &[Box<str>],
+ ) -> ProcMacro {
+ let name = SmolStr::from(expander.name());
+ let kind = match expander.kind() {
+ proc_macro_api::ProcMacroKind::CustomDerive => ProcMacroKind::CustomDerive,
+ proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike,
+ proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr,
+ };
+ let expander: Arc<dyn ProcMacroExpander> =
+ if dummy_replace.iter().any(|replace| &**replace == name) {
- /// Dummy identity expander, used for proc-macros that are deliberately ignored by the user.
++ match kind {
++ ProcMacroKind::Attr => Arc::new(IdentityExpander),
++ _ => Arc::new(EmptyExpander),
++ }
+ } else {
+ Arc::new(Expander(expander))
+ };
+ ProcMacro { name, kind, expander }
+ }
+
+ #[derive(Debug)]
+ struct Expander(proc_macro_api::ProcMacro);
+
+ impl ProcMacroExpander for Expander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ attrs: Option<&tt::Subtree>,
+ env: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
+ match self.0.expand(subtree, attrs, env) {
+ Ok(Ok(subtree)) => Ok(subtree),
+ Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),
+ Err(err) => Err(ProcMacroExpansionError::System(err.to_string())),
+ }
+ }
+ }
+
- struct DummyExpander;
++ /// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user.
+ #[derive(Debug)]
- impl ProcMacroExpander for DummyExpander {
++ struct IdentityExpander;
+
- let file_name = path.file_name().unwrap_or_default();
++ impl ProcMacroExpander for IdentityExpander {
+ fn expand(
+ &self,
+ subtree: &tt::Subtree,
+ _: Option<&tt::Subtree>,
+ _: &Env,
+ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
+ Ok(subtree.clone())
+ }
+ }
++
++ /// Empty expander, used for proc-macros that are deliberately ignored by the user.
++ #[derive(Debug)]
++ struct EmptyExpander;
++
++ impl ProcMacroExpander for EmptyExpander {
++ fn expand(
++ &self,
++ _: &tt::Subtree,
++ _: Option<&tt::Subtree>,
++ _: &Env,
++ ) -> Result<tt::Subtree, ProcMacroExpansionError> {
++ Ok(tt::Subtree::default())
++ }
++ }
+}
+
+pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool {
+ const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"];
+ const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"];
- if file_name == "Cargo.toml" || file_name == "Cargo.lock" {
+
- if (file_name == "config.toml" || file_name == "config")
- && path.parent().map(|parent| parent.as_ref().ends_with(".cargo")) == Some(true)
- {
- return true;
- }
- return false;
++ let file_name = match path.file_name().unwrap_or_default().to_str() {
++ Some(it) => it,
++ None => return false,
++ };
++
++ if let "Cargo.toml" | "Cargo.lock" = file_name {
+ return true;
+ }
+ if change_kind == ChangeKind::Modify {
+ return false;
+ }
++
++ // .cargo/config{.toml}
+ if path.extension().unwrap_or_default() != "rs" {
++ let is_cargo_config = matches!(file_name, "config.toml" | "config")
++ && path.parent().map(|parent| parent.as_ref().ends_with(".cargo")).unwrap_or(false);
++ return is_cargo_config;
+ }
++
+ if IMPLICIT_TARGET_FILES.iter().any(|it| path.as_ref().ends_with(it)) {
+ return true;
+ }
+ let parent = match path.parent() {
+ Some(it) => it,
+ None => return false,
+ };
+ if IMPLICIT_TARGET_DIRS.iter().any(|it| parent.as_ref().ends_with(it)) {
+ return true;
+ }
+ if file_name == "main.rs" {
+ let grand_parent = match parent.parent() {
+ Some(it) => it,
+ None => return false,
+ };
+ if IMPLICIT_TARGET_DIRS.iter().any(|it| grand_parent.as_ref().ends_with(it)) {
+ return true;
+ }
+ }
+ false
+}
--- /dev/null
- sh.set_var("RUSTUP_TOOLCHAIN", "stable");
+use std::{
+ collections::HashSet,
+ path::{Path, PathBuf},
+};
+
+use xshell::Shell;
+
+#[cfg(not(feature = "in-rust-tree"))]
+use xshell::cmd;
+
+#[cfg(not(feature = "in-rust-tree"))]
+#[test]
+fn check_code_formatting() {
+ let sh = &Shell::new().unwrap();
+ sh.change_dir(sourcegen::project_root());
- let out = cmd!(sh, "rustfmt --version").read().unwrap();
+
- let res = cmd!(sh, "cargo fmt -- --check").run();
++ let out = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap();
+ if !out.contains("stable") {
+ panic!(
+ "Failed to run rustfmt from toolchain 'stable'. \
+ Please run `rustup component add rustfmt --toolchain stable` to install it.",
+ )
+ }
+
- let _ = cmd!(sh, "cargo fmt").run();
++ let res = cmd!(sh, "rustup run stable cargo fmt -- --check").run();
+ if res.is_err() {
++ let _ = cmd!(sh, "rustup run stable cargo fmt").run();
+ }
+ res.unwrap()
+}
+
+#[test]
+fn check_lsp_extensions_docs() {
+ let sh = &Shell::new().unwrap();
+
+ let expected_hash = {
+ let lsp_ext_rs = sh
+ .read_file(sourcegen::project_root().join("crates/rust-analyzer/src/lsp_ext.rs"))
+ .unwrap();
+ stable_hash(lsp_ext_rs.as_str())
+ };
+
+ let actual_hash = {
+ let lsp_extensions_md =
+ sh.read_file(sourcegen::project_root().join("docs/dev/lsp-extensions.md")).unwrap();
+ let text = lsp_extensions_md
+ .lines()
+ .find_map(|line| line.strip_prefix("lsp_ext.rs hash:"))
+ .unwrap()
+ .trim();
+ u64::from_str_radix(text, 16).unwrap()
+ };
+
+ if actual_hash != expected_hash {
+ panic!(
+ "
+lsp_ext.rs was changed without touching lsp-extensions.md.
+
+Expected hash: {:x}
+Actual hash: {:x}
+
+Please adjust docs/dev/lsp-extensions.md.
+",
+ expected_hash, actual_hash
+ )
+ }
+}
+
+#[test]
+fn files_are_tidy() {
+ let sh = &Shell::new().unwrap();
+
+ let files = sourcegen::list_files(&sourcegen::project_root().join("crates"));
+
+ let mut tidy_docs = TidyDocs::default();
+ let mut tidy_marks = TidyMarks::default();
+ for path in files {
+ let extension = path.extension().unwrap_or_default().to_str().unwrap_or_default();
+ match extension {
+ "rs" => {
+ let text = sh.read_file(&path).unwrap();
+ check_todo(&path, &text);
+ check_dbg(&path, &text);
+ check_test_attrs(&path, &text);
+ check_trailing_ws(&path, &text);
+ deny_clippy(&path, &text);
+ tidy_docs.visit(&path, &text);
+ tidy_marks.visit(&path, &text);
+ }
+ "toml" => {
+ let text = sh.read_file(&path).unwrap();
+ check_cargo_toml(&path, text);
+ }
+ _ => (),
+ }
+ }
+
+ tidy_docs.finish();
+ tidy_marks.finish();
+}
+
+fn check_cargo_toml(path: &Path, text: String) {
+ let mut section = None;
+ for (line_no, text) in text.lines().enumerate() {
+ let text = text.trim();
+ if text.starts_with('[') {
+ if !text.ends_with(']') {
+ panic!(
+ "\nplease don't add comments or trailing whitespace in section lines.\n\
+ {}:{}\n",
+ path.display(),
+ line_no + 1
+ )
+ }
+ section = Some(text);
+ continue;
+ }
+ let text: String = text.split_whitespace().collect();
+ if !text.contains("path=") {
+ continue;
+ }
+ match section {
+ Some(s) if s.contains("dev-dependencies") => {
+ if text.contains("version") {
+ panic!(
+ "\ncargo internal dev-dependencies should not have a version.\n\
+ {}:{}\n",
+ path.display(),
+ line_no + 1
+ );
+ }
+ }
+ Some(s) if s.contains("dependencies") => {
+ if !text.contains("version") {
+ panic!(
+ "\ncargo internal dependencies should have a version.\n\
+ {}:{}\n",
+ path.display(),
+ line_no + 1
+ );
+ }
+ }
+ _ => {}
+ }
+ }
+}
+
+fn deny_clippy(path: &Path, text: &str) {
+ let ignore = &[
+ // The documentation in string literals may contain anything for its own purposes
+ "ide-db/src/generated/lints.rs",
+ // The tests test clippy lint hovers
+ "ide/src/hover/tests.rs",
+ // The tests test clippy lint completions
+ "ide-completion/src/tests/attribute.rs",
+ ];
+ if ignore.iter().any(|p| path.ends_with(p)) {
+ return;
+ }
+
+ if text.contains("\u{61}llow(clippy") {
+ panic!(
+ "\n\nallowing lints is forbidden: {}.
+rust-analyzer intentionally doesn't check clippy on CI.
+You can allow lint globally via `xtask clippy`.
+See https://github.com/rust-lang/rust-clippy/issues/5537 for discussion.
+
+",
+ path.display()
+ )
+ }
+}
+
+#[cfg(not(feature = "in-rust-tree"))]
+#[test]
+fn check_licenses() {
+ let sh = &Shell::new().unwrap();
+
+ let expected = "
+0BSD OR MIT OR Apache-2.0
+Apache-2.0
+Apache-2.0 OR BSL-1.0
+Apache-2.0 OR MIT
+Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
+Apache-2.0/MIT
+BSD-3-Clause
+BlueOak-1.0.0 OR MIT OR Apache-2.0
+CC0-1.0 OR Artistic-2.0
+ISC
+MIT
+MIT / Apache-2.0
+MIT OR Apache-2.0
+MIT OR Apache-2.0 OR Zlib
+MIT OR Zlib OR Apache-2.0
+MIT/Apache-2.0
+Unlicense/MIT
+Zlib OR Apache-2.0 OR MIT
+"
+ .lines()
+ .filter(|it| !it.is_empty())
+ .collect::<Vec<_>>();
+
+ let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap();
+ let mut licenses = meta
+ .split(|c| c == ',' || c == '{' || c == '}')
+ .filter(|it| it.contains(r#""license""#))
+ .map(|it| it.trim())
+ .map(|it| it[r#""license":"#.len()..].trim_matches('"'))
+ .collect::<Vec<_>>();
+ licenses.sort_unstable();
+ licenses.dedup();
+ if licenses != expected {
+ let mut diff = String::new();
+
+ diff.push_str("New Licenses:\n");
+ for &l in licenses.iter() {
+ if !expected.contains(&l) {
+ diff += &format!(" {}\n", l)
+ }
+ }
+
+ diff.push_str("\nMissing Licenses:\n");
+ for &l in expected.iter() {
+ if !licenses.contains(&l) {
+ diff += &format!(" {}\n", l)
+ }
+ }
+
+ panic!("different set of licenses!\n{}", diff);
+ }
+ assert_eq!(licenses, expected);
+}
+
+fn check_todo(path: &Path, text: &str) {
+ let need_todo = &[
+ // This file itself obviously needs to use todo (<- like this!).
+ "tests/tidy.rs",
+ // Some of our assists generate `todo!()`.
+ "handlers/add_turbo_fish.rs",
+ "handlers/generate_function.rs",
+ "handlers/add_missing_match_arms.rs",
+ "handlers/replace_derive_with_manual_impl.rs",
+ // To support generating `todo!()` in assists, we have `expr_todo()` in
+ // `ast::make`.
+ "ast/make.rs",
+ // The documentation in string literals may contain anything for its own purposes
+ "ide-db/src/generated/lints.rs",
+ "ide-assists/src/utils/gen_trait_fn_body.rs",
+ "ide-assists/src/tests/generated.rs",
+ // The tests for missing fields
+ "ide-diagnostics/src/handlers/missing_fields.rs",
+ ];
+ if need_todo.iter().any(|p| path.ends_with(p)) {
+ return;
+ }
+ if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
+ // Generated by an assist
+ if text.contains("${0:todo!()}") {
+ return;
+ }
+
+ panic!(
+ "\nTODO markers or todo! macros should not be committed to the master branch,\n\
+ use FIXME instead\n\
+ {}\n",
+ path.display(),
+ )
+ }
+}
+
+fn check_dbg(path: &Path, text: &str) {
+ let need_dbg = &[
+ // This file itself obviously needs to use dbg.
+ "slow-tests/tidy.rs",
+ // Assists to remove `dbg!()`
+ "handlers/remove_dbg.rs",
+ // We have .dbg postfix
+ "ide-completion/src/completions/postfix.rs",
+ "ide-completion/src/completions/keyword.rs",
+ "ide-completion/src/tests/proc_macros.rs",
+ // The documentation in string literals may contain anything for its own purposes
+ "ide-completion/src/lib.rs",
+ "ide-db/src/generated/lints.rs",
+ // test for doc test for remove_dbg
+ "src/tests/generated.rs",
+ ];
+ if need_dbg.iter().any(|p| path.ends_with(p)) {
+ return;
+ }
+ if text.contains("dbg!") {
+ panic!(
+ "\ndbg! macros should not be committed to the master branch,\n\
+ {}\n",
+ path.display(),
+ )
+ }
+}
+
+fn check_test_attrs(path: &Path, text: &str) {
+ let ignore_rule =
+ "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore";
+ let need_ignore: &[&str] = &[
+ // This file.
+ "slow-tests/tidy.rs",
+ // Special case to run `#[ignore]` tests.
+ "ide/src/runnables.rs",
+ // A legit test which needs to be ignored, as it takes too long to run
+ // :(
+ "hir-def/src/nameres/collector.rs",
+ // Long sourcegen test to generate lint completions.
+ "ide-db/src/tests/sourcegen_lints.rs",
+ // Obviously needs ignore.
+ "ide-assists/src/handlers/toggle_ignore.rs",
+ // See above.
+ "ide-assists/src/tests/generated.rs",
+ ];
+ if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) {
+ panic!("\ndon't `#[ignore]` tests, see:\n\n {}\n\n {}\n", ignore_rule, path.display(),)
+ }
+
+ let panic_rule =
+ "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
+ let need_panic: &[&str] = &[
+ // This file.
+ "slow-tests/tidy.rs",
+ "test-utils/src/fixture.rs",
+ ];
+ if text.contains("#[should_panic") && !need_panic.iter().any(|p| path.ends_with(p)) {
+ panic!(
+ "\ndon't add `#[should_panic]` tests, see:\n\n {}\n\n {}\n",
+ panic_rule,
+ path.display(),
+ )
+ }
+}
+
+fn check_trailing_ws(path: &Path, text: &str) {
+ if is_exclude_dir(path, &["test_data"]) {
+ return;
+ }
+ for (line_number, line) in text.lines().enumerate() {
+ if line.chars().last().map(char::is_whitespace) == Some(true) {
+ panic!("Trailing whitespace in {} at line {}", path.display(), line_number + 1)
+ }
+ }
+}
+
+#[derive(Default)]
+struct TidyDocs {
+ missing_docs: Vec<String>,
+ contains_fixme: Vec<PathBuf>,
+}
+
+impl TidyDocs {
+ fn visit(&mut self, path: &Path, text: &str) {
+ // Tests and diagnostic fixes don't need module level comments.
+ if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar"]) {
+ return;
+ }
+
+ if is_exclude_file(path) {
+ return;
+ }
+
+ let first_line = match text.lines().next() {
+ Some(it) => it,
+ None => return,
+ };
+
+ if first_line.starts_with("//!") {
+ if first_line.contains("FIXME") {
+ self.contains_fixme.push(path.to_path_buf());
+ }
+ } else {
+ if text.contains("// Feature:")
+ || text.contains("// Assist:")
+ || text.contains("// Diagnostic:")
+ {
+ return;
+ }
+ self.missing_docs.push(path.display().to_string());
+ }
+
+ fn is_exclude_file(d: &Path) -> bool {
+ let file_names = ["tests.rs", "famous_defs_fixture.rs"];
+
+ d.file_name()
+ .unwrap_or_default()
+ .to_str()
+ .map(|f_n| file_names.iter().any(|name| *name == f_n))
+ .unwrap_or(false)
+ }
+ }
+
+ fn finish(self) {
+ if !self.missing_docs.is_empty() {
+ panic!(
+ "\nMissing docs strings\n\n\
+ modules:\n{}\n\n",
+ self.missing_docs.join("\n")
+ )
+ }
+
+ for path in self.contains_fixme {
+ panic!("FIXME doc in a fully-documented crate: {}", path.display())
+ }
+ }
+}
+
+fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
+ p.strip_prefix(sourcegen::project_root())
+ .unwrap()
+ .components()
+ .rev()
+ .skip(1)
+ .filter_map(|it| it.as_os_str().to_str())
+ .any(|it| dirs_to_exclude.contains(&it))
+}
+
+#[derive(Default)]
+struct TidyMarks {
+ hits: HashSet<String>,
+ checks: HashSet<String>,
+}
+
+impl TidyMarks {
+ fn visit(&mut self, _path: &Path, text: &str) {
+ find_marks(&mut self.hits, text, "hit");
+ find_marks(&mut self.checks, text, "check");
+ find_marks(&mut self.checks, text, "check_count");
+ }
+
+ fn finish(self) {
+ assert!(!self.hits.is_empty());
+
+ let diff: Vec<_> =
+ self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
+
+ if !diff.is_empty() {
+ panic!("unpaired marks: {:?}", diff)
+ }
+ }
+}
+
+#[allow(deprecated)]
+fn stable_hash(text: &str) -> u64 {
+ use std::hash::{Hash, Hasher, SipHasher};
+
+ let text = text.replace('\r', "");
+ let mut hasher = SipHasher::default();
+ text.hash(&mut hasher);
+ hasher.finish()
+}
+
+fn find_marks(set: &mut HashSet<String>, text: &str, mark: &str) {
+ let mut text = text;
+ let mut prev_text = "";
+ while text != prev_text {
+ prev_text = text;
+ if let Some(idx) = text.find(mark) {
+ text = &text[idx + mark.len()..];
+ if let Some(stripped_text) = text.strip_prefix("!(") {
+ text = stripped_text.trim_start();
+ if let Some(idx2) = text.find(|c: char| !(c.is_alphanumeric() || c == '_')) {
+ let mark_text = &text[..idx2];
+ set.insert(mark_text.to_string());
+ text = &text[idx2..];
+ }
+ }
+ }
+ }
+}
--- /dev/null
- let version = cmd!(sh, "rustfmt --version").read().unwrap_or_default();
+//! rust-analyzer relies heavily on source code generation.
+//!
+//! Things like feature documentation or assist tests are implemented by
+//! processing rust-analyzer's own source code and generating the appropriate
+//! output. See `sourcegen_` tests in various crates.
+//!
+//! This crate contains utilities to make this kind of source-gen easy.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::{
+ fmt, fs, mem,
+ path::{Path, PathBuf},
+};
+
+use xshell::{cmd, Shell};
+
+pub fn list_rust_files(dir: &Path) -> Vec<PathBuf> {
+ let mut res = list_files(dir);
+ res.retain(|it| {
+ it.file_name().unwrap_or_default().to_str().unwrap_or_default().ends_with(".rs")
+ });
+ res
+}
+
+pub fn list_files(dir: &Path) -> Vec<PathBuf> {
+ let mut res = Vec::new();
+ let mut work = vec![dir.to_path_buf()];
+ while let Some(dir) = work.pop() {
+ for entry in dir.read_dir().unwrap() {
+ let entry = entry.unwrap();
+ let file_type = entry.file_type().unwrap();
+ let path = entry.path();
+ let is_hidden =
+ path.file_name().unwrap_or_default().to_str().unwrap_or_default().starts_with('.');
+ if !is_hidden {
+ if file_type.is_dir() {
+ work.push(path);
+ } else if file_type.is_file() {
+ res.push(path);
+ }
+ }
+ }
+ }
+ res
+}
+
+#[derive(Clone)]
+pub struct CommentBlock {
+ pub id: String,
+ pub line: usize,
+ pub contents: Vec<String>,
+ is_doc: bool,
+}
+
+impl CommentBlock {
+ pub fn extract(tag: &str, text: &str) -> Vec<CommentBlock> {
+ assert!(tag.starts_with(char::is_uppercase));
+
+ let tag = format!("{}:", tag);
+ // Would be nice if we had `.retain_mut` here!
+ CommentBlock::extract_untagged(text)
+ .into_iter()
+ .filter_map(|mut block| {
+ let first = block.contents.remove(0);
+ first.strip_prefix(&tag).map(|id| {
+ if block.is_doc {
+ panic!(
+ "Use plain (non-doc) comments with tags like {}:\n {}",
+ tag, first
+ );
+ }
+
+ block.id = id.trim().to_string();
+ block
+ })
+ })
+ .collect()
+ }
+
+ pub fn extract_untagged(text: &str) -> Vec<CommentBlock> {
+ let mut res = Vec::new();
+
+ let lines = text.lines().map(str::trim_start);
+
+ let dummy_block =
+ CommentBlock { id: String::new(), line: 0, contents: Vec::new(), is_doc: false };
+ let mut block = dummy_block.clone();
+ for (line_num, line) in lines.enumerate() {
+ match line.strip_prefix("//") {
+ Some(mut contents) => {
+ if let Some('/' | '!') = contents.chars().next() {
+ contents = &contents[1..];
+ block.is_doc = true;
+ }
+ if let Some(' ') = contents.chars().next() {
+ contents = &contents[1..];
+ }
+ block.contents.push(contents.to_string());
+ }
+ None => {
+ if !block.contents.is_empty() {
+ let block = mem::replace(&mut block, dummy_block.clone());
+ res.push(block);
+ }
+ block.line = line_num + 2;
+ }
+ }
+ }
+ if !block.contents.is_empty() {
+ res.push(block);
+ }
+ res
+ }
+}
+
+#[derive(Debug)]
+pub struct Location {
+ pub file: PathBuf,
+ pub line: usize,
+}
+
+impl fmt::Display for Location {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let path = self.file.strip_prefix(&project_root()).unwrap().display().to_string();
+ let path = path.replace('\\', "/");
+ let name = self.file.file_name().unwrap();
+ write!(
+ f,
+ "https://github.com/rust-lang/rust-analyzer/blob/master/{}#L{}[{}]",
+ path,
+ self.line,
+ name.to_str().unwrap()
+ )
+ }
+}
+
+fn ensure_rustfmt(sh: &Shell) {
- sh.set_var("RUSTUP_TOOLCHAIN", "stable");
++ let version = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap_or_default();
+ if !version.contains("stable") {
+ panic!(
+ "Failed to run rustfmt from toolchain 'stable'. \
+ Please run `rustup component add rustfmt --toolchain stable` to install it.",
+ );
+ }
+}
+
+pub fn reformat(text: String) -> String {
+ let sh = Shell::new().unwrap();
- let mut stdout = cmd!(sh, "rustfmt --config-path {rustfmt_toml} --config fn_single_line=true")
- .stdin(text)
- .read()
- .unwrap();
+ ensure_rustfmt(&sh);
+ let rustfmt_toml = project_root().join("rustfmt.toml");
++ let mut stdout = cmd!(
++ sh,
++ "rustup run stable rustfmt --config-path {rustfmt_toml} --config fn_single_line=true"
++ )
++ .stdin(text)
++ .read()
++ .unwrap();
+ if !stdout.ends_with('\n') {
+ stdout.push('\n');
+ }
+ stdout
+}
+
+pub fn add_preamble(generator: &'static str, mut text: String) -> String {
+ let preamble = format!("//! Generated by `{}`, do not edit by hand.\n\n", generator);
+ text.insert_str(0, &preamble);
+ text
+}
+
+/// Checks that the `file` has the specified `contents`. If that is not the
+/// case, updates the file and then fails the test.
+pub fn ensure_file_contents(file: &Path, contents: &str) {
+ if let Ok(old_contents) = fs::read_to_string(file) {
+ if normalize_newlines(&old_contents) == normalize_newlines(contents) {
+ // File is already up to date.
+ return;
+ }
+ }
+
+ let display_path = file.strip_prefix(&project_root()).unwrap_or(file);
+ eprintln!(
+ "\n\x1b[31;1merror\x1b[0m: {} was not up-to-date, updating\n",
+ display_path.display()
+ );
+ if std::env::var("CI").is_ok() {
+ eprintln!(" NOTE: run `cargo test` locally and commit the updated files\n");
+ }
+ if let Some(parent) = file.parent() {
+ let _ = fs::create_dir_all(parent);
+ }
+ fs::write(file, contents).unwrap();
+ panic!("some file was not up to date and has been updated, simply re-run the tests");
+}
+
+fn normalize_newlines(s: &str) -> String {
+ s.replace("\r\n", "\n")
+}
+
+pub fn project_root() -> PathBuf {
+ let dir = env!("CARGO_MANIFEST_DIR");
+ let res = PathBuf::from(dir).parent().unwrap().parent().unwrap().to_owned();
+ assert!(res.join("triagebot.toml").exists());
+ res
+}
--- /dev/null
- SyntaxNode,
+//! Structural editing for ast.
+
+use std::iter::{empty, successors};
+
+use parser::{SyntaxKind, T};
+use rowan::SyntaxElement;
+
+use crate::{
+ algo::{self, neighbor},
+ ast::{self, edit::IndentLevel, make, HasGenericParams},
+ ted::{self, Position},
+ AstNode, AstToken, Direction,
+ SyntaxKind::{ATTR, COMMENT, WHITESPACE},
- let comma = match last_field
- .syntax()
- .siblings_with_tokens(Direction::Next)
- .filter_map(|it| it.into_token())
- .find(|it| it.kind() == T![,])
- {
- Some(it) => it,
- None => {
- let comma = ast::make::token(T![,]);
- ted::insert(Position::after(last_field.syntax()), &comma);
- comma
- }
- };
++ SyntaxNode, SyntaxToken,
+};
+
+use super::HasName;
+
+pub trait GenericParamsOwnerEdit: ast::HasGenericParams {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList;
+ fn get_or_create_where_clause(&self) -> ast::WhereClause;
+}
+
+impl GenericParamsOwnerEdit for ast::Fn {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(fn_token) = self.fn_token() {
+ Position::after(fn_token)
+ } else if let Some(param_list) = self.param_list() {
+ Position::before(param_list.syntax)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = if let Some(ty) = self.ret_type() {
+ Position::after(ty.syntax())
+ } else if let Some(param_list) = self.param_list() {
+ Position::after(param_list.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Impl {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = match self.impl_token() {
+ Some(imp_token) => Position::after(imp_token),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = match self.assoc_item_list() {
+ Some(items) => Position::before(items.syntax()),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Trait {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(trait_token) = self.trait_token() {
+ Position::after(trait_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = match self.assoc_item_list() {
+ Some(items) => Position::before(items.syntax()),
+ None => Position::last_child_of(self.syntax()),
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Struct {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(struct_token) = self.struct_token() {
+ Position::after(struct_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let tfl = self.field_list().and_then(|fl| match fl {
+ ast::FieldList::RecordFieldList(_) => None,
+ ast::FieldList::TupleFieldList(it) => Some(it),
+ });
+ let position = if let Some(tfl) = tfl {
+ Position::after(tfl.syntax())
+ } else if let Some(gpl) = self.generic_param_list() {
+ Position::after(gpl.syntax())
+ } else if let Some(name) = self.name() {
+ Position::after(name.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+impl GenericParamsOwnerEdit for ast::Enum {
+ fn get_or_create_generic_param_list(&self) -> ast::GenericParamList {
+ match self.generic_param_list() {
+ Some(it) => it,
+ None => {
+ let position = if let Some(name) = self.name() {
+ Position::after(name.syntax)
+ } else if let Some(enum_token) = self.enum_token() {
+ Position::after(enum_token)
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_generic_param_list(position)
+ }
+ }
+ }
+
+ fn get_or_create_where_clause(&self) -> ast::WhereClause {
+ if self.where_clause().is_none() {
+ let position = if let Some(gpl) = self.generic_param_list() {
+ Position::after(gpl.syntax())
+ } else if let Some(name) = self.name() {
+ Position::after(name.syntax())
+ } else {
+ Position::last_child_of(self.syntax())
+ };
+ create_where_clause(position);
+ }
+ self.where_clause().unwrap()
+ }
+}
+
+fn create_where_clause(position: Position) {
+ let where_clause = make::where_clause(empty()).clone_for_update();
+ ted::insert(position, where_clause.syntax());
+}
+
+fn create_generic_param_list(position: Position) -> ast::GenericParamList {
+ let gpl = make::generic_param_list(empty()).clone_for_update();
+ ted::insert_raw(position, gpl.syntax());
+ gpl
+}
+
+pub trait AttrsOwnerEdit: ast::HasAttrs {
+ fn remove_attrs_and_docs(&self) {
+ remove_attrs_and_docs(self.syntax());
+
+ fn remove_attrs_and_docs(node: &SyntaxNode) {
+ let mut remove_next_ws = false;
+ for child in node.children_with_tokens() {
+ match child.kind() {
+ ATTR | COMMENT => {
+ remove_next_ws = true;
+ child.detach();
+ continue;
+ }
+ WHITESPACE if remove_next_ws => {
+ child.detach();
+ }
+ _ => (),
+ }
+ remove_next_ws = false;
+ }
+ }
+ }
+}
+
+impl<T: ast::HasAttrs> AttrsOwnerEdit for T {}
+
+impl ast::GenericParamList {
+ pub fn add_generic_param(&self, generic_param: ast::GenericParam) {
+ match self.generic_params().last() {
+ Some(last_param) => {
+ let position = Position::after(last_param.syntax());
+ let elements = vec![
+ make::token(T![,]).into(),
+ make::tokens::single_space().into(),
+ generic_param.syntax().clone().into(),
+ ];
+ ted::insert_all(position, elements);
+ }
+ None => {
+ let after_l_angle = Position::after(self.l_angle_token().unwrap());
+ ted::insert(after_l_angle, generic_param.syntax());
+ }
+ }
+ }
+}
+
+impl ast::WhereClause {
+ pub fn add_predicate(&self, predicate: ast::WherePred) {
+ if let Some(pred) = self.predicates().last() {
+ if !pred.syntax().siblings_with_tokens(Direction::Next).any(|it| it.kind() == T![,]) {
+ ted::append_child_raw(self.syntax(), make::token(T![,]));
+ }
+ }
+ ted::append_child(self.syntax(), predicate.syntax());
+ }
+}
+
+impl ast::TypeBoundList {
+ pub fn remove(&self) {
+ match self.syntax().siblings_with_tokens(Direction::Prev).find(|it| it.kind() == T![:]) {
+ Some(colon) => ted::remove_all(colon..=self.syntax().clone().into()),
+ None => ted::remove(self.syntax()),
+ }
+ }
+}
+
+impl ast::PathSegment {
+ pub fn get_or_create_generic_arg_list(&self) -> ast::GenericArgList {
+ if self.generic_arg_list().is_none() {
+ let arg_list = make::generic_arg_list().clone_for_update();
+ ted::append_child(self.syntax(), arg_list.syntax());
+ }
+ self.generic_arg_list().unwrap()
+ }
+}
+
+impl ast::UseTree {
+ pub fn remove(&self) {
+ for dir in [Direction::Next, Direction::Prev] {
+ if let Some(next_use_tree) = neighbor(self, dir) {
+ let separators = self
+ .syntax()
+ .siblings_with_tokens(dir)
+ .skip(1)
+ .take_while(|it| it.as_node() != Some(next_use_tree.syntax()));
+ ted::remove_all_iter(separators);
+ break;
+ }
+ }
+ ted::remove(self.syntax());
+ }
+
+ pub fn get_or_create_use_tree_list(&self) -> ast::UseTreeList {
+ match self.use_tree_list() {
+ Some(it) => it,
+ None => {
+ let position = Position::last_child_of(self.syntax());
+ let use_tree_list = make::use_tree_list(empty()).clone_for_update();
+ let mut elements = Vec::with_capacity(2);
+ if self.coloncolon_token().is_none() {
+ elements.push(make::token(T![::]).into());
+ }
+ elements.push(use_tree_list.syntax().clone().into());
+ ted::insert_all_raw(position, elements);
+ use_tree_list
+ }
+ }
+ }
+
+ /// Splits off the given prefix, making it the path component of the use tree,
+ /// appending the rest of the path to all UseTreeList items.
+ ///
+ /// # Examples
+ ///
+ /// `prefix$0::suffix` -> `prefix::{suffix}`
+ ///
+ /// `prefix$0` -> `prefix::{self}`
+ ///
+ /// `prefix$0::*` -> `prefix::{*}`
+ pub fn split_prefix(&self, prefix: &ast::Path) {
+ debug_assert_eq!(self.path(), Some(prefix.top_path()));
+ let path = self.path().unwrap();
+ if &path == prefix && self.use_tree_list().is_none() {
+ if self.star_token().is_some() {
+ // path$0::* -> *
+ self.coloncolon_token().map(ted::remove);
+ ted::remove(prefix.syntax());
+ } else {
+ // path$0 -> self
+ let self_suffix =
+ make::path_unqualified(make::path_segment_self()).clone_for_update();
+ ted::replace(path.syntax(), self_suffix.syntax());
+ }
+ } else if split_path_prefix(prefix).is_none() {
+ return;
+ }
+ // At this point, prefix path is detached; _self_ use tree has suffix path.
+ // Next, transform 'suffix' use tree into 'prefix::{suffix}'
+ let subtree = self.clone_subtree().clone_for_update();
+ ted::remove_all_iter(self.syntax().children_with_tokens());
+ ted::insert(Position::first_child_of(self.syntax()), prefix.syntax());
+ self.get_or_create_use_tree_list().add_use_tree(subtree);
+
+ fn split_path_prefix(prefix: &ast::Path) -> Option<()> {
+ let parent = prefix.parent_path()?;
+ let segment = parent.segment()?;
+ if algo::has_errors(segment.syntax()) {
+ return None;
+ }
+ for p in successors(parent.parent_path(), |it| it.parent_path()) {
+ p.segment()?;
+ }
+ prefix.parent_path().and_then(|p| p.coloncolon_token()).map(ted::remove);
+ ted::remove(prefix.syntax());
+ Some(())
+ }
+ }
+}
+
+impl ast::UseTreeList {
+ pub fn add_use_tree(&self, use_tree: ast::UseTree) {
+ let (position, elements) = match self.use_trees().last() {
+ Some(last_tree) => (
+ Position::after(last_tree.syntax()),
+ vec![
+ make::token(T![,]).into(),
+ make::tokens::single_space().into(),
+ use_tree.syntax.into(),
+ ],
+ ),
+ None => {
+ let position = match self.l_curly_token() {
+ Some(l_curly) => Position::after(l_curly),
+ None => Position::last_child_of(self.syntax()),
+ };
+ (position, vec![use_tree.syntax.into()])
+ }
+ };
+ ted::insert_all_raw(position, elements);
+ }
+}
+
+impl ast::Use {
+ pub fn remove(&self) {
+ let next_ws = self
+ .syntax()
+ .next_sibling_or_token()
+ .and_then(|it| it.into_token())
+ .and_then(ast::Whitespace::cast);
+ if let Some(next_ws) = next_ws {
+ let ws_text = next_ws.syntax().text();
+ if let Some(rest) = ws_text.strip_prefix('\n') {
+ if rest.is_empty() {
+ ted::remove(next_ws.syntax());
+ } else {
+ ted::replace(next_ws.syntax(), make::tokens::whitespace(rest));
+ }
+ }
+ }
+ ted::remove(self.syntax());
+ }
+}
+
+impl ast::Impl {
+ pub fn get_or_create_assoc_item_list(&self) -> ast::AssocItemList {
+ if self.assoc_item_list().is_none() {
+ let assoc_item_list = make::assoc_item_list().clone_for_update();
+ ted::append_child(self.syntax(), assoc_item_list.syntax());
+ }
+ self.assoc_item_list().unwrap()
+ }
+}
+
+impl ast::AssocItemList {
+ pub fn add_item(&self, item: ast::AssocItem) {
+ let (indent, position, whitespace) = match self.assoc_items().last() {
+ Some(last_item) => (
+ IndentLevel::from_node(last_item.syntax()),
+ Position::after(last_item.syntax()),
+ "\n\n",
+ ),
+ None => match self.l_curly_token() {
+ Some(l_curly) => {
+ normalize_ws_between_braces(self.syntax());
+ (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly), "\n")
+ }
+ None => (IndentLevel::single(), Position::last_child_of(self.syntax()), "\n"),
+ },
+ };
+ let elements: Vec<SyntaxElement<_>> = vec![
+ make::tokens::whitespace(&format!("{}{}", whitespace, indent)).into(),
+ item.syntax().clone().into(),
+ ];
+ ted::insert_all(position, elements);
+ }
+}
+
+impl ast::Fn {
+ pub fn get_or_create_body(&self) -> ast::BlockExpr {
+ if self.body().is_none() {
+ let body = make::ext::empty_block_expr().clone_for_update();
+ match self.semicolon_token() {
+ Some(semi) => {
+ ted::replace(semi, body.syntax());
+ ted::insert(Position::before(body.syntax), make::tokens::single_space());
+ }
+ None => ted::append_child(self.syntax(), body.syntax()),
+ }
+ }
+ self.body().unwrap()
+ }
+}
+
+impl ast::MatchArm {
+ pub fn remove(&self) {
+ if let Some(sibling) = self.syntax().prev_sibling_or_token() {
+ if sibling.kind() == SyntaxKind::WHITESPACE {
+ ted::remove(sibling);
+ }
+ }
+ if let Some(sibling) = self.syntax().next_sibling_or_token() {
+ if sibling.kind() == T![,] {
+ ted::remove(sibling);
+ }
+ }
+ ted::remove(self.syntax());
+ }
+}
+
+impl ast::MatchArmList {
+ pub fn add_arm(&self, arm: ast::MatchArm) {
+ normalize_ws_between_braces(self.syntax());
+ let mut elements = Vec::new();
+ let position = match self.arms().last() {
+ Some(last_arm) => {
+ if needs_comma(&last_arm) {
+ ted::append_child(last_arm.syntax(), make::token(SyntaxKind::COMMA));
+ }
+ Position::after(last_arm.syntax().clone())
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ elements.push(make::tokens::whitespace(&format!("\n{}", indent)).into());
+ elements.push(arm.syntax().clone().into());
+ if needs_comma(&arm) {
+ ted::append_child(arm.syntax(), make::token(SyntaxKind::COMMA));
+ }
+ ted::insert_all(position, elements);
+
+ fn needs_comma(arm: &ast::MatchArm) -> bool {
+ arm.expr().map_or(false, |e| !e.is_block_like()) && arm.comma_token().is_none()
+ }
+ }
+}
+
+impl ast::RecordExprFieldList {
+ pub fn add_field(&self, field: ast::RecordExprField) {
+ let is_multiline = self.syntax().text().contains_char('\n');
+ let whitespace = if is_multiline {
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ make::tokens::whitespace(&format!("\n{}", indent))
+ } else {
+ make::tokens::single_space()
+ };
+
+ if is_multiline {
+ normalize_ws_between_braces(self.syntax());
+ }
+
+ let position = match self.fields().last() {
+ Some(last_field) => {
- let comma = match last_field
- .syntax()
- .siblings_with_tokens(Direction::Next)
- .filter_map(|it| it.into_token())
- .find(|it| it.kind() == T![,])
- {
- Some(it) => it,
- None => {
- let comma = ast::make::token(T![,]);
- ted::insert(Position::after(last_field.syntax()), &comma);
- comma
- }
- };
++ let comma = get_or_insert_comma_after(last_field.syntax());
+ Position::after(comma)
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+
+ ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
+ if is_multiline {
+ ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+ }
+ }
+}
+
+impl ast::RecordExprField {
+ /// This will either replace the initializer, or in the case that this is a shorthand convert
+ /// the initializer into the name ref and insert the expr as the new initializer.
+ pub fn replace_expr(&self, expr: ast::Expr) {
+ if self.name_ref().is_some() {
+ match self.expr() {
+ Some(prev) => ted::replace(prev.syntax(), expr.syntax()),
+ None => ted::append_child(self.syntax(), expr.syntax()),
+ }
+ return;
+ }
+ // this is a shorthand
+ if let Some(ast::Expr::PathExpr(path_expr)) = self.expr() {
+ if let Some(path) = path_expr.path() {
+ if let Some(name_ref) = path.as_single_name_ref() {
+ path_expr.syntax().detach();
+ let children = vec![
+ name_ref.syntax().clone().into(),
+ ast::make::token(T![:]).into(),
+ ast::make::tokens::single_space().into(),
+ expr.syntax().clone().into(),
+ ];
+ ted::insert_all_raw(Position::last_child_of(self.syntax()), children);
+ }
+ }
+ }
+ }
+}
+
+impl ast::RecordPatFieldList {
+ pub fn add_field(&self, field: ast::RecordPatField) {
+ let is_multiline = self.syntax().text().contains_char('\n');
+ let whitespace = if is_multiline {
+ let indent = IndentLevel::from_node(self.syntax()) + 1;
+ make::tokens::whitespace(&format!("\n{}", indent))
+ } else {
+ make::tokens::single_space()
+ };
+
+ if is_multiline {
+ normalize_ws_between_braces(self.syntax());
+ }
+
+ let position = match self.fields().last() {
+ Some(last_field) => {
++ let syntax = last_field.syntax();
++ let comma = get_or_insert_comma_after(syntax);
+ Position::after(comma)
+ }
+ None => match self.l_curly_token() {
+ Some(it) => Position::after(it),
+ None => Position::last_child_of(self.syntax()),
+ },
+ };
+
+ ted::insert_all(position, vec![whitespace.into(), field.syntax().clone().into()]);
+ if is_multiline {
+ ted::insert(Position::after(field.syntax()), ast::make::token(T![,]));
+ }
+ }
+}
++
++fn get_or_insert_comma_after(syntax: &SyntaxNode) -> SyntaxToken {
++ let comma = match syntax
++ .siblings_with_tokens(Direction::Next)
++ .filter_map(|it| it.into_token())
++ .find(|it| it.kind() == T![,])
++ {
++ Some(it) => it,
++ None => {
++ let comma = ast::make::token(T![,]);
++ ted::insert(Position::after(syntax), &comma);
++ comma
++ }
++ };
++ comma
++}
++
+impl ast::StmtList {
+ pub fn push_front(&self, statement: ast::Stmt) {
+ ted::insert(Position::after(self.l_curly_token().unwrap()), statement.syntax());
+ }
+}
+
++impl ast::VariantList {
++ pub fn add_variant(&self, variant: ast::Variant) {
++ let (indent, position) = match self.variants().last() {
++ Some(last_item) => (
++ IndentLevel::from_node(last_item.syntax()),
++ Position::after(get_or_insert_comma_after(last_item.syntax())),
++ ),
++ None => match self.l_curly_token() {
++ Some(l_curly) => {
++ normalize_ws_between_braces(self.syntax());
++ (IndentLevel::from_token(&l_curly) + 1, Position::after(&l_curly))
++ }
++ None => (IndentLevel::single(), Position::last_child_of(self.syntax())),
++ },
++ };
++ let elements: Vec<SyntaxElement<_>> = vec![
++ make::tokens::whitespace(&format!("{}{}", "\n", indent)).into(),
++ variant.syntax().clone().into(),
++ ast::make::token(T![,]).into(),
++ ];
++ ted::insert_all(position, elements);
++ }
++}
++
+fn normalize_ws_between_braces(node: &SyntaxNode) -> Option<()> {
+ let l = node
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T!['{'])?;
+ let r = node
+ .children_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == T!['}'])?;
+
+ let indent = IndentLevel::from_node(node);
+
+ match l.next_sibling_or_token() {
+ Some(ws) if ws.kind() == SyntaxKind::WHITESPACE => {
+ if ws.next_sibling_or_token()?.into_token()? == r {
+ ted::replace(ws, make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ }
+ Some(ws) if ws.kind() == T!['}'] => {
+ ted::insert(Position::after(l), make::tokens::whitespace(&format!("\n{}", indent)));
+ }
+ _ => (),
+ }
+ Some(())
+}
+
+pub trait Indent: AstNode + Clone + Sized {
+ fn indent_level(&self) -> IndentLevel {
+ IndentLevel::from_node(self.syntax())
+ }
+ fn indent(&self, by: IndentLevel) {
+ by.increase_indent(self.syntax());
+ }
+ fn dedent(&self, by: IndentLevel) {
+ by.decrease_indent(self.syntax());
+ }
+ fn reindent_to(&self, target_level: IndentLevel) {
+ let current_level = IndentLevel::from_node(self.syntax());
+ self.dedent(current_level);
+ self.indent(target_level);
+ }
+}
+
+impl<N: AstNode + Clone> Indent for N {}
+
+#[cfg(test)]
+mod tests {
+ use std::fmt;
+
++ use stdx::trim_indent;
++ use test_utils::assert_eq_text;
++
+ use crate::SourceFile;
+
+ use super::*;
+
+ fn ast_mut_from_text<N: AstNode>(text: &str) -> N {
+ let parse = SourceFile::parse(text);
+ parse.tree().syntax().descendants().find_map(N::cast).unwrap().clone_for_update()
+ }
+
+ #[test]
+ fn test_create_generic_param_list() {
+ fn check_create_gpl<N: GenericParamsOwnerEdit + fmt::Display>(before: &str, after: &str) {
+ let gpl_owner = ast_mut_from_text::<N>(before);
+ gpl_owner.get_or_create_generic_param_list();
+ assert_eq!(gpl_owner.to_string(), after);
+ }
+
+ check_create_gpl::<ast::Fn>("fn foo", "fn foo<>");
+ check_create_gpl::<ast::Fn>("fn foo() {}", "fn foo<>() {}");
+
+ check_create_gpl::<ast::Impl>("impl", "impl<>");
+ check_create_gpl::<ast::Impl>("impl Struct {}", "impl<> Struct {}");
+ check_create_gpl::<ast::Impl>("impl Trait for Struct {}", "impl<> Trait for Struct {}");
+
+ check_create_gpl::<ast::Trait>("trait Trait<>", "trait Trait<>");
+ check_create_gpl::<ast::Trait>("trait Trait<> {}", "trait Trait<> {}");
+
+ check_create_gpl::<ast::Struct>("struct A", "struct A<>");
+ check_create_gpl::<ast::Struct>("struct A;", "struct A<>;");
+ check_create_gpl::<ast::Struct>("struct A();", "struct A<>();");
+ check_create_gpl::<ast::Struct>("struct A {}", "struct A<> {}");
+
+ check_create_gpl::<ast::Enum>("enum E", "enum E<>");
+ check_create_gpl::<ast::Enum>("enum E {", "enum E<> {");
+ }
+
+ #[test]
+ fn test_increase_indent() {
+ let arm_list = ast_mut_from_text::<ast::Fn>(
+ "fn foo() {
+ ;
+ ;
+}",
+ );
+ arm_list.indent(IndentLevel(2));
+ assert_eq!(
+ arm_list.to_string(),
+ "fn foo() {
+ ;
+ ;
+ }",
+ );
+ }
++
++ #[test]
++ fn add_variant_to_empty_enum() {
++ let variant = make::variant(make::name("Bar"), None).clone_for_update();
++
++ check_add_variant(
++ r#"
++enum Foo {}
++"#,
++ r#"
++enum Foo {
++ Bar,
++}
++"#,
++ variant,
++ );
++ }
++
++ #[test]
++ fn add_variant_to_non_empty_enum() {
++ let variant = make::variant(make::name("Baz"), None).clone_for_update();
++
++ check_add_variant(
++ r#"
++enum Foo {
++ Bar,
++}
++"#,
++ r#"
++enum Foo {
++ Bar,
++ Baz,
++}
++"#,
++ variant,
++ );
++ }
++
++ #[test]
++ fn add_variant_with_tuple_field_list() {
++ let variant = make::variant(
++ make::name("Baz"),
++ Some(ast::FieldList::TupleFieldList(make::tuple_field_list(std::iter::once(
++ make::tuple_field(None, make::ty("bool")),
++ )))),
++ )
++ .clone_for_update();
++
++ check_add_variant(
++ r#"
++enum Foo {
++ Bar,
++}
++"#,
++ r#"
++enum Foo {
++ Bar,
++ Baz(bool),
++}
++"#,
++ variant,
++ );
++ }
++
++ #[test]
++ fn add_variant_with_record_field_list() {
++ let variant = make::variant(
++ make::name("Baz"),
++ Some(ast::FieldList::RecordFieldList(make::record_field_list(std::iter::once(
++ make::record_field(None, make::name("x"), make::ty("bool")),
++ )))),
++ )
++ .clone_for_update();
++
++ check_add_variant(
++ r#"
++enum Foo {
++ Bar,
++}
++"#,
++ r#"
++enum Foo {
++ Bar,
++ Baz { x: bool },
++}
++"#,
++ variant,
++ );
++ }
++
++ fn check_add_variant(before: &str, expected: &str, variant: ast::Variant) {
++ let enum_ = ast_mut_from_text::<ast::Enum>(before);
++ enum_.variant_list().map(|it| it.add_variant(variant));
++ let after = enum_.to_string();
++ assert_eq_text!(&trim_indent(expected.trim()), &trim_indent(&after.trim()));
++ }
+}
--- /dev/null
- Some(it) => format!("{}", it),
+//! This module contains free-standing functions for creating AST fragments out
+//! of smaller pieces.
+//!
+//! Note that all functions here intended to be stupid constructors, which just
+//! assemble a finish node from immediate children. If you want to do something
+//! smarter than that, it belongs to the `ext` submodule.
+//!
+//! Keep in mind that `from_text` functions should be kept private. The public
+//! API should require to assemble every node piecewise. The trick of
+//! `parse(format!())` we use internally is an implementation detail -- long
+//! term, it will be replaced with direct tree manipulation.
+use itertools::Itertools;
+use stdx::{format_to, never};
+
+use crate::{ast, AstNode, SourceFile, SyntaxKind, SyntaxToken};
+
+/// While the parent module defines basic atomic "constructors", the `ext`
+/// module defines shortcuts for common things.
+///
+/// It's named `ext` rather than `shortcuts` just to keep it short.
+pub mod ext {
+ use super::*;
+
+ pub fn simple_ident_pat(name: ast::Name) -> ast::IdentPat {
+ return from_text(&name.text());
+
+ fn from_text(text: &str) -> ast::IdentPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+ }
+ pub fn ident_path(ident: &str) -> ast::Path {
+ path_unqualified(path_segment(name_ref(ident)))
+ }
+
+ pub fn path_from_idents<'a>(
+ parts: impl std::iter::IntoIterator<Item = &'a str>,
+ ) -> Option<ast::Path> {
+ let mut iter = parts.into_iter();
+ let base = ext::ident_path(iter.next()?);
+ let path = iter.fold(base, |base, s| {
+ let path = ext::ident_path(s);
+ path_concat(base, path)
+ });
+ Some(path)
+ }
+
+ pub fn field_from_idents<'a>(
+ parts: impl std::iter::IntoIterator<Item = &'a str>,
+ ) -> Option<ast::Expr> {
+ let mut iter = parts.into_iter();
+ let base = expr_path(ext::ident_path(iter.next()?));
+ let expr = iter.fold(base, expr_field);
+ Some(expr)
+ }
+
+ pub fn expr_unreachable() -> ast::Expr {
+ expr_from_text("unreachable!()")
+ }
+ pub fn expr_todo() -> ast::Expr {
+ expr_from_text("todo!()")
+ }
+ pub fn expr_ty_default(ty: &ast::Type) -> ast::Expr {
+ expr_from_text(&format!("{}::default()", ty))
+ }
+ pub fn expr_ty_new(ty: &ast::Type) -> ast::Expr {
+ expr_from_text(&format!("{}::new()", ty))
+ }
+
+ pub fn zero_number() -> ast::Expr {
+ expr_from_text("0")
+ }
+ pub fn zero_float() -> ast::Expr {
+ expr_from_text("0.0")
+ }
+ pub fn empty_str() -> ast::Expr {
+ expr_from_text(r#""""#)
+ }
+ pub fn empty_char() -> ast::Expr {
+ expr_from_text("'\x00'")
+ }
+ pub fn default_bool() -> ast::Expr {
+ expr_from_text("false")
+ }
+ pub fn option_none() -> ast::Expr {
+ expr_from_text("None")
+ }
+ pub fn empty_block_expr() -> ast::BlockExpr {
+ block_expr(None, None)
+ }
+
+ pub fn ty_bool() -> ast::Type {
+ ty_path(ident_path("bool"))
+ }
+ pub fn ty_option(t: ast::Type) -> ast::Type {
+ ty_from_text(&format!("Option<{}>", t))
+ }
+ pub fn ty_result(t: ast::Type, e: ast::Type) -> ast::Type {
+ ty_from_text(&format!("Result<{}, {}>", t, e))
+ }
+}
+
+pub fn name(text: &str) -> ast::Name {
+ ast_from_text(&format!("mod {}{};", raw_ident_esc(text), text))
+}
+pub fn name_ref(text: &str) -> ast::NameRef {
+ ast_from_text(&format!("fn f() {{ {}{}; }}", raw_ident_esc(text), text))
+}
+fn raw_ident_esc(ident: &str) -> &'static str {
+ let is_keyword = parser::SyntaxKind::from_keyword(ident).is_some();
+ if is_keyword && !matches!(ident, "self" | "crate" | "super" | "Self") {
+ "r#"
+ } else {
+ ""
+ }
+}
+
+pub fn lifetime(text: &str) -> ast::Lifetime {
+ let mut text = text;
+ let tmp;
+ if never!(!text.starts_with('\'')) {
+ tmp = format!("'{}", text);
+ text = &tmp;
+ }
+ ast_from_text(&format!("fn f<{}>() {{ }}", text))
+}
+
+// FIXME: replace stringly-typed constructor with a family of typed ctors, a-la
+// `expr_xxx`.
+pub fn ty(text: &str) -> ast::Type {
+ ty_from_text(text)
+}
+pub fn ty_placeholder() -> ast::Type {
+ ty_from_text("_")
+}
+pub fn ty_unit() -> ast::Type {
+ ty_from_text("()")
+}
+pub fn ty_tuple(types: impl IntoIterator<Item = ast::Type>) -> ast::Type {
+ let mut count: usize = 0;
+ let mut contents = types.into_iter().inspect(|_| count += 1).join(", ");
+ if count == 1 {
+ contents.push(',');
+ }
+
+ ty_from_text(&format!("({})", contents))
+}
+pub fn ty_ref(target: ast::Type, exclusive: bool) -> ast::Type {
+ ty_from_text(&if exclusive { format!("&mut {}", target) } else { format!("&{}", target) })
+}
+pub fn ty_path(path: ast::Path) -> ast::Type {
+ ty_from_text(&path.to_string())
+}
+fn ty_from_text(text: &str) -> ast::Type {
+ ast_from_text(&format!("type _T = {};", text))
+}
+
+pub fn assoc_item_list() -> ast::AssocItemList {
+ ast_from_text("impl C for D {}")
+}
+
+pub fn impl_(
+ ty: ast::Path,
+ params: Option<ast::GenericParamList>,
+ ty_params: Option<ast::GenericParamList>,
+) -> ast::Impl {
+ let params = match params {
+ Some(params) => params.to_string(),
+ None => String::new(),
+ };
+ let ty_params = match ty_params {
+ Some(params) => params.to_string(),
+ None => String::new(),
+ };
+ ast_from_text(&format!("impl{} {}{} {{}}", params, ty, ty_params))
+}
+
+pub fn impl_trait(
+ trait_: ast::Path,
+ ty: ast::Path,
+ ty_params: Option<ast::GenericParamList>,
+) -> ast::Impl {
+ let ty_params = ty_params.map_or_else(String::new, |params| params.to_string());
+ ast_from_text(&format!("impl{2} {} for {}{2} {{}}", trait_, ty, ty_params))
+}
+
+pub(crate) fn generic_arg_list() -> ast::GenericArgList {
+ ast_from_text("const S: T<> = ();")
+}
+
+pub fn path_segment(name_ref: ast::NameRef) -> ast::PathSegment {
+ ast_from_text(&format!("type __ = {};", name_ref))
+}
+
+pub fn path_segment_ty(type_ref: ast::Type, trait_ref: Option<ast::PathType>) -> ast::PathSegment {
+ let text = match trait_ref {
+ Some(trait_ref) => format!("fn f(x: <{} as {}>) {{}}", type_ref, trait_ref),
+ None => format!("fn f(x: <{}>) {{}}", type_ref),
+ };
+ ast_from_text(&text)
+}
+
+pub fn path_segment_self() -> ast::PathSegment {
+ ast_from_text("use self;")
+}
+
+pub fn path_segment_super() -> ast::PathSegment {
+ ast_from_text("use super;")
+}
+
+pub fn path_segment_crate() -> ast::PathSegment {
+ ast_from_text("use crate;")
+}
+
+pub fn path_unqualified(segment: ast::PathSegment) -> ast::Path {
+ ast_from_text(&format!("type __ = {};", segment))
+}
+
+pub fn path_qualified(qual: ast::Path, segment: ast::PathSegment) -> ast::Path {
+ ast_from_text(&format!("{}::{}", qual, segment))
+}
+// FIXME: path concatenation operation doesn't make sense as AST op.
+pub fn path_concat(first: ast::Path, second: ast::Path) -> ast::Path {
+ ast_from_text(&format!("type __ = {}::{};", first, second))
+}
+
+pub fn path_from_segments(
+ segments: impl IntoIterator<Item = ast::PathSegment>,
+ is_abs: bool,
+) -> ast::Path {
+ let segments = segments.into_iter().map(|it| it.syntax().clone()).join("::");
+ ast_from_text(&if is_abs {
+ format!("fn f(x: ::{}) {{}}", segments)
+ } else {
+ format!("fn f(x: {}) {{}}", segments)
+ })
+}
+
+pub fn join_paths(paths: impl IntoIterator<Item = ast::Path>) -> ast::Path {
+ let paths = paths.into_iter().map(|it| it.syntax().clone()).join("::");
+ ast_from_text(&format!("type __ = {};", paths))
+}
+
+// FIXME: should not be pub
+pub fn path_from_text(text: &str) -> ast::Path {
+ ast_from_text(&format!("fn main() {{ let test = {}; }}", text))
+}
+
+pub fn use_tree_glob() -> ast::UseTree {
+ ast_from_text("use *;")
+}
+pub fn use_tree(
+ path: ast::Path,
+ use_tree_list: Option<ast::UseTreeList>,
+ alias: Option<ast::Rename>,
+ add_star: bool,
+) -> ast::UseTree {
+ let mut buf = "use ".to_string();
+ buf += &path.syntax().to_string();
+ if let Some(use_tree_list) = use_tree_list {
+ format_to!(buf, "::{}", use_tree_list);
+ }
+ if add_star {
+ buf += "::*";
+ }
+
+ if let Some(alias) = alias {
+ format_to!(buf, " {}", alias);
+ }
+ ast_from_text(&buf)
+}
+
+pub fn use_tree_list(use_trees: impl IntoIterator<Item = ast::UseTree>) -> ast::UseTreeList {
+ let use_trees = use_trees.into_iter().map(|it| it.syntax().clone()).join(", ");
+ ast_from_text(&format!("use {{{}}};", use_trees))
+}
+
+pub fn use_(visibility: Option<ast::Visibility>, use_tree: ast::UseTree) -> ast::Use {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("{}use {};", visibility, use_tree))
+}
+
+pub fn record_expr(path: ast::Path, fields: ast::RecordExprFieldList) -> ast::RecordExpr {
+ ast_from_text(&format!("fn f() {{ {} {} }}", path, fields))
+}
+
+pub fn record_expr_field_list(
+ fields: impl IntoIterator<Item = ast::RecordExprField>,
+) -> ast::RecordExprFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("fn f() {{ S {{ {} }} }}", fields))
+}
+
+pub fn record_expr_field(name: ast::NameRef, expr: Option<ast::Expr>) -> ast::RecordExprField {
+ return match expr {
+ Some(expr) => from_text(&format!("{}: {}", name, expr)),
+ None => from_text(&name.to_string()),
+ };
+
+ fn from_text(text: &str) -> ast::RecordExprField {
+ ast_from_text(&format!("fn f() {{ S {{ {}, }} }}", text))
+ }
+}
+
+pub fn record_field(
+ visibility: Option<ast::Visibility>,
+ name: ast::Name,
+ ty: ast::Type,
+) -> ast::RecordField {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("struct S {{ {}{}: {}, }}", visibility, name, ty))
+}
+
+// TODO
+pub fn block_expr(
+ stmts: impl IntoIterator<Item = ast::Stmt>,
+ tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+ let mut buf = "{\n".to_string();
+ for stmt in stmts.into_iter() {
+ format_to!(buf, " {}\n", stmt);
+ }
+ if let Some(tail_expr) = tail_expr {
+ format_to!(buf, " {}\n", tail_expr);
+ }
+ buf += "}";
+ ast_from_text(&format!("fn f() {}", buf))
+}
+
+/// Ideally this function wouldn't exist since it involves manual indenting.
+/// It differs from `make::block_expr` by also supporting comments.
+///
+/// FIXME: replace usages of this with the mutable syntax tree API
+pub fn hacky_block_expr_with_comments(
+ elements: impl IntoIterator<Item = crate::SyntaxElement>,
+ tail_expr: Option<ast::Expr>,
+) -> ast::BlockExpr {
+ let mut buf = "{\n".to_string();
+ for node_or_token in elements.into_iter() {
+ match node_or_token {
+ rowan::NodeOrToken::Node(n) => format_to!(buf, " {}\n", n),
+ rowan::NodeOrToken::Token(t) if t.kind() == SyntaxKind::COMMENT => {
+ format_to!(buf, " {}\n", t)
+ }
+ _ => (),
+ }
+ }
+ if let Some(tail_expr) = tail_expr {
+ format_to!(buf, " {}\n", tail_expr);
+ }
+ buf += "}";
+ ast_from_text(&format!("fn f() {}", buf))
+}
+
+pub fn expr_unit() -> ast::Expr {
+ expr_from_text("()")
+}
+pub fn expr_literal(text: &str) -> ast::Literal {
+ assert_eq!(text.trim(), text);
+ ast_from_text(&format!("fn f() {{ let _ = {}; }}", text))
+}
+
+pub fn expr_empty_block() -> ast::Expr {
+ expr_from_text("{}")
+}
+pub fn expr_path(path: ast::Path) -> ast::Expr {
+ expr_from_text(&path.to_string())
+}
+pub fn expr_continue(label: Option<ast::Lifetime>) -> ast::Expr {
+ match label {
+ Some(label) => expr_from_text(&format!("continue {}", label)),
+ None => expr_from_text("continue"),
+ }
+}
+// Consider `op: SyntaxKind` instead for nicer syntax at the call-site?
+pub fn expr_bin_op(lhs: ast::Expr, op: ast::BinaryOp, rhs: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{} {} {}", lhs, op, rhs))
+}
+pub fn expr_break(label: Option<ast::Lifetime>, expr: Option<ast::Expr>) -> ast::Expr {
+ let mut s = String::from("break");
+
+ if let Some(label) = label {
+ format_to!(s, " {}", label);
+ }
+
+ if let Some(expr) = expr {
+ format_to!(s, " {}", expr);
+ }
+
+ expr_from_text(&s)
+}
+pub fn expr_return(expr: Option<ast::Expr>) -> ast::Expr {
+ match expr {
+ Some(expr) => expr_from_text(&format!("return {}", expr)),
+ None => expr_from_text("return"),
+ }
+}
+pub fn expr_try(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{}?", expr))
+}
+pub fn expr_await(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{}.await", expr))
+}
+pub fn expr_match(expr: ast::Expr, match_arm_list: ast::MatchArmList) -> ast::Expr {
+ expr_from_text(&format!("match {} {}", expr, match_arm_list))
+}
+pub fn expr_if(
+ condition: ast::Expr,
+ then_branch: ast::BlockExpr,
+ else_branch: Option<ast::ElseBranch>,
+) -> ast::Expr {
+ let else_branch = match else_branch {
+ Some(ast::ElseBranch::Block(block)) => format!("else {}", block),
+ Some(ast::ElseBranch::IfExpr(if_expr)) => format!("else {}", if_expr),
+ None => String::new(),
+ };
+ expr_from_text(&format!("if {} {} {}", condition, then_branch, else_branch))
+}
+pub fn expr_for_loop(pat: ast::Pat, expr: ast::Expr, block: ast::BlockExpr) -> ast::Expr {
+ expr_from_text(&format!("for {} in {} {}", pat, expr, block))
+}
+
+pub fn expr_loop(block: ast::BlockExpr) -> ast::Expr {
+ expr_from_text(&format!("loop {}", block))
+}
+
+pub fn expr_prefix(op: SyntaxKind, expr: ast::Expr) -> ast::Expr {
+ let token = token(op);
+ expr_from_text(&format!("{}{}", token, expr))
+}
+pub fn expr_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
+ expr_from_text(&format!("{}{}", f, arg_list))
+}
+pub fn expr_method_call(
+ receiver: ast::Expr,
+ method: ast::NameRef,
+ arg_list: ast::ArgList,
+) -> ast::Expr {
+ expr_from_text(&format!("{}.{}{}", receiver, method, arg_list))
+}
+pub fn expr_macro_call(f: ast::Expr, arg_list: ast::ArgList) -> ast::Expr {
+ expr_from_text(&format!("{}!{}", f, arg_list))
+}
+pub fn expr_ref(expr: ast::Expr, exclusive: bool) -> ast::Expr {
+ expr_from_text(&if exclusive { format!("&mut {}", expr) } else { format!("&{}", expr) })
+}
+pub fn expr_closure(pats: impl IntoIterator<Item = ast::Param>, expr: ast::Expr) -> ast::Expr {
+ let params = pats.into_iter().join(", ");
+ expr_from_text(&format!("|{}| {}", params, expr))
+}
+pub fn expr_field(receiver: ast::Expr, field: &str) -> ast::Expr {
+ expr_from_text(&format!("{}.{}", receiver, field))
+}
+pub fn expr_paren(expr: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("({})", expr))
+}
+pub fn expr_tuple(elements: impl IntoIterator<Item = ast::Expr>) -> ast::Expr {
+ let expr = elements.into_iter().format(", ");
+ expr_from_text(&format!("({})", expr))
+}
+pub fn expr_assignment(lhs: ast::Expr, rhs: ast::Expr) -> ast::Expr {
+ expr_from_text(&format!("{} = {}", lhs, rhs))
+}
+fn expr_from_text(text: &str) -> ast::Expr {
+ ast_from_text(&format!("const C: () = {};", text))
+}
+pub fn expr_let(pattern: ast::Pat, expr: ast::Expr) -> ast::LetExpr {
+ ast_from_text(&format!("const _: () = while let {} = {} {{}};", pattern, expr))
+}
+
+pub fn arg_list(args: impl IntoIterator<Item = ast::Expr>) -> ast::ArgList {
+ ast_from_text(&format!("fn main() {{ ()({}) }}", args.into_iter().format(", ")))
+}
+
+pub fn ident_pat(ref_: bool, mut_: bool, name: ast::Name) -> ast::IdentPat {
+ let mut s = String::from("fn f(");
+ if ref_ {
+ s.push_str("ref ");
+ }
+ if mut_ {
+ s.push_str("mut ");
+ }
+ format_to!(s, "{}", name);
+ s.push_str(": ())");
+ ast_from_text(&s)
+}
+
+pub fn wildcard_pat() -> ast::WildcardPat {
+ return from_text("_");
+
+ fn from_text(text: &str) -> ast::WildcardPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn literal_pat(lit: &str) -> ast::LiteralPat {
+ return from_text(lit);
+
+ fn from_text(text: &str) -> ast::LiteralPat {
+ ast_from_text(&format!("fn f() {{ match x {{ {} => {{}} }} }}", text))
+ }
+}
+
+/// Creates a tuple of patterns from an iterator of patterns.
+///
+/// Invariant: `pats` must be length > 0
+pub fn tuple_pat(pats: impl IntoIterator<Item = ast::Pat>) -> ast::TuplePat {
+ let mut count: usize = 0;
+ let mut pats_str = pats.into_iter().inspect(|_| count += 1).join(", ");
+ if count == 1 {
+ pats_str.push(',');
+ }
+ return from_text(&format!("({})", pats_str));
+
+ fn from_text(text: &str) -> ast::TuplePat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn tuple_struct_pat(
+ path: ast::Path,
+ pats: impl IntoIterator<Item = ast::Pat>,
+) -> ast::TupleStructPat {
+ let pats_str = pats.into_iter().join(", ");
+ return from_text(&format!("{}({})", path, pats_str));
+
+ fn from_text(text: &str) -> ast::TupleStructPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn record_pat(path: ast::Path, pats: impl IntoIterator<Item = ast::Pat>) -> ast::RecordPat {
+ let pats_str = pats.into_iter().join(", ");
+ return from_text(&format!("{} {{ {} }}", path, pats_str));
+
+ fn from_text(text: &str) -> ast::RecordPat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn record_pat_with_fields(path: ast::Path, fields: ast::RecordPatFieldList) -> ast::RecordPat {
+ ast_from_text(&format!("fn f({} {}: ()))", path, fields))
+}
+
+pub fn record_pat_field_list(
+ fields: impl IntoIterator<Item = ast::RecordPatField>,
+) -> ast::RecordPatFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("fn f(S {{ {} }}: ()))", fields))
+}
+
+pub fn record_pat_field(name_ref: ast::NameRef, pat: ast::Pat) -> ast::RecordPatField {
+ ast_from_text(&format!("fn f(S {{ {}: {} }}: ()))", name_ref, pat))
+}
+
+pub fn record_pat_field_shorthand(name_ref: ast::NameRef) -> ast::RecordPatField {
+ ast_from_text(&format!("fn f(S {{ {} }}: ()))", name_ref))
+}
+
+/// Returns a `BindPat` if the path has just one segment, a `PathPat` otherwise.
+pub fn path_pat(path: ast::Path) -> ast::Pat {
+ return from_text(&path.to_string());
+ fn from_text(text: &str) -> ast::Pat {
+ ast_from_text(&format!("fn f({}: ())", text))
+ }
+}
+
+pub fn match_arm(
+ pats: impl IntoIterator<Item = ast::Pat>,
+ guard: Option<ast::Expr>,
+ expr: ast::Expr,
+) -> ast::MatchArm {
+ let pats_str = pats.into_iter().join(" | ");
+ return match guard {
+ Some(guard) => from_text(&format!("{} if {} => {}", pats_str, guard, expr)),
+ None => from_text(&format!("{} => {}", pats_str, expr)),
+ };
+
+ fn from_text(text: &str) -> ast::MatchArm {
+ ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
+ }
+}
+
+pub fn match_arm_with_guard(
+ pats: impl IntoIterator<Item = ast::Pat>,
+ guard: ast::Expr,
+ expr: ast::Expr,
+) -> ast::MatchArm {
+ let pats_str = pats.into_iter().join(" | ");
+ return from_text(&format!("{} if {} => {}", pats_str, guard, expr));
+
+ fn from_text(text: &str) -> ast::MatchArm {
+ ast_from_text(&format!("fn f() {{ match () {{{}}} }}", text))
+ }
+}
+
+pub fn match_arm_list(arms: impl IntoIterator<Item = ast::MatchArm>) -> ast::MatchArmList {
+ let arms_str = arms
+ .into_iter()
+ .map(|arm| {
+ let needs_comma = arm.expr().map_or(true, |it| !it.is_block_like());
+ let comma = if needs_comma { "," } else { "" };
+ format!(" {}{}\n", arm.syntax(), comma)
+ })
+ .collect::<String>();
+ return from_text(&arms_str);
+
+ fn from_text(text: &str) -> ast::MatchArmList {
+ ast_from_text(&format!("fn f() {{ match () {{\n{}}} }}", text))
+ }
+}
+
+pub fn where_pred(
+ path: ast::Path,
+ bounds: impl IntoIterator<Item = ast::TypeBound>,
+) -> ast::WherePred {
+ let bounds = bounds.into_iter().join(" + ");
+ return from_text(&format!("{}: {}", path, bounds));
+
+ fn from_text(text: &str) -> ast::WherePred {
+ ast_from_text(&format!("fn f() where {} {{ }}", text))
+ }
+}
+
+pub fn where_clause(preds: impl IntoIterator<Item = ast::WherePred>) -> ast::WhereClause {
+ let preds = preds.into_iter().join(", ");
+ return from_text(preds.as_str());
+
+ fn from_text(text: &str) -> ast::WhereClause {
+ ast_from_text(&format!("fn f() where {} {{ }}", text))
+ }
+}
+
+pub fn let_stmt(
+ pattern: ast::Pat,
+ ty: Option<ast::Type>,
+ initializer: Option<ast::Expr>,
+) -> ast::LetStmt {
+ let mut text = String::new();
+ format_to!(text, "let {}", pattern);
+ if let Some(ty) = ty {
+ format_to!(text, ": {}", ty);
+ }
+ match initializer {
+ Some(it) => format_to!(text, " = {};", it),
+ None => format_to!(text, ";"),
+ };
+ ast_from_text(&format!("fn f() {{ {} }}", text))
+}
+pub fn expr_stmt(expr: ast::Expr) -> ast::ExprStmt {
+ let semi = if expr.is_block_like() { "" } else { ";" };
+ ast_from_text(&format!("fn f() {{ {}{} (); }}", expr, semi))
+}
+
+pub fn item_const(
+ visibility: Option<ast::Visibility>,
+ name: ast::Name,
+ ty: ast::Type,
+ expr: ast::Expr,
+) -> ast::Const {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("{} const {}: {} = {};", visibility, name, ty, expr))
+}
+
+pub fn param(pat: ast::Pat, ty: ast::Type) -> ast::Param {
+ ast_from_text(&format!("fn f({}: {}) {{ }}", pat, ty))
+}
+
+pub fn self_param() -> ast::SelfParam {
+ ast_from_text("fn f(&self) { }")
+}
+
+pub fn ret_type(ty: ast::Type) -> ast::RetType {
+ ast_from_text(&format!("fn f() -> {} {{ }}", ty))
+}
+
+pub fn param_list(
+ self_param: Option<ast::SelfParam>,
+ pats: impl IntoIterator<Item = ast::Param>,
+) -> ast::ParamList {
+ let args = pats.into_iter().join(", ");
+ let list = match self_param {
+ Some(self_param) if args.is_empty() => format!("fn f({}) {{ }}", self_param),
+ Some(self_param) => format!("fn f({}, {}) {{ }}", self_param, args),
+ None => format!("fn f({}) {{ }}", args),
+ };
+ ast_from_text(&list)
+}
+
+pub fn type_param(name: ast::Name, ty: Option<ast::TypeBoundList>) -> ast::TypeParam {
+ let bound = match ty {
+ Some(it) => format!(": {}", it),
+ None => String::new(),
+ };
+ ast_from_text(&format!("fn f<{}{}>() {{ }}", name, bound))
+}
+
+pub fn lifetime_param(lifetime: ast::Lifetime) -> ast::LifetimeParam {
+ ast_from_text(&format!("fn f<{}>() {{ }}", lifetime))
+}
+
+pub fn generic_param_list(
+ pats: impl IntoIterator<Item = ast::GenericParam>,
+) -> ast::GenericParamList {
+ let args = pats.into_iter().join(", ");
+ ast_from_text(&format!("fn f<{}>() {{ }}", args))
+}
+
+pub fn visibility_pub_crate() -> ast::Visibility {
+ ast_from_text("pub(crate) struct S")
+}
+
+pub fn visibility_pub() -> ast::Visibility {
+ ast_from_text("pub struct S")
+}
+
+pub fn tuple_field_list(fields: impl IntoIterator<Item = ast::TupleField>) -> ast::TupleFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("struct f({});", fields))
+}
+
+pub fn record_field_list(
+ fields: impl IntoIterator<Item = ast::RecordField>,
+) -> ast::RecordFieldList {
+ let fields = fields.into_iter().join(", ");
+ ast_from_text(&format!("struct f {{ {} }}", fields))
+}
+
+pub fn tuple_field(visibility: Option<ast::Visibility>, ty: ast::Type) -> ast::TupleField {
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+ ast_from_text(&format!("struct f({}{});", visibility, ty))
+}
+
+pub fn variant(name: ast::Name, field_list: Option<ast::FieldList>) -> ast::Variant {
+ let field_list = match field_list {
+ None => String::new(),
++ Some(it) => match it {
++ ast::FieldList::RecordFieldList(record) => format!(" {}", record),
++ ast::FieldList::TupleFieldList(tuple) => format!("{}", tuple),
++ },
+ };
+ ast_from_text(&format!("enum f {{ {}{} }}", name, field_list))
+}
+
+pub fn fn_(
+ visibility: Option<ast::Visibility>,
+ fn_name: ast::Name,
+ type_params: Option<ast::GenericParamList>,
+ params: ast::ParamList,
+ body: ast::BlockExpr,
+ ret_type: Option<ast::RetType>,
+ is_async: bool,
+) -> ast::Fn {
+ let type_params = match type_params {
+ Some(type_params) => format!("{}", type_params),
+ None => "".into(),
+ };
+ let ret_type = match ret_type {
+ Some(ret_type) => format!("{} ", ret_type),
+ None => "".into(),
+ };
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+
+ let async_literal = if is_async { "async " } else { "" };
+
+ ast_from_text(&format!(
+ "{}{}fn {}{}{} {}{}",
+ visibility, async_literal, fn_name, type_params, params, ret_type, body
+ ))
+}
+
+pub fn struct_(
+ visibility: Option<ast::Visibility>,
+ strukt_name: ast::Name,
+ generic_param_list: Option<ast::GenericParamList>,
+ field_list: ast::FieldList,
+) -> ast::Struct {
+ let semicolon = if matches!(field_list, ast::FieldList::TupleFieldList(_)) { ";" } else { "" };
+ let type_params = generic_param_list.map_or_else(String::new, |it| it.to_string());
+ let visibility = match visibility {
+ None => String::new(),
+ Some(it) => format!("{} ", it),
+ };
+
+ ast_from_text(&format!(
+ "{}struct {}{}{}{}",
+ visibility, strukt_name, type_params, field_list, semicolon
+ ))
+}
+
+#[track_caller]
+fn ast_from_text<N: AstNode>(text: &str) -> N {
+ let parse = SourceFile::parse(text);
+ let node = match parse.tree().syntax().descendants().find_map(N::cast) {
+ Some(it) => it,
+ None => {
+ panic!("Failed to make ast node `{}` from text {}", std::any::type_name::<N>(), text)
+ }
+ };
+ let node = node.clone_subtree();
+ assert_eq!(node.syntax().text_range().start(), 0.into());
+ node
+}
+
+pub fn token(kind: SyntaxKind) -> SyntaxToken {
+ tokens::SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == kind)
+ .unwrap_or_else(|| panic!("unhandled token: {:?}", kind))
+}
+
+pub mod tokens {
+ use once_cell::sync::Lazy;
+
+ use crate::{ast, AstNode, Parse, SourceFile, SyntaxKind::*, SyntaxToken};
+
+ pub(super) static SOURCE_FILE: Lazy<Parse<SourceFile>> = Lazy::new(|| {
+ SourceFile::parse(
+ "const C: <()>::Item = (1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p)\n;\n\n",
+ )
+ });
+
+ pub fn single_space() -> SyntaxToken {
+ SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == " ")
+ .unwrap()
+ }
+
+ pub fn whitespace(text: &str) -> SyntaxToken {
+ assert!(text.trim().is_empty());
+ let sf = SourceFile::parse(text).ok().unwrap();
+ sf.syntax().clone_for_update().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn doc_comment(text: &str) -> SyntaxToken {
+ assert!(!text.trim().is_empty());
+ let sf = SourceFile::parse(text).ok().unwrap();
+ sf.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn literal(text: &str) -> SyntaxToken {
+ assert_eq!(text.trim(), text);
+ let lit: ast::Literal = super::ast_from_text(&format!("fn f() {{ let _ = {}; }}", text));
+ lit.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+
+ pub fn single_newline() -> SyntaxToken {
+ let res = SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == "\n")
+ .unwrap();
+ res.detach();
+ res
+ }
+
+ pub fn blank_line() -> SyntaxToken {
+ SOURCE_FILE
+ .tree()
+ .syntax()
+ .clone_for_update()
+ .descendants_with_tokens()
+ .filter_map(|it| it.into_token())
+ .find(|it| it.kind() == WHITESPACE && it.text() == "\n\n")
+ .unwrap()
+ }
+
+ pub struct WsBuilder(SourceFile);
+
+ impl WsBuilder {
+ pub fn new(text: &str) -> WsBuilder {
+ WsBuilder(SourceFile::parse(text).ok().unwrap())
+ }
+ pub fn ws(&self) -> SyntaxToken {
+ self.0.syntax().first_child_or_token().unwrap().into_token().unwrap()
+ }
+ }
+}
--- /dev/null
- //! Syntax Tree library used throughout the rust analyzer.
++//! Syntax Tree library used throughout the rust-analyzer.
+//!
+//! Properties:
+//! - easy and fast incremental re-parsing
+//! - graceful handling of errors
+//! - full-fidelity representation (*any* text can be precisely represented as
+//! a syntax tree)
+//!
+//! For more information, see the [RFC]. Current implementation is inspired by
+//! the [Swift] one.
+//!
+//! The most interesting modules here are `syntax_node` (which defines concrete
+//! syntax tree) and `ast` (which defines abstract syntax tree on top of the
+//! CST). The actual parser live in a separate `parser` crate, though the
+//! lexer lives in this crate.
+//!
+//! See `api_walkthrough` test in this file for a quick API tour!
+//!
+//! [RFC]: <https://github.com/rust-lang/rfcs/pull/2256>
+//! [Swift]: <https://github.com/apple/swift/blob/13d593df6f359d0cb2fc81cfaac273297c539455/lib/Syntax/README.md>
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+#[allow(unused)]
+macro_rules! eprintln {
+ ($($tt:tt)*) => { stdx::eprintln!($($tt)*) };
+}
+
+mod syntax_node;
+mod syntax_error;
+mod parsing;
+mod validation;
+mod ptr;
+mod token_text;
+#[cfg(test)]
+mod tests;
+
+pub mod algo;
+pub mod ast;
+#[doc(hidden)]
+pub mod fuzz;
+pub mod utils;
+pub mod ted;
+pub mod hacks;
+
+use std::{marker::PhantomData, sync::Arc};
+
+use stdx::format_to;
+use text_edit::Indel;
+
+pub use crate::{
+ ast::{AstNode, AstToken},
+ ptr::{AstPtr, SyntaxNodePtr},
+ syntax_error::SyntaxError,
+ syntax_node::{
+ PreorderWithTokens, RustLanguage, SyntaxElement, SyntaxElementChildren, SyntaxNode,
+ SyntaxNodeChildren, SyntaxToken, SyntaxTreeBuilder,
+ },
+ token_text::TokenText,
+};
+pub use parser::{SyntaxKind, T};
+pub use rowan::{
+ api::Preorder, Direction, GreenNode, NodeOrToken, SyntaxText, TextRange, TextSize,
+ TokenAtOffset, WalkEvent,
+};
+pub use smol_str::SmolStr;
+
+/// `Parse` is the result of the parsing: a syntax tree and a collection of
+/// errors.
+///
+/// Note that we always produce a syntax tree, even for completely invalid
+/// files.
+#[derive(Debug, PartialEq, Eq)]
+pub struct Parse<T> {
+ green: GreenNode,
+ errors: Arc<Vec<SyntaxError>>,
+ _ty: PhantomData<fn() -> T>,
+}
+
+impl<T> Clone for Parse<T> {
+ fn clone(&self) -> Parse<T> {
+ Parse { green: self.green.clone(), errors: self.errors.clone(), _ty: PhantomData }
+ }
+}
+
+impl<T> Parse<T> {
+ fn new(green: GreenNode, errors: Vec<SyntaxError>) -> Parse<T> {
+ Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ }
+
+ pub fn syntax_node(&self) -> SyntaxNode {
+ SyntaxNode::new_root(self.green.clone())
+ }
+ pub fn errors(&self) -> &[SyntaxError] {
+ &*self.errors
+ }
+}
+
+impl<T: AstNode> Parse<T> {
+ pub fn to_syntax(self) -> Parse<SyntaxNode> {
+ Parse { green: self.green, errors: self.errors, _ty: PhantomData }
+ }
+
+ pub fn tree(&self) -> T {
+ T::cast(self.syntax_node()).unwrap()
+ }
+
+ pub fn ok(self) -> Result<T, Arc<Vec<SyntaxError>>> {
+ if self.errors.is_empty() {
+ Ok(self.tree())
+ } else {
+ Err(self.errors)
+ }
+ }
+}
+
+impl Parse<SyntaxNode> {
+ pub fn cast<N: AstNode>(self) -> Option<Parse<N>> {
+ if N::cast(self.syntax_node()).is_some() {
+ Some(Parse { green: self.green, errors: self.errors, _ty: PhantomData })
+ } else {
+ None
+ }
+ }
+}
+
+impl Parse<SourceFile> {
+ pub fn debug_dump(&self) -> String {
+ let mut buf = format!("{:#?}", self.tree().syntax());
+ for err in self.errors.iter() {
+ format_to!(buf, "error {:?}: {}\n", err.range(), err);
+ }
+ buf
+ }
+
+ pub fn reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+ self.incremental_reparse(indel).unwrap_or_else(|| self.full_reparse(indel))
+ }
+
+ fn incremental_reparse(&self, indel: &Indel) -> Option<Parse<SourceFile>> {
+ // FIXME: validation errors are not handled here
+ parsing::incremental_reparse(self.tree().syntax(), indel, self.errors.to_vec()).map(
+ |(green_node, errors, _reparsed_range)| Parse {
+ green: green_node,
+ errors: Arc::new(errors),
+ _ty: PhantomData,
+ },
+ )
+ }
+
+ fn full_reparse(&self, indel: &Indel) -> Parse<SourceFile> {
+ let mut text = self.tree().syntax().text().to_string();
+ indel.apply(&mut text);
+ SourceFile::parse(&text)
+ }
+}
+
+/// `SourceFile` represents a parse tree for a single Rust file.
+pub use crate::ast::SourceFile;
+
+impl SourceFile {
+ pub fn parse(text: &str) -> Parse<SourceFile> {
+ let (green, mut errors) = parsing::parse_text(text);
+ let root = SyntaxNode::new_root(green.clone());
+
+ errors.extend(validation::validate(&root));
+
+ assert_eq!(root.kind(), SyntaxKind::SOURCE_FILE);
+ Parse { green, errors: Arc::new(errors), _ty: PhantomData }
+ }
+}
+
+/// Matches a `SyntaxNode` against an `ast` type.
+///
+/// # Example:
+///
+/// ```ignore
+/// match_ast! {
+/// match node {
+/// ast::CallExpr(it) => { ... },
+/// ast::MethodCallExpr(it) => { ... },
+/// ast::MacroCall(it) => { ... },
+/// _ => None,
+/// }
+/// }
+/// ```
+#[macro_export]
+macro_rules! match_ast {
+ (match $node:ident { $($tt:tt)* }) => { match_ast!(match ($node) { $($tt)* }) };
+
+ (match ($node:expr) {
+ $( $( $path:ident )::+ ($it:pat) => $res:expr, )*
+ _ => $catch_all:expr $(,)?
+ }) => {{
+ $( if let Some($it) = $($path::)+cast($node.clone()) { $res } else )*
+ { $catch_all }
+ }};
+}
+
+/// This test does not assert anything and instead just shows off the crate's
+/// API.
+#[test]
+fn api_walkthrough() {
+ use ast::{HasModuleItem, HasName};
+
+ let source_code = "
+ fn foo() {
+ 1 + 1
+ }
+ ";
+ // `SourceFile` is the main entry point.
+ //
+ // The `parse` method returns a `Parse` -- a pair of syntax tree and a list
+ // of errors. That is, syntax tree is constructed even in presence of errors.
+ let parse = SourceFile::parse(source_code);
+ assert!(parse.errors().is_empty());
+
+ // The `tree` method returns an owned syntax node of type `SourceFile`.
+ // Owned nodes are cheap: inside, they are `Rc` handles to the underling data.
+ let file: SourceFile = parse.tree();
+
+ // `SourceFile` is the root of the syntax tree. We can iterate file's items.
+ // Let's fetch the `foo` function.
+ let mut func = None;
+ for item in file.items() {
+ match item {
+ ast::Item::Fn(f) => func = Some(f),
+ _ => unreachable!(),
+ }
+ }
+ let func: ast::Fn = func.unwrap();
+
+ // Each AST node has a bunch of getters for children. All getters return
+ // `Option`s though, to account for incomplete code. Some getters are common
+ // for several kinds of node. In this case, a trait like `ast::NameOwner`
+ // usually exists. By convention, all ast types should be used with `ast::`
+ // qualifier.
+ let name: Option<ast::Name> = func.name();
+ let name = name.unwrap();
+ assert_eq!(name.text(), "foo");
+
+ // Let's get the `1 + 1` expression!
+ let body: ast::BlockExpr = func.body().unwrap();
+ let stmt_list: ast::StmtList = body.stmt_list().unwrap();
+ let expr: ast::Expr = stmt_list.tail_expr().unwrap();
+
+ // Enums are used to group related ast nodes together, and can be used for
+ // matching. However, because there are no public fields, it's possible to
+ // match only the top level enum: that is the price we pay for increased API
+ // flexibility
+ let bin_expr: &ast::BinExpr = match &expr {
+ ast::Expr::BinExpr(e) => e,
+ _ => unreachable!(),
+ };
+
+ // Besides the "typed" AST API, there's an untyped CST one as well.
+ // To switch from AST to CST, call `.syntax()` method:
+ let expr_syntax: &SyntaxNode = expr.syntax();
+
+ // Note how `expr` and `bin_expr` are in fact the same node underneath:
+ assert!(expr_syntax == bin_expr.syntax());
+
+ // To go from CST to AST, `AstNode::cast` function is used:
+ let _expr: ast::Expr = match ast::Expr::cast(expr_syntax.clone()) {
+ Some(e) => e,
+ None => unreachable!(),
+ };
+
+ // The two properties each syntax node has is a `SyntaxKind`:
+ assert_eq!(expr_syntax.kind(), SyntaxKind::BIN_EXPR);
+
+ // And text range:
+ assert_eq!(expr_syntax.text_range(), TextRange::new(32.into(), 37.into()));
+
+ // You can get node's text as a `SyntaxText` object, which will traverse the
+ // tree collecting token's text:
+ let text: SyntaxText = expr_syntax.text();
+ assert_eq!(text.to_string(), "1 + 1");
+
+ // There's a bunch of traversal methods on `SyntaxNode`:
+ assert_eq!(expr_syntax.parent().as_ref(), Some(stmt_list.syntax()));
+ assert_eq!(stmt_list.syntax().first_child_or_token().map(|it| it.kind()), Some(T!['{']));
+ assert_eq!(
+ expr_syntax.next_sibling_or_token().map(|it| it.kind()),
+ Some(SyntaxKind::WHITESPACE)
+ );
+
+ // As well as some iterator helpers:
+ let f = expr_syntax.ancestors().find_map(ast::Fn::cast);
+ assert_eq!(f, Some(func));
+ assert!(expr_syntax.siblings_with_tokens(Direction::Next).any(|it| it.kind() == T!['}']));
+ assert_eq!(
+ expr_syntax.descendants_with_tokens().count(),
+ 8, // 5 tokens `1`, ` `, `+`, ` `, `!`
+ // 2 child literal expressions: `1`, `1`
+ // 1 the node itself: `1 + 1`
+ );
+
+ // There's also a `preorder` method with a more fine-grained iteration control:
+ let mut buf = String::new();
+ let mut indent = 0;
+ for event in expr_syntax.preorder_with_tokens() {
+ match event {
+ WalkEvent::Enter(node) => {
+ let text = match &node {
+ NodeOrToken::Node(it) => it.text().to_string(),
+ NodeOrToken::Token(it) => it.text().to_string(),
+ };
+ format_to!(buf, "{:indent$}{:?} {:?}\n", " ", text, node.kind(), indent = indent);
+ indent += 2;
+ }
+ WalkEvent::Leave(_) => indent -= 2,
+ }
+ }
+ assert_eq!(indent, 0);
+ assert_eq!(
+ buf.trim(),
+ r#"
+"1 + 1" BIN_EXPR
+ "1" LITERAL
+ "1" INT_NUMBER
+ " " WHITESPACE
+ "+" PLUS
+ " " WHITESPACE
+ "1" LITERAL
+ "1" INT_NUMBER
+"#
+ .trim()
+ );
+
+ // To recursively process the tree, there are three approaches:
+ // 1. explicitly call getter methods on AST nodes.
+ // 2. use descendants and `AstNode::cast`.
+ // 3. use descendants and `match_ast!`.
+ //
+ // Here's how the first one looks like:
+ let exprs_cast: Vec<String> = file
+ .syntax()
+ .descendants()
+ .filter_map(ast::Expr::cast)
+ .map(|expr| expr.syntax().text().to_string())
+ .collect();
+
+ // An alternative is to use a macro.
+ let mut exprs_visit = Vec::new();
+ for node in file.syntax().descendants() {
+ match_ast! {
+ match node {
+ ast::Expr(it) => {
+ let res = it.syntax().text().to_string();
+ exprs_visit.push(res);
+ },
+ _ => (),
+ }
+ }
+ }
+ assert_eq!(exprs_cast, exprs_visit);
+}
--- /dev/null
- tracing::debug!("vfs-notify event: {:?}", event);
+//! An implementation of `loader::Handle`, based on `walkdir` and `notify`.
+//!
+//! The file watching bits here are untested and quite probably buggy. For this
+//! reason, by default we don't watch files and rely on editor's file watching
+//! capabilities.
+//!
+//! Hopefully, one day a reliable file watching/walking crate appears on
+//! crates.io, and we can reduce this to trivial glue code.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+
+use std::fs;
+
+use crossbeam_channel::{never, select, unbounded, Receiver, Sender};
+use notify::{RecommendedWatcher, RecursiveMode, Watcher};
+use paths::{AbsPath, AbsPathBuf};
+use vfs::loader;
+use walkdir::WalkDir;
+
+#[derive(Debug)]
+pub struct NotifyHandle {
+ // Relative order of fields below is significant.
+ sender: Sender<Message>,
+ _thread: jod_thread::JoinHandle,
+}
+
+#[derive(Debug)]
+enum Message {
+ Config(loader::Config),
+ Invalidate(AbsPathBuf),
+}
+
+impl loader::Handle for NotifyHandle {
+ fn spawn(sender: loader::Sender) -> NotifyHandle {
+ let actor = NotifyActor::new(sender);
+ let (sender, receiver) = unbounded::<Message>();
+ let thread = jod_thread::Builder::new()
+ .name("VfsLoader".to_owned())
+ .spawn(move || actor.run(receiver))
+ .expect("failed to spawn thread");
+ NotifyHandle { sender, _thread: thread }
+ }
++
+ fn set_config(&mut self, config: loader::Config) {
+ self.sender.send(Message::Config(config)).unwrap();
+ }
++
+ fn invalidate(&mut self, path: AbsPathBuf) {
+ self.sender.send(Message::Invalidate(path)).unwrap();
+ }
++
+ fn load_sync(&mut self, path: &AbsPath) -> Option<Vec<u8>> {
+ read(path)
+ }
+}
+
+type NotifyEvent = notify::Result<notify::Event>;
+
+struct NotifyActor {
+ sender: loader::Sender,
+ watched_entries: Vec<loader::Entry>,
+ // Drop order is significant.
+ watcher: Option<(RecommendedWatcher, Receiver<NotifyEvent>)>,
+}
+
+#[derive(Debug)]
+enum Event {
+ Message(Message),
+ NotifyEvent(NotifyEvent),
+}
+
+impl NotifyActor {
+ fn new(sender: loader::Sender) -> NotifyActor {
+ NotifyActor { sender, watched_entries: Vec::new(), watcher: None }
+ }
++
+ fn next_event(&self, receiver: &Receiver<Message>) -> Option<Event> {
+ let watcher_receiver = self.watcher.as_ref().map(|(_, receiver)| receiver);
+ select! {
+ recv(receiver) -> it => it.ok().map(Event::Message),
+ recv(watcher_receiver.unwrap_or(&never())) -> it => Some(Event::NotifyEvent(it.unwrap())),
+ }
+ }
++
+ fn run(mut self, inbox: Receiver<Message>) {
+ while let Some(event) = self.next_event(&inbox) {
++ tracing::debug!(?event, "vfs-notify event");
+ match event {
+ Event::Message(msg) => match msg {
+ Message::Config(config) => {
+ self.watcher = None;
+ if !config.watch.is_empty() {
+ let (watcher_sender, watcher_receiver) = unbounded();
+ let watcher = log_notify_error(RecommendedWatcher::new(move |event| {
+ watcher_sender.send(event).unwrap();
+ }));
+ self.watcher = watcher.map(|it| (it, watcher_receiver));
+ }
+
+ let config_version = config.version;
+
+ let n_total = config.load.len();
+ self.send(loader::Message::Progress { n_total, n_done: 0, config_version });
+
+ self.watched_entries.clear();
+
+ for (i, entry) in config.load.into_iter().enumerate() {
+ let watch = config.watch.contains(&i);
+ if watch {
+ self.watched_entries.push(entry.clone());
+ }
+ let files = self.load_entry(entry, watch);
+ self.send(loader::Message::Loaded { files });
+ self.send(loader::Message::Progress {
+ n_total,
+ n_done: i + 1,
+ config_version,
+ });
+ }
+ }
+ Message::Invalidate(path) => {
+ let contents = read(path.as_path());
+ let files = vec![(path, contents)];
+ self.send(loader::Message::Loaded { files });
+ }
+ },
+ Event::NotifyEvent(event) => {
+ if let Some(event) = log_notify_error(event) {
+ let files = event
+ .paths
+ .into_iter()
+ .map(|path| AbsPathBuf::try_from(path).unwrap())
+ .filter_map(|path| {
+ let meta = fs::metadata(&path).ok()?;
+ if meta.file_type().is_dir()
+ && self
+ .watched_entries
+ .iter()
+ .any(|entry| entry.contains_dir(&path))
+ {
+ self.watch(path);
+ return None;
+ }
+
+ if !meta.file_type().is_file() {
+ return None;
+ }
+ if !self
+ .watched_entries
+ .iter()
+ .any(|entry| entry.contains_file(&path))
+ {
+ return None;
+ }
+
+ let contents = read(&path);
+ Some((path, contents))
+ })
+ .collect();
+ self.send(loader::Message::Loaded { files });
+ }
+ }
+ }
+ }
+ }
+ fn load_entry(
+ &mut self,
+ entry: loader::Entry,
+ watch: bool,
+ ) -> Vec<(AbsPathBuf, Option<Vec<u8>>)> {
+ match entry {
+ loader::Entry::Files(files) => files
+ .into_iter()
+ .map(|file| {
+ if watch {
+ self.watch(file.clone());
+ }
+ let contents = read(file.as_path());
+ (file, contents)
+ })
+ .collect::<Vec<_>>(),
+ loader::Entry::Directories(dirs) => {
+ let mut res = Vec::new();
+
+ for root in &dirs.include {
+ let walkdir =
+ WalkDir::new(root).follow_links(true).into_iter().filter_entry(|entry| {
+ if !entry.file_type().is_dir() {
+ return true;
+ }
+ let path = AbsPath::assert(entry.path());
+ root == path
+ || dirs.exclude.iter().chain(&dirs.include).all(|it| it != path)
+ });
+
+ let files = walkdir.filter_map(|it| it.ok()).filter_map(|entry| {
+ let is_dir = entry.file_type().is_dir();
+ let is_file = entry.file_type().is_file();
+ let abs_path = AbsPathBuf::assert(entry.into_path());
+ if is_dir && watch {
+ self.watch(abs_path.clone());
+ }
+ if !is_file {
+ return None;
+ }
+ let ext = abs_path.extension().unwrap_or_default();
+ if dirs.extensions.iter().all(|it| it.as_str() != ext) {
+ return None;
+ }
+ Some(abs_path)
+ });
+
+ res.extend(files.map(|file| {
+ let contents = read(file.as_path());
+ (file, contents)
+ }));
+ }
+ res
+ }
+ }
+ }
+
+ fn watch(&mut self, path: AbsPathBuf) {
+ if let Some((watcher, _)) = &mut self.watcher {
+ log_notify_error(watcher.watch(path.as_ref(), RecursiveMode::NonRecursive));
+ }
+ }
+ fn send(&mut self, msg: loader::Message) {
+ (self.sender)(msg);
+ }
+}
+
+fn read(path: &AbsPath) -> Option<Vec<u8>> {
+ std::fs::read(path).ok()
+}
+
+fn log_notify_error<T>(res: notify::Result<T>) -> Option<T> {
+ res.map_err(|err| tracing::warn!("notify error: {}", err)).ok()
+}
--- /dev/null
- So, I use **Rust Analyzer: Run** action in VS Code to run this single test, and then just do printf-driven development/debugging.
+# Contributing Quick Start
+
+rust-analyzer is an ordinary Rust project, which is organized as a Cargo workspace, builds on stable and doesn't depend on C libraries.
+So, just
+
+```
+$ cargo test
+```
+
+should be enough to get you started!
+
+To learn more about how rust-analyzer works, see [./architecture.md](./architecture.md).
+It also explains the high-level layout of the source code.
+Do skim through that document.
+
+We also publish rustdoc docs to pages: https://rust-lang.github.io/rust-analyzer/ide/.
+Note though, that the internal documentation is very incomplete.
+
+Various organizational and process issues are discussed in this document.
+
+# Getting in Touch
+
+rust-analyzer is a part of the [RLS-2.0 working
+group](https://github.com/rust-lang/compiler-team/tree/6a769c13656c0a6959ebc09e7b1f7c09b86fb9c0/working-groups/rls-2.0).
+Discussion happens in this Zulip stream:
+
+https://rust-lang.zulipchat.com/#narrow/stream/185405-t-compiler.2Frust-analyzer
+
+# Issue Labels
+
+* [good-first-issue](https://github.com/rust-lang/rust-analyzer/labels/good%20first%20issue)
+ are good issues to get into the project.
+* [E-has-instructions](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-has-instructions)
+ issues have links to the code in question and tests.
+* [Broken Window](https://github.com/rust-lang/rust-analyzer/issues?q=is:issue+is:open+label:%22Broken+Window%22)
+ are issues which are not necessarily critical by themselves, but which should be fixed ASAP regardless, to avoid accumulation of technical debt.
+* [E-easy](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-easy),
+ [E-medium](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-medium),
+ [E-hard](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-hard),
+ [E-unknown](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AE-unknown),
+ labels are *estimates* for how hard would be to write a fix. Each triaged issue should have one of these labels.
+* [S-actionable](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AS-actionable) and
+ [S-unactionable](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3AS-unactionable)
+ specify if there are concrete steps to resolve or advance an issue. Roughly, actionable issues need only work to be fixed,
+ while unactionable ones are blocked either on user feedback (providing a reproducible example), or on larger architectural
+ work or decisions. This classification is descriptive, not prescriptive, and might be wrong: Any unactionable issue might have a simple fix that we missed.
+ Each triaged issue should have one of these labels.
+* [fun](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%3Afun)
+ is for cool, but probably hard stuff.
+* [Design](https://github.com/rust-lang/rust-analyzer/issues?q=is%3Aopen+is%3Aissue+label%Design)
+ is for moderate/large scale architecture discussion.
+ Also a kind of fun.
+ These issues should generally include a link to a Zulip discussion thread.
+
+# Code Style & Review Process
+
+Do see [./style.md](./style.md).
+
+# Cookbook
+
+## CI
+
+We use GitHub Actions for CI.
+Most of the things, including formatting, are checked by `cargo test`.
+If `cargo test` passes locally, that's a good sign that CI will be green as well.
+The only exception is that some long-running tests are skipped locally by default.
+Use `env RUN_SLOW_TESTS=1 cargo test` to run the full suite.
+
+We use bors to enforce the [not rocket science](https://graydon2.dreamwidth.org/1597.html) rule.
+
+## Launching rust-analyzer
+
+Debugging the language server can be tricky.
+LSP is rather chatty, so driving it from the command line is not really feasible, driving it via VS Code requires interacting with two processes.
+
+For this reason, the best way to see how rust-analyzer works is to **find a relevant test and execute it**.
+VS Code & Emacs include an action for running a single test.
+
+Launching a VS Code instance with a locally built language server is also possible.
+There's **"Run Extension (Debug Build)"** launch configuration for this in VS Code.
+
+In general, I use one of the following workflows for fixing bugs and implementing features:
+
+If the problem concerns only internal parts of rust-analyzer (i.e. I don't need to touch the `rust-analyzer` crate or TypeScript code), there is a unit-test for it.
- * `Rust Analyzer: Status` shows some memory-usage statistics.
++So, I use **rust-analyzer: Run** action in VS Code to run this single test, and then just do printf-driven development/debugging.
+As a sanity check after I'm done, I use `cargo xtask install --server` and **Reload Window** action in VS Code to verify that the thing works as I expect.
+
+If the problem concerns only the VS Code extension, I use **Run Installed Extension** launch configuration from `launch.json`.
+Notably, this uses the usual `rust-analyzer` binary from `PATH`.
+For this, it is important to have the following in your `settings.json` file:
+```json
+{
+ "rust-analyzer.server.path": "rust-analyzer"
+}
+```
+After I am done with the fix, I use `cargo xtask install --client` to try the new extension for real.
+
+If I need to fix something in the `rust-analyzer` crate, I feel sad because it's on the boundary between the two processes, and working there is slow.
+I usually just `cargo xtask install --server` and poke changes from my live environment.
+Note that this uses `--release`, which is usually faster overall, because loading stdlib into debug version of rust-analyzer takes a lot of time.
+To speed things up, sometimes I open a temporary hello-world project which has `"rust-analyzer.cargo.noSysroot": true` in `.code/settings.json`.
+This flag causes rust-analyzer to skip loading the sysroot, which greatly reduces the amount of things rust-analyzer needs to do, and makes printf's more useful.
+Note that you should only use the `eprint!` family of macros for debugging: stdout is used for LSP communication, and `print!` would break it.
+
+If I need to fix something simultaneously in the server and in the client, I feel even more sad.
+I don't have a specific workflow for this case.
+
+Additionally, I use `cargo run --release -p rust-analyzer -- analysis-stats path/to/some/rust/crate` to run a batch analysis.
+This is primarily useful for performance optimizations, or for bug minimization.
+
+## TypeScript Tests
+
+If you change files under `editors/code` and would like to run the tests and linter, install npm and run:
+
+```bash
+cd editors/code
+npm ci
+npm run lint
+```
+## How to ...
+
+* ... add an assist? [#7535](https://github.com/rust-lang/rust-analyzer/pull/7535)
+* ... add a new protocol extension? [#4569](https://github.com/rust-lang/rust-analyzer/pull/4569)
+* ... add a new configuration option? [#7451](https://github.com/rust-lang/rust-analyzer/pull/7451)
+* ... add a new completion? [#6964](https://github.com/rust-lang/rust-analyzer/pull/6964)
+* ... allow new syntax in the parser? [#7338](https://github.com/rust-lang/rust-analyzer/pull/7338)
+
+## Logging
+
+Logging is done by both rust-analyzer and VS Code, so it might be tricky to figure out where logs go.
+
+Inside rust-analyzer, we use the [`tracing`](https://docs.rs/tracing/) crate for logging,
+and [`tracing-subscriber`](https://docs.rs/tracing-subscriber) for logging frontend.
+By default, log goes to stderr, but the stderr itself is processed by VS Code.
+`--log-file <PATH>` CLI argument allows logging to file.
+Setting the `RA_LOG_FILE=<PATH>` environment variable will also log to file, it will also override `--log-file`.
+
+To see stderr in the running VS Code instance, go to the "Output" tab of the panel and select `rust-analyzer`.
+This shows `eprintln!` as well.
+Note that `stdout` is used for the actual protocol, so `println!` will break things.
+
+To log all communication between the server and the client, there are two choices:
+
+* You can log on the server side, by running something like
+ ```
+ env RA_LOG=lsp_server=debug code .
+ ```
+* You can log on the client side, by enabling `"rust-analyzer.trace.server": "verbose"` workspace setting.
+ These logs are shown in a separate tab in the output and could be used with LSP inspector.
+ Kudos to [@DJMcNab](https://github.com/DJMcNab) for setting this awesome infra up!
+
+
+There are also several VS Code commands which might be of interest:
+
- * `Rust Analyzer: Syntax Tree` shows syntax tree of the current file/selection.
++* `rust-analyzer: Status` shows some memory-usage statistics.
+
- * `Rust Analyzer: View Hir` shows the HIR expressions within the function containing the cursor.
++* `rust-analyzer: Syntax Tree` shows syntax tree of the current file/selection.
+
++* `rust-analyzer: View Hir` shows the HIR expressions within the function containing the cursor.
+
+ You can hover over syntax nodes in the opened text file to see the appropriate
+ rust code that it refers to and the rust editor will also highlight the proper
+ text range.
+
+ If you trigger Go to Definition in the inspected Rust source file,
+ the syntax tree read-only editor should scroll to and select the
+ appropriate syntax node token.
+
+ ![demo](https://user-images.githubusercontent.com/36276403/78225773-6636a480-74d3-11ea-9d9f-1c9d42da03b0.png)
+
+## Profiling
+
+We have a built-in hierarchical profiler, you can enable it by using `RA_PROFILE` env-var:
+
+```
+RA_PROFILE=* // dump everything
+RA_PROFILE=foo|bar|baz // enabled only selected entries
+RA_PROFILE=*@3>10 // dump everything, up to depth 3, if it takes more than 10 ms
+```
+
+In particular, I have `export RA_PROFILE='*>10'` in my shell profile.
+
+We also have a "counting" profiler which counts number of instances of popular structs.
+It is enabled by `RA_COUNT=1`.
+
+To measure time for from-scratch analysis, use something like this:
+
+```
+$ cargo run --release -p rust-analyzer -- analysis-stats ../chalk/
+```
+
+For measuring time of incremental analysis, use either of these:
+
+```
+$ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --highlight ../chalk/chalk-engine/src/logic.rs
+$ cargo run --release -p rust-analyzer -- analysis-bench ../chalk/ --complete ../chalk/chalk-engine/src/logic.rs:94:0
+```
+
+Look for `fn benchmark_xxx` tests for a quick way to reproduce performance problems.
+
+## Release Process
+
+Release process is handled by `release`, `dist` and `promote` xtasks, `release` being the main one.
+
+`release` assumes that you have checkouts of `rust-analyzer`, `rust-analyzer.github.io`, and `rust-lang/rust` in the same directory:
+
+```
+./rust-analyzer
+./rust-analyzer.github.io
+./rust-rust-analyzer # Note the name!
+```
+
+The remote for `rust-analyzer` must be called `upstream` (I use `origin` to point to my fork).
+In addition, for `xtask promote` (see below), `rust-rust-analyzer` must have a `rust-analyzer` remote pointing to this repository on GitHub.
+
+`release` calls the GitHub API calls to scrape pull request comments and categorize them in the changelog.
+This step uses the `curl` and `jq` applications, which need to be available in `PATH`.
+Finally, you need to obtain a GitHub personal access token and set the `GITHUB_TOKEN` environment variable.
+
+Release steps:
+
+1. Set the `GITHUB_TOKEN` environment variable.
+2. Inside rust-analyzer, run `cargo xtask release`. This will:
+ * checkout the `release` branch
+ * reset it to `upstream/nightly`
+ * push it to `upstream`. This triggers GitHub Actions which:
+ * runs `cargo xtask dist` to package binaries and VS Code extension
+ * makes a GitHub release
+ * publishes the VS Code extension to the marketplace
+ * call the GitHub API for PR details
+ * create a new changelog in `rust-analyzer.github.io`
+3. While the release is in progress, fill in the changelog
+4. Commit & push the changelog
+5. Tweet
+6. Inside `rust-analyzer`, run `cargo xtask promote` -- this will create a PR to rust-lang/rust updating rust-analyzer's subtree.
+ Self-approve the PR.
+
+If the GitHub Actions release fails because of a transient problem like a timeout, you can re-run the job from the Actions console.
+If it fails because of something that needs to be fixed, remove the release tag (if needed), fix the problem, then start over.
+Make sure to remove the new changelog post created when running `cargo xtask release` a second time.
+
+We release "nightly" every night automatically and promote the latest nightly to "stable" manually, every week.
+
+We don't do "patch" releases, unless something truly egregious comes up.
+To do a patch release, cherry-pick the fix on top of the current `release` branch and push the branch.
+There's no need to write a changelog for a patch release, it's OK to include the notes about the fix into the next weekly one.
+Note: we tag releases by dates, releasing a patch release on the same day should work (by overwriting a tag), but I am not 100% sure.
+
+## Permissions
+
+There are three sets of people with extra permissions:
+
+* rust-analyzer GitHub organization [**admins**](https://github.com/orgs/rust-analyzer/people?query=role:owner) (which include current t-compiler leads).
+ Admins have full access to the org.
+* [**review**](https://github.com/orgs/rust-analyzer/teams/review) team in the organization.
+ Reviewers have `r+` access to all of organization's repositories and publish rights on crates.io.
+ They also have direct commit access, but all changes should via bors queue.
+ It's ok to self-approve if you think you know what you are doing!
+ bors should automatically sync the permissions.
+ Feel free to request a review or assign any PR to a reviewer with the relevant expertise to bring the work to their attention.
+ Don't feel pressured to review assigned PRs though.
+ If you don't feel like reviewing for whatever reason, someone else will pick the review up!
+* [**triage**](https://github.com/orgs/rust-analyzer/teams/triage) team in the organization.
+ This team can label and close issues.
+
+Note that at the time being you need to be a member of the org yourself to view the links.
--- /dev/null
- Rust Analyzer has three interesting [system boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) to concentrate tests on.
+# Architecture
+
+This document describes the high-level architecture of rust-analyzer.
+If you want to familiarize yourself with the code base, you are just in the right place!
+
+You might also enjoy ["Explaining Rust Analyzer"](https://www.youtube.com/playlist?list=PLhb66M_x9UmrqXhQuIpWC5VgTdrGxMx3y) series on YouTube.
+It goes deeper than what is covered in this document, but will take some time to watch.
+
+See also these implementation-related blog posts:
+
+* https://rust-analyzer.github.io/blog/2019/11/13/find-usages.html
+* https://rust-analyzer.github.io/blog/2020/07/20/three-architectures-for-responsive-ide.html
+* https://rust-analyzer.github.io/blog/2020/09/16/challeging-LR-parsing.html
+* https://rust-analyzer.github.io/blog/2020/09/28/how-to-make-a-light-bulb.html
+* https://rust-analyzer.github.io/blog/2020/10/24/introducing-ungrammar.html
+
+For older, by now mostly outdated stuff, see the [guide](./guide.md) and [another playlist](https://www.youtube.com/playlist?list=PL85XCvVPmGQho7MZkdW-wtPtuJcFpzycE).
+
+
+## Bird's Eye View
+
+![](https://user-images.githubusercontent.com/4789492/107129398-0ab70f00-687a-11eb-9bfc-d4eb023aec06.png)
+
+On the highest level, rust-analyzer is a thing which accepts input source code from the client and produces a structured semantic model of the code.
+
+More specifically, input data consists of a set of test files (`(PathBuf, String)` pairs) and information about project structure, captured in the so called `CrateGraph`.
+The crate graph specifies which files are crate roots, which cfg flags are specified for each crate and what dependencies exist between the crates.
+This is the input (ground) state.
+The analyzer keeps all this input data in memory and never does any IO.
+Because the input data is source code, which typically measures in tens of megabytes at most, keeping everything in memory is OK.
+
+A "structured semantic model" is basically an object-oriented representation of modules, functions and types which appear in the source code.
+This representation is fully "resolved": all expressions have types, all references are bound to declarations, etc.
+This is derived state.
+
+The client can submit a small delta of input data (typically, a change to a single file) and get a fresh code model which accounts for changes.
+
+The underlying engine makes sure that model is computed lazily (on-demand) and can be quickly updated for small modifications.
+
+## Entry Points
+
+`crates/rust-analyzer/src/bin/main.rs` contains the main function which spawns LSP.
+This is *the* entry point, but it front-loads a lot of complexity, so it's fine to just skim through it.
+
+`crates/rust-analyzer/src/handlers.rs` implements all LSP requests and is a great place to start if you are already familiar with LSP.
+
+`Analysis` and `AnalysisHost` types define the main API for consumers of IDE services.
+
+## Code Map
+
+This section talks briefly about various important directories and data structures.
+Pay attention to the **Architecture Invariant** sections.
+They often talk about things which are deliberately absent in the source code.
+
+Note also which crates are **API Boundaries**.
+Remember, [rules at the boundary are different](https://www.tedinski.com/2018/02/06/system-boundaries.html).
+
+### `xtask`
+
+This is rust-analyzer's "build system".
+We use cargo to compile rust code, but there are also various other tasks, like release management or local installation.
+They are handled by Rust code in the xtask directory.
+
+### `editors/code`
+
+VS Code plugin.
+
+### `lib/`
+
+rust-analyzer independent libraries which we publish to crates.io.
+It's not heavily utilized at the moment.
+
+### `crates/parser`
+
+It is a hand-written recursive descent parser, which produces a sequence of events like "start node X", "finish node Y".
+It works similarly to
+[kotlin's parser](https://github.com/JetBrains/kotlin/blob/4d951de616b20feca92f3e9cc9679b2de9e65195/compiler/frontend/src/org/jetbrains/kotlin/parsing/KotlinParsing.java),
+which is a good source of inspiration for dealing with syntax errors and incomplete input.
+Original [libsyntax parser](https://github.com/rust-lang/rust/blob/6b99adeb11313197f409b4f7c4083c2ceca8a4fe/src/libsyntax/parse/parser.rs) is what we use for the definition of the Rust language.
+`TreeSink` and `TokenSource` traits bridge the tree-agnostic parser from `grammar` with `rowan` trees.
+
+**Architecture Invariant:** the parser is independent of the particular tree structure and particular representation of the tokens.
+It transforms one flat stream of events into another flat stream of events.
+Token independence allows us to parse out both text-based source code and `tt`-based macro input.
+Tree independence allows us to more easily vary the syntax tree implementation.
+It should also unlock efficient light-parsing approaches.
+For example, you can extract the set of names defined in a file (for typo correction) without building a syntax tree.
+
+**Architecture Invariant:** parsing never fails, the parser produces `(T, Vec<Error>)` rather than `Result<T, Error>`.
+
+### `crates/syntax`
+
+Rust syntax tree structure and parser.
+See [RFC](https://github.com/rust-lang/rfcs/pull/2256) and [./syntax.md](./syntax.md) for some design notes.
+
+- [rowan](https://github.com/rust-analyzer/rowan) library is used for constructing syntax trees.
+- `ast` provides a type safe API on top of the raw `rowan` tree.
+- `ungrammar` description of the grammar, which is used to generate `syntax_kinds` and `ast` modules, using `cargo test -p xtask` command.
+
+Tests for ra_syntax are mostly data-driven.
+`test_data/parser` contains subdirectories with a bunch of `.rs` (test vectors) and `.txt` files with corresponding syntax trees.
+During testing, we check `.rs` against `.txt`.
+If the `.txt` file is missing, it is created (this is how you update tests).
+Additionally, running the xtask test suite with `cargo test -p xtask` will walk the grammar module and collect all `// test test_name` comments into files inside `test_data/parser/inline` directory.
+
+To update test data, run with `UPDATE_EXPECT` variable:
+
+```bash
+env UPDATE_EXPECT=1 cargo qt
+```
+
+After adding a new inline test you need to run `cargo test -p xtask` and also update the test data as described above.
+
+Note [`api_walkthrough`](https://github.com/rust-lang/rust-analyzer/blob/2fb6af89eb794f775de60b82afe56b6f986c2a40/crates/ra_syntax/src/lib.rs#L190-L348)
+in particular: it shows off various methods of working with syntax tree.
+
+See [#93](https://github.com/rust-lang/rust-analyzer/pull/93) for an example PR which fixes a bug in the grammar.
+
+**Architecture Invariant:** `syntax` crate is completely independent from the rest of rust-analyzer. It knows nothing about salsa or LSP.
+This is important because it is possible to make useful tooling using only the syntax tree.
+Without semantic information, you don't need to be able to _build_ code, which makes the tooling more robust.
+See also https://web.stanford.edu/~mlfbrown/paper.pdf.
+You can view the `syntax` crate as an entry point to rust-analyzer.
+`syntax` crate is an **API Boundary**.
+
+**Architecture Invariant:** syntax tree is a value type.
+The tree is fully determined by the contents of its syntax nodes, it doesn't need global context (like an interner) and doesn't store semantic info.
+Using the tree as a store for semantic info is convenient in traditional compilers, but doesn't work nicely in the IDE.
+Specifically, assists and refactors require transforming syntax trees, and that becomes awkward if you need to do something with the semantic info.
+
+**Architecture Invariant:** syntax tree is built for a single file.
+This is to enable parallel parsing of all files.
+
+**Architecture Invariant:** Syntax trees are by design incomplete and do not enforce well-formedness.
+If an AST method returns an `Option`, it *can* be `None` at runtime, even if this is forbidden by the grammar.
+
+### `crates/base_db`
+
+We use the [salsa](https://github.com/salsa-rs/salsa) crate for incremental and on-demand computation.
+Roughly, you can think of salsa as a key-value store, but it can also compute derived values using specified functions.
+The `base_db` crate provides basic infrastructure for interacting with salsa.
+Crucially, it defines most of the "input" queries: facts supplied by the client of the analyzer.
+Reading the docs of the `base_db::input` module should be useful: everything else is strictly derived from those inputs.
+
+**Architecture Invariant:** particularities of the build system are *not* the part of the ground state.
+In particular, `base_db` knows nothing about cargo.
+For example, `cfg` flags are a part of `base_db`, but `feature`s are not.
+A `foo` feature is a Cargo-level concept, which is lowered by Cargo to `--cfg feature=foo` argument on the command line.
+The `CrateGraph` structure is used to represent the dependencies between the crates abstractly.
+
+**Architecture Invariant:** `base_db` doesn't know about file system and file paths.
+Files are represented with opaque `FileId`, there's no operation to get an `std::path::Path` out of the `FileId`.
+
+### `crates/hir_expand`, `crates/hir_def`, `crates/hir_ty`
+
+These crates are the *brain* of rust-analyzer.
+This is the compiler part of the IDE.
+
+`hir_xxx` crates have a strong [ECS](https://en.wikipedia.org/wiki/Entity_component_system) flavor, in that they work with raw ids and directly query the database.
+There's little abstraction here.
+These crates integrate deeply with salsa and chalk.
+
+Name resolution, macro expansion and type inference all happen here.
+These crates also define various intermediate representations of the core.
+
+`ItemTree` condenses a single `SyntaxTree` into a "summary" data structure, which is stable over modifications to function bodies.
+
+`DefMap` contains the module tree of a crate and stores module scopes.
+
+`Body` stores information about expressions.
+
+**Architecture Invariant:** these crates are not, and will never be, an api boundary.
+
+**Architecture Invariant:** these crates explicitly care about being incremental.
+The core invariant we maintain is "typing inside a function's body never invalidates global derived data".
+i.e., if you change the body of `foo`, all facts about `bar` should remain intact.
+
+**Architecture Invariant:** hir exists only in context of particular crate instance with specific CFG flags.
+The same syntax may produce several instances of HIR if the crate participates in the crate graph more than once.
+
+### `crates/hir`
+
+The top-level `hir` crate is an **API Boundary**.
+If you think about "using rust-analyzer as a library", `hir` crate is most likely the façade you'll be talking to.
+
+It wraps ECS-style internal API into a more OO-flavored API (with an extra `db` argument for each call).
+
+**Architecture Invariant:** `hir` provides a static, fully resolved view of the code.
+While internal `hir_*` crates _compute_ things, `hir`, from the outside, looks like an inert data structure.
+
+`hir` also handles the delicate task of going from syntax to the corresponding `hir`.
+Remember that the mapping here is one-to-many.
+See `Semantics` type and `source_to_def` module.
+
+Note in particular a curious recursive structure in `source_to_def`.
+We first resolve the parent _syntax_ node to the parent _hir_ element.
+Then we ask the _hir_ parent what _syntax_ children does it have.
+Then we look for our node in the set of children.
+
+This is the heart of many IDE features, like goto definition, which start with figuring out the hir node at the cursor.
+This is some kind of (yet unnamed) uber-IDE pattern, as it is present in Roslyn and Kotlin as well.
+
+### `crates/ide`
+
+The `ide` crate builds on top of `hir` semantic model to provide high-level IDE features like completion or goto definition.
+It is an **API Boundary**.
+If you want to use IDE parts of rust-analyzer via LSP, custom flatbuffers-based protocol or just as a library in your text editor, this is the right API.
+
+**Architecture Invariant:** `ide` crate's API is build out of POD types with public fields.
+The API uses editor's terminology, it talks about offsets and string labels rather than in terms of definitions or types.
+It is effectively the view in MVC and viewmodel in [MVVM](https://en.wikipedia.org/wiki/Model%E2%80%93view%E2%80%93viewmodel).
+All arguments and return types are conceptually serializable.
+In particular, syntax trees and hir types are generally absent from the API (but are used heavily in the implementation).
+Shout outs to LSP developers for popularizing the idea that "UI" is a good place to draw a boundary at.
+
+`ide` is also the first crate which has the notion of change over time.
+`AnalysisHost` is a state to which you can transactionally `apply_change`.
+`Analysis` is an immutable snapshot of the state.
+
+Internally, `ide` is split across several crates. `ide_assists`, `ide_completion` and `ide_ssr` implement large isolated features.
+`ide_db` implements common IDE functionality (notably, reference search is implemented here).
+The `ide` contains a public API/façade, as well as implementation for a plethora of smaller features.
+
+**Architecture Invariant:** `ide` crate strives to provide a _perfect_ API.
+Although at the moment it has only one consumer, the LSP server, LSP *does not* influence its API design.
+Instead, we keep in mind a hypothetical _ideal_ client -- an IDE tailored specifically for rust, every nook and cranny of which is packed with Rust-specific goodies.
+
+### `crates/rust-analyzer`
+
+This crate defines the `rust-analyzer` binary, so it is the **entry point**.
+It implements the language server.
+
+**Architecture Invariant:** `rust-analyzer` is the only crate that knows about LSP and JSON serialization.
+If you want to expose a data structure `X` from ide to LSP, don't make it serializable.
+Instead, create a serializable counterpart in `rust-analyzer` crate and manually convert between the two.
+
+`GlobalState` is the state of the server.
+The `main_loop` defines the server event loop which accepts requests and sends responses.
+Requests that modify the state or might block user's typing are handled on the main thread.
+All other requests are processed in background.
+
+**Architecture Invariant:** the server is stateless, a-la HTTP.
+Sometimes state needs to be preserved between requests.
+For example, "what is the `edit` for the fifth completion item of the last completion edit?".
+For this, the second request should include enough info to re-create the context from scratch.
+This generally means including all the parameters of the original request.
+
+`reload` module contains the code that handles configuration and Cargo.toml changes.
+This is a tricky business.
+
+**Architecture Invariant:** `rust-analyzer` should be partially available even when the build is broken.
+Reloading process should not prevent IDE features from working.
+
+### `crates/toolchain`, `crates/project_model`, `crates/flycheck`
+
+These crates deal with invoking `cargo` to learn about project structure and get compiler errors for the "check on save" feature.
+
+They use `crates/path` heavily instead of `std::path`.
+A single `rust-analyzer` process can serve many projects, so it is important that server's current directory does not leak.
+
+### `crates/mbe`, `crates/tt`, `crates/proc_macro_api`, `crates/proc_macro_srv`
+
+These crates implement macros as token tree -> token tree transforms.
+They are independent from the rest of the code.
+
+`tt` crate defined `TokenTree`, a single token or a delimited sequence of token trees.
+`mbe` crate contains tools for transforming between syntax trees and token tree.
+And it also handles the actual parsing and expansion of declarative macro (a-la "Macros By Example" or mbe).
+
+For proc macros, the client-server model are used.
+We pass an argument `--proc-macro` to `rust-analyzer` binary to start a separate process (`proc_macro_srv`).
+And the client (`proc_macro_api`) provides an interface to talk to that server separately.
+
+And then token trees are passed from client, and the server will load the corresponding dynamic library (which built by `cargo`).
+And due to the fact the api for getting result from proc macro are always unstable in `rustc`,
+we maintain our own copy (and paste) of that part of code to allow us to build the whole thing in stable rust.
+
+ **Architecture Invariant:**
+Bad proc macros may panic or segfault accidentally. So we run it in another process and recover it from fatal error.
+And they may be non-deterministic which conflict how `salsa` works, so special attention is required.
+
+### `crates/cfg`
+
+This crate is responsible for parsing, evaluation and general definition of `cfg` attributes.
+
+### `crates/vfs`, `crates/vfs-notify`
+
+These crates implement a virtual file system.
+They provide consistent snapshots of the underlying file system and insulate messy OS paths.
+
+**Architecture Invariant:** vfs doesn't assume a single unified file system.
+i.e., a single rust-analyzer process can act as a remote server for two different machines, where the same `/tmp/foo.rs` path points to different files.
+For this reason, all path APIs generally take some existing path as a "file system witness".
+
+### `crates/stdx`
+
+This crate contains various non-rust-analyzer specific utils, which could have been in std, as well
+as copies of unstable std items we would like to make use of already, like `std::str::split_once`.
+
+### `crates/profile`
+
+This crate contains utilities for CPU and memory profiling.
+
+
+## Cross-Cutting Concerns
+
+This sections talks about the things which are everywhere and nowhere in particular.
+
+### Stability Guarantees
+
+One of the reasons rust-analyzer moves relatively fast is that we don't introduce new stability guarantees.
+Instead, as much as possible we leverage existing ones.
+
+Examples:
+
+* The `ide` API of rust-analyzer are explicitly unstable, but the LSP interface is stable, and here we just implement a stable API managed by someone else.
+* Rust language and Cargo are stable, and they are the primary inputs to rust-analyzer.
+* The `rowan` library is published to crates.io, but it is deliberately kept under `1.0` and always makes semver-incompatible upgrades
+
+Another important example is that rust-analyzer isn't run on CI, so, unlike `rustc` and `clippy`, it is actually ok for us to change runtime behavior.
+
+At some point we might consider opening up APIs or allowing crates.io libraries to include rust-analyzer specific annotations, but that's going to be a big commitment on our side.
+
+Exceptions:
+
+* `rust-project.json` is a de-facto stable format for non-cargo build systems.
+ It is probably ok enough, but was definitely stabilized implicitly.
+ Lesson for the future: when designing API which could become a stability boundary, don't wait for the first users until you stabilize it.
+ By the time you have first users, it is already de-facto stable.
+ And the users will first use the thing, and *then* inform you that now you have users.
+ The sad thing is that stuff should be stable before someone uses it for the first time, or it should contain explicit opt-in.
+* We ship some LSP extensions, and we try to keep those somewhat stable.
+ Here, we need to work with a finite set of editor maintainers, so not providing rock-solid guarantees works.
+
+### Code generation
+
+Some components in this repository are generated through automatic processes.
+Generated code is updated automatically on `cargo test`.
+Generated code is generally committed to the git repository.
+
+In particular, we generate:
+
+* API for working with syntax trees (`syntax::ast`, the [`ungrammar`](https://github.com/rust-analyzer/ungrammar) crate).
+* Various sections of the manual:
+
+ * features
+ * assists
+ * config
+
+* Documentation tests for assists
+
+See the `sourcegen` crate for details.
+
+**Architecture Invariant:** we avoid bootstrapping.
+For codegen we need to parse Rust code.
+Using rust-analyzer for that would work and would be fun, but it would also complicate the build process a lot.
+For that reason, we use syn and manual string parsing.
+
+### Cancellation
+
+Let's say that the IDE is in the process of computing syntax highlighting, when the user types `foo`.
+What should happen?
+`rust-analyzer`s answer is that the highlighting process should be cancelled -- its results are now stale, and it also blocks modification of the inputs.
+
+The salsa database maintains a global revision counter.
+When applying a change, salsa bumps this counter and waits until all other threads using salsa finish.
+If a thread does salsa-based computation and notices that the counter is incremented, it panics with a special value (see `Canceled::throw`).
+That is, rust-analyzer requires unwinding.
+
+`ide` is the boundary where the panic is caught and transformed into a `Result<T, Cancelled>`.
+
+### Testing
+
++rust-analyzer has three interesting [system boundaries](https://www.tedinski.com/2018/04/10/making-tests-a-positive-influence-on-design.html) to concentrate tests on.
+
+The outermost boundary is the `rust-analyzer` crate, which defines an LSP interface in terms of stdio.
+We do integration testing of this component, by feeding it with a stream of LSP requests and checking responses.
+These tests are known as "heavy", because they interact with Cargo and read real files from disk.
+For this reason, we try to avoid writing too many tests on this boundary: in a statically typed language, it's hard to make an error in the protocol itself if messages are themselves typed.
+Heavy tests are only run when `RUN_SLOW_TESTS` env var is set.
+
+The middle, and most important, boundary is `ide`.
+Unlike `rust-analyzer`, which exposes API, `ide` uses Rust API and is intended for use by various tools.
+A typical test creates an `AnalysisHost`, calls some `Analysis` functions and compares the results against expectation.
+
+The innermost and most elaborate boundary is `hir`.
+It has a much richer vocabulary of types than `ide`, but the basic testing setup is the same: we create a database, run some queries, assert result.
+
+For comparisons, we use the `expect` crate for snapshot testing.
+
+To test various analysis corner cases and avoid forgetting about old tests, we use so-called marks.
+See the `marks` module in the `test_utils` crate for more.
+
+**Architecture Invariant:** rust-analyzer tests do not use libcore or libstd.
+All required library code must be a part of the tests.
+This ensures fast test execution.
+
+**Architecture Invariant:** tests are data driven and do not test the API.
+Tests which directly call various API functions are a liability, because they make refactoring the API significantly more complicated.
+So most of the tests look like this:
+
+```rust
+#[track_caller]
+fn check(input: &str, expect: expect_test::Expect) {
+ // The single place that actually exercises a particular API
+}
+
+#[test]
+fn foo() {
+ check("foo", expect![["bar"]]);
+}
+
+#[test]
+fn spam() {
+ check("spam", expect![["eggs"]]);
+}
+// ...and a hundred more tests that don't care about the specific API at all.
+```
+
+To specify input data, we use a single string literal in a special format, which can describe a set of rust files.
+See the `Fixture` its module for fixture examples and documentation.
+
+**Architecture Invariant:** all code invariants are tested by `#[test]` tests.
+There's no additional checks in CI, formatting and tidy tests are run with `cargo test`.
+
+**Architecture Invariant:** tests do not depend on any kind of external resources, they are perfectly reproducible.
+
+
+### Performance Testing
+
+TBA, take a look at the `metrics` xtask and `#[test] fn benchmark_xxx()` functions.
+
+### Error Handling
+
+**Architecture Invariant:** core parts of rust-analyzer (`ide`/`hir`) don't interact with the outside world and thus can't fail.
+Only parts touching LSP are allowed to do IO.
+
+Internals of rust-analyzer need to deal with broken code, but this is not an error condition.
+rust-analyzer is robust: various analysis compute `(T, Vec<Error>)` rather than `Result<T, Error>`.
+
+rust-analyzer is a complex long-running process.
+It will always have bugs and panics.
+But a panic in an isolated feature should not bring down the whole process.
+Each LSP-request is protected by a `catch_unwind`.
+We use `always` and `never` macros instead of `assert` to gracefully recover from impossible conditions.
+
+### Observability
+
+rust-analyzer is a long-running process, so it is important to understand what's going on inside.
+We have several instruments for that.
+
+The event loop that runs rust-analyzer is very explicit.
+Rather than spawning futures or scheduling callbacks (open), the event loop accepts an `enum` of possible events (closed).
+It's easy to see all the things that trigger rust-analyzer processing, together with their performance
+
+rust-analyzer includes a simple hierarchical profiler (`hprof`).
+It is enabled with `RA_PROFILE='*>50'` env var (log all (`*`) actions which take more than `50` ms) and produces output like:
+
+```
+85ms - handle_completion
+ 68ms - import_on_the_fly
+ 67ms - import_assets::search_for_relative_paths
+ 0ms - crate_def_map:wait (804 calls)
+ 0ms - find_path (16 calls)
+ 2ms - find_similar_imports (1 calls)
+ 0ms - generic_params_query (334 calls)
+ 59ms - trait_solve_query (186 calls)
+ 0ms - Semantics::analyze_impl (1 calls)
+ 1ms - render_resolution (8 calls)
+ 0ms - Semantics::analyze_impl (5 calls)
+```
+
+This is cheap enough to enable in production.
+
+
+Similarly, we save live object counting (`RA_COUNT=1`).
+It is not cheap enough to enable in prod, and this is a bug which should be fixed.
+
+### Configurability
+
+rust-analyzer strives to be as configurable as possible while offering reasonable defaults where no configuration exists yet.
+There will always be features that some people find more annoying than helpful, so giving the users the ability to tweak or disable these is a big part of offering a good user experience.
+Mind the code--architecture gap: at the moment, we are using fewer feature flags than we really should.
+
+### Serialization
+
+In Rust, it is easy (often too easy) to add serialization to any type by adding `#[derive(Serialize)]`.
+This easiness is misleading -- serializable types impose significant backwards compatability constraints.
+If a type is serializable, then it is a part of some IPC boundary.
+You often don't control the other side of this boundary, so changing serializable types is hard.
+
+For this reason, the types in `ide`, `base_db` and below are not serializable by design.
+If such types need to cross an IPC boundary, then the client of rust-analyzer needs to provide custom, client-specific serialization format.
+This isolates backwards compatibility and migration concerns to a specific client.
+
+For example, `rust-project.json` is it's own format -- it doesn't include `CrateGraph` as is.
+Instead, it creates a `CrateGraph` by calling appropriate constructing functions.
--- /dev/null
- Rust Analyzer never does any I/O itself, all inputs get passed explicitly via
+# Guide to rust-analyzer
+
+## About the guide
+
+This guide describes the current state of rust-analyzer as of 2019-01-20 (git
+tag [guide-2019-01]). Its purpose is to document various problems and
+architectural solutions related to the problem of building IDE-first compiler
+for Rust. There is a video version of this guide as well:
+https://youtu.be/ANKBNiSWyfc.
+
+[guide-2019-01]: https://github.com/rust-lang/rust-analyzer/tree/guide-2019-01
+
+## The big picture
+
+On the highest possible level, rust-analyzer is a stateful component. A client may
+apply changes to the analyzer (new contents of `foo.rs` file is "fn main() {}")
+and it may ask semantic questions about the current state (what is the
+definition of the identifier with offset 92 in file `bar.rs`?). Two important
+properties hold:
+
+* Analyzer does not do any I/O. It starts in an empty state and all input data is
+ provided via `apply_change` API.
+
+* Only queries about the current state are supported. One can, of course,
+ simulate undo and redo by keeping a log of changes and inverse changes respectively.
+
+## IDE API
+
+To see the bigger picture of how the IDE features work, let's take a look at the [`AnalysisHost`] and
+[`Analysis`] pair of types. `AnalysisHost` has three methods:
+
+* `default()` for creating an empty analysis instance
+* `apply_change(&mut self)` to make changes (this is how you get from an empty
+ state to something interesting)
+* `analysis(&self)` to get an instance of `Analysis`
+
+`Analysis` has a ton of methods for IDEs, like `goto_definition`, or
+`completions`. Both inputs and outputs of `Analysis`' methods are formulated in
+terms of files and offsets, and **not** in terms of Rust concepts like structs,
+traits, etc. The "typed" API with Rust specific types is slightly lower in the
+stack, we'll talk about it later.
+
+[`AnalysisHost`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L265-L284
+[`Analysis`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L291-L478
+
+The reason for this separation of `Analysis` and `AnalysisHost` is that we want to apply
+changes "uniquely", but we might also want to fork an `Analysis` and send it to
+another thread for background processing. That is, there is only a single
+`AnalysisHost`, but there may be several (equivalent) `Analysis`.
+
+Note that all of the `Analysis` API return `Cancellable<T>`. This is required to
+be responsive in an IDE setting. Sometimes a long-running query is being computed
+and the user types something in the editor and asks for completion. In this
+case, we cancel the long-running computation (so it returns `Err(Cancelled)`),
+apply the change and execute request for completion. We never use stale data to
+answer requests. Under the cover, `AnalysisHost` "remembers" all outstanding
+`Analysis` instances. The `AnalysisHost::apply_change` method cancels all
+`Analysis`es, blocks until all of them are `Dropped` and then applies changes
+in-place. This may be familiar to Rustaceans who use read-write locks for interior
+mutability.
+
+Next, let's talk about what the inputs to the `Analysis` are, precisely.
+
+## Inputs
+
++rust-analyzer never does any I/O itself, all inputs get passed explicitly via
+the `AnalysisHost::apply_change` method, which accepts a single argument, a
+`Change`. [`Change`] is a builder for a single change
+"transaction", so it suffices to study its methods to understand all of the
+input data.
+
+[`Change`]: https://github.com/rust-lang/rust-analyzer/blob/master/crates/base_db/src/change.rs#L14-L89
+
+The `(add|change|remove)_file` methods control the set of the input files, where
+each file has an integer id (`FileId`, picked by the client), text (`String`)
+and a filesystem path. Paths are tricky; they'll be explained below, in source roots
+section, together with the `add_root` method. The `add_library` method allows us to add a
+group of files which are assumed to rarely change. It's mostly an optimization
+and does not change the fundamental picture.
+
+The `set_crate_graph` method allows us to control how the input files are partitioned
+into compilation units -- crates. It also controls (in theory, not implemented
+yet) `cfg` flags. `CrateGraph` is a directed acyclic graph of crates. Each crate
+has a root `FileId`, a set of active `cfg` flags and a set of dependencies. Each
+dependency is a pair of a crate and a name. It is possible to have two crates
+with the same root `FileId` but different `cfg`-flags/dependencies. This model
+is lower than Cargo's model of packages: each Cargo package consists of several
+targets, each of which is a separate crate (or several crates, if you try
+different feature combinations).
+
+Procedural macros should become inputs as well, but currently they are not
+supported. Procedural macro will be a black box `Box<dyn Fn(TokenStream) -> TokenStream>`
+function, and will be inserted into the crate graph just like dependencies.
+
+Soon we'll talk how we build an LSP server on top of `Analysis`, but first,
+let's deal with that paths issue.
+
+## Source roots (a.k.a. "Filesystems are horrible")
+
+This is a non-essential section, feel free to skip.
+
+The previous section said that the filesystem path is an attribute of a file,
+but this is not the whole truth. Making it an absolute `PathBuf` will be bad for
+several reasons. First, filesystems are full of (platform-dependent) edge cases:
+
+* It's hard (requires a syscall) to decide if two paths are equivalent.
+* Some filesystems are case-sensitive (e.g. macOS).
+* Paths are not necessarily UTF-8.
+* Symlinks can form cycles.
+
+Second, this might hurt the reproducibility and hermeticity of builds. In theory,
+moving a project from `/foo/bar/my-project` to `/spam/eggs/my-project` should
+not change a bit in the output. However, if the absolute path is a part of the
+input, it is at least in theory observable, and *could* affect the output.
+
+Yet another problem is that we really *really* want to avoid doing I/O, but with
+Rust the set of "input" files is not necessarily known up-front. In theory, you
+can have `#[path="/dev/random"] mod foo;`.
+
+To solve (or explicitly refuse to solve) these problems rust-analyzer uses the
+concept of a "source root". Roughly speaking, source roots are the contents of a
+directory on a file systems, like `/home/matklad/projects/rustraytracer/**.rs`.
+
+More precisely, all files (`FileId`s) are partitioned into disjoint
+`SourceRoot`s. Each file has a relative UTF-8 path within the `SourceRoot`.
+`SourceRoot` has an identity (integer ID). Crucially, the root path of the
+source root itself is unknown to the analyzer: A client is supposed to maintain a
+mapping between `SourceRoot` IDs (which are assigned by the client) and actual
+`PathBuf`s. `SourceRoot`s give a sane tree model of the file system to the
+analyzer.
+
+Note that `mod`, `#[path]` and `include!()` can only reference files from the
+same source root. It is of course possible to explicitly add extra files to
+the source root, even `/dev/random`.
+
+## Language Server Protocol
+
+Now let's see how the `Analysis` API is exposed via the JSON RPC based language server protocol. The
+hard part here is managing changes (which can come either from the file system
+or from the editor) and concurrency (we want to spawn background jobs for things
+like syntax highlighting). We use the event loop pattern to manage the zoo, and
+the loop is the [`main_loop_inner`] function. The [`main_loop`] does a one-time
+initialization and tearing down of the resources.
+
+[`main_loop`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L51-L110
+[`main_loop_inner`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L156-L258
+
+
+Let's walk through a typical analyzer session!
+
+First, we need to figure out what to analyze. To do this, we run `cargo
+metadata` to learn about Cargo packages for current workspace and dependencies,
+and we run `rustc --print sysroot` and scan the "sysroot" (the directory containing the current Rust toolchain's files) to learn about crates like
+`std`. Currently we load this configuration once at the start of the server, but
+it should be possible to dynamically reconfigure it later without restart.
+
+[main_loop.rs#L62-L70](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L62-L70)
+
+The [`ProjectModel`] we get after this step is very Cargo and sysroot specific,
+it needs to be lowered to get the input in the form of `Change`. This
+happens in [`ServerWorldState::new`] method. Specifically
+
+* Create a `SourceRoot` for each Cargo package and sysroot.
+* Schedule a filesystem scan of the roots.
+* Create an analyzer's `Crate` for each Cargo **target** and sysroot crate.
+* Setup dependencies between the crates.
+
+[`ProjectModel`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/project_model.rs#L16-L20
+[`ServerWorldState::new`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L38-L160
+
+The results of the scan (which may take a while) will be processed in the body
+of the main loop, just like any other change. Here's where we handle:
+
+* [File system changes](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L194)
+* [Changes from the editor](https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L377)
+
+After a single loop's turn, we group the changes into one `Change` and
+[apply] it. This always happens on the main thread and blocks the loop.
+
+[apply]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L216
+
+To handle requests, like ["goto definition"], we create an instance of the
+`Analysis` and [`schedule`] the task (which consumes `Analysis`) on the
+threadpool. [The task] calls the corresponding `Analysis` method, while
+massaging the types into the LSP representation. Keep in mind that if we are
+executing "goto definition" on the threadpool and a new change comes in, the
+task will be canceled as soon as the main loop calls `apply_change` on the
+`AnalysisHost`.
+
+["goto definition"]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/server_world.rs#L216
+[`schedule`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L426-L455
+[The task]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop/handlers.rs#L205-L223
+
+This concludes the overview of the analyzer's programing *interface*. Next, let's
+dig into the implementation!
+
+## Salsa
+
+The most straightforward way to implement an "apply change, get analysis, repeat"
+API would be to maintain the input state and to compute all possible analysis
+information from scratch after every change. This works, but scales poorly with
+the size of the project. To make this fast, we need to take advantage of the
+fact that most of the changes are small, and that analysis results are unlikely
+to change significantly between invocations.
+
+To do this we use [salsa]: a framework for incremental on-demand computation.
+You can skip the rest of the section if you are familiar with `rustc`'s red-green
+algorithm (which is used for incremental compilation).
+
+[salsa]: https://github.com/salsa-rs/salsa
+
+It's better to refer to salsa's docs to learn about it. Here's a small excerpt:
+
+The key idea of salsa is that you define your program as a set of queries. Every
+query is used like a function `K -> V` that maps from some key of type `K` to a value
+of type `V`. Queries come in two basic varieties:
+
+* **Inputs**: the base inputs to your system. You can change these whenever you
+ like.
+
+* **Functions**: pure functions (no side effects) that transform your inputs
+ into other values. The results of queries are memoized to avoid recomputing
+ them a lot. When you make changes to the inputs, we'll figure out (fairly
+ intelligently) when we can re-use these memoized values and when we have to
+ recompute them.
+
+For further discussion, its important to understand one bit of "fairly
+intelligently". Suppose we have two functions, `f1` and `f2`, and one input,
+`z`. We call `f1(X)` which in turn calls `f2(Y)` which inspects `i(Z)`. `i(Z)`
+returns some value `V1`, `f2` uses that and returns `R1`, `f1` uses that and
+returns `O`. Now, let's change `i` at `Z` to `V2` from `V1` and try to compute
+`f1(X)` again. Because `f1(X)` (transitively) depends on `i(Z)`, we can't just
+reuse its value as is. However, if `f2(Y)` is *still* equal to `R1` (despite
+`i`'s change), we, in fact, *can* reuse `O` as result of `f1(X)`. And that's how
+salsa works: it recomputes results in *reverse* order, starting from inputs and
+progressing towards outputs, stopping as soon as it sees an intermediate value
+that hasn't changed. If this sounds confusing to you, don't worry: it is
+confusing. This illustration by @killercup might help:
+
+<img alt="step 1" src="https://user-images.githubusercontent.com/1711539/51460907-c5484780-1d6d-11e9-9cd2-d6f62bd746e0.png" width="50%">
+
+<img alt="step 2" src="https://user-images.githubusercontent.com/1711539/51460915-c9746500-1d6d-11e9-9a77-27d33a0c51b5.png" width="50%">
+
+<img alt="step 3" src="https://user-images.githubusercontent.com/1711539/51460920-cda08280-1d6d-11e9-8d96-a782aa57a4d4.png" width="50%">
+
+<img alt="step 4" src="https://user-images.githubusercontent.com/1711539/51460927-d1340980-1d6d-11e9-851e-13c149d5c406.png" width="50%">
+
+## Salsa Input Queries
+
+All analyzer information is stored in a salsa database. `Analysis` and
+`AnalysisHost` types are newtype wrappers for [`RootDatabase`] -- a salsa
+database.
+
+[`RootDatabase`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/db.rs#L88-L134
+
+Salsa input queries are defined in [`FilesDatabase`] (which is a part of
+`RootDatabase`). They closely mirror the familiar `Change` structure:
+indeed, what `apply_change` does is it sets the values of input queries.
+
+[`FilesDatabase`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/base_db/src/input.rs#L150-L174
+
+## From text to semantic model
+
+The bulk of the rust-analyzer is transforming input text into a semantic model of
+Rust code: a web of entities like modules, structs, functions and traits.
+
+An important fact to realize is that (unlike most other languages like C# or
+Java) there is not a one-to-one mapping between the source code and the semantic model. A
+single function definition in the source code might result in several semantic
+functions: for example, the same source file might get included as a module in
+several crates or a single crate might be present in the compilation DAG
+several times, with different sets of `cfg`s enabled. The IDE-specific task of
+mapping source code into a semantic model is inherently imprecise for
+this reason and gets handled by the [`source_binder`].
+
+[`source_binder`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/source_binder.rs
+
+The semantic interface is declared in the [`code_model_api`] module. Each entity is
+identified by an integer ID and has a bunch of methods which take a salsa database
+as an argument and returns other entities (which are also IDs). Internally, these
+methods invoke various queries on the database to build the model on demand.
+Here's [the list of queries].
+
+[`code_model_api`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/code_model_api.rs
+[the list of queries]: https://github.com/rust-lang/rust-analyzer/blob/7e84440e25e19529e4ff8a66e521d1b06349c6ec/crates/hir/src/db.rs#L20-L106
+
+The first step of building the model is parsing the source code.
+
+## Syntax trees
+
+An important property of the Rust language is that each file can be parsed in
+isolation. Unlike, say, `C++`, an `include` can't change the meaning of the
+syntax. For this reason, rust-analyzer can build a syntax tree for each "source
+file", which could then be reused by several semantic models if this file
+happens to be a part of several crates.
+
+The representation of syntax trees that rust-analyzer uses is similar to that of `Roslyn`
+and Swift's new [libsyntax]. Swift's docs give an excellent overview of the
+approach, so I skip this part here and instead outline the main characteristics
+of the syntax trees:
+
+* Syntax trees are fully lossless. Converting **any** text to a syntax tree and
+ back is a total identity function. All whitespace and comments are explicitly
+ represented in the tree.
+
+* Syntax nodes have generic `(next|previous)_sibling`, `parent`,
+ `(first|last)_child` functions. You can get from any one node to any other
+ node in the file using only these functions.
+
+* Syntax nodes know their range (start offset and length) in the file.
+
+* Syntax nodes share the ownership of their syntax tree: if you keep a reference
+ to a single function, the whole enclosing file is alive.
+
+* Syntax trees are immutable and the cost of replacing the subtree is
+ proportional to the depth of the subtree. Read Swift's docs to learn how
+ immutable + parent pointers + cheap modification is possible.
+
+* Syntax trees are build on best-effort basis. All accessor methods return
+ `Option`s. The tree for `fn foo` will contain a function declaration with
+ `None` for parameter list and body.
+
+* Syntax trees do not know the file they are built from, they only know about
+ the text.
+
+The implementation is based on the generic [rowan] crate on top of which a
+[rust-specific] AST is generated.
+
+[libsyntax]: https://github.com/apple/swift/tree/5e2c815edfd758f9b1309ce07bfc01c4bc20ec23/lib/Syntax
+[rowan]: https://github.com/rust-analyzer/rowan/tree/100a36dc820eb393b74abe0d20ddf99077b61f88
+[rust-specific]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_syntax/src/ast/generated.rs
+
+The next step in constructing the semantic model is ...
+
+## Building a Module Tree
+
+The algorithm for building a tree of modules is to start with a crate root
+(remember, each `Crate` from a `CrateGraph` has a `FileId`), collect all `mod`
+declarations and recursively process child modules. This is handled by the
+[`module_tree_query`], with two slight variations.
+
+[`module_tree_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/module_tree.rs#L116-L123
+
+First, rust-analyzer builds a module tree for all crates in a source root
+simultaneously. The main reason for this is historical (`module_tree` predates
+`CrateGraph`), but this approach also enables accounting for files which are not
+part of any crate. That is, if you create a file but do not include it as a
+submodule anywhere, you still get semantic completion, and you get a warning
+about a free-floating module (the actual warning is not implemented yet).
+
+The second difference is that `module_tree_query` does not *directly* depend on
+the "parse" query (which is confusingly called `source_file`). Why would calling
+the parse directly be bad? Suppose the user changes the file slightly, by adding
+an insignificant whitespace. Adding whitespace changes the parse tree (because
+it includes whitespace), and that means recomputing the whole module tree.
+
+We deal with this problem by introducing an intermediate [`submodules_query`].
+This query processes the syntax tree and extracts a set of declared submodule
+names. Now, changing the whitespace results in `submodules_query` being
+re-executed for a *single* module, but because the result of this query stays
+the same, we don't have to re-execute [`module_tree_query`]. In fact, we only
+need to re-execute it when we add/remove new files or when we change mod
+declarations.
+
+[`submodules_query`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/module_tree.rs#L41
+
+We store the resulting modules in a `Vec`-based indexed arena. The indices in
+the arena becomes module IDs. And this brings us to the next topic:
+assigning IDs in the general case.
+
+## Location Interner pattern
+
+One way to assign IDs is how we've dealt with modules: Collect all items into a
+single array in some specific order and use the index in the array as an ID. The
+main drawback of this approach is that these IDs are not stable: Adding a new item can
+shift the IDs of all other items. This works for modules, because adding a module is
+a comparatively rare operation, but would be less convenient for, for example,
+functions.
+
+Another solution here is positional IDs: We can identify a function as "the
+function with name `foo` in a ModuleId(92) module". Such locations are stable:
+adding a new function to the module (unless it is also named `foo`) does not
+change the location. However, such "ID" types ceases to be a `Copy`able integer and in
+general can become pretty large if we account for nesting (for example: "third parameter of
+the `foo` function of the `bar` `impl` in the `baz` module").
+
+[`LocationInterner`] allows us to combine the benefits of positional and numeric
+IDs. It is a bidirectional append-only map between locations and consecutive
+integers which can "intern" a location and return an integer ID back. The salsa
+database we use includes a couple of [interners]. How to "garbage collect"
+unused locations is an open question.
+
+[`LocationInterner`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/base_db/src/loc2id.rs#L65-L71
+[interners]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/db.rs#L22-L23
+
+For example, we use `LocationInterner` to assign IDs to definitions of functions,
+structs, enums, etc. The location, [`DefLoc`] contains two bits of information:
+
+* the ID of the module which contains the definition,
+* the ID of the specific item in the modules source code.
+
+We "could" use a text offset for the location of a particular item, but that would play
+badly with salsa: offsets change after edits. So, as a rule of thumb, we avoid
+using offsets, text ranges or syntax trees as keys and values for queries. What
+we do instead is we store "index" of the item among all of the items of a file
+(so, a positional based ID, but localized to a single file).
+
+[`DefLoc`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ids.rs#L127-L139
+
+One thing we've glossed over for the time being is support for macros. We have
+only proof of concept handling of macros at the moment, but they are extremely
+interesting from an "assigning IDs" perspective.
+
+## Macros and recursive locations
+
+The tricky bit about macros is that they effectively create new source files.
+While we can use `FileId`s to refer to original files, we can't just assign them
+willy-nilly to the pseudo files of macro expansion. Instead, we use a special
+ID, [`HirFileId`] to refer to either a usual file or a macro-generated file:
+
+```rust
+enum HirFileId {
+ FileId(FileId),
+ Macro(MacroCallId),
+}
+```
+
+`MacroCallId` is an interned ID that specifies a particular macro invocation.
+Its `MacroCallLoc` contains:
+
+* `ModuleId` of the containing module
+* `HirFileId` of the containing file or pseudo file
+* an index of this particular macro invocation in this file (positional id
+ again).
+
+Note how `HirFileId` is defined in terms of `MacroCallLoc` which is defined in
+terms of `HirFileId`! This does not recur infinitely though: any chain of
+`HirFileId`s bottoms out in `HirFileId::FileId`, that is, some source file
+actually written by the user.
+
+[`HirFileId`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ids.rs#L18-L125
+
+Now that we understand how to identify a definition, in a source or in a
+macro-generated file, we can discuss name resolution a bit.
+
+## Name resolution
+
+Name resolution faces the same problem as the module tree: if we look at the
+syntax tree directly, we'll have to recompute name resolution after every
+modification. The solution to the problem is the same: We [lower] the source code of
+each module into a position-independent representation which does not change if
+we modify bodies of the items. After that we [loop] resolving all imports until
+we've reached a fixed point.
+
+[lower]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L113-L117
+[loop]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres.rs#L186-L196
+
+And, given all our preparation with IDs and a position-independent representation,
+it is satisfying to [test] that typing inside function body does not invalidate
+name resolution results.
+
+[test]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/tests.rs#L376
+
+An interesting fact about name resolution is that it "erases" all of the
+intermediate paths from the imports: in the end, we know which items are defined
+and which items are imported in each module, but, if the import was `use
+foo::bar::baz`, we deliberately forget what modules `foo` and `bar` resolve to.
+
+To serve "goto definition" requests on intermediate segments we need this info
+in the IDE, however. Luckily, we need it only for a tiny fraction of imports, so we just ask
+the module explicitly, "What does the path `foo::bar` resolve to?". This is a
+general pattern: we try to compute the minimal possible amount of information
+during analysis while allowing IDE to ask for additional specific bits.
+
+Name resolution is also a good place to introduce another salsa pattern used
+throughout the analyzer:
+
+## Source Map pattern
+
+Due to an obscure edge case in completion, IDE needs to know the syntax node of
+a use statement which imported the given completion candidate. We can't just
+store the syntax node as a part of name resolution: this will break
+incrementality, due to the fact that syntax changes after every file
+modification.
+
+We solve this problem during the lowering step of name resolution. The lowering
+query actually produces a *pair* of outputs: `LoweredModule` and [`SourceMap`].
+The `LoweredModule` module contains [imports], but in a position-independent form.
+The `SourceMap` contains a mapping from position-independent imports to
+(position-dependent) syntax nodes.
+
+The result of this basic lowering query changes after every modification. But
+there's an intermediate [projection query] which returns only the first
+position-independent part of the lowering. The result of this query is stable.
+Naturally, name resolution [uses] this stable projection query.
+
+[imports]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L52-L59
+[`SourceMap`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L52-L59
+[projection query]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/nameres/lower.rs#L97-L103
+[uses]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/query_definitions.rs#L49
+
+## Type inference
+
+First of all, implementation of type inference in rust-analyzer was spearheaded
+by [@flodiebold]. [#327] was an awesome Christmas present, thank you, Florian!
+
+Type inference runs on per-function granularity and uses the patterns we've
+discussed previously.
+
+First, we [lower the AST] of a function body into a position-independent
+representation. In this representation, each expression is assigned a
+[positional ID]. Alongside the lowered expression, [a source map] is produced,
+which maps between expression ids and original syntax. This lowering step also
+deals with "incomplete" source trees by replacing missing expressions by an
+explicit `Missing` expression.
+
+Given the lowered body of the function, we can now run [type inference] and
+construct a mapping from `ExprId`s to types.
+
+[@flodiebold]: https://github.com/flodiebold
+[#327]: https://github.com/rust-lang/rust-analyzer/pull/327
+[lower the AST]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs
+[positional ID]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs#L13-L15
+[a source map]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/expr.rs#L41-L44
+[type inference]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/hir/src/ty.rs#L1208-L1223
+
+## Tying it all together: completion
+
+To conclude the overview of the rust-analyzer, let's trace the request for
+(type-inference powered!) code completion!
+
+We start by [receiving a message] from the language client. We decode the
+message as a request for completion and [schedule it on the threadpool]. This is
+the place where we [catch] canceled errors if, immediately after completion, the
+client sends some modification.
+
+In [the handler], we deserialize LSP requests into rust-analyzer specific data
+types (by converting a file url into a numeric `FileId`), [ask analysis for
+completion] and serialize results into the LSP.
+
+The [completion implementation] is finally the place where we start doing the actual
+work. The first step is to collect the `CompletionContext` -- a struct which
+describes the cursor position in terms of Rust syntax and semantics. For
+example, `function_syntax: Option<&'a ast::FnDef>` stores a reference to
+the enclosing function *syntax*, while `function: Option<hir::Function>` is the
+`Def` for this function.
+
+To construct the context, we first do an ["IntelliJ Trick"]: we insert a dummy
+identifier at the cursor's position and parse this modified file, to get a
+reasonably looking syntax tree. Then we do a bunch of "classification" routines
+to figure out the context. For example, we [find an ancestor `fn` node] and we get a
+[semantic model] for it (using the lossy `source_binder` infrastructure).
+
+The second step is to run a [series of independent completion routines]. Let's
+take a closer look at [`complete_dot`], which completes fields and methods in
+`foo.bar|`. First we extract a semantic function and a syntactic receiver
+expression out of the `Context`. Then we run type-inference for this single
+function and map our syntactic expression to `ExprId`. Using the ID, we figure
+out the type of the receiver expression. Then we add all fields & methods from
+the type to completion.
+
+[receiving a message]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L203
+[schedule it on the threadpool]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L428
+[catch]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ra_lsp_server/src/main_loop.rs#L436-L442
+[the handler]: https://salsa.zulipchat.com/#narrow/stream/181542-rfcs.2Fsalsa-query-group/topic/design.20next.20steps
+[ask analysis for completion]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/lib.rs#L439-L444
+[completion implementation]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion.rs#L46-L62
+[`CompletionContext`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L14-L37
+["IntelliJ Trick"]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L72-L75
+[find an ancestor `fn` node]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L116-L120
+[semantic model]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/completion_context.rs#L123
+[series of independent completion routines]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion.rs#L52-L59
+[`complete_dot`]: https://github.com/rust-lang/rust-analyzer/blob/guide-2019-01/crates/ide_api/src/completion/complete_dot.rs#L6-L22
--- /dev/null
- Try **Rust Analyzer: Show RA Version** in VS Code (using **Command Palette** feature typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the command line.
+= User Manual
+:toc: preamble
+:sectanchors:
+:page-layout: post
+:icons: font
+:source-highlighter: rouge
+:experimental:
+
+////
+IMPORTANT: the master copy of this document lives in the https://github.com/rust-lang/rust-analyzer repository
+////
+
+At its core, rust-analyzer is a *library* for semantic analysis of Rust code as it changes over time.
+This manual focuses on a specific usage of the library -- running it as part of a server that implements the
+https://microsoft.github.io/language-server-protocol/[Language Server Protocol] (LSP).
+The LSP allows various code editors, like VS Code, Emacs or Vim, to implement semantic features like completion or goto definition by talking to an external language server process.
+
+[TIP]
+====
+[.lead]
+To improve this document, send a pull request: +
+https://github.com/rust-lang/rust-analyzer/blob/master/docs/user/manual.adoc[https://github.com/rust-analyzer/.../manual.adoc]
+
+The manual is written in https://asciidoc.org[AsciiDoc] and includes some extra files which are generated from the source code. Run `cargo test` and `cargo test -p xtask` to create these and then `asciidoctor manual.adoc` to create an HTML copy.
+====
+
+If you have questions about using rust-analyzer, please ask them in the https://users.rust-lang.org/c/ide/14["`IDEs and Editors`"] topic of Rust users forum.
+
+== Installation
+
+In theory, one should be able to just install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> and have it automatically work with any editor.
+We are not there yet, so some editor specific setup is required.
+
+Additionally, rust-analyzer needs the sources of the standard library.
+If the source code is not present, rust-analyzer will attempt to install it automatically.
+
+To add the sources manually, run the following command:
+
+```bash
+$ rustup component add rust-src
+```
+
+=== Toolchain
+
+Only the latest stable standard library source is officially supported for use with rust-analyzer.
+If you are using an older toolchain or have an override set, rust-analyzer may fail to understand the Rust source.
+You will either need to update your toolchain or use an older version of rust-analyzer that is compatible with your toolchain.
+
+If you are using an override in your project, you can still force rust-analyzer to use the stable toolchain via the environment variable `RUSTUP_TOOLCHAIN`.
+For example, with VS Code or coc-rust-analyzer:
+
+[source,json]
+----
+{ "rust-analyzer.server.extraEnv": { "RUSTUP_TOOLCHAIN": "stable" } }
+----
+
+=== VS Code
+
+This is the best supported editor at the moment.
+The rust-analyzer plugin for VS Code is maintained
+https://github.com/rust-lang/rust-analyzer/tree/master/editors/code[in tree].
+
+You can install the latest release of the plugin from
+https://marketplace.visualstudio.com/items?itemName=rust-lang.rust-analyzer[the marketplace].
+
+Note that the plugin may cause conflicts with the
+https://marketplace.visualstudio.com/items?itemName=rust-lang.rust[official Rust plugin].
+It is recommended to disable the Rust plugin when using the rust-analyzer extension.
+
+By default, the plugin will prompt you to download the matching version of the server as well:
+
+image::https://user-images.githubusercontent.com/9021944/75067008-17502500-54ba-11ea-835a-f92aac50e866.png[]
+
+[NOTE]
+====
+To disable this notification put the following to `settings.json`
+
+[source,json]
+----
+{ "rust-analyzer.updates.askBeforeDownload": false }
+----
+====
+
+The server binary is stored in the extension install directory, which starts with `rust-lang.rust-analyzer-` and is located under:
+
+* Linux: `~/.vscode/extensions`
+* Linux (Remote, such as WSL): `~/.vscode-server/extensions`
+* macOS: `~/.vscode/extensions`
+* Windows: `%USERPROFILE%\.vscode\extensions`
+
+As an exception, on NixOS, the extension makes a copy of the server and stores it under `~/.config/Code/User/globalStorage/rust-lang.rust-analyzer`.
+
+Note that we only support the two most recent versions of VS Code.
+
+==== Updates
+
+The extension will be updated automatically as new versions become available.
+It will ask your permission to download the matching language server version binary if needed.
+
+===== Nightly
+
+We ship nightly releases for VS Code.
+To help us out by testing the newest code, you can enable pre-release versions in the Code extension page.
+
+==== Manual installation
+
+Alternatively, download a VSIX corresponding to your platform from the
+https://github.com/rust-lang/rust-analyzer/releases[releases] page.
+
+Install the extension with the `Extensions: Install from VSIX` command within VS Code, or from the command line via:
+[source]
+----
+$ code --install-extension /path/to/rust-analyzer.vsix
+----
+
+If you are running an unsupported platform, you can install `rust-analyzer-no-server.vsix` and compile or obtain a server binary.
+Copy the server anywhere, then add the path to your settings.json, for example:
+[source,json]
+----
+{ "rust-analyzer.server.path": "~/.local/bin/rust-analyzer-linux" }
+----
+
+==== Building From Source
+
+Both the server and the Code plugin can be installed from source:
+
+[source]
+----
+$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
+$ cargo xtask install
+----
+
+You'll need Cargo, nodejs (matching a supported version of VS Code) and npm for this.
+
+Note that installing via `xtask install` does not work for VS Code Remote, instead you'll need to install the `.vsix` manually.
+
+If you're not using Code, you can compile and install only the LSP server:
+
+[source]
+----
+$ cargo xtask install --server
+----
+
+=== rust-analyzer Language Server Binary
+
+Other editors generally require the `rust-analyzer` binary to be in `$PATH`.
+You can download pre-built binaries from the https://github.com/rust-lang/rust-analyzer/releases[releases] page.
+You will need to uncompress and rename the binary for your platform, e.g. from `rust-analyzer-aarch64-apple-darwin.gz` on Mac OS to `rust-analyzer`, make it executable, then move it into a directory in your `$PATH`.
+
+On Linux to install the `rust-analyzer` binary into `~/.local/bin`, these commands should work:
+
+[source,bash]
+----
+$ mkdir -p ~/.local/bin
+$ curl -L https://github.com/rust-lang/rust-analyzer/releases/latest/download/rust-analyzer-x86_64-unknown-linux-gnu.gz | gunzip -c - > ~/.local/bin/rust-analyzer
+$ chmod +x ~/.local/bin/rust-analyzer
+----
+
+Make sure that `~/.local/bin` is listed in the `$PATH` variable and use the appropriate URL if you're not on a `x86-64` system.
+
+You don't have to use `~/.local/bin`, any other path like `~/.cargo/bin` or `/usr/local/bin` will work just as well.
+
+Alternatively, you can install it from source using the command below.
+You'll need the latest stable version of the Rust toolchain.
+
+[source,bash]
+----
+$ git clone https://github.com/rust-lang/rust-analyzer.git && cd rust-analyzer
+$ cargo xtask install --server
+----
+
+If your editor can't find the binary even though the binary is on your `$PATH`, the likely explanation is that it doesn't see the same `$PATH` as the shell, see https://github.com/rust-lang/rust-analyzer/issues/1811[this issue].
+On Unix, running the editor from a shell or changing the `.desktop` file to set the environment should help.
+
+==== `rustup`
+
+`rust-analyzer` is available in `rustup`, but only in the nightly toolchain:
+
+[source,bash]
+----
+$ rustup +nightly component add rust-analyzer-preview
+----
+
+However, in contrast to `component add clippy` or `component add rustfmt`, this does not actually place a `rust-analyzer` binary in `~/.cargo/bin`, see https://github.com/rust-lang/rustup/issues/2411[this issue].
+
+==== Arch Linux
+
+The `rust-analyzer` binary can be installed from the repos or AUR (Arch User Repository):
+
+- https://www.archlinux.org/packages/community/x86_64/rust-analyzer/[`rust-analyzer`] (built from latest tagged source)
+- https://aur.archlinux.org/packages/rust-analyzer-git[`rust-analyzer-git`] (latest Git version)
+
+Install it with pacman, for example:
+
+[source,bash]
+----
+$ pacman -S rust-analyzer
+----
+
+==== Gentoo Linux
+
+`rust-analyzer` is available in the GURU repository:
+
+- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer`] builds from source
+- https://gitweb.gentoo.org/repo/proj/guru.git/tree/dev-util/rust-analyzer-bin?id=9895cea62602cfe599bd48e0fb02127411ca6e81[`dev-util/rust-analyzer-bin`] installs an official binary release
+
+If not already, GURU must be enabled (e.g. using `app-eselect/eselect-repository`) and sync'd before running `emerge`:
+
+[source,bash]
+----
+$ eselect repository enable guru && emaint sync -r guru
+$ emerge rust-analyzer-bin
+----
+
+==== macOS
+
+The `rust-analyzer` binary can be installed via https://brew.sh/[Homebrew].
+
+[source,bash]
+----
+$ brew install rust-analyzer
+----
+
+=== Emacs
+
+Note this excellent https://robert.kra.hn/posts/2021-02-07_rust-with-emacs/[guide] from https://github.com/rksm[@rksm].
+
+Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+
+Emacs support is maintained as part of the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP] package in https://github.com/emacs-lsp/lsp-mode/blob/master/lsp-rust.el[lsp-rust.el].
+
+1. Install the most recent version of `emacs-lsp` package by following the https://github.com/emacs-lsp/lsp-mode[Emacs-LSP instructions].
+2. Set `lsp-rust-server` to `'rust-analyzer`.
+3. Run `lsp` in a Rust buffer.
+4. (Optionally) bind commands like `lsp-rust-analyzer-join-lines`, `lsp-extend-selection` and `lsp-rust-analyzer-expand-macro` to keys.
+
+=== Vim/NeoVim
+
+Prerequisites: You have installed the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+Not needed if the extension can install/update it on its own, coc-rust-analyzer is one example.
+
+There are several LSP client implementations for vim or neovim:
+
+==== coc-rust-analyzer
+
+1. Install coc.nvim by following the instructions at
+ https://github.com/neoclide/coc.nvim[coc.nvim]
+ (Node.js required)
+2. Run `:CocInstall coc-rust-analyzer` to install
+ https://github.com/fannheyward/coc-rust-analyzer[coc-rust-analyzer],
+ this extension implements _most_ of the features supported in the VSCode extension:
+ * automatically install and upgrade stable/nightly releases
+ * same configurations as VSCode extension, `rust-analyzer.server.path`, `rust-analyzer.cargo.features` etc.
+ * same commands too, `rust-analyzer.analyzerStatus`, `rust-analyzer.ssr` etc.
+ * inlay hints for variables and method chaining, _Neovim Only_
+
+Note: for code actions, use `coc-codeaction-cursor` and `coc-codeaction-selected`; `coc-codeaction` and `coc-codeaction-line` are unlikely to be useful.
+
+==== LanguageClient-neovim
+
+1. Install LanguageClient-neovim by following the instructions
+ https://github.com/autozimu/LanguageClient-neovim[here]
+ * The GitHub project wiki has extra tips on configuration
+
+2. Configure by adding this to your vim/neovim config file (replacing the existing Rust-specific line if it exists):
++
+[source,vim]
+----
+let g:LanguageClient_serverCommands = {
+\ 'rust': ['rust-analyzer'],
+\ }
+----
+
+==== YouCompleteMe
+
+Install YouCompleteMe by following the instructions
+ https://github.com/ycm-core/YouCompleteMe#installation[here].
+
+rust-analyzer is the default in ycm, it should work out of the box.
+
+==== ALE
+
+To use the LSP server in https://github.com/dense-analysis/ale[ale]:
+
+[source,vim]
+----
+let g:ale_linters = {'rust': ['analyzer']}
+----
+
+==== nvim-lsp
+
+NeoVim 0.5 has built-in language server support.
+For a quick start configuration of rust-analyzer, use https://github.com/neovim/nvim-lspconfig#rust_analyzer[neovim/nvim-lspconfig].
+Once `neovim/nvim-lspconfig` is installed, use `+lua require'lspconfig'.rust_analyzer.setup({})+` in your `init.vim`.
+
+You can also pass LSP settings to the server:
+
+[source,vim]
+----
+lua << EOF
+local nvim_lsp = require'lspconfig'
+
+local on_attach = function(client)
+ require'completion'.on_attach(client)
+end
+
+nvim_lsp.rust_analyzer.setup({
+ on_attach=on_attach,
+ settings = {
+ ["rust-analyzer"] = {
+ imports = {
+ granularity = {
+ group = "module",
+ },
+ prefix = "self",
+ },
+ cargo = {
+ buildScripts = {
+ enable = true,
+ },
+ },
+ procMacro = {
+ enable = true
+ },
+ }
+ }
+})
+EOF
+----
+
+See https://sharksforarms.dev/posts/neovim-rust/ for more tips on getting started.
+
+Check out https://github.com/simrat39/rust-tools.nvim for a batteries included rust-analyzer setup for neovim.
+
+==== vim-lsp
+
+vim-lsp is installed by following https://github.com/prabirshrestha/vim-lsp[the plugin instructions].
+It can be as simple as adding this line to your `.vimrc`:
+
+[source,vim]
+----
+Plug 'prabirshrestha/vim-lsp'
+----
+
+Next you need to register the `rust-analyzer` binary.
+If it is available in `$PATH`, you may want to add this to your `.vimrc`:
+
+[source,vim]
+----
+if executable('rust-analyzer')
+ au User lsp_setup call lsp#register_server({
+ \ 'name': 'Rust Language Server',
+ \ 'cmd': {server_info->['rust-analyzer']},
+ \ 'whitelist': ['rust'],
+ \ })
+endif
+----
+
+There is no dedicated UI for the server configuration, so you would need to send any options as a value of the `initialization_options` field, as described in the <<_configuration,Configuration>> section.
+Here is an example of how to enable the proc-macro support:
+
+[source,vim]
+----
+if executable('rust-analyzer')
+ au User lsp_setup call lsp#register_server({
+ \ 'name': 'Rust Language Server',
+ \ 'cmd': {server_info->['rust-analyzer']},
+ \ 'whitelist': ['rust'],
+ \ 'initialization_options': {
+ \ 'cargo': {
+ \ 'buildScripts': {
+ \ 'enable': v:true,
+ \ },
+ \ },
+ \ 'procMacro': {
+ \ 'enable': v:true,
+ \ },
+ \ },
+ \ })
+endif
+----
+
+=== Sublime Text
+
+==== Sublime Text 4:
+* Follow the instructions in link:https://github.com/sublimelsp/LSP-rust-analyzer[LSP-rust-analyzer].
+
+NOTE: Install link:https://packagecontrol.io/packages/LSP-file-watcher-chokidar[LSP-file-watcher-chokidar] to enable file watching (`workspace/didChangeWatchedFiles`).
+
+==== Sublime Text 3:
+* Install the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+* Install the link:https://packagecontrol.io/packages/LSP[LSP package].
+* From the command palette, run `LSP: Enable Language Server Globally` and select `rust-analyzer`.
+
+If it worked, you should see "rust-analyzer, Line X, Column Y" on the left side of the status bar, and after waiting a bit, functionalities like tooltips on hovering over variables should become available.
+
+If you get an error saying `No such file or directory: 'rust-analyzer'`, see the <<rust-analyzer-language-server-binary,`rust-analyzer` binary>> section on installing the language server binary.
+
+=== GNOME Builder
+
+GNOME Builder 3.37.1 and newer has native `rust-analyzer` support.
+If the LSP binary is not available, GNOME Builder can install it when opening a Rust file.
+
+
+=== Eclipse IDE
+
+Support for Rust development in the Eclipse IDE is provided by link:https://github.com/eclipse/corrosion[Eclipse Corrosion].
+If available in PATH or in some standard location, `rust-analyzer` is detected and powers editing of Rust files without further configuration.
+If `rust-analyzer` is not detected, Corrosion will prompt you for configuration of your Rust toolchain and language server with a link to the __Window > Preferences > Rust__ preference page; from here a button allows to download and configure `rust-analyzer`, but you can also reference another installation.
+You'll need to close and reopen all .rs and Cargo files, or to restart the IDE, for this change to take effect.
+
+=== Kate Text Editor
+
+Support for the language server protocol is built into Kate through the LSP plugin, which is included by default.
+It is preconfigured to use rust-analyzer for Rust sources since Kate 21.12.
+
+Earlier versions allow you to use rust-analyzer through a simple settings change.
+In the LSP Client settings of Kate, copy the content of the third tab "default parameters" to the second tab "server configuration".
+Then in the configuration replace:
+[source,json]
+----
+ "rust": {
+ "command": ["rls"],
+ "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
+ "url": "https://github.com/rust-lang/rls",
+ "highlightingModeRegex": "^Rust$"
+ },
+----
+With
+[source,json]
+----
+ "rust": {
+ "command": ["rust-analyzer"],
+ "rootIndicationFileNames": ["Cargo.lock", "Cargo.toml"],
+ "url": "https://github.com/rust-lang/rust-analyzer",
+ "highlightingModeRegex": "^Rust$"
+ },
+----
+Then click on apply, and restart the LSP server for your rust project.
+
+=== juCi++
+
+https://gitlab.com/cppit/jucipp[juCi++] has built-in support for the language server protocol, and since version 1.7.0 offers installation of both Rust and rust-analyzer when opening a Rust file.
+
+=== Kakoune
+
+https://kakoune.org/[Kakoune] supports LSP with the help of https://github.com/kak-lsp/kak-lsp[`kak-lsp`].
+Follow the https://github.com/kak-lsp/kak-lsp#installation[instructions] to install `kak-lsp`.
+To configure `kak-lsp`, refer to the https://github.com/kak-lsp/kak-lsp#configuring-kak-lsp[configuration section] which is basically about copying the https://github.com/kak-lsp/kak-lsp/blob/master/kak-lsp.toml[configuration file] in the right place (latest versions should use `rust-analyzer` by default).
+
+Finally, you need to configure Kakoune to talk to `kak-lsp` (see https://github.com/kak-lsp/kak-lsp#usage[Usage section]).
+A basic configuration will only get you LSP but you can also activate inlay diagnostics and auto-formatting on save.
+The following might help you get all of this.
+
+[source,txt]
+----
+eval %sh{kak-lsp --kakoune -s $kak_session} # Not needed if you load it with plug.kak.
+hook global WinSetOption filetype=rust %{
+ # Enable LSP
+ lsp-enable-window
+
+ # Auto-formatting on save
+ hook window BufWritePre .* lsp-formatting-sync
+
+ # Configure inlay hints (only on save)
+ hook window -group rust-inlay-hints BufWritePost .* rust-analyzer-inlay-hints
+ hook -once -always window WinSetOption filetype=.* %{
+ remove-hooks window rust-inlay-hints
+ }
+}
+----
+
+=== Helix
+
+https://docs.helix-editor.com/[Helix] supports LSP by default.
+However, it won't install `rust-analyzer` automatically.
+You can follow instructions for installing <<rust-analyzer-language-server-binary,`rust-analyzer` binary>>.
+
+== Troubleshooting
+
+Start with looking at the rust-analyzer version.
- **Rust Analyzer: Status** prints dependency information for the current file.
++Try **rust-analyzer: Show RA Version** in VS Code (using **Command Palette** feature typically activated by Ctrl+Shift+P) or `rust-analyzer --version` in the command line.
+If the date is more than a week ago, it's better to update rust-analyzer version.
+
+The next thing to check would be panic messages in rust-analyzer's log.
+Log messages are printed to stderr, in VS Code you can see then in the `Output > Rust Analyzer Language Server` tab of the panel.
+To see more logs, set the `RA_LOG=info` environment variable, this can be done either by setting the environment variable manually or by using `rust-analyzer.server.extraEnv`, note that both of these approaches require the server to be restarted.
+
+To fully capture LSP messages between the editor and the server, set `"rust-analyzer.trace.server": "verbose"` config and check
+`Output > Rust Analyzer Language Server Trace`.
+
+The root cause for many "`nothing works`" problems is that rust-analyzer fails to understand the project structure.
+To debug that, first note the `rust-analyzer` section in the status bar.
+If it has an error icon and red, that's the problem (hover will have somewhat helpful error message).
++**rust-analyzer: Status** prints dependency information for the current file.
+Finally, `RA_LOG=project_model=debug` enables verbose logs during project loading.
+
+If rust-analyzer outright crashes, try running `rust-analyzer analysis-stats /path/to/project/directory/` on the command line.
+This command type checks the whole project in batch mode bypassing LSP machinery.
+
+When filing issues, it is useful (but not necessary) to try to minimize examples.
+An ideal bug reproduction looks like this:
+
+```bash
+$ git clone https://github.com/username/repo.git && cd repo && git switch --detach commit-hash
+$ rust-analyzer --version
+rust-analyzer dd12184e4 2021-05-08 dev
+$ rust-analyzer analysis-stats .
+💀 💀 💀
+```
+
+It is especially useful when the `repo` doesn't use external crates or the standard library.
+
+If you want to go as far as to modify the source code to debug the problem, be sure to take a look at the
+https://github.com/rust-lang/rust-analyzer/tree/master/docs/dev[dev docs]!
+
+== Configuration
+
+**Source:** https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/config.rs[config.rs]
+
+The <<_installation,Installation>> section contains details on configuration for some of the editors.
+In general `rust-analyzer` is configured via LSP messages, which means that it's up to the editor to decide on the exact format and location of configuration files.
+
+Some clients, such as <<vs-code,VS Code>> or <<coc-rust-analyzer,COC plugin in Vim>> provide `rust-analyzer` specific configuration UIs. Others may require you to know a bit more about the interaction with `rust-analyzer`.
+
+For the later category, it might help to know that the initial configuration is specified as a value of the `initializationOptions` field of the https://microsoft.github.io/language-server-protocol/specifications/specification-current/#initialize[`InitializeParams` message, in the LSP protocol].
+The spec says that the field type is `any?`, but `rust-analyzer` is looking for a JSON object that is constructed using settings from the list below.
+Name of the setting, ignoring the `rust-analyzer.` prefix, is used as a path, and value of the setting becomes the JSON property value.
+
+For example, a very common configuration is to enable proc-macro support, can be achieved by sending this JSON:
+
+[source,json]
+----
+{
+ "cargo": {
+ "buildScripts": {
+ "enable": true,
+ },
+ },
+ "procMacro": {
+ "enable": true,
+ }
+}
+----
+
+Please consult your editor's documentation to learn more about how to configure https://microsoft.github.io/language-server-protocol/[LSP servers].
+
+To verify which configuration is actually used by `rust-analyzer`, set `RA_LOG` environment variable to `rust_analyzer=info` and look for config-related messages.
+Logs should show both the JSON that `rust-analyzer` sees as well as the updated config.
+
+This is the list of config options `rust-analyzer` supports:
+
+include::./generated_config.adoc[]
+
+== Non-Cargo Based Projects
+
+rust-analyzer does not require Cargo.
+However, if you use some other build system, you'll have to describe the structure of your project for rust-analyzer in the `rust-project.json` format:
+
+[source,TypeScript]
+----
+interface JsonProject {
+ /// Path to the directory with *source code* of
+ /// sysroot crates.
+ ///
+ /// It should point to the directory where std,
+ /// core, and friends can be found:
+ ///
+ /// https://github.com/rust-lang/rust/tree/master/library.
+ ///
+ /// If provided, rust-analyzer automatically adds
+ /// dependencies on sysroot crates. Conversely,
+ /// if you omit this path, you can specify sysroot
+ /// dependencies yourself and, for example, have
+ /// several different "sysroots" in one graph of
+ /// crates.
+ sysroot_src?: string;
+ /// The set of crates comprising the current
+ /// project. Must include all transitive
+ /// dependencies as well as sysroot crate (libstd,
+ /// libcore and such).
+ crates: Crate[];
+}
+
+interface Crate {
+ /// Optional crate name used for display purposes,
+ /// without affecting semantics. See the `deps`
+ /// key for semantically-significant crate names.
+ display_name?: string;
+ /// Path to the root module of the crate.
+ root_module: string;
+ /// Edition of the crate.
+ edition: "2015" | "2018" | "2021";
+ /// Dependencies
+ deps: Dep[];
+ /// Should this crate be treated as a member of
+ /// current "workspace".
+ ///
+ /// By default, inferred from the `root_module`
+ /// (members are the crates which reside inside
+ /// the directory opened in the editor).
+ ///
+ /// Set this to `false` for things like standard
+ /// library and 3rd party crates to enable
+ /// performance optimizations (rust-analyzer
+ /// assumes that non-member crates don't change).
+ is_workspace_member?: boolean;
+ /// Optionally specify the (super)set of `.rs`
+ /// files comprising this crate.
+ ///
+ /// By default, rust-analyzer assumes that only
+ /// files under `root_module.parent` can belong
+ /// to a crate. `include_dirs` are included
+ /// recursively, unless a subdirectory is in
+ /// `exclude_dirs`.
+ ///
+ /// Different crates can share the same `source`.
+ ///
+ /// If two crates share an `.rs` file in common,
+ /// they *must* have the same `source`.
+ /// rust-analyzer assumes that files from one
+ /// source can't refer to files in another source.
+ source?: {
+ include_dirs: string[],
+ exclude_dirs: string[],
+ },
+ /// The set of cfgs activated for a given crate, like
+ /// `["unix", "feature=\"foo\"", "feature=\"bar\""]`.
+ cfg: string[];
+ /// Target triple for this Crate.
+ ///
+ /// Used when running `rustc --print cfg`
+ /// to get target-specific cfgs.
+ target?: string;
+ /// Environment variables, used for
+ /// the `env!` macro
+ env: { [key: string]: string; },
+
+ /// Whether the crate is a proc-macro crate.
+ is_proc_macro: boolean;
+ /// For proc-macro crates, path to compiled
+ /// proc-macro (.so file).
+ proc_macro_dylib_path?: string;
+}
+
+interface Dep {
+ /// Index of a crate in the `crates` array.
+ crate: number,
+ /// Name as should appear in the (implicit)
+ /// `extern crate name` declaration.
+ name: string,
+}
+----
+
+This format is provisional and subject to change.
+Specifically, the `roots` setup will be different eventually.
+
+There are three ways to feed `rust-project.json` to rust-analyzer:
+
+* Place `rust-project.json` file at the root of the project, and rust-analyzer will discover it.
+* Specify `"rust-analyzer.linkedProjects": [ "path/to/rust-project.json" ]` in the settings (and make sure that your LSP client sends settings as a part of initialize request).
+* Specify `"rust-analyzer.linkedProjects": [ { "roots": [...], "crates": [...] }]` inline.
+
+Relative paths are interpreted relative to `rust-project.json` file location or (for inline JSON) relative to `rootUri`.
+
+See https://github.com/rust-analyzer/rust-project.json-example for a small example.
+
+You can set the `RA_LOG` environment variable to `rust_analyzer=info` to inspect how rust-analyzer handles config and project loading.
+
+Note that calls to `cargo check` are disabled when using `rust-project.json` by default, so compilation errors and warnings will no longer be sent to your LSP client. To enable these compilation errors you will need to specify explicitly what command rust-analyzer should run to perform the checks using the `checkOnSave.overrideCommand` configuration. As an example, the following configuration explicitly sets `cargo check` as the `checkOnSave` command.
+
+[source,json]
+----
+{ "rust-analyzer.checkOnSave.overrideCommand": ["cargo", "check", "--message-format=json"] }
+----
+
+The `checkOnSave.overrideCommand` requires the command specified to output json error messages for rust-analyzer to consume. The `--message-format=json` flag does this for `cargo check` so whichever command you use must also output errors in this format. See the <<Configuration>> section for more information.
+
+== Security
+
+At the moment, rust-analyzer assumes that all code is trusted.
+Here is a **non-exhaustive** list of ways to make rust-analyzer execute arbitrary code:
+
+* proc macros and build scripts are executed by default
+* `.cargo/config` can override `rustc` with an arbitrary executable
+* `rust-toolchain.toml` can override `rustc` with an arbitrary executable
+* VS Code plugin reads configuration from project directory, and that can be used to override paths to various executables, like `rustfmt` or `rust-analyzer` itself.
+* rust-analyzer's syntax trees library uses a lot of `unsafe` and hasn't been properly audited for memory safety.
+
+== Privacy
+
+The LSP server performs no network access in itself, but runs `cargo metadata` which will update or download the crate registry and the source code of the project dependencies.
+If enabled (the default), build scripts and procedural macros can do anything.
+
+The Code extension does not access the network.
+
+Any other editor plugins are not under the control of the `rust-analyzer` developers. For any privacy concerns, you should check with their respective developers.
+
+For `rust-analyzer` developers, `cargo xtask release` uses the GitHub API to put together the release notes.
+
+== Features
+
+include::./generated_features.adoc[]
+
+== Assists (Code Actions)
+
+Assists, or code actions, are small local refactorings, available in a particular context.
+They are usually triggered by a shortcut or by clicking a light bulb icon in the editor.
+Cursor position or selection is signified by `┃` character.
+
+include::./generated_assists.adoc[]
+
+== Diagnostics
+
+While most errors and warnings provided by rust-analyzer come from the `cargo check` integration, there's a growing number of diagnostics implemented using rust-analyzer's own analysis.
+Some of these diagnostics don't respect `\#[allow]` or `\#[deny]` attributes yet, but can be turned off using the `rust-analyzer.diagnostics.enable`, `rust-analyzer.diagnostics.experimental.enable` or `rust-analyzer.diagnostics.disabled` settings.
+
+include::./generated_diagnostic.adoc[]
+
+== Editor Features
+=== VS Code
+
+==== Color configurations
+
+It is possible to change the foreground/background color and font family/size of inlay hints.
+Just add this to your `settings.json`:
+
+[source,jsonc]
+----
+{
+ "editor.inlayHints.fontFamily": "Courier New",
+ "editor.inlayHints.fontSize": 11,
+
+ "workbench.colorCustomizations": {
+ // Name of the theme you are currently using
+ "[Default Dark+]": {
+ "editorInlayHint.foreground": "#868686f0",
+ "editorInlayHint.background": "#3d3d3d48",
+
+ // Overrides for specific kinds of inlay hints
+ "editorInlayHint.typeForeground": "#fdb6fdf0",
+ "editorInlayHint.parameterForeground": "#fdb6fdf0",
+ }
+ }
+}
+----
+
+==== Semantic style customizations
+
+You can customize the look of different semantic elements in the source code.
+For example, mutable bindings are underlined by default and you can override this behavior by adding the following section to your `settings.json`:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "*.mutable": {
+ "fontStyle": "", // underline is the default
+ },
+ }
+ },
+}
+----
+
+Most themes doesn't support styling unsafe operations differently yet. You can fix this by adding overrides for the rules `operator.unsafe`, `function.unsafe`, and `method.unsafe`:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "operator.unsafe": "#ff6600",
+ "function.unsafe": "#ff6600",
+ "method.unsafe": "#ff6600"
+ }
+ },
+}
+----
+
+In addition to the top-level rules you can specify overrides for specific themes. For example, if you wanted to use a darker text color on a specific light theme, you might write:
+
+[source,jsonc]
+----
+{
+ "editor.semanticTokenColorCustomizations": {
+ "rules": {
+ "operator.unsafe": "#ff6600"
+ },
+ "[Ayu Light]": {
+ "rules": {
+ "operator.unsafe": "#572300"
+ }
+ }
+ },
+}
+----
+
+Make sure you include the brackets around the theme name. For example, use `"[Ayu Light]"` to customize the theme Ayu Light.
+
+==== Special `when` clause context for keybindings.
+You may use `inRustProject` context to configure keybindings for rust projects only.
+For example:
+
+[source,json]
+----
+{
+ "key": "ctrl+alt+d",
+ "command": "rust-analyzer.openDocs",
+ "when": "inRustProject"
+}
+----
+More about `when` clause contexts https://code.visualstudio.com/docs/getstarted/keybindings#_when-clause-contexts[here].
+
+==== Setting runnable environment variables
+You can use "rust-analyzer.runnableEnv" setting to define runnable environment-specific substitution variables.
+The simplest way for all runnables in a bunch:
+```jsonc
+"rust-analyzer.runnableEnv": {
+ "RUN_SLOW_TESTS": "1"
+}
+```
+
+Or it is possible to specify vars more granularly:
+```jsonc
+"rust-analyzer.runnableEnv": [
+ {
+ // "mask": null, // null mask means that this rule will be applied for all runnables
+ env: {
+ "APP_ID": "1",
+ "APP_DATA": "asdf"
+ }
+ },
+ {
+ "mask": "test_name",
+ "env": {
+ "APP_ID": "2", // overwrites only APP_ID
+ }
+ }
+]
+```
+
+You can use any valid regular expression as a mask.
+Also note that a full runnable name is something like *run bin_or_example_name*, *test some::mod::test_name* or *test-mod some::mod*, so it is possible to distinguish binaries, single tests, and test modules with this masks: `"^run"`, `"^test "` (the trailing space matters!), and `"^test-mod"` respectively.
+
+==== Compiler feedback from external commands
+
+Instead of relying on the built-in `cargo check`, you can configure Code to run a command in the background and use the `$rustc-watch` problem matcher to generate inline error markers from its output.
+
+To do this you need to create a new https://code.visualstudio.com/docs/editor/tasks[VS Code Task] and set `rust-analyzer.checkOnSave.enable: false` in preferences.
+
+For example, if you want to run https://crates.io/crates/cargo-watch[`cargo watch`] instead, you might add the following to `.vscode/tasks.json`:
+
+```json
+{
+ "label": "Watch",
+ "group": "build",
+ "type": "shell",
+ "command": "cargo watch",
+ "problemMatcher": "$rustc-watch",
+ "isBackground": true
+}
+```
--- /dev/null
- "category": "Rust Analyzer"
+{
+ "name": "rust-analyzer",
+ "displayName": "rust-analyzer",
+ "description": "Rust language support for Visual Studio Code",
+ "private": true,
+ "icon": "icon.png",
+ "version": "0.5.0-dev",
+ "releaseTag": null,
+ "publisher": "rust-lang",
+ "repository": {
+ "url": "https://github.com/rust-lang/rust-analyzer.git",
+ "type": "git"
+ },
+ "homepage": "https://rust-analyzer.github.io/",
+ "license": "MIT OR Apache-2.0",
+ "keywords": [
+ "rust"
+ ],
+ "categories": [
+ "Programming Languages"
+ ],
+ "engines": {
+ "vscode": "^1.66.0"
+ },
+ "enabledApiProposals": [],
+ "scripts": {
+ "vscode:prepublish": "npm run build-base -- --minify",
+ "package": "vsce package -o rust-analyzer.vsix",
+ "build-base": "esbuild ./src/main.ts --bundle --outfile=out/main.js --external:vscode --format=cjs --platform=node --target=node16",
+ "build": "npm run build-base -- --sourcemap",
+ "watch": "npm run build-base -- --sourcemap --watch",
+ "lint": "prettier --check . && eslint -c .eslintrc.js --ext ts ./src ./tests",
+ "fix": "prettier --write . && eslint -c .eslintrc.js --ext ts ./src ./tests --fix",
+ "pretest": "tsc && npm run build",
+ "test": "cross-env TEST_VARIABLE=test node ./out/tests/runTests.js"
+ },
+ "dependencies": {
+ "d3": "^7.6.1",
+ "d3-graphviz": "^4.1.1",
+ "vscode-languageclient": "^8.0.0-next.14"
+ },
+ "devDependencies": {
+ "@types/node": "~16.11.7",
+ "@types/vscode": "~1.66.0",
+ "@typescript-eslint/eslint-plugin": "^5.30.5",
+ "@typescript-eslint/parser": "^5.30.5",
+ "@vscode/test-electron": "^2.1.5",
+ "cross-env": "^7.0.3",
+ "esbuild": "^0.14.48",
+ "eslint": "^8.19.0",
+ "eslint-config-prettier": "^8.5.0",
+ "ovsx": "^0.5.1",
+ "prettier": "^2.7.1",
+ "tslib": "^2.4.0",
+ "typescript": "^4.7.4",
+ "vsce": "^2.9.2"
+ },
+ "activationEvents": [
+ "onLanguage:rust",
+ "onCommand:rust-analyzer.analyzerStatus",
+ "onCommand:rust-analyzer.memoryUsage",
+ "onCommand:rust-analyzer.reloadWorkspace",
+ "workspaceContains:*/Cargo.toml",
+ "workspaceContains:*/rust-project.json"
+ ],
+ "main": "./out/main",
+ "contributes": {
+ "taskDefinitions": [
+ {
+ "type": "cargo",
+ "required": [
+ "command"
+ ],
+ "properties": {
+ "label": {
+ "type": "string"
+ },
+ "command": {
+ "type": "string"
+ },
+ "args": {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "env": {
+ "type": "object",
+ "patternProperties": {
+ ".+": {
+ "type": "string"
+ }
+ }
+ }
+ }
+ }
+ ],
+ "commands": [
+ {
+ "command": "rust-analyzer.syntaxTree",
+ "title": "Show Syntax Tree",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.viewHir",
+ "title": "View Hir",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.viewFileText",
+ "title": "View File Text (as seen by the server)",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.viewItemTree",
+ "title": "Debug ItemTree",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.viewCrateGraph",
+ "title": "View Crate Graph",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.viewFullCrateGraph",
+ "title": "View Crate Graph (Full)",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.expandMacro",
+ "title": "Expand macro recursively",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.matchingBrace",
+ "title": "Find matching brace",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.parentModule",
+ "title": "Locate parent module",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.joinLines",
+ "title": "Join lines",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.run",
+ "title": "Run",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.copyRunCommandLine",
+ "title": "Copy Run Command Line",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.debug",
+ "title": "Debug",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.newDebugConfig",
+ "title": "Generate launch configuration",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.analyzerStatus",
+ "title": "Status",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.memoryUsage",
+ "title": "Memory Usage (Clears Database)",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.shuffleCrateGraph",
+ "title": "Shuffle Crate Graph",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.reloadWorkspace",
+ "title": "Reload workspace",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.reload",
+ "title": "Restart server",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.onEnter",
+ "title": "Enhanced enter key",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.ssr",
+ "title": "Structural Search Replace",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.serverVersion",
+ "title": "Show RA Version",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.toggleInlayHints",
+ "title": "Toggle inlay hints",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.openDocs",
+ "title": "Open docs under cursor",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.openCargoToml",
+ "title": "Open Cargo.toml",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.peekTests",
+ "title": "Peek related tests",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.moveItemUp",
+ "title": "Move item up",
- "category": "Rust Analyzer"
++ "category": "rust-analyzer"
+ },
+ {
+ "command": "rust-analyzer.moveItemDown",
+ "title": "Move item down",
- "title": "Rust Analyzer",
++ "category": "rust-analyzer"
+ }
+ ],
+ "keybindings": [
+ {
+ "command": "rust-analyzer.parentModule",
+ "key": "ctrl+shift+u",
+ "when": "editorTextFocus && editorLangId == rust"
+ },
+ {
+ "command": "rust-analyzer.matchingBrace",
+ "key": "ctrl+shift+m",
+ "when": "editorTextFocus && editorLangId == rust"
+ },
+ {
+ "command": "rust-analyzer.joinLines",
+ "key": "ctrl+shift+j",
+ "when": "editorTextFocus && editorLangId == rust"
+ }
+ ],
+ "configuration": {
+ "type": "object",
++ "title": "rust-analyzer",
+ "properties": {
+ "rust-analyzer.cargoRunner": {
+ "type": [
+ "null",
+ "string"
+ ],
+ "default": null,
+ "description": "Custom cargo runner extension ID."
+ },
+ "rust-analyzer.runnableEnv": {
+ "anyOf": [
+ {
+ "type": "null"
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "properties": {
+ "mask": {
+ "type": "string",
+ "description": "Runnable name mask"
+ },
+ "env": {
+ "type": "object",
+ "description": "Variables in form of { \"key\": \"value\"}"
+ }
+ }
+ }
+ },
+ {
+ "type": "object",
+ "description": "Variables in form of { \"key\": \"value\"}"
+ }
+ ],
+ "default": null,
+ "markdownDescription": "Environment variables passed to the runnable launched using `Test` or `Debug` lens or `rust-analyzer.run` command."
+ },
+ "rust-analyzer.server.path": {
+ "type": [
+ "null",
+ "string"
+ ],
+ "scope": "machine-overridable",
+ "default": null,
+ "markdownDescription": "Path to rust-analyzer executable (points to bundled binary by default)."
+ },
+ "rust-analyzer.server.extraEnv": {
+ "type": [
+ "null",
+ "object"
+ ],
+ "additionalProperties": {
+ "type": [
+ "string",
+ "number"
+ ]
+ },
+ "default": null,
+ "markdownDescription": "Extra environment variables that will be passed to the rust-analyzer executable. Useful for passing e.g. `RA_LOG` for debugging."
+ },
+ "rust-analyzer.trace.server": {
+ "type": "string",
+ "scope": "window",
+ "enum": [
+ "off",
+ "messages",
+ "verbose"
+ ],
+ "enumDescriptions": [
+ "No traces",
+ "Error only",
+ "Full log"
+ ],
+ "default": "off",
+ "description": "Trace requests to the rust-analyzer (this is usually overly verbose and not recommended for regular users)."
+ },
+ "rust-analyzer.trace.extension": {
+ "description": "Enable logging of VS Code extensions itself.",
+ "type": "boolean",
+ "default": false
+ },
+ "rust-analyzer.debug.engine": {
+ "type": "string",
+ "enum": [
+ "auto",
+ "vadimcn.vscode-lldb",
+ "ms-vscode.cpptools"
+ ],
+ "default": "auto",
+ "description": "Preferred debug engine.",
+ "markdownEnumDescriptions": [
+ "First try to use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb), if it's not installed try to use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools).",
+ "Use [CodeLLDB](https://marketplace.visualstudio.com/items?itemName=vadimcn.vscode-lldb)",
+ "Use [MS C++ tools](https://marketplace.visualstudio.com/items?itemName=ms-vscode.cpptools)"
+ ]
+ },
+ "rust-analyzer.debug.sourceFileMap": {
+ "type": [
+ "object",
+ "string"
+ ],
+ "const": "auto",
+ "description": "Optional source file mappings passed to the debug engine.",
+ "default": {
+ "/rustc/<id>": "${env:USERPROFILE}/.rustup/toolchains/<toolchain-id>/lib/rustlib/src/rust"
+ }
+ },
+ "rust-analyzer.debug.openDebugPane": {
+ "markdownDescription": "Whether to open up the `Debug Panel` on debugging start.",
+ "type": "boolean",
+ "default": false
+ },
+ "rust-analyzer.debug.engineSettings": {
+ "type": "object",
+ "default": {},
+ "markdownDescription": "Optional settings passed to the debug engine. Example: `{ \"lldb\": { \"terminal\":\"external\"} }`"
+ },
+ "rust-analyzer.restartServerOnConfigChange": {
+ "markdownDescription": "Whether to restart the server automatically when certain settings that require a restart are changed.",
+ "default": false,
+ "type": "boolean"
+ },
++ "rust-analyzer.typing.continueCommentsOnNewline": {
++ "markdownDescription": "Whether to prefix newlines after comments with the corresponding comment prefix.",
++ "default": true,
++ "type": "boolean"
++ },
+ "$generated-start": {},
+ "rust-analyzer.assist.expressionFillDefault": {
+ "markdownDescription": "Placeholder expression to use for missing expressions in assists.",
+ "default": "todo",
+ "type": "string",
+ "enum": [
+ "todo",
+ "default"
+ ],
+ "enumDescriptions": [
+ "Fill missing expressions with the `todo` macro",
+ "Fill missing expressions with reasonable defaults, `new` or `default` constructors."
+ ]
+ },
+ "rust-analyzer.cachePriming.enable": {
+ "markdownDescription": "Warm up caches on project load.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.cachePriming.numThreads": {
+ "markdownDescription": "How many worker threads to handle priming caches. The default `0` means to pick automatically.",
+ "default": 0,
+ "type": "number",
+ "minimum": 0,
+ "maximum": 255
+ },
+ "rust-analyzer.cargo.autoreload": {
+ "markdownDescription": "Automatically refresh project info via `cargo metadata` on\n`Cargo.toml` or `.cargo/config.toml` changes.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.cargo.buildScripts.enable": {
+ "markdownDescription": "Run build scripts (`build.rs`) for more precise code analysis.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.cargo.buildScripts.overrideCommand": {
+ "markdownDescription": "Override the command rust-analyzer uses to run build scripts and\nbuild procedural macros. The command is required to output json\nand should therefore include `--message-format=json` or a similar\noption.\n\nBy default, a cargo invocation will be constructed for the configured\ntargets and features, with the following base command line:\n\n```bash\ncargo check --quiet --workspace --message-format=json --all-targets\n```\n.",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.cargo.buildScripts.useRustcWrapper": {
+ "markdownDescription": "Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to\navoid checking unnecessary things.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.cargo.features": {
+ "markdownDescription": "List of features to activate.\n\nSet this to `\"all\"` to pass `--all-features` to cargo.",
+ "default": [],
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo"
+ ]
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ }
+ ]
+ },
+ "rust-analyzer.cargo.noDefaultFeatures": {
+ "markdownDescription": "Whether to pass `--no-default-features` to cargo.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.cargo.noSysroot": {
+ "markdownDescription": "Internal config for debugging, disables loading of sysroot crates.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.cargo.target": {
+ "markdownDescription": "Compilation target override (target triple).",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.cargo.unsetTest": {
+ "markdownDescription": "Unsets `#[cfg(test)]` for the specified crates.",
+ "default": [
+ "core"
+ ],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.checkOnSave.allTargets": {
+ "markdownDescription": "Check all targets and tests (`--all-targets`).",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.checkOnSave.command": {
+ "markdownDescription": "Cargo command to use for `cargo check`.",
+ "default": "check",
+ "type": "string"
+ },
+ "rust-analyzer.checkOnSave.enable": {
+ "markdownDescription": "Run specified `cargo check` command for diagnostics on save.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.checkOnSave.extraArgs": {
+ "markdownDescription": "Extra arguments for `cargo check`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.checkOnSave.features": {
+ "markdownDescription": "List of features to activate. Defaults to\n`#rust-analyzer.cargo.features#`.\n\nSet to `\"all\"` to pass `--all-features` to Cargo.",
+ "default": null,
+ "anyOf": [
+ {
+ "type": "string",
+ "enum": [
+ "all"
+ ],
+ "enumDescriptions": [
+ "Pass `--all-features` to cargo"
+ ]
+ },
+ {
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ {
+ "type": "null"
+ }
+ ]
+ },
+ "rust-analyzer.checkOnSave.noDefaultFeatures": {
+ "markdownDescription": "Whether to pass `--no-default-features` to Cargo. Defaults to\n`#rust-analyzer.cargo.noDefaultFeatures#`.",
+ "default": null,
+ "type": [
+ "null",
+ "boolean"
+ ]
+ },
+ "rust-analyzer.checkOnSave.overrideCommand": {
+ "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefor include `--message-format=json` or a similar option.\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.checkOnSave.target": {
+ "markdownDescription": "Check for a specific target. Defaults to\n`#rust-analyzer.cargo.target#`.",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.completion.autoimport.enable": {
+ "markdownDescription": "Toggles the additional completions that automatically add imports when completed.\nNote that your client must specify the `additionalTextEdits` LSP client capability to truly have this feature enabled.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.completion.autoself.enable": {
+ "markdownDescription": "Toggles the additional completions that automatically show method calls and field accesses\nwith `self` prefixed to them when inside a method.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.completion.callable.snippets": {
+ "markdownDescription": "Whether to add parenthesis and argument snippets when completing function.",
+ "default": "fill_arguments",
+ "type": "string",
+ "enum": [
+ "fill_arguments",
+ "add_parentheses",
+ "none"
+ ],
+ "enumDescriptions": [
+ "Add call parentheses and pre-fill arguments.",
+ "Add call parentheses.",
+ "Do no snippet completions for callables."
+ ]
+ },
+ "rust-analyzer.completion.postfix.enable": {
+ "markdownDescription": "Whether to show postfix snippets like `dbg`, `if`, `not`, etc.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.completion.privateEditable.enable": {
+ "markdownDescription": "Enables completions of private items and fields that are defined in the current workspace even if they are not visible at the current position.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.completion.snippets.custom": {
+ "markdownDescription": "Custom completion snippets.",
+ "default": {
+ "Arc::new": {
+ "postfix": "arc",
+ "body": "Arc::new(${receiver})",
+ "requires": "std::sync::Arc",
+ "description": "Put the expression into an `Arc`",
+ "scope": "expr"
+ },
+ "Rc::new": {
+ "postfix": "rc",
+ "body": "Rc::new(${receiver})",
+ "requires": "std::rc::Rc",
+ "description": "Put the expression into an `Rc`",
+ "scope": "expr"
+ },
+ "Box::pin": {
+ "postfix": "pinbox",
+ "body": "Box::pin(${receiver})",
+ "requires": "std::boxed::Box",
+ "description": "Put the expression into a pinned `Box`",
+ "scope": "expr"
+ },
+ "Ok": {
+ "postfix": "ok",
+ "body": "Ok(${receiver})",
+ "description": "Wrap the expression in a `Result::Ok`",
+ "scope": "expr"
+ },
+ "Err": {
+ "postfix": "err",
+ "body": "Err(${receiver})",
+ "description": "Wrap the expression in a `Result::Err`",
+ "scope": "expr"
+ },
+ "Some": {
+ "postfix": "some",
+ "body": "Some(${receiver})",
+ "description": "Wrap the expression in an `Option::Some`",
+ "scope": "expr"
+ }
+ },
+ "type": "object"
+ },
+ "rust-analyzer.diagnostics.disabled": {
+ "markdownDescription": "List of rust-analyzer diagnostics to disable.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ },
+ "uniqueItems": true
+ },
+ "rust-analyzer.diagnostics.enable": {
+ "markdownDescription": "Whether to show native rust-analyzer diagnostics.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.diagnostics.experimental.enable": {
+ "markdownDescription": "Whether to show experimental rust-analyzer diagnostics that might\nhave more false positives than usual.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.diagnostics.remapPrefix": {
+ "markdownDescription": "Map of prefixes to be substituted when parsing diagnostic file paths.\nThis should be the reverse mapping of what is passed to `rustc` as `--remap-path-prefix`.",
+ "default": {},
+ "type": "object"
+ },
+ "rust-analyzer.diagnostics.warningsAsHint": {
+ "markdownDescription": "List of warnings that should be displayed with hint severity.\n\nThe warnings will be indicated by faded text or three dots in code\nand will not show up in the `Problems Panel`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.diagnostics.warningsAsInfo": {
+ "markdownDescription": "List of warnings that should be displayed with info severity.\n\nThe warnings will be indicated by a blue squiggly underline in code\nand a blue icon in the `Problems Panel`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.files.excludeDirs": {
+ "markdownDescription": "These directories will be ignored by rust-analyzer. They are\nrelative to the workspace root, and globs are not supported. You may\nalso need to add the folders to Code's `files.watcherExclude`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.files.watcher": {
+ "markdownDescription": "Controls file watching implementation.",
+ "default": "client",
+ "type": "string",
+ "enum": [
+ "client",
+ "server"
+ ],
+ "enumDescriptions": [
+ "Use the client (editor) to watch files for changes",
+ "Use server-side file watching"
+ ]
+ },
+ "rust-analyzer.highlightRelated.breakPoints.enable": {
+ "markdownDescription": "Enables highlighting of related references while the cursor is on `break`, `loop`, `while`, or `for` keywords.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.highlightRelated.exitPoints.enable": {
+ "markdownDescription": "Enables highlighting of all exit points while the cursor is on any `return`, `?`, `fn`, or return type arrow (`->`).",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.highlightRelated.references.enable": {
+ "markdownDescription": "Enables highlighting of related references while the cursor is on any identifier.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.highlightRelated.yieldPoints.enable": {
+ "markdownDescription": "Enables highlighting of all break points for a loop or block context while the cursor is on any `async` or `await` keywords.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.debug.enable": {
+ "markdownDescription": "Whether to show `Debug` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.enable": {
+ "markdownDescription": "Whether to show HoverActions in Rust files.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.gotoTypeDef.enable": {
+ "markdownDescription": "Whether to show `Go to Type Definition` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.implementations.enable": {
+ "markdownDescription": "Whether to show `Implementations` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.references.enable": {
+ "markdownDescription": "Whether to show `References` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.actions.run.enable": {
+ "markdownDescription": "Whether to show `Run` action. Only applies when\n`#rust-analyzer.hover.actions.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.documentation.enable": {
+ "markdownDescription": "Whether to show documentation on hover.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.hover.links.enable": {
+ "markdownDescription": "Use markdown syntax for links in hover.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.imports.granularity.enforce": {
+ "markdownDescription": "Whether to enforce the import granularity setting for all files. If set to false rust-analyzer will try to keep import styles consistent per file.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.imports.granularity.group": {
+ "markdownDescription": "How imports should be grouped into use statements.",
+ "default": "crate",
+ "type": "string",
+ "enum": [
+ "preserve",
+ "crate",
+ "module",
+ "item"
+ ],
+ "enumDescriptions": [
+ "Do not change the granularity of any imports and preserve the original structure written by the developer.",
+ "Merge imports from the same crate into a single use statement. Conversely, imports from different crates are split into separate statements.",
+ "Merge imports from the same module into a single use statement. Conversely, imports from different modules are split into separate statements.",
+ "Flatten imports so that each has its own use statement."
+ ]
+ },
+ "rust-analyzer.imports.group.enable": {
+ "markdownDescription": "Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.imports.merge.glob": {
+ "markdownDescription": "Whether to allow import insertion to merge new imports into single path glob imports like `use std::fmt::*;`.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.imports.prefix": {
+ "markdownDescription": "The path structure for newly inserted paths to use.",
+ "default": "plain",
+ "type": "string",
+ "enum": [
+ "plain",
+ "self",
+ "crate"
+ ],
+ "enumDescriptions": [
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item.",
+ "Insert import paths relative to the current module, using up to one `super` prefix if the parent module contains the requested item. Prefixes `self` in front of the path if it starts with a module.",
+ "Force import paths to be absolute by always starting them with `crate` or the extern crate name they come from."
+ ]
+ },
+ "rust-analyzer.inlayHints.bindingModeHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for binding modes.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.chainingHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for method chains.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.closingBraceHints.enable": {
+ "markdownDescription": "Whether to show inlay hints after a closing `}` to indicate what item it belongs to.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.closingBraceHints.minLines": {
+ "markdownDescription": "Minimum number of lines required before the `}` until the hint is shown (set to 0 or 1\nto always show them).",
+ "default": 25,
+ "type": "integer",
+ "minimum": 0
+ },
+ "rust-analyzer.inlayHints.closureReturnTypeHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for return types of closures.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "with_block"
+ ],
+ "enumDescriptions": [
+ "Always show type hints for return types of closures.",
+ "Never show type hints for return types of closures.",
+ "Only show type hints for return types of closures with blocks."
+ ]
+ },
+ "rust-analyzer.inlayHints.lifetimeElisionHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for elided lifetimes in function signatures.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "skip_trivial"
+ ],
+ "enumDescriptions": [
+ "Always show lifetime elision hints.",
+ "Never show lifetime elision hints.",
+ "Only show lifetime elision hints if a return type is involved."
+ ]
+ },
+ "rust-analyzer.inlayHints.lifetimeElisionHints.useParameterNames": {
+ "markdownDescription": "Whether to prefer using parameter names as the name for elided lifetime hints if possible.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.maxLength": {
+ "markdownDescription": "Maximum length for inlay hints. Set to null to have an unlimited length.",
+ "default": 25,
+ "type": [
+ "null",
+ "integer"
+ ],
+ "minimum": 0
+ },
+ "rust-analyzer.inlayHints.parameterHints.enable": {
+ "markdownDescription": "Whether to show function parameter name inlay hints at the call\nsite.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.reborrowHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for compiler inserted reborrows.",
+ "default": "never",
+ "type": "string",
+ "enum": [
+ "always",
+ "never",
+ "mutable"
+ ],
+ "enumDescriptions": [
+ "Always show reborrow hints.",
+ "Never show reborrow hints.",
+ "Only show mutable reborrow hints."
+ ]
+ },
+ "rust-analyzer.inlayHints.renderColons": {
+ "markdownDescription": "Whether to render leading colons for type hints, and trailing colons for parameter hints.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.typeHints.enable": {
+ "markdownDescription": "Whether to show inlay type hints for variables.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.typeHints.hideClosureInitialization": {
+ "markdownDescription": "Whether to hide inlay type hints for `let` statements that initialize to a closure.\nOnly applies to closures with blocks, same as `#rust-analyzer.inlayHints.closureReturnTypeHints.enable#`.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.inlayHints.typeHints.hideNamedConstructor": {
+ "markdownDescription": "Whether to hide inlay type hints for constructors.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.joinLines.joinAssignments": {
+ "markdownDescription": "Join lines merges consecutive declaration and initialization of an assignment.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.joinLines.joinElseIf": {
+ "markdownDescription": "Join lines inserts else between consecutive ifs.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.joinLines.removeTrailingComma": {
+ "markdownDescription": "Join lines removes trailing commas.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.joinLines.unwrapTrivialBlock": {
+ "markdownDescription": "Join lines unwraps trivial blocks.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.debug.enable": {
+ "markdownDescription": "Whether to show `Debug` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.enable": {
+ "markdownDescription": "Whether to show CodeLens in Rust files.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.forceCustomCommands": {
+ "markdownDescription": "Internal config: use custom client-side commands even when the\nclient doesn't set the corresponding capability.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.implementations.enable": {
+ "markdownDescription": "Whether to show `Implementations` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.references.adt.enable": {
+ "markdownDescription": "Whether to show `References` lens for Struct, Enum, and Union.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.references.enumVariant.enable": {
+ "markdownDescription": "Whether to show `References` lens for Enum Variants.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.references.method.enable": {
+ "markdownDescription": "Whether to show `Method References` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.references.trait.enable": {
+ "markdownDescription": "Whether to show `References` lens for Trait.\nOnly applies when `#rust-analyzer.lens.enable#` is set.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.lens.run.enable": {
+ "markdownDescription": "Whether to show `Run` lens. Only applies when\n`#rust-analyzer.lens.enable#` is set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.linkedProjects": {
+ "markdownDescription": "Disable project auto-discovery in favor of explicitly specified set\nof projects.\n\nElements must be paths pointing to `Cargo.toml`,\n`rust-project.json`, or JSON objects in `rust-project.json` format.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": [
+ "string",
+ "object"
+ ]
+ }
+ },
+ "rust-analyzer.lru.capacity": {
+ "markdownDescription": "Number of syntax trees rust-analyzer keeps in memory. Defaults to 128.",
+ "default": null,
+ "type": [
+ "null",
+ "integer"
+ ],
+ "minimum": 0
+ },
+ "rust-analyzer.notifications.cargoTomlNotFound": {
+ "markdownDescription": "Whether to show `can't find Cargo.toml` error message.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.procMacro.attributes.enable": {
+ "markdownDescription": "Expand attribute macros. Requires `#rust-analyzer.procMacro.enable#` to be set.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.procMacro.enable": {
+ "markdownDescription": "Enable support for procedural macros, implies `#rust-analyzer.cargo.buildScripts.enable#`.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.procMacro.ignored": {
+ "markdownDescription": "These proc-macros will be ignored when trying to expand them.\n\nThis config takes a map of crate names with the exported proc-macro names to ignore as values.",
+ "default": {},
+ "type": "object"
+ },
+ "rust-analyzer.procMacro.server": {
+ "markdownDescription": "Internal config, path to proc-macro server executable (typically,\nthis is rust-analyzer itself, but we override this in tests).",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.runnables.command": {
+ "markdownDescription": "Command to be executed instead of 'cargo' for runnables.",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.runnables.extraArgs": {
+ "markdownDescription": "Additional arguments to be passed to cargo for runnables such as\ntests or binaries. For example, it may be `--release`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.rustc.source": {
+ "markdownDescription": "Path to the Cargo.toml of the rust compiler workspace, for usage in rustc_private\nprojects, or \"discover\" to try to automatically find it if the `rustc-dev` component\nis installed.\n\nAny project which uses rust-analyzer with the rustcPrivate\ncrates must set `[package.metadata.rust-analyzer] rustc_private=true` to use it.\n\nThis option does not take effect until rust-analyzer is restarted.",
+ "default": null,
+ "type": [
+ "null",
+ "string"
+ ]
+ },
+ "rust-analyzer.rustfmt.extraArgs": {
+ "markdownDescription": "Additional arguments to `rustfmt`.",
+ "default": [],
+ "type": "array",
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.rustfmt.overrideCommand": {
+ "markdownDescription": "Advanced option, fully override the command rust-analyzer uses for\nformatting.",
+ "default": null,
+ "type": [
+ "null",
+ "array"
+ ],
+ "items": {
+ "type": "string"
+ }
+ },
+ "rust-analyzer.rustfmt.rangeFormatting.enable": {
+ "markdownDescription": "Enables the use of rustfmt's unstable range formatting command for the\n`textDocument/rangeFormatting` request. The rustfmt option is unstable and only\navailable on a nightly build.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.semanticHighlighting.strings.enable": {
+ "markdownDescription": "Use semantic tokens for strings.\n\nIn some editors (e.g. vscode) semantic tokens override other highlighting grammars.\nBy disabling semantic tokens for strings, other grammars can be used to highlight\ntheir contents.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.signatureInfo.detail": {
+ "markdownDescription": "Show full signature of the callable. Only shows parameters if disabled.",
+ "default": "full",
+ "type": "string",
+ "enum": [
+ "full",
+ "parameters"
+ ],
+ "enumDescriptions": [
+ "Show the entire signature.",
+ "Show only the parameters."
+ ]
+ },
+ "rust-analyzer.signatureInfo.documentation.enable": {
+ "markdownDescription": "Show documentation.",
+ "default": true,
+ "type": "boolean"
+ },
+ "rust-analyzer.typing.autoClosingAngleBrackets.enable": {
+ "markdownDescription": "Whether to insert closing angle brackets when typing an opening angle bracket of a generic argument list.",
+ "default": false,
+ "type": "boolean"
+ },
+ "rust-analyzer.workspace.symbol.search.kind": {
+ "markdownDescription": "Workspace symbol search kind.",
+ "default": "only_types",
+ "type": "string",
+ "enum": [
+ "only_types",
+ "all_symbols"
+ ],
+ "enumDescriptions": [
+ "Search for types only.",
+ "Search for all symbols kinds."
+ ]
+ },
+ "rust-analyzer.workspace.symbol.search.limit": {
+ "markdownDescription": "Limits the number of items returned from a workspace symbol search (Defaults to 128).\nSome clients like vs-code issue new searches on result filtering and don't require all results to be returned in the initial search.\nOther clients requires all results upfront and might require a higher limit.",
+ "default": 128,
+ "type": "integer",
+ "minimum": 0
+ },
+ "rust-analyzer.workspace.symbol.search.scope": {
+ "markdownDescription": "Workspace symbol search scope.",
+ "default": "workspace",
+ "type": "string",
+ "enum": [
+ "workspace",
+ "workspace_and_dependencies"
+ ],
+ "enumDescriptions": [
+ "Search in current workspace only.",
+ "Search in current workspace and dependencies."
+ ]
+ },
+ "$generated-end": {}
+ }
+ },
+ "problemPatterns": [
+ {
+ "name": "rustc",
+ "patterns": [
+ {
+ "regexp": "^(warning|warn|error)(?:\\[(.*?)\\])?: (.*)$",
+ "severity": 1,
+ "code": 2,
+ "message": 3
+ },
+ {
+ "regexp": "^[\\s->=]*(.*?):(\\d*):(\\d*)\\s*$",
+ "file": 1,
+ "line": 2,
+ "column": 3
+ }
+ ]
+ },
+ {
+ "name": "rustc-json",
+ "patterns": [
+ {
+ "regexp": "^.*\"message\":{\"message\":\"([^\"]*).*?\"file_name\":\"([^\"]+).*?\"line_start\":(\\d+).*?\"line_end\":(\\d+).*?\"column_start\":(\\d+).*?\"column_end\":(\\d+).*}$",
+ "message": 1,
+ "file": 2,
+ "line": 3,
+ "endLine": 4,
+ "column": 5,
+ "endColumn": 6
+ }
+ ]
+ }
+ ],
+ "languages": [
+ {
+ "id": "ra_syntax_tree",
+ "extensions": [
+ ".rast"
+ ]
+ },
+ {
+ "id": "rust",
+ "extensions": [
+ ".rs"
+ ],
+ "aliases": [
+ "Rust",
+ "rs"
+ ],
+ "configuration": "language-configuration.json"
+ }
+ ],
+ "grammars": [
+ {
+ "language": "ra_syntax_tree",
+ "scopeName": "source.ra_syntax_tree",
+ "path": "ra_syntax_tree.tmGrammar.json"
+ }
+ ],
+ "problemMatchers": [
+ {
+ "name": "rustc",
+ "owner": "rustc",
+ "source": "rustc",
+ "fileLocation": [
+ "autoDetect",
+ "${workspaceRoot}"
+ ],
+ "pattern": "$rustc"
+ },
+ {
+ "name": "rustc-json",
+ "owner": "rustc",
+ "source": "rustc",
+ "fileLocation": [
+ "autoDetect",
+ "${workspaceRoot}"
+ ],
+ "pattern": "$rustc-json"
+ },
+ {
+ "name": "rustc-watch",
+ "owner": "rustc",
+ "source": "rustc",
+ "fileLocation": [
+ "autoDetect",
+ "${workspaceRoot}"
+ ],
+ "background": {
+ "beginsPattern": "^\\[Running\\b",
+ "endsPattern": "^\\[Finished running\\b"
+ },
+ "pattern": "$rustc"
+ }
+ ],
+ "colors": [
+ {
+ "id": "rust_analyzer.syntaxTreeBorder",
+ "description": "Color of the border displayed in the Rust source code for the selected syntax node (see \"Show Syntax Tree\" command)",
+ "defaults": {
+ "dark": "#ffffff",
+ "light": "#b700ff",
+ "highContrast": "#b700ff"
+ }
+ }
+ ],
+ "semanticTokenTypes": [
+ {
+ "id": "angle",
+ "description": "Style for < or >",
+ "superType": "punctuation"
+ },
+ {
+ "id": "arithmetic",
+ "description": "Style for arithmetic operators",
+ "superType": "operator"
+ },
+ {
+ "id": "attribute",
+ "description": "Style for attributes"
+ },
+ {
+ "id": "attributeBracket",
+ "description": "Style for attribute invocation brackets, that is the `#[` and `]` tokens",
+ "superType": "punctuation"
+ },
+ {
+ "id": "bitwise",
+ "description": "Style for bitwise operators",
+ "superType": "operator"
+ },
+ {
+ "id": "boolean",
+ "description": "Style for boolean literals",
+ "superType": "keyword"
+ },
+ {
+ "id": "brace",
+ "description": "Style for { or }",
+ "superType": "punctuation"
+ },
+ {
+ "id": "bracket",
+ "description": "Style for [ or ]",
+ "superType": "punctuation"
+ },
+ {
+ "id": "builtinAttribute",
+ "description": "Style for builtin attributes",
+ "superType": "attribute"
+ },
+ {
+ "id": "builtinType",
+ "description": "Style for builtin types",
+ "superType": "type"
+ },
+ {
+ "id": "character",
+ "description": "Style for character literals",
+ "superType": "string"
+ },
+ {
+ "id": "colon",
+ "description": "Style for :",
+ "superType": "punctuation"
+ },
+ {
+ "id": "comma",
+ "description": "Style for ,",
+ "superType": "punctuation"
+ },
+ {
+ "id": "comparison",
+ "description": "Style for comparison operators",
+ "superType": "operator"
+ },
+ {
+ "id": "constParameter",
+ "description": "Style for const generics"
+ },
+ {
+ "id": "derive",
+ "description": "Style for derives",
+ "superType": "attribute"
+ },
+ {
+ "id": "dot",
+ "description": "Style for .",
+ "superType": "punctuation"
+ },
+ {
+ "id": "escapeSequence",
+ "description": "Style for char escapes in strings"
+ },
+ {
+ "id": "formatSpecifier",
+ "description": "Style for {} placeholders in format strings"
+ },
+ {
+ "id": "label",
+ "description": "Style for labels"
+ },
+ {
+ "id": "lifetime",
+ "description": "Style for lifetimes"
+ },
+ {
+ "id": "logical",
+ "description": "Style for logic operators",
+ "superType": "operator"
+ },
+ {
+ "id": "macroBang",
+ "description": "Style for the ! token of macro calls",
+ "superType": "punctuation"
+ },
+ {
+ "id": "operator",
+ "description": "Style for operators",
+ "superType": "punctuation"
+ },
+ {
+ "id": "parenthesis",
+ "description": "Style for ( or )",
+ "superType": "punctuation"
+ },
+ {
+ "id": "punctuation",
+ "description": "Style for generic punctuation"
+ },
+ {
+ "id": "selfKeyword",
+ "description": "Style for the self keyword",
+ "superType": "keyword"
+ },
+ {
+ "id": "selfTypeKeyword",
+ "description": "Style for the self type keyword",
+ "superType": "keyword"
+ },
+ {
+ "id": "semicolon",
+ "description": "Style for ;",
+ "superType": "punctuation"
+ },
+ {
+ "id": "typeAlias",
+ "description": "Style for type aliases",
+ "superType": "type"
+ },
+ {
+ "id": "union",
+ "description": "Style for C-style untagged unions",
+ "superType": "type"
+ },
+ {
+ "id": "unresolvedReference",
+ "description": "Style for names which can not be resolved due to compilation errors"
+ }
+ ],
+ "semanticTokenModifiers": [
+ {
+ "id": "async",
+ "description": "Style for async functions and the `async` and `await` keywords"
+ },
+ {
+ "id": "attribute",
+ "description": "Style for elements within attributes"
+ },
+ {
+ "id": "callable",
+ "description": "Style for locals whose types implements one of the `Fn*` traits"
+ },
+ {
+ "id": "constant",
+ "description": "Style for compile-time constants"
+ },
+ {
+ "id": "consuming",
+ "description": "Style for locals that are being consumed when use in a function call"
+ },
+ {
+ "id": "controlFlow",
+ "description": "Style for control-flow related tokens, this includes the `?` operator"
+ },
+ {
+ "id": "crateRoot",
+ "description": "Style for names resolving to a crate root"
+ },
+ {
+ "id": "injected",
+ "description": "Style for doc-string injected highlighting like rust source blocks in documentation"
+ },
+ {
+ "id": "intraDocLink",
+ "description": "Style for intra doc links in doc-strings"
+ },
+ {
+ "id": "library",
+ "description": "Style for items that are defined outside of the current crate"
+ },
+ {
+ "id": "mutable",
+ "description": "Style for mutable locals and statics as well as functions taking `&mut self`"
+ },
+ {
+ "id": "public",
+ "description": "Style for items that are from the current crate and are `pub`"
+ },
+ {
+ "id": "reference",
+ "description": "Style for locals behind a reference and functions taking `self` by reference"
+ },
+ {
+ "id": "trait",
+ "description": "Style for associated trait items"
+ },
+ {
+ "id": "unsafe",
+ "description": "Style for unsafe operations, like unsafe function calls, as well as the `unsafe` token"
+ }
+ ],
+ "semanticTokenScopes": [
+ {
+ "language": "rust",
+ "scopes": {
+ "attribute": [
+ "meta.attribute.rust"
+ ],
+ "boolean": [
+ "constant.language.boolean.rust"
+ ],
+ "builtinType": [
+ "support.type.primitive.rust"
+ ],
+ "constParameter": [
+ "constant.other.caps.rust"
+ ],
+ "enum": [
+ "entity.name.type.enum.rust"
+ ],
+ "formatSpecifier": [
+ "punctuation.section.embedded.rust"
+ ],
+ "function": [
+ "entity.name.function.rust"
+ ],
+ "interface": [
+ "entity.name.type.trait.rust"
+ ],
+ "keyword": [
+ "keyword.other.rust"
+ ],
+ "keyword.controlFlow": [
+ "keyword.control.rust"
+ ],
+ "lifetime": [
+ "storage.modifier.lifetime.rust"
+ ],
+ "macroBang": [
+ "entity.name.function.macro.rust"
+ ],
+ "method": [
+ "entity.name.function.rust"
+ ],
+ "struct": [
+ "entity.name.type.struct.rust"
+ ],
+ "typeAlias": [
+ "entity.name.type.declaration.rust"
+ ],
+ "union": [
+ "entity.name.type.union.rust"
+ ],
+ "variable": [
+ "variable.other.rust"
+ ],
+ "variable.constant": [
+ "variable.other.constant.rust"
+ ],
+ "*.mutable": [
+ "markup.underline"
+ ]
+ }
+ }
+ ],
+ "menus": {
+ "commandPalette": [
+ {
+ "command": "rust-analyzer.syntaxTree",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.viewHir",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.viewFileText",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.expandMacro",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.matchingBrace",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.parentModule",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.joinLines",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.run",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.debug",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.newDebugConfig",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.analyzerStatus",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.memoryUsage",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.reloadWorkspace",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.reload",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.onEnter",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.ssr",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.serverVersion",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.toggleInlayHints",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.openDocs",
+ "when": "inRustProject"
+ },
+ {
+ "command": "rust-analyzer.openCargoToml",
+ "when": "inRustProject"
+ }
+ ],
+ "editor/context": [
+ {
+ "command": "rust-analyzer.peekTests",
+ "when": "inRustProject",
+ "group": "navigation@1000"
+ }
+ ]
+ }
+ }
+}
--- /dev/null
- private readonly requiresWorkspaceReloadOpts = ["serverPath", "server"].map(
- (opt) => `${this.rootSection}.${opt}`
- );
+import path = require("path");
+import * as vscode from "vscode";
+import { Env } from "./client";
+import { log } from "./util";
+
+export type UpdatesChannel = "stable" | "nightly";
+
+const NIGHTLY_TAG = "nightly";
+
+export type RunnableEnvCfg =
+ | undefined
+ | Record<string, string>
+ | { mask?: string; env: Record<string, string> }[];
+
+export class Config {
+ readonly extensionId = "rust-lang.rust-analyzer";
+
+ readonly rootSection = "rust-analyzer";
++ private readonly requiresWorkspaceReloadOpts = [
++ "serverPath",
++ "server",
++ // FIXME: This shouldn't be here, changing this setting should reload
++ // `continueCommentsOnNewline` behavior without restart
++ "typing",
++ ].map((opt) => `${this.rootSection}.${opt}`);
+ private readonly requiresReloadOpts = [
+ "cargo",
+ "procMacro",
+ "files",
+ "lens", // works as lens.*
+ ]
+ .map((opt) => `${this.rootSection}.${opt}`)
+ .concat(this.requiresWorkspaceReloadOpts);
+
+ readonly package: {
+ version: string;
+ releaseTag: string | null;
+ enableProposedApi: boolean | undefined;
+ } = vscode.extensions.getExtension(this.extensionId)!.packageJSON;
+
+ readonly globalStorageUri: vscode.Uri;
+
+ constructor(ctx: vscode.ExtensionContext) {
+ this.globalStorageUri = ctx.globalStorageUri;
+ vscode.workspace.onDidChangeConfiguration(
+ this.onDidChangeConfiguration,
+ this,
+ ctx.subscriptions
+ );
+ this.refreshLogging();
+ }
+
+ private refreshLogging() {
+ log.setEnabled(this.traceExtension);
+ log.info("Extension version:", this.package.version);
+
+ const cfg = Object.entries(this.cfg).filter(([_, val]) => !(val instanceof Function));
+ log.info("Using configuration", Object.fromEntries(cfg));
+ }
+
+ private async onDidChangeConfiguration(event: vscode.ConfigurationChangeEvent) {
+ this.refreshLogging();
+
+ const requiresReloadOpt = this.requiresReloadOpts.find((opt) =>
+ event.affectsConfiguration(opt)
+ );
+
+ if (!requiresReloadOpt) return;
+
+ const requiresWorkspaceReloadOpt = this.requiresWorkspaceReloadOpts.find((opt) =>
+ event.affectsConfiguration(opt)
+ );
+
+ if (!requiresWorkspaceReloadOpt && this.restartServerOnConfigChange) {
+ await vscode.commands.executeCommand("rust-analyzer.reload");
+ return;
+ }
+
+ const message = requiresWorkspaceReloadOpt
+ ? `Changing "${requiresWorkspaceReloadOpt}" requires a window reload`
+ : `Changing "${requiresReloadOpt}" requires a reload`;
+ const userResponse = await vscode.window.showInformationMessage(message, "Reload now");
+
+ if (userResponse === "Reload now") {
+ const command = requiresWorkspaceReloadOpt
+ ? "workbench.action.reloadWindow"
+ : "rust-analyzer.reload";
+ if (userResponse === "Reload now") {
+ await vscode.commands.executeCommand(command);
+ }
+ }
+ }
+
+ // We don't do runtime config validation here for simplicity. More on stackoverflow:
+ // https://stackoverflow.com/questions/60135780/what-is-the-best-way-to-type-check-the-configuration-for-vscode-extension
+
+ private get cfg(): vscode.WorkspaceConfiguration {
+ return vscode.workspace.getConfiguration(this.rootSection);
+ }
+
+ /**
+ * Beware that postfix `!` operator erases both `null` and `undefined`.
+ * This is why the following doesn't work as expected:
+ *
+ * ```ts
+ * const nullableNum = vscode
+ * .workspace
+ * .getConfiguration
+ * .getConfiguration("rust-analyzer")
+ * .get<number | null>(path)!;
+ *
+ * // What happens is that type of `nullableNum` is `number` but not `null | number`:
+ * const fullFledgedNum: number = nullableNum;
+ * ```
+ * So this getter handles this quirk by not requiring the caller to use postfix `!`
+ */
+ private get<T>(path: string): T {
+ return this.cfg.get<T>(path)!;
+ }
+
+ get serverPath() {
+ return this.get<null | string>("server.path") ?? this.get<null | string>("serverPath");
+ }
+ get serverExtraEnv(): Env {
+ const extraEnv =
+ this.get<{ [key: string]: string | number } | null>("server.extraEnv") ?? {};
+ return Object.fromEntries(
+ Object.entries(extraEnv).map(([k, v]) => [k, typeof v !== "string" ? v.toString() : v])
+ );
+ }
+ get traceExtension() {
+ return this.get<boolean>("trace.extension");
+ }
+
+ get cargoRunner() {
+ return this.get<string | undefined>("cargoRunner");
+ }
+
+ get runnableEnv() {
+ return this.get<RunnableEnvCfg>("runnableEnv");
+ }
+
+ get restartServerOnConfigChange() {
+ return this.get<boolean>("restartServerOnConfigChange");
+ }
+
++ get typingContinueCommentsOnNewline() {
++ return this.get<boolean>("typing.continueCommentsOnNewline");
++ }
++
+ get debug() {
+ let sourceFileMap = this.get<Record<string, string> | "auto">("debug.sourceFileMap");
+ if (sourceFileMap !== "auto") {
+ // "/rustc/<id>" used by suggestions only.
+ const { ["/rustc/<id>"]: _, ...trimmed } =
+ this.get<Record<string, string>>("debug.sourceFileMap");
+ sourceFileMap = trimmed;
+ }
+
+ return {
+ engine: this.get<string>("debug.engine"),
+ engineSettings: this.get<object>("debug.engineSettings"),
+ openDebugPane: this.get<boolean>("debug.openDebugPane"),
+ sourceFileMap: sourceFileMap,
+ };
+ }
+
+ get hoverActions() {
+ return {
+ enable: this.get<boolean>("hover.actions.enable"),
+ implementations: this.get<boolean>("hover.actions.implementations.enable"),
+ references: this.get<boolean>("hover.actions.references.enable"),
+ run: this.get<boolean>("hover.actions.run.enable"),
+ debug: this.get<boolean>("hover.actions.debug.enable"),
+ gotoTypeDef: this.get<boolean>("hover.actions.gotoTypeDef.enable"),
+ };
+ }
+
+ get currentExtensionIsNightly() {
+ return this.package.releaseTag === NIGHTLY_TAG;
+ }
+}
+
+export async function updateConfig(config: vscode.WorkspaceConfiguration) {
+ const renames = [
+ ["assist.allowMergingIntoGlobImports", "imports.merge.glob"],
+ ["assist.exprFillDefault", "assist.expressionFillDefault"],
+ ["assist.importEnforceGranularity", "imports.granularity.enforce"],
+ ["assist.importGranularity", "imports.granularity.group"],
+ ["assist.importMergeBehavior", "imports.granularity.group"],
+ ["assist.importMergeBehaviour", "imports.granularity.group"],
+ ["assist.importGroup", "imports.group.enable"],
+ ["assist.importPrefix", "imports.prefix"],
+ ["primeCaches.enable", "cachePriming.enable"],
+ ["cache.warmup", "cachePriming.enable"],
+ ["cargo.loadOutDirsFromCheck", "cargo.buildScripts.enable"],
+ ["cargo.runBuildScripts", "cargo.buildScripts.enable"],
+ ["cargo.runBuildScriptsCommand", "cargo.buildScripts.overrideCommand"],
+ ["cargo.useRustcWrapperForBuildScripts", "cargo.buildScripts.useRustcWrapper"],
+ ["completion.snippets", "completion.snippets.custom"],
+ ["diagnostics.enableExperimental", "diagnostics.experimental.enable"],
+ ["experimental.procAttrMacros", "procMacro.attributes.enable"],
+ ["highlighting.strings", "semanticHighlighting.strings.enable"],
+ ["highlightRelated.breakPoints", "highlightRelated.breakPoints.enable"],
+ ["highlightRelated.exitPoints", "highlightRelated.exitPoints.enable"],
+ ["highlightRelated.yieldPoints", "highlightRelated.yieldPoints.enable"],
+ ["highlightRelated.references", "highlightRelated.references.enable"],
+ ["hover.documentation", "hover.documentation.enable"],
+ ["hover.linksInHover", "hover.links.enable"],
+ ["hoverActions.linksInHover", "hover.links.enable"],
+ ["hoverActions.debug", "hover.actions.debug.enable"],
+ ["hoverActions.enable", "hover.actions.enable.enable"],
+ ["hoverActions.gotoTypeDef", "hover.actions.gotoTypeDef.enable"],
+ ["hoverActions.implementations", "hover.actions.implementations.enable"],
+ ["hoverActions.references", "hover.actions.references.enable"],
+ ["hoverActions.run", "hover.actions.run.enable"],
+ ["inlayHints.chainingHints", "inlayHints.chainingHints.enable"],
+ ["inlayHints.closureReturnTypeHints", "inlayHints.closureReturnTypeHints.enable"],
+ ["inlayHints.hideNamedConstructorHints", "inlayHints.typeHints.hideNamedConstructor"],
+ ["inlayHints.parameterHints", "inlayHints.parameterHints.enable"],
+ ["inlayHints.reborrowHints", "inlayHints.reborrowHints.enable"],
+ ["inlayHints.typeHints", "inlayHints.typeHints.enable"],
+ ["lruCapacity", "lru.capacity"],
+ ["runnables.cargoExtraArgs", "runnables.extraArgs"],
+ ["runnables.overrideCargo", "runnables.command"],
+ ["rustcSource", "rustc.source"],
+ ["rustfmt.enableRangeFormatting", "rustfmt.rangeFormatting.enable"],
+ ];
+
+ for (const [oldKey, newKey] of renames) {
+ const inspect = config.inspect(oldKey);
+ if (inspect !== undefined) {
+ const valMatrix = [
+ {
+ val: inspect.globalValue,
+ langVal: inspect.globalLanguageValue,
+ target: vscode.ConfigurationTarget.Global,
+ },
+ {
+ val: inspect.workspaceFolderValue,
+ langVal: inspect.workspaceFolderLanguageValue,
+ target: vscode.ConfigurationTarget.WorkspaceFolder,
+ },
+ {
+ val: inspect.workspaceValue,
+ langVal: inspect.workspaceLanguageValue,
+ target: vscode.ConfigurationTarget.Workspace,
+ },
+ ];
+ for (const { val, langVal, target } of valMatrix) {
+ const patch = (val: unknown) => {
+ // some of the updates we do only append "enable" or "custom"
+ // that means on the next run we would find these again, but as objects with
+ // these properties causing us to destroy the config
+ // so filter those already updated ones out
+ return (
+ val !== undefined &&
+ !(
+ typeof val === "object" &&
+ val !== null &&
+ (oldKey === "completion.snippets" || !val.hasOwnProperty("custom"))
+ )
+ );
+ };
+ if (patch(val)) {
+ await config.update(newKey, val, target, false);
+ await config.update(oldKey, undefined, target, false);
+ }
+ if (patch(langVal)) {
+ await config.update(newKey, langVal, target, true);
+ await config.update(oldKey, undefined, target, true);
+ }
+ }
+ }
+ }
+}
+
+export function substituteVariablesInEnv(env: Env): Env {
+ const missingDeps = new Set<string>();
+ // vscode uses `env:ENV_NAME` for env vars resolution, and it's easier
+ // to follow the same convention for our dependency tracking
+ const definedEnvKeys = new Set(Object.keys(env).map((key) => `env:${key}`));
+ const envWithDeps = Object.fromEntries(
+ Object.entries(env).map(([key, value]) => {
+ const deps = new Set<string>();
+ const depRe = new RegExp(/\${(?<depName>.+?)}/g);
+ let match = undefined;
+ while ((match = depRe.exec(value))) {
+ const depName = match.groups!.depName;
+ deps.add(depName);
+ // `depName` at this point can have a form of `expression` or
+ // `prefix:expression`
+ if (!definedEnvKeys.has(depName)) {
+ missingDeps.add(depName);
+ }
+ }
+ return [`env:${key}`, { deps: [...deps], value }];
+ })
+ );
+
+ const resolved = new Set<string>();
+ for (const dep of missingDeps) {
+ const match = /(?<prefix>.*?):(?<body>.+)/.exec(dep);
+ if (match) {
+ const { prefix, body } = match.groups!;
+ if (prefix === "env") {
+ const envName = body;
+ envWithDeps[dep] = {
+ value: process.env[envName] ?? "",
+ deps: [],
+ };
+ resolved.add(dep);
+ } else {
+ // we can't handle other prefixes at the moment
+ // leave values as is, but still mark them as resolved
+ envWithDeps[dep] = {
+ value: "${" + dep + "}",
+ deps: [],
+ };
+ resolved.add(dep);
+ }
+ } else {
+ envWithDeps[dep] = {
+ value: computeVscodeVar(dep),
+ deps: [],
+ };
+ }
+ }
+ const toResolve = new Set(Object.keys(envWithDeps));
+
+ let leftToResolveSize;
+ do {
+ leftToResolveSize = toResolve.size;
+ for (const key of toResolve) {
+ if (envWithDeps[key].deps.every((dep) => resolved.has(dep))) {
+ envWithDeps[key].value = envWithDeps[key].value.replace(
+ /\${(?<depName>.+?)}/g,
+ (_wholeMatch, depName) => {
+ return envWithDeps[depName].value;
+ }
+ );
+ resolved.add(key);
+ toResolve.delete(key);
+ }
+ }
+ } while (toResolve.size > 0 && toResolve.size < leftToResolveSize);
+
+ const resolvedEnv: Env = {};
+ for (const key of Object.keys(env)) {
+ resolvedEnv[key] = envWithDeps[`env:${key}`].value;
+ }
+ return resolvedEnv;
+}
+
+function computeVscodeVar(varName: string): string {
+ // https://code.visualstudio.com/docs/editor/variables-reference
+ const supportedVariables: { [k: string]: () => string } = {
+ workspaceFolder: () => {
+ const folders = vscode.workspace.workspaceFolders ?? [];
+ if (folders.length === 1) {
+ // TODO: support for remote workspaces?
+ return folders[0].uri.fsPath;
+ } else if (folders.length > 1) {
+ // could use currently opened document to detect the correct
+ // workspace. However, that would be determined by the document
+ // user has opened on Editor startup. Could lead to
+ // unpredictable workspace selection in practice.
+ // It's better to pick the first one
+ return folders[0].uri.fsPath;
+ } else {
+ // no workspace opened
+ return "";
+ }
+ },
+
+ workspaceFolderBasename: () => {
+ const workspaceFolder = computeVscodeVar("workspaceFolder");
+ if (workspaceFolder) {
+ return path.basename(workspaceFolder);
+ } else {
+ return "";
+ }
+ },
+
+ cwd: () => process.cwd(),
+
+ // see
+ // https://github.com/microsoft/vscode/blob/08ac1bb67ca2459496b272d8f4a908757f24f56f/src/vs/workbench/api/common/extHostVariableResolverService.ts#L81
+ // or
+ // https://github.com/microsoft/vscode/blob/29eb316bb9f154b7870eb5204ec7f2e7cf649bec/src/vs/server/node/remoteTerminalChannel.ts#L56
+ execPath: () => process.env.VSCODE_EXEC_PATH ?? process.execPath,
+
+ pathSeparator: () => path.sep,
+ };
+
+ if (varName in supportedVariables) {
+ return supportedVariables[varName]();
+ } else {
+ // can't resolve, keep the expression as is
+ return "${" + varName + "}";
+ }
+}
--- /dev/null
- ctx.pushCleanup(configureLanguage());
+import * as vscode from "vscode";
+import * as lc from "vscode-languageclient/node";
+import * as os from "os";
+
+import * as commands from "./commands";
+import { Ctx } from "./ctx";
+import { Config } from "./config";
+import { log, isValidExecutable, isRustDocument } from "./util";
+import { PersistentState } from "./persistent_state";
+import { activateTaskProvider } from "./tasks";
+import { setContextValue } from "./util";
+import { exec } from "child_process";
+
+let ctx: Ctx | undefined;
+
+const RUST_PROJECT_CONTEXT_NAME = "inRustProject";
+
+let TRACE_OUTPUT_CHANNEL: vscode.OutputChannel | null = null;
+export function traceOutputChannel() {
+ if (!TRACE_OUTPUT_CHANNEL) {
+ TRACE_OUTPUT_CHANNEL = vscode.window.createOutputChannel(
+ "Rust Analyzer Language Server Trace"
+ );
+ }
+ return TRACE_OUTPUT_CHANNEL;
+}
+let OUTPUT_CHANNEL: vscode.OutputChannel | null = null;
+export function outputChannel() {
+ if (!OUTPUT_CHANNEL) {
+ OUTPUT_CHANNEL = vscode.window.createOutputChannel("Rust Analyzer Language Server");
+ }
+ return OUTPUT_CHANNEL;
+}
+
+export interface RustAnalyzerExtensionApi {
+ client: lc.LanguageClient;
+}
+
+export async function activate(
+ context: vscode.ExtensionContext
+): Promise<RustAnalyzerExtensionApi> {
+ // VS Code doesn't show a notification when an extension fails to activate
+ // so we do it ourselves.
+ return await tryActivate(context).catch((err) => {
+ void vscode.window.showErrorMessage(`Cannot activate rust-analyzer: ${err.message}`);
+ throw err;
+ });
+}
+
+async function tryActivate(context: vscode.ExtensionContext): Promise<RustAnalyzerExtensionApi> {
+ const config = new Config(context);
+ const state = new PersistentState(context.globalState);
+ const serverPath = await bootstrap(context, config, state).catch((err) => {
+ let message = "bootstrap error. ";
+
+ message += 'See the logs in "OUTPUT > Rust Analyzer Client" (should open automatically). ';
+ message += 'To enable verbose logs use { "rust-analyzer.trace.extension": true }';
+
+ log.error("Bootstrap error", err);
+ throw new Error(message);
+ });
+
+ if ((vscode.workspace.workspaceFolders || []).length === 0) {
+ const rustDocuments = vscode.workspace.textDocuments.filter((document) =>
+ isRustDocument(document)
+ );
+ if (rustDocuments.length > 0) {
+ ctx = await Ctx.create(config, context, serverPath, {
+ kind: "Detached Files",
+ files: rustDocuments,
+ });
+ } else {
+ throw new Error("no rust files are opened");
+ }
+ } else {
+ // Note: we try to start the server before we activate type hints so that it
+ // registers its `onDidChangeDocument` handler before us.
+ //
+ // This a horribly, horribly wrong way to deal with this problem.
+ ctx = await Ctx.create(config, context, serverPath, { kind: "Workspace Folder" });
+ ctx.pushCleanup(activateTaskProvider(ctx.config));
+ }
+ await initCommonContext(context, ctx);
+
+ warnAboutExtensionConflicts();
+
++ if (config.typingContinueCommentsOnNewline) {
++ ctx.pushCleanup(configureLanguage());
++ }
+
+ vscode.workspace.onDidChangeConfiguration(
+ (_) =>
+ ctx?.client
+ ?.sendNotification("workspace/didChangeConfiguration", { settings: "" })
+ .catch(log.error),
+ null,
+ ctx.subscriptions
+ );
+
+ return {
+ client: ctx.client,
+ };
+}
+
+async function initCommonContext(context: vscode.ExtensionContext, ctx: Ctx) {
+ // Register a "dumb" onEnter command for the case where server fails to
+ // start.
+ //
+ // FIXME: refactor command registration code such that commands are
+ // **always** registered, even if the server does not start. Use API like
+ // this perhaps?
+ //
+ // ```TypeScript
+ // registerCommand(
+ // factory: (Ctx) => ((Ctx) => any),
+ // fallback: () => any = () => vscode.window.showErrorMessage(
+ // "rust-analyzer is not available"
+ // ),
+ // )
+ const defaultOnEnter = vscode.commands.registerCommand("rust-analyzer.onEnter", () =>
+ vscode.commands.executeCommand("default:type", { text: "\n" })
+ );
+ context.subscriptions.push(defaultOnEnter);
+
+ await setContextValue(RUST_PROJECT_CONTEXT_NAME, true);
+
+ // Commands which invokes manually via command palette, shortcut, etc.
+
+ // Reloading is inspired by @DanTup maneuver: https://github.com/microsoft/vscode/issues/45774#issuecomment-373423895
+ ctx.registerCommand("reload", (_) => async () => {
+ void vscode.window.showInformationMessage("Reloading rust-analyzer...");
+ await doDeactivate();
+ while (context.subscriptions.length > 0) {
+ try {
+ context.subscriptions.pop()!.dispose();
+ } catch (err) {
+ log.error("Dispose error:", err);
+ }
+ }
+ await activate(context).catch(log.error);
+ });
+
+ ctx.registerCommand("analyzerStatus", commands.analyzerStatus);
+ ctx.registerCommand("memoryUsage", commands.memoryUsage);
+ ctx.registerCommand("shuffleCrateGraph", commands.shuffleCrateGraph);
+ ctx.registerCommand("reloadWorkspace", commands.reloadWorkspace);
+ ctx.registerCommand("matchingBrace", commands.matchingBrace);
+ ctx.registerCommand("joinLines", commands.joinLines);
+ ctx.registerCommand("parentModule", commands.parentModule);
+ ctx.registerCommand("syntaxTree", commands.syntaxTree);
+ ctx.registerCommand("viewHir", commands.viewHir);
+ ctx.registerCommand("viewFileText", commands.viewFileText);
+ ctx.registerCommand("viewItemTree", commands.viewItemTree);
+ ctx.registerCommand("viewCrateGraph", commands.viewCrateGraph);
+ ctx.registerCommand("viewFullCrateGraph", commands.viewFullCrateGraph);
+ ctx.registerCommand("expandMacro", commands.expandMacro);
+ ctx.registerCommand("run", commands.run);
+ ctx.registerCommand("copyRunCommandLine", commands.copyRunCommandLine);
+ ctx.registerCommand("debug", commands.debug);
+ ctx.registerCommand("newDebugConfig", commands.newDebugConfig);
+ ctx.registerCommand("openDocs", commands.openDocs);
+ ctx.registerCommand("openCargoToml", commands.openCargoToml);
+ ctx.registerCommand("peekTests", commands.peekTests);
+ ctx.registerCommand("moveItemUp", commands.moveItemUp);
+ ctx.registerCommand("moveItemDown", commands.moveItemDown);
+
+ defaultOnEnter.dispose();
+ ctx.registerCommand("onEnter", commands.onEnter);
+
+ ctx.registerCommand("ssr", commands.ssr);
+ ctx.registerCommand("serverVersion", commands.serverVersion);
+ ctx.registerCommand("toggleInlayHints", commands.toggleInlayHints);
+
+ // Internal commands which are invoked by the server.
+ ctx.registerCommand("runSingle", commands.runSingle);
+ ctx.registerCommand("debugSingle", commands.debugSingle);
+ ctx.registerCommand("showReferences", commands.showReferences);
+ ctx.registerCommand("applySnippetWorkspaceEdit", commands.applySnippetWorkspaceEditCommand);
+ ctx.registerCommand("resolveCodeAction", commands.resolveCodeAction);
+ ctx.registerCommand("applyActionGroup", commands.applyActionGroup);
+ ctx.registerCommand("gotoLocation", commands.gotoLocation);
+
+ ctx.registerCommand("linkToCommand", commands.linkToCommand);
+}
+
+export async function deactivate() {
+ TRACE_OUTPUT_CHANNEL?.dispose();
+ TRACE_OUTPUT_CHANNEL = null;
+ OUTPUT_CHANNEL?.dispose();
+ OUTPUT_CHANNEL = null;
+ await doDeactivate();
+}
+
+async function doDeactivate() {
+ await setContextValue(RUST_PROJECT_CONTEXT_NAME, undefined);
+ await ctx?.client.stop();
+ ctx = undefined;
+}
+
+async function bootstrap(
+ context: vscode.ExtensionContext,
+ config: Config,
+ state: PersistentState
+): Promise<string> {
+ const path = await getServer(context, config, state);
+ if (!path) {
+ throw new Error(
+ "Rust Analyzer Language Server is not available. " +
+ "Please, ensure its [proper installation](https://rust-analyzer.github.io/manual.html#installation)."
+ );
+ }
+
+ log.info("Using server binary at", path);
+
+ if (!isValidExecutable(path)) {
+ if (config.serverPath) {
+ throw new Error(`Failed to execute ${path} --version. \`config.server.path\` or \`config.serverPath\` has been set explicitly.\
+ Consider removing this config or making a valid server binary available at that path.`);
+ } else {
+ throw new Error(`Failed to execute ${path} --version`);
+ }
+ }
+
+ return path;
+}
+
+async function patchelf(dest: vscode.Uri): Promise<void> {
+ await vscode.window.withProgress(
+ {
+ location: vscode.ProgressLocation.Notification,
+ title: "Patching rust-analyzer for NixOS",
+ },
+ async (progress, _) => {
+ const expression = `
+ {srcStr, pkgs ? import <nixpkgs> {}}:
+ pkgs.stdenv.mkDerivation {
+ name = "rust-analyzer";
+ src = /. + srcStr;
+ phases = [ "installPhase" "fixupPhase" ];
+ installPhase = "cp $src $out";
+ fixupPhase = ''
+ chmod 755 $out
+ patchelf --set-interpreter "$(cat $NIX_CC/nix-support/dynamic-linker)" $out
+ '';
+ }
+ `;
+ const origFile = vscode.Uri.file(dest.fsPath + "-orig");
+ await vscode.workspace.fs.rename(dest, origFile, { overwrite: true });
+ try {
+ progress.report({ message: "Patching executable", increment: 20 });
+ await new Promise((resolve, reject) => {
+ const handle = exec(
+ `nix-build -E - --argstr srcStr '${origFile.fsPath}' -o '${dest.fsPath}'`,
+ (err, stdout, stderr) => {
+ if (err != null) {
+ reject(Error(stderr));
+ } else {
+ resolve(stdout);
+ }
+ }
+ );
+ handle.stdin?.write(expression);
+ handle.stdin?.end();
+ });
+ } finally {
+ await vscode.workspace.fs.delete(origFile);
+ }
+ }
+ );
+}
+
+async function getServer(
+ context: vscode.ExtensionContext,
+ config: Config,
+ state: PersistentState
+): Promise<string | undefined> {
+ const explicitPath = serverPath(config);
+ if (explicitPath) {
+ if (explicitPath.startsWith("~/")) {
+ return os.homedir() + explicitPath.slice("~".length);
+ }
+ return explicitPath;
+ }
+ if (config.package.releaseTag === null) return "rust-analyzer";
+
+ const ext = process.platform === "win32" ? ".exe" : "";
+ const bundled = vscode.Uri.joinPath(context.extensionUri, "server", `rust-analyzer${ext}`);
+ const bundledExists = await vscode.workspace.fs.stat(bundled).then(
+ () => true,
+ () => false
+ );
+ if (bundledExists) {
+ let server = bundled;
+ if (await isNixOs()) {
+ await vscode.workspace.fs.createDirectory(config.globalStorageUri).then();
+ const dest = vscode.Uri.joinPath(config.globalStorageUri, `rust-analyzer${ext}`);
+ let exists = await vscode.workspace.fs.stat(dest).then(
+ () => true,
+ () => false
+ );
+ if (exists && config.package.version !== state.serverVersion) {
+ await vscode.workspace.fs.delete(dest);
+ exists = false;
+ }
+ if (!exists) {
+ await vscode.workspace.fs.copy(bundled, dest);
+ await patchelf(dest);
+ }
+ server = dest;
+ }
+ await state.updateServerVersion(config.package.version);
+ return server.fsPath;
+ }
+
+ await state.updateServerVersion(undefined);
+ await vscode.window.showErrorMessage(
+ "Unfortunately we don't ship binaries for your platform yet. " +
+ "You need to manually clone the rust-analyzer repository and " +
+ "run `cargo xtask install --server` to build the language server from sources. " +
+ "If you feel that your platform should be supported, please create an issue " +
+ "about that [here](https://github.com/rust-lang/rust-analyzer/issues) and we " +
+ "will consider it."
+ );
+ return undefined;
+}
+
+function serverPath(config: Config): string | null {
+ return process.env.__RA_LSP_SERVER_DEBUG ?? config.serverPath;
+}
+
+async function isNixOs(): Promise<boolean> {
+ try {
+ const contents = (
+ await vscode.workspace.fs.readFile(vscode.Uri.file("/etc/os-release"))
+ ).toString();
+ const idString = contents.split("\n").find((a) => a.startsWith("ID=")) || "ID=linux";
+ return idString.indexOf("nixos") !== -1;
+ } catch {
+ return false;
+ }
+}
+
+function warnAboutExtensionConflicts() {
+ if (vscode.extensions.getExtension("rust-lang.rust")) {
+ vscode.window
+ .showWarningMessage(
+ `You have both the rust-analyzer (rust-lang.rust-analyzer) and Rust (rust-lang.rust) ` +
+ "plugins enabled. These are known to conflict and cause various functions of " +
+ "both plugins to not work correctly. You should disable one of them.",
+ "Got it"
+ )
+ .then(() => {}, console.error);
+ }
+}
+
+/**
+ * Sets up additional language configuration that's impossible to do via a
+ * separate language-configuration.json file. See [1] for more information.
+ *
+ * [1]: https://github.com/Microsoft/vscode/issues/11514#issuecomment-244707076
+ */
+function configureLanguage(): vscode.Disposable {
+ const indentAction = vscode.IndentAction.None;
+ return vscode.languages.setLanguageConfiguration("rust", {
+ onEnterRules: [
+ {
+ // Doc single-line comment
+ // e.g. ///|
+ beforeText: /^\s*\/{3}.*$/,
+ action: { indentAction, appendText: "/// " },
+ },
+ {
+ // Parent doc single-line comment
+ // e.g. //!|
+ beforeText: /^\s*\/{2}\!.*$/,
+ action: { indentAction, appendText: "//! " },
+ },
+ {
+ // Begins an auto-closed multi-line comment (standard or parent doc)
+ // e.g. /** | */ or /*! | */
+ beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
+ afterText: /^\s*\*\/$/,
+ action: { indentAction: vscode.IndentAction.IndentOutdent, appendText: " * " },
+ },
+ {
+ // Begins a multi-line comment (standard or parent doc)
+ // e.g. /** ...| or /*! ...|
+ beforeText: /^\s*\/\*(\*|\!)(?!\/)([^\*]|\*(?!\/))*$/,
+ action: { indentAction, appendText: " * " },
+ },
+ {
+ // Continues a multi-line comment
+ // e.g. * ...|
+ beforeText: /^(\ \ )*\ \*(\ ([^\*]|\*(?!\/))*)?$/,
+ action: { indentAction, appendText: "* " },
+ },
+ {
+ // Dedents after closing a multi-line comment
+ // e.g. */|
+ beforeText: /^(\ \ )*\ \*\/\s*$/,
+ action: { indentAction, removeText: 1 },
+ },
+ ],
+ });
+}
--- /dev/null
- pub use map::ArenaMap;
+//! Yet another index-based arena.
+
+#![warn(rust_2018_idioms, unused_lifetimes, semicolon_in_expressions_from_macros)]
+#![warn(missing_docs)]
+
+use std::{
+ fmt,
+ hash::{Hash, Hasher},
+ iter::FromIterator,
+ marker::PhantomData,
+ ops::{Index, IndexMut, Range, RangeInclusive},
+};
+
+mod map;
++pub use map::{ArenaMap, Entry, OccupiedEntry, VacantEntry};
+
+/// The raw index of a value in an arena.
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct RawIdx(u32);
+
+impl From<RawIdx> for u32 {
+ fn from(raw: RawIdx) -> u32 {
+ raw.0
+ }
+}
+
+impl From<u32> for RawIdx {
+ fn from(idx: u32) -> RawIdx {
+ RawIdx(idx)
+ }
+}
+
+impl fmt::Debug for RawIdx {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+impl fmt::Display for RawIdx {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ self.0.fmt(f)
+ }
+}
+
+/// The index of a value allocated in an arena that holds `T`s.
+pub struct Idx<T> {
+ raw: RawIdx,
+ _ty: PhantomData<fn() -> T>,
+}
+
+impl<T> Clone for Idx<T> {
+ fn clone(&self) -> Self {
+ *self
+ }
+}
+impl<T> Copy for Idx<T> {}
+
+impl<T> PartialEq for Idx<T> {
+ fn eq(&self, other: &Idx<T>) -> bool {
+ self.raw == other.raw
+ }
+}
+impl<T> Eq for Idx<T> {}
+
+impl<T> Hash for Idx<T> {
+ fn hash<H: Hasher>(&self, state: &mut H) {
+ self.raw.hash(state);
+ }
+}
+
+impl<T> fmt::Debug for Idx<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut type_name = std::any::type_name::<T>();
+ if let Some(idx) = type_name.rfind(':') {
+ type_name = &type_name[idx + 1..];
+ }
+ write!(f, "Idx::<{}>({})", type_name, self.raw)
+ }
+}
+
+impl<T> Idx<T> {
+ /// Creates a new index from a [`RawIdx`].
+ pub fn from_raw(raw: RawIdx) -> Self {
+ Idx { raw, _ty: PhantomData }
+ }
+
+ /// Converts this index into the underlying [`RawIdx`].
+ pub fn into_raw(self) -> RawIdx {
+ self.raw
+ }
+}
+
+/// A range of densely allocated arena values.
+pub struct IdxRange<T> {
+ range: Range<u32>,
+ _p: PhantomData<T>,
+}
+
+impl<T> IdxRange<T> {
+ /// Creates a new index range
+ /// inclusive of the start value and exclusive of the end value.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let a = arena.alloc("a");
+ /// let b = arena.alloc("b");
+ /// let c = arena.alloc("c");
+ /// let d = arena.alloc("d");
+ ///
+ /// let range = la_arena::IdxRange::new(b..d);
+ /// assert_eq!(&arena[range], &["b", "c"]);
+ /// ```
+ pub fn new(range: Range<Idx<T>>) -> Self {
+ Self { range: range.start.into_raw().into()..range.end.into_raw().into(), _p: PhantomData }
+ }
+
+ /// Creates a new index range
+ /// inclusive of the start value and end value.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let foo = arena.alloc("foo");
+ /// let bar = arena.alloc("bar");
+ /// let baz = arena.alloc("baz");
+ ///
+ /// let range = la_arena::IdxRange::new_inclusive(foo..=baz);
+ /// assert_eq!(&arena[range], &["foo", "bar", "baz"]);
+ ///
+ /// let range = la_arena::IdxRange::new_inclusive(foo..=foo);
+ /// assert_eq!(&arena[range], &["foo"]);
+ /// ```
+ pub fn new_inclusive(range: RangeInclusive<Idx<T>>) -> Self {
+ Self {
+ range: u32::from(range.start().into_raw())..u32::from(range.end().into_raw()) + 1,
+ _p: PhantomData,
+ }
+ }
+
+ /// Returns whether the index range is empty.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let one = arena.alloc(1);
+ /// let two = arena.alloc(2);
+ ///
+ /// assert!(la_arena::IdxRange::new(one..one).is_empty());
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ self.range.is_empty()
+ }
+}
+
+impl<T> Iterator for IdxRange<T> {
+ type Item = Idx<T>;
+ fn next(&mut self) -> Option<Self::Item> {
+ self.range.next().map(|raw| Idx::from_raw(raw.into()))
+ }
+}
+
+impl<T> DoubleEndedIterator for IdxRange<T> {
+ fn next_back(&mut self) -> Option<Self::Item> {
+ self.range.next_back().map(|raw| Idx::from_raw(raw.into()))
+ }
+}
+
+impl<T> fmt::Debug for IdxRange<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_tuple(&format!("IdxRange::<{}>", std::any::type_name::<T>()))
+ .field(&self.range)
+ .finish()
+ }
+}
+
+impl<T> Clone for IdxRange<T> {
+ fn clone(&self) -> Self {
+ Self { range: self.range.clone(), _p: PhantomData }
+ }
+}
+
+impl<T> PartialEq for IdxRange<T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.range == other.range
+ }
+}
+
+impl<T> Eq for IdxRange<T> {}
+
+/// Yet another index-based arena.
+#[derive(Clone, PartialEq, Eq, Hash)]
+pub struct Arena<T> {
+ data: Vec<T>,
+}
+
+impl<T: fmt::Debug> fmt::Debug for Arena<T> {
+ fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt.debug_struct("Arena").field("len", &self.len()).field("data", &self.data).finish()
+ }
+}
+
+impl<T> Arena<T> {
+ /// Creates a new empty arena.
+ ///
+ /// ```
+ /// let arena: la_arena::Arena<i32> = la_arena::Arena::new();
+ /// assert!(arena.is_empty());
+ /// ```
+ pub const fn new() -> Arena<T> {
+ Arena { data: Vec::new() }
+ }
+
++ /// Create a new empty arena with specific capacity.
++ ///
++ /// ```
++ /// let arena: la_arena::Arena<i32> = la_arena::Arena::with_capacity(42);
++ /// assert!(arena.is_empty());
++ /// ```
++ pub fn with_capacity(capacity: usize) -> Arena<T> {
++ Arena { data: Vec::with_capacity(capacity) }
++ }
++
+ /// Empties the arena, removing all contained values.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ ///
+ /// arena.alloc(1);
+ /// arena.alloc(2);
+ /// arena.alloc(3);
+ /// assert_eq!(arena.len(), 3);
+ ///
+ /// arena.clear();
+ /// assert!(arena.is_empty());
+ /// ```
+ pub fn clear(&mut self) {
+ self.data.clear();
+ }
+
+ /// Returns the length of the arena.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// assert_eq!(arena.len(), 0);
+ ///
+ /// arena.alloc("foo");
+ /// assert_eq!(arena.len(), 1);
+ ///
+ /// arena.alloc("bar");
+ /// assert_eq!(arena.len(), 2);
+ ///
+ /// arena.alloc("baz");
+ /// assert_eq!(arena.len(), 3);
+ /// ```
+ pub fn len(&self) -> usize {
+ self.data.len()
+ }
+
+ /// Returns whether the arena contains no elements.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// assert!(arena.is_empty());
+ ///
+ /// arena.alloc(0.5);
+ /// assert!(!arena.is_empty());
+ /// ```
+ pub fn is_empty(&self) -> bool {
+ self.data.is_empty()
+ }
+
+ /// Allocates a new value on the arena, returning the value’s index.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let idx = arena.alloc(50);
+ ///
+ /// assert_eq!(arena[idx], 50);
+ /// ```
+ pub fn alloc(&mut self, value: T) -> Idx<T> {
+ let idx = self.next_idx();
+ self.data.push(value);
+ idx
+ }
+
+ /// Returns an iterator over the arena’s elements.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let idx1 = arena.alloc(20);
+ /// let idx2 = arena.alloc(40);
+ /// let idx3 = arena.alloc(60);
+ ///
+ /// let mut iterator = arena.iter();
+ /// assert_eq!(iterator.next(), Some((idx1, &20)));
+ /// assert_eq!(iterator.next(), Some((idx2, &40)));
+ /// assert_eq!(iterator.next(), Some((idx3, &60)));
+ /// ```
+ pub fn iter(
+ &self,
+ ) -> impl Iterator<Item = (Idx<T>, &T)> + ExactSizeIterator + DoubleEndedIterator {
+ self.data.iter().enumerate().map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value))
+ }
+
+ /// Returns an iterator over the arena’s mutable elements.
+ ///
+ /// ```
+ /// let mut arena = la_arena::Arena::new();
+ /// let idx1 = arena.alloc(20);
+ ///
+ /// assert_eq!(arena[idx1], 20);
+ ///
+ /// let mut iterator = arena.iter_mut();
+ /// *iterator.next().unwrap().1 = 10;
+ /// drop(iterator);
+ ///
+ /// assert_eq!(arena[idx1], 10);
+ /// ```
+ pub fn iter_mut(
+ &mut self,
+ ) -> impl Iterator<Item = (Idx<T>, &mut T)> + ExactSizeIterator + DoubleEndedIterator {
+ self.data
+ .iter_mut()
+ .enumerate()
+ .map(|(idx, value)| (Idx::from_raw(RawIdx(idx as u32)), value))
+ }
+
+ /// Reallocates the arena to make it take up as little space as possible.
+ pub fn shrink_to_fit(&mut self) {
+ self.data.shrink_to_fit();
+ }
+
+ /// Returns the index of the next value allocated on the arena.
+ ///
+ /// This method should remain private to make creating invalid `Idx`s harder.
+ fn next_idx(&self) -> Idx<T> {
+ Idx::from_raw(RawIdx(self.data.len() as u32))
+ }
+}
+
+impl<T> Default for Arena<T> {
+ fn default() -> Arena<T> {
+ Arena { data: Vec::new() }
+ }
+}
+
+impl<T> Index<Idx<T>> for Arena<T> {
+ type Output = T;
+ fn index(&self, idx: Idx<T>) -> &T {
+ let idx = idx.into_raw().0 as usize;
+ &self.data[idx]
+ }
+}
+
+impl<T> IndexMut<Idx<T>> for Arena<T> {
+ fn index_mut(&mut self, idx: Idx<T>) -> &mut T {
+ let idx = idx.into_raw().0 as usize;
+ &mut self.data[idx]
+ }
+}
+
+impl<T> Index<IdxRange<T>> for Arena<T> {
+ type Output = [T];
+ fn index(&self, range: IdxRange<T>) -> &[T] {
+ let start = range.range.start as usize;
+ let end = range.range.end as usize;
+ &self.data[start..end]
+ }
+}
+
+impl<T> FromIterator<T> for Arena<T> {
+ fn from_iter<I>(iter: I) -> Self
+ where
+ I: IntoIterator<Item = T>,
+ {
+ Arena { data: Vec::from_iter(iter) }
+ }
+}
--- /dev/null
- pub fn insert(&mut self, idx: Idx<T>, t: V) {
+use std::marker::PhantomData;
+
+use crate::Idx;
+
+/// A map from arena indexes to some other type.
+/// Space requirement is O(highest index).
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct ArenaMap<IDX, V> {
+ v: Vec<Option<V>>,
+ _ty: PhantomData<IDX>,
+}
+
+impl<T, V> ArenaMap<Idx<T>, V> {
++ /// Creates a new empty map.
++ pub const fn new() -> Self {
++ Self { v: Vec::new(), _ty: PhantomData }
++ }
++
++ /// Create a new empty map with specific capacity.
++ pub fn with_capacity(capacity: usize) -> Self {
++ Self { v: Vec::with_capacity(capacity), _ty: PhantomData }
++ }
++
++ /// Reserves capacity for at least additional more elements to be inserted in the map.
++ pub fn reserve(&mut self, additional: usize) {
++ self.v.reserve(additional);
++ }
++
++ /// Clears the map, removing all elements.
++ pub fn clear(&mut self) {
++ self.v.clear();
++ }
++
++ /// Shrinks the capacity of the map as much as possible.
++ pub fn shrink_to_fit(&mut self) {
++ let min_len = self.v.iter().rposition(|slot| slot.is_some()).map_or(0, |i| i + 1);
++ self.v.truncate(min_len);
++ self.v.shrink_to_fit();
++ }
++
++ /// Returns whether the map contains a value for the specified index.
++ pub fn contains_idx(&self, idx: Idx<T>) -> bool {
++ matches!(self.v.get(Self::to_idx(idx)), Some(Some(_)))
++ }
++
++ /// Removes an index from the map, returning the value at the index if the index was previously in the map.
++ pub fn remove(&mut self, idx: Idx<T>) -> Option<V> {
++ self.v.get_mut(Self::to_idx(idx))?.take()
++ }
++
+ /// Inserts a value associated with a given arena index into the map.
- self.v[idx] = Some(t);
++ ///
++ /// If the map did not have this index present, None is returned.
++ /// Otherwise, the value is updated, and the old value is returned.
++ pub fn insert(&mut self, idx: Idx<T>, t: V) -> Option<V> {
+ let idx = Self::to_idx(idx);
+
+ self.v.resize_with((idx + 1).max(self.v.len()), || None);
- ArenaMap { v: Vec::new(), _ty: PhantomData }
++ self.v[idx].replace(t)
+ }
+
+ /// Returns a reference to the value associated with the provided index
+ /// if it is present.
+ pub fn get(&self, idx: Idx<T>) -> Option<&V> {
+ self.v.get(Self::to_idx(idx)).and_then(|it| it.as_ref())
+ }
+
+ /// Returns a mutable reference to the value associated with the provided index
+ /// if it is present.
+ pub fn get_mut(&mut self, idx: Idx<T>) -> Option<&mut V> {
+ self.v.get_mut(Self::to_idx(idx)).and_then(|it| it.as_mut())
+ }
+
+ /// Returns an iterator over the values in the map.
+ pub fn values(&self) -> impl Iterator<Item = &V> {
+ self.v.iter().filter_map(|o| o.as_ref())
+ }
+
+ /// Returns an iterator over mutable references to the values in the map.
+ pub fn values_mut(&mut self) -> impl Iterator<Item = &mut V> {
+ self.v.iter_mut().filter_map(|o| o.as_mut())
+ }
+
+ /// Returns an iterator over the arena indexes and values in the map.
+ pub fn iter(&self) -> impl Iterator<Item = (Idx<T>, &V)> {
+ self.v.iter().enumerate().filter_map(|(idx, o)| Some((Self::from_idx(idx), o.as_ref()?)))
+ }
+
++ /// Gets the given key's corresponding entry in the map for in-place manipulation.
++ pub fn entry(&mut self, idx: Idx<T>) -> Entry<'_, Idx<T>, V> {
++ let idx = Self::to_idx(idx);
++ self.v.resize_with((idx + 1).max(self.v.len()), || None);
++ match &mut self.v[idx] {
++ slot @ Some(_) => Entry::Occupied(OccupiedEntry { slot, _ty: PhantomData }),
++ slot @ None => Entry::Vacant(VacantEntry { slot, _ty: PhantomData }),
++ }
++ }
++
+ fn to_idx(idx: Idx<T>) -> usize {
+ u32::from(idx.into_raw()) as usize
+ }
+
+ fn from_idx(idx: usize) -> Idx<T> {
+ Idx::from_raw((idx as u32).into())
+ }
+}
+
+impl<T, V> std::ops::Index<Idx<V>> for ArenaMap<Idx<V>, T> {
+ type Output = T;
+ fn index(&self, idx: Idx<V>) -> &T {
+ self.v[Self::to_idx(idx)].as_ref().unwrap()
+ }
+}
+
+impl<T, V> std::ops::IndexMut<Idx<V>> for ArenaMap<Idx<V>, T> {
+ fn index_mut(&mut self, idx: Idx<V>) -> &mut T {
+ self.v[Self::to_idx(idx)].as_mut().unwrap()
+ }
+}
+
+impl<T, V> Default for ArenaMap<Idx<V>, T> {
+ fn default() -> Self {
++ Self::new()
++ }
++}
++
++impl<T, V> Extend<(Idx<V>, T)> for ArenaMap<Idx<V>, T> {
++ fn extend<I: IntoIterator<Item = (Idx<V>, T)>>(&mut self, iter: I) {
++ iter.into_iter().for_each(move |(k, v)| {
++ self.insert(k, v);
++ });
++ }
++}
++
++impl<T, V> FromIterator<(Idx<V>, T)> for ArenaMap<Idx<V>, T> {
++ fn from_iter<I: IntoIterator<Item = (Idx<V>, T)>>(iter: I) -> Self {
++ let mut this = Self::new();
++ this.extend(iter);
++ this
++ }
++}
++
++/// A view into a single entry in a map, which may either be vacant or occupied.
++///
++/// This `enum` is constructed from the [`entry`] method on [`ArenaMap`].
++///
++/// [`entry`]: ArenaMap::entry
++pub enum Entry<'a, IDX, V> {
++ /// A vacant entry.
++ Vacant(VacantEntry<'a, IDX, V>),
++ /// An occupied entry.
++ Occupied(OccupiedEntry<'a, IDX, V>),
++}
++
++impl<'a, IDX, V> Entry<'a, IDX, V> {
++ /// Ensures a value is in the entry by inserting the default if empty, and returns a mutable reference to
++ /// the value in the entry.
++ pub fn or_insert(self, default: V) -> &'a mut V {
++ match self {
++ Self::Vacant(ent) => ent.insert(default),
++ Self::Occupied(ent) => ent.into_mut(),
++ }
++ }
++
++ /// Ensures a value is in the entry by inserting the result of the default function if empty, and returns
++ /// a mutable reference to the value in the entry.
++ pub fn or_insert_with<F: FnOnce() -> V>(self, default: F) -> &'a mut V {
++ match self {
++ Self::Vacant(ent) => ent.insert(default()),
++ Self::Occupied(ent) => ent.into_mut(),
++ }
++ }
++
++ /// Provides in-place mutable access to an occupied entry before any potential inserts into the map.
++ pub fn and_modify<F: FnOnce(&mut V)>(mut self, f: F) -> Self {
++ if let Self::Occupied(ent) = &mut self {
++ f(ent.get_mut());
++ }
++ self
++ }
++}
++
++impl<'a, IDX, V> Entry<'a, IDX, V>
++where
++ V: Default,
++{
++ /// Ensures a value is in the entry by inserting the default value if empty, and returns a mutable reference
++ /// to the value in the entry.
++ pub fn or_default(self) -> &'a mut V {
++ self.or_insert_with(Default::default)
++ }
++}
++
++/// A view into an vacant entry in a [`ArenaMap`]. It is part of the [`Entry`] enum.
++pub struct VacantEntry<'a, IDX, V> {
++ slot: &'a mut Option<V>,
++ _ty: PhantomData<IDX>,
++}
++
++impl<'a, IDX, V> VacantEntry<'a, IDX, V> {
++ /// Sets the value of the entry with the `VacantEntry`’s key, and returns a mutable reference to it.
++ pub fn insert(self, value: V) -> &'a mut V {
++ self.slot.insert(value)
++ }
++}
++
++/// A view into an occupied entry in a [`ArenaMap`]. It is part of the [`Entry`] enum.
++pub struct OccupiedEntry<'a, IDX, V> {
++ slot: &'a mut Option<V>,
++ _ty: PhantomData<IDX>,
++}
++
++impl<'a, IDX, V> OccupiedEntry<'a, IDX, V> {
++ /// Gets a reference to the value in the entry.
++ pub fn get(&self) -> &V {
++ self.slot.as_ref().expect("Occupied")
++ }
++
++ /// Gets a mutable reference to the value in the entry.
++ pub fn get_mut(&mut self) -> &mut V {
++ self.slot.as_mut().expect("Occupied")
++ }
++
++ /// Converts the entry into a mutable reference to its value.
++ pub fn into_mut(self) -> &'a mut V {
++ self.slot.as_mut().expect("Occupied")
++ }
++
++ /// Sets the value of the entry with the `OccupiedEntry`’s key, and returns the entry’s old value.
++ pub fn insert(&mut self, value: V) -> V {
++ self.slot.replace(value).expect("Occupied")
++ }
++
++ /// Takes the value of the entry out of the map, and returns it.
++ pub fn remove(self) -> V {
++ self.slot.take().expect("Occupied")
+ }
+}
--- /dev/null
- cmd!(sh, "git subtree pull -P src/tools/rust-analyzer rust-analyzer master").run()?;
+mod changelog;
+
+use xshell::{cmd, Shell};
+
+use crate::{date_iso, flags, is_release_tag, project_root};
+
+impl flags::Release {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
+ if !self.dry_run {
+ cmd!(sh, "git switch release").run()?;
+ cmd!(sh, "git fetch upstream --tags --force").run()?;
+ cmd!(sh, "git reset --hard tags/nightly").run()?;
+ // The `release` branch sometimes has a couple of cherry-picked
+ // commits for patch releases. If that's the case, just overwrite
+ // it. As we are setting `release` branch to an up-to-date `nightly`
+ // tag, this shouldn't be problematic in general.
+ //
+ // Note that, as we tag releases, we don't worry about "losing"
+ // commits -- they'll be kept alive by the tag. More generally, we
+ // don't care about historic releases all that much, it's fine even
+ // to delete old tags.
+ cmd!(sh, "git push --force").run()?;
+ }
+
+ // Generates bits of manual.adoc.
+ cmd!(sh, "cargo test -p ide-assists -p ide-diagnostics -p rust-analyzer -- sourcegen_")
+ .run()?;
+
+ let website_root = project_root().join("../rust-analyzer.github.io");
+ {
+ let _dir = sh.push_dir(&website_root);
+ cmd!(sh, "git switch src").run()?;
+ cmd!(sh, "git pull").run()?;
+ }
+ let changelog_dir = website_root.join("./thisweek/_posts");
+
+ let today = date_iso(sh)?;
+ let commit = cmd!(sh, "git rev-parse HEAD").read()?;
+ let changelog_n = sh
+ .read_dir(changelog_dir.as_path())?
+ .into_iter()
+ .filter_map(|p| p.file_stem().map(|s| s.to_string_lossy().to_string()))
+ .filter_map(|s| s.splitn(5, '-').last().map(|n| n.replace('-', ".")))
+ .filter_map(|s| s.parse::<f32>().ok())
+ .map(|n| 1 + n.floor() as usize)
+ .max()
+ .unwrap_or_default();
+
+ for adoc in [
+ "manual.adoc",
+ "generated_assists.adoc",
+ "generated_config.adoc",
+ "generated_diagnostic.adoc",
+ "generated_features.adoc",
+ ] {
+ let src = project_root().join("./docs/user/").join(adoc);
+ let dst = website_root.join(adoc);
+
+ let contents = sh.read_file(src)?;
+ sh.write_file(dst, contents)?;
+ }
+
+ let tags = cmd!(sh, "git tag --list").read()?;
+ let prev_tag = tags.lines().filter(|line| is_release_tag(line)).last().unwrap();
+
+ let contents = changelog::get_changelog(sh, changelog_n, &commit, prev_tag, &today)?;
+ let path = changelog_dir.join(format!("{}-changelog-{}.adoc", today, changelog_n));
+ sh.write_file(&path, &contents)?;
+
+ Ok(())
+ }
+}
+
+impl flags::Promote {
+ pub(crate) fn run(self, sh: &Shell) -> anyhow::Result<()> {
+ let _dir = sh.push_dir("../rust-rust-analyzer");
+ cmd!(sh, "git switch master").run()?;
+ cmd!(sh, "git fetch upstream").run()?;
+ cmd!(sh, "git reset --hard upstream/master").run()?;
+
+ let date = date_iso(sh)?;
+ let branch = format!("rust-analyzer-{date}");
+ cmd!(sh, "git switch -c {branch}").run()?;
++ cmd!(sh, "git subtree pull -m ':arrow_up: rust-analyzer' -P src/tools/rust-analyzer rust-analyzer release").run()?;
+
+ if !self.dry_run {
+ cmd!(sh, "git push -u origin {branch}").run()?;
+ cmd!(
+ sh,
+ "xdg-open https://github.com/matklad/rust/pull/new/{branch}?body=r%3F%20%40ghost"
+ )
+ .run()?;
+ }
+ Ok(())
+ }
+}