X-Git-Url: https://git.lizzy.rs/?a=blobdiff_plain;f=crates%2Frust-analyzer%2Fsrc%2Fconfig.rs;h=cac48e9117099074a33638e3137cdc66b32e3d2e;hb=96fc01a30b88d95619b26fd96c58627dd54cb339;hp=24e7936fc9ec38deadd8ed25063614d5128305bc;hpb=66c575659e17035a747d3e4e10bcf4f3669a7143;p=rust.git diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 24e7936fc9e..cac48e91170 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -7,40 +7,48 @@ //! configure the server itself, feature flags are passed into analysis, and //! tweak things like automatic insertion of `()` in completions. -use std::{convert::TryFrom, ffi::OsString, path::PathBuf}; +use std::{ffi::OsString, iter, path::PathBuf}; use flycheck::FlycheckConfig; use hir::PrefixKind; use ide::{AssistConfig, CompletionConfig, DiagnosticsConfig, HoverConfig, InlayHintsConfig}; -use ide_db::helpers::insert_use::MergeBehavior; +use ide_db::helpers::{ + insert_use::{InsertUseConfig, MergeBehavior}, + SnippetCap, +}; use itertools::Itertools; use lsp_types::{ClientCapabilities, MarkupKind}; -use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest}; +use project_model::{CargoConfig, ProjectJson, ProjectJsonData, ProjectManifest, RustcSource}; use rustc_hash::FxHashSet; use serde::{de::DeserializeOwned, Deserialize}; use vfs::AbsPathBuf; -use crate::{caps::enabled_completions_resolve_capabilities, diagnostics::DiagnosticsMapConfig}; +use crate::{ + caps::completion_item_edit_resolve, diagnostics::DiagnosticsMapConfig, + line_index::OffsetEncoding, lsp_ext::supports_utf8, +}; config_data! { struct ConfigData { /// The strategy to use when inserting new imports or merging imports. - assist_importMergeBehaviour: MergeBehaviorDef = "\"full\"", + assist_importMergeBehavior | + assist_importMergeBehaviour: MergeBehaviorDef = "\"full\"", /// The path structure for newly inserted paths to use. assist_importPrefix: ImportPrefixDef = "\"plain\"", - + /// Group inserted imports by the [following order](https://rust-analyzer.github.io/manual.html#auto-import). Groups are separated by newlines. + assist_importGroup: bool = "true", /// Show function name and docs in parameter hints. callInfo_full: bool = "true", /// Automatically refresh project info via `cargo metadata` on /// `Cargo.toml` changes. cargo_autoreload: bool = "true", - /// Activate all available features. + /// Activate all available features (`--all-features`). cargo_allFeatures: bool = "false", /// List of features to activate. cargo_features: Vec = "[]", - /// Run `cargo check` on startup to get the correct value for package - /// OUT_DIRs. + /// Run build scripts (`build.rs`) for more precise code analysis. + cargo_runBuildScripts | cargo_loadOutDirsFromCheck: bool = "false", /// Do not activate the `default` feature. cargo_noDefaultFeatures: bool = "false", @@ -51,10 +59,10 @@ struct ConfigData { /// Run specified `cargo check` command for diagnostics on save. checkOnSave_enable: bool = "true", - /// Check with all features (will be passed as `--all-features`). + /// Check with all features (`--all-features`). /// Defaults to `#rust-analyzer.cargo.allFeatures#`. checkOnSave_allFeatures: Option = "null", - /// Check all targets and tests (will be passed as `--all-targets`). + /// Check all targets and tests (`--all-targets`). checkOnSave_allTargets: bool = "true", /// Cargo command to use for `cargo check`. checkOnSave_command: String = "\"check\"", @@ -101,6 +109,8 @@ struct ConfigData { /// Controls file watching implementation. files_watcher: String = "\"client\"", + /// These directories will be ignored by rust-analyzer. + files_excludeDirs: Vec = "[]", /// Whether to show `Debug` action. Only applies when /// `#rust-analyzer.hoverActions.enable#` is set. @@ -143,20 +153,22 @@ struct ConfigData { /// Whether to show `Method References` lens. Only applies when /// `#rust-analyzer.lens.enable#` is set. lens_methodReferences: bool = "false", + /// Whether to show `References` lens. Only applies when + /// `#rust-analyzer.lens.enable#` is set. + lens_references: bool = "false", /// Disable project auto-discovery in favor of explicitly specified set /// of projects.\n\nElements must be paths pointing to `Cargo.toml`, /// `rust-project.json`, or JSON objects in `rust-project.json` format. linkedProjects: Vec = "[]", - /// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128. + /// Number of syntax trees rust-analyzer keeps in memory. Defaults to 128. lruCapacity: Option = "null", /// Whether to show `can't find Cargo.toml` error message. notifications_cargoTomlNotFound: bool = "true", - /// Enable Proc macro support, `#rust-analyzer.cargo.loadOutDirsFromCheck#` must be - /// enabled. + /// Enable support for procedural macros, implies `#rust-analyzer.cargo.runBuildScripts#`. procMacro_enable: bool = "false", /// Internal config, path to proc-macro server executable (typically, /// this is rust-analyzer itself, but we override this in tests). @@ -168,8 +180,9 @@ struct ConfigData { /// tests or binaries.\nFor example, it may be `--release`. runnables_cargoExtraArgs: Vec = "[]", - /// Path to the rust compiler sources, for usage in rustc_private projects. - rustcSource : Option = "null", + /// Path to the rust compiler sources, for usage in rustc_private projects, or "discover" + /// to try to automatically find it. + rustcSource : Option = "null", /// Additional arguments to `rustfmt`. rustfmt_extraArgs: Vec = "[]", @@ -217,6 +230,7 @@ pub struct LensConfig { pub debug: bool, pub implementations: bool, pub method_refs: bool, + pub refs: bool, // for Struct, Enum, Union and Trait } impl LensConfig { @@ -233,14 +247,14 @@ pub fn runnable(&self) -> bool { } pub fn references(&self) -> bool { - self.method_refs + self.method_refs || self.refs } } #[derive(Debug, Clone)] pub struct FilesConfig { pub watcher: FilesWatcher, - pub exclude: Vec, + pub exclude: Vec, } #[derive(Debug, Clone)] @@ -330,6 +344,18 @@ pub fn linked_projects(&self) -> Vec { } } + pub fn did_save_text_document_dynamic_registration(&self) -> bool { + let caps = + try_or!(self.caps.text_document.as_ref()?.synchronization.clone()?, Default::default()); + caps.did_save == Some(true) && caps.dynamic_registration == Some(true) + } + pub fn did_change_watched_files_dynamic_registration(&self) -> bool { + try_or!( + self.caps.workspace.as_ref()?.did_change_watched_files.as_ref()?.dynamic_registration?, + false + ) + } + pub fn location_link(&self) -> bool { try_or!(self.caps.text_document.as_ref()?.definition?.link_support?, false) } @@ -392,6 +418,13 @@ pub fn signature_help_label_offsets(&self) -> bool { false ) } + pub fn offset_encoding(&self) -> OffsetEncoding { + if supports_utf8(&self.caps) { + OffsetEncoding::Utf8 + } else { + OffsetEncoding::Utf16 + } + } fn experimental(&self, index: &'static str) -> bool { try_or!(self.caps.experimental.as_ref()?.get(index)?.as_bool()?, false) @@ -438,7 +471,7 @@ pub fn files(&self) -> FilesConfig { "notify" => FilesWatcher::Notify, "client" | _ => FilesWatcher::Client, }, - exclude: Vec::new(), + exclude: self.data.files_excludeDirs.iter().map(|it| self.root_path.join(it)).collect(), } } pub fn notifications(&self) -> NotificationsConfig { @@ -447,18 +480,22 @@ pub fn notifications(&self) -> NotificationsConfig { pub fn cargo_autoreload(&self) -> bool { self.data.cargo_autoreload } + pub fn run_build_scripts(&self) -> bool { + self.data.cargo_runBuildScripts || self.data.procMacro_enable + } pub fn cargo(&self) -> CargoConfig { - let rustc_source = self.data.rustcSource.clone().and_then(|it| { - AbsPathBuf::try_from(it) - .map_err(|_| log::error!("rustc source directory must be an absolute path")) - .ok() + let rustc_source = self.data.rustcSource.as_ref().map(|rustc_src| { + if rustc_src == "discover" { + RustcSource::Discover + } else { + RustcSource::Path(self.root_path.join(rustc_src)) + } }); CargoConfig { no_default_features: self.data.cargo_noDefaultFeatures, all_features: self.data.cargo_allFeatures, features: self.data.cargo_features.clone(), - load_out_dirs_from_check: self.data.cargo_loadOutDirsFromCheck, target: self.data.cargo_target.clone(), rustc_source, no_sysroot: self.data.cargo_noSysroot, @@ -492,7 +529,7 @@ pub fn flycheck(&self) -> Option { .data .checkOnSave_target .clone() - .or(self.data.cargo_target.clone()), + .or_else(|| self.data.cargo_target.clone()), all_targets: self.data.checkOnSave_allTargets, no_default_features: self .data @@ -506,7 +543,7 @@ pub fn flycheck(&self) -> Option { .data .checkOnSave_features .clone() - .unwrap_or(self.data.cargo_features.clone()), + .unwrap_or_else(|| self.data.cargo_features.clone()), extra_args: self.data.checkOnSave_extraArgs.clone(), }, }; @@ -526,46 +563,48 @@ pub fn inlay_hints(&self) -> InlayHintsConfig { max_length: self.data.inlayHints_maxLength, } } - fn merge_behavior(&self) -> Option { - match self.data.assist_importMergeBehaviour { - MergeBehaviorDef::None => None, - MergeBehaviorDef::Full => Some(MergeBehavior::Full), - MergeBehaviorDef::Last => Some(MergeBehavior::Last), + fn insert_use_config(&self) -> InsertUseConfig { + InsertUseConfig { + merge: match self.data.assist_importMergeBehavior { + MergeBehaviorDef::None => None, + MergeBehaviorDef::Full => Some(MergeBehavior::Full), + MergeBehaviorDef::Last => Some(MergeBehavior::Last), + }, + prefix_kind: match self.data.assist_importPrefix { + ImportPrefixDef::Plain => PrefixKind::Plain, + ImportPrefixDef::ByCrate => PrefixKind::ByCrate, + ImportPrefixDef::BySelf => PrefixKind::BySelf, + }, + group: self.data.assist_importGroup, } } pub fn completion(&self) -> CompletionConfig { - let mut res = CompletionConfig::default(); - res.enable_postfix_completions = self.data.completion_postfix_enable; - res.enable_autoimport_completions = self.data.completion_autoimport_enable; - res.add_call_parenthesis = self.data.completion_addCallParenthesis; - res.add_call_argument_snippets = self.data.completion_addCallArgumentSnippets; - res.merge = self.merge_behavior(); - res.active_resolve_capabilities = - enabled_completions_resolve_capabilities(&self.caps).unwrap_or_default(); - - res.allow_snippets(try_or!( - self.caps - .text_document - .as_ref()? - .completion - .as_ref()? - .completion_item - .as_ref()? - .snippet_support?, - false - )); - res + CompletionConfig { + enable_postfix_completions: self.data.completion_postfix_enable, + enable_imports_on_the_fly: self.data.completion_autoimport_enable + && completion_item_edit_resolve(&self.caps), + add_call_parenthesis: self.data.completion_addCallParenthesis, + add_call_argument_snippets: self.data.completion_addCallArgumentSnippets, + insert_use: self.insert_use_config(), + snippet_cap: SnippetCap::new(try_or!( + self.caps + .text_document + .as_ref()? + .completion + .as_ref()? + .completion_item + .as_ref()? + .snippet_support?, + false + )), + } } pub fn assist(&self) -> AssistConfig { - let mut res = AssistConfig::default(); - res.insert_use.merge = self.merge_behavior(); - res.insert_use.prefix_kind = match self.data.assist_importPrefix { - ImportPrefixDef::Plain => PrefixKind::Plain, - ImportPrefixDef::ByCrate => PrefixKind::ByCrate, - ImportPrefixDef::BySelf => PrefixKind::BySelf, - }; - res.allow_snippets(self.experimental("snippetTextEdit")); - res + AssistConfig { + snippet_cap: SnippetCap::new(self.experimental("snippetTextEdit")), + allowed: None, + insert_use: self.insert_use_config(), + } } pub fn call_info_full(&self) -> bool { self.data.callInfo_full @@ -576,6 +615,7 @@ pub fn lens(&self) -> LensConfig { debug: self.data.lens_enable && self.data.lens_debug, implementations: self.data.lens_enable && self.data.lens_implementations, method_refs: self.data.lens_enable && self.data.lens_methodReferences, + refs: self.data.lens_enable && self.data.lens_references, } } pub fn hover(&self) -> HoverConfig { @@ -635,7 +675,7 @@ macro_rules! _config_data { (struct $name:ident { $( $(#[doc=$doc:literal])* - $field:ident: $ty:ty = $default:expr, + $field:ident $(| $alias:ident)?: $ty:ty = $default:expr, )* }) => { #[allow(non_snake_case)] @@ -644,7 +684,12 @@ struct $name { $($field: $ty,)* } impl $name { fn from_json(mut json: serde_json::Value) -> $name { $name {$( - $field: get_field(&mut json, stringify!($field), $default), + $field: get_field( + &mut json, + stringify!($field), + None$(.or(Some(stringify!($alias))))?, + $default, + ), )*} } @@ -676,21 +721,28 @@ fn manual() -> String { fn get_field( json: &mut serde_json::Value, field: &'static str, + alias: Option<&'static str>, default: &str, ) -> T { let default = serde_json::from_str(default).unwrap(); - let mut pointer = field.replace('_', "/"); - pointer.insert(0, '/'); - json.pointer_mut(&pointer) - .and_then(|it| serde_json::from_value(it.take()).ok()) + // XXX: check alias first, to work-around the VS Code where it pre-fills the + // defaults instead of sending an empty object. + alias + .into_iter() + .chain(iter::once(field)) + .find_map(move |field| { + let mut pointer = field.replace('_', "/"); + pointer.insert(0, '/'); + json.pointer_mut(&pointer).and_then(|it| serde_json::from_value(it.take()).ok()) + }) .unwrap_or(default) } fn schema(fields: &[(&'static str, &'static str, &[&str], &str)]) -> serde_json::Value { for ((f1, ..), (f2, ..)) in fields.iter().zip(&fields[1..]) { fn key(f: &str) -> &str { - f.splitn(2, "_").next().unwrap() + f.splitn(2, '_').next().unwrap() } assert!(key(f1) <= key(f2), "wrong field order: {:?} {:?}", f1, f2); } @@ -733,6 +785,10 @@ macro_rules! set { "type": "array", "items": { "type": "string" }, }, + "Vec" => set! { + "type": "array", + "items": { "type": "string" }, + }, "FxHashSet" => set! { "type": "array", "items": { "type": "string" }, @@ -810,15 +866,32 @@ mod tests { fn schema_in_sync_with_package_json() { let s = Config::json_schema(); let schema = format!("{:#}", s); - let schema = schema.trim_start_matches('{').trim_end_matches('}'); - - let package_json = project_dir().join("editors/code/package.json"); - let package_json = fs::read_to_string(&package_json).unwrap(); - - let p = remove_ws(&package_json); + let mut schema = schema + .trim_start_matches('{') + .trim_end_matches('}') + .replace(" ", " ") + .replace("\n", "\n ") + .trim_start_matches('\n') + .trim_end() + .to_string(); + schema.push_str(",\n"); + + let package_json_path = project_dir().join("editors/code/package.json"); + let mut package_json = fs::read_to_string(&package_json_path).unwrap(); + + let start_marker = " \"$generated-start\": false,\n"; + let end_marker = " \"$generated-end\": false\n"; + + let start = package_json.find(start_marker).unwrap() + start_marker.len(); + let end = package_json.find(end_marker).unwrap(); + let p = remove_ws(&package_json[start..end]); let s = remove_ws(&schema); - assert!(p.contains(&s), "update config in package.json. New config:\n{:#}", schema); + if !p.contains(&s) { + package_json.replace_range(start..end, &schema); + fs::write(&package_json_path, &mut package_json).unwrap(); + panic!("new config, updating package.json") + } } #[test]