[[package]]
name = "chalk-derive"
-version = "0.43.0"
+version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2d9e0c8adcced1ab0fea5cb8a38647922893d5b495e363e1814299fd380469b"
+checksum = "ec7dacf94958d1a930b95d049d9443860859af59eadc77849392093eb577bcee"
dependencies = [
"proc-macro2",
"quote",
[[package]]
name = "chalk-ir"
-version = "0.43.0"
+version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c5218266a5709bc4943de997e64d3fab41c9e9f68efd54a898de53135e987bd3"
+checksum = "a1a5b38ede247def17da87f4badb62396a5753db6048e2011d3089d8b3796c67"
dependencies = [
+ "bitflags",
"chalk-derive",
"lazy_static",
]
[[package]]
name = "chalk-recursive"
-version = "0.43.0"
+version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ed8f34f13fd4f30251f9f6f1dc56f80363201390ecbcac2fdfc8e33036cd9c4a"
+checksum = "7a18db146d7a023edc20ad094e8c2284451f7888719645004979617d1f17c041"
dependencies = [
"chalk-derive",
"chalk-ir",
[[package]]
name = "chalk-solve"
-version = "0.43.0"
+version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "379c9f584488346044709d4c638c38d61a06fe593d4de2ac5f15fd2b0ba4cd9d"
+checksum = "7f73e0de04a0f394e47ed8118e00541bcf681d7c3c2ef500fa743eb4cf3a4850"
dependencies = [
"chalk-derive",
"chalk-ir",
[[package]]
name = "heck"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205"
+checksum = "87cbf45460356b7deeb5e3415b5563308c0a9b057c85e12b06ad551f98d0a6ac"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "parking_lot_core"
-version = "0.8.1"
+version = "0.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d7c6d9b8427445284a09c55be860a15855ab580a417ccad9da88f5a06787ced0"
+checksum = "9ccb628cad4f84851442432c60ad8e1f607e29752d0bf072cbd0baf28aa34272"
dependencies = [
"cfg-if 1.0.0",
"instant",
"rustc-hash",
"serde",
"serde_json",
+ "serde_path_to_error",
"ssr",
"stdx",
"syntax",
[[package]]
name = "rustc-ap-rustc_lexer"
-version = "691.0.0"
+version = "695.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44bc89d9ca7a78fb82e103b389362c55f03800745f8ba14e068b805cfaf783ec"
+checksum = "390bad134705b0bff02cd9541ac66df751a91c3cc734c3369cd6151ca269caed"
dependencies = [
"unicode-xid",
]
"serde",
]
+[[package]]
+name = "serde_path_to_error"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42f6109f0506e20f7e0f910e51a0079acf41da8e0694e6442527c4ddf5a2b158"
+dependencies = [
+ "serde",
+]
+
[[package]]
name = "serde_repr"
version = "0.1.6"
let crate_def_map = db.crate_def_map(self.id.krate);
crate_def_map.add_diagnostics(db.upcast(), self.id.local_id, sink);
for decl in self.declarations(db) {
- decl.diagnostics(db, sink);
-
match decl {
crate::ModuleDef::Function(f) => f.diagnostics(db, sink),
crate::ModuleDef::Module(m) => {
m.diagnostics(db, sink)
}
}
- _ => (),
+ _ => {
+ decl.diagnostics(db, sink);
+ }
}
}
log = "0.4.8"
rustc-hash = "1.1.0"
scoped-tls = "1"
-chalk-solve = { version = "0.43", default-features = false }
-chalk-ir = "0.43"
-chalk-recursive = "0.43"
+chalk-solve = { version = "0.45", default-features = false }
+chalk-ir = "0.45"
+chalk-recursive = "0.45"
stdx = { path = "../stdx", version = "0.0.0" }
hir_def = { path = "../hir_def", version = "0.0.0" }
fn adt_datum(&self, struct_id: AdtId) -> Arc<StructDatum> {
self.db.struct_datum(self.krate, struct_id)
}
- fn adt_repr(&self, _struct_id: AdtId) -> rust_ir::AdtRepr {
- rust_ir::AdtRepr { repr_c: false, repr_packed: false }
+ fn adt_repr(&self, _struct_id: AdtId) -> Arc<rust_ir::AdtRepr<Interner>> {
+ // FIXME: keep track of these
+ Arc::new(rust_ir::AdtRepr { c: false, packed: false, int: None })
+ }
+ fn discriminant_type(&self, _ty: chalk_ir::Ty<Interner>) -> chalk_ir::Ty<Interner> {
+ // FIXME: keep track of this
+ chalk_ir::TyKind::Scalar(chalk_ir::Scalar::Uint(chalk_ir::UintTy::U32)).intern(&Interner)
}
fn impl_datum(&self, impl_id: ImplId) -> Arc<ImplDatum> {
self.db.impl_datum(self.krate, impl_id)
"fn" => WellKnownTrait::Fn,
"unsize" => WellKnownTrait::Unsize,
"coerce_unsized" => WellKnownTrait::CoerceUnsized,
+ "discriminant_kind" => WellKnownTrait::DiscriminantKind,
_ => return None,
})
}
WellKnownTrait::Unsize => "unsize",
WellKnownTrait::Unpin => "unpin",
WellKnownTrait::CoerceUnsized => "coerce_unsized",
+ WellKnownTrait::DiscriminantKind => "discriminant_kind",
}
}
"#,
);
}
+
+ #[test]
+ fn test_single_incorrect_case_diagnostic_in_function_name_issue_6970() {
+ let input = r#"fn FOO<|>() {}"#;
+ let expected = r#"fn foo() {}"#;
+
+ let (analysis, file_position) = fixture::position(input);
+ let diagnostics =
+ analysis.diagnostics(&DiagnosticsConfig::default(), file_position.file_id).unwrap();
+ assert_eq!(diagnostics.len(), 1);
+
+ check_fixes(input, expected);
+ }
}
self.with_db(|db| references::rename::prepare_rename(db, position))
}
+ pub fn will_rename_file(
+ &self,
+ file_id: FileId,
+ new_name_stem: &str,
+ ) -> Cancelable<Option<SourceChange>> {
+ self.with_db(|db| references::rename::will_rename_file(db, file_id, new_name_stem))
+ }
+
pub fn structural_search_replace(
&self,
query: &str,
};
use hir::{Module, ModuleDef, ModuleSource, Semantics};
-use ide_db::base_db::{AnchoredPathBuf, FileRange, SourceDatabaseExt};
+use ide_db::base_db::{AnchoredPathBuf, FileId, FileRange, SourceDatabaseExt};
use ide_db::{
defs::{Definition, NameClass, NameRefClass},
RootDatabase,
}
}
+pub(crate) fn will_rename_file(
+ db: &RootDatabase,
+ file_id: FileId,
+ new_name_stem: &str,
+) -> Option<SourceChange> {
+ let sema = Semantics::new(db);
+ let module = sema.to_module_def(file_id)?;
+
+ let decl = module.declaration_source(db)?;
+ let range = decl.value.name()?.syntax().text_range();
+
+ let position = FilePosition { file_id: decl.file_id.original_file(db), offset: range.start() };
+ let mut change = rename_mod(&sema, position, module, new_name_stem).ok()?.info;
+ change.file_system_edits.clear();
+ Some(change)
+}
+
fn find_module_at_offset(
sema: &Semantics<RootDatabase>,
position: FilePosition,
rustc-hash = "1.1.0"
serde = { version = "1.0.106", features = ["derive"] }
serde_json = { version = "1.0.48", features = ["preserve_order"] }
+serde_path_to_error = "0.1"
threadpool = "1.7.1"
rayon = "1.5"
mimalloc = { version = "0.1.19", default-features = false, optional = true }
use lsp_types::{
CallHierarchyServerCapability, ClientCapabilities, CodeActionKind, CodeActionOptions,
CodeActionProviderCapability, CodeLensOptions, CompletionOptions,
- DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability, HoverProviderCapability,
- ImplementationProviderCapability, OneOf, RenameOptions, SaveOptions,
+ DocumentOnTypeFormattingOptions, FileOperationFilter, FileOperationPattern,
+ FileOperationPatternKind, FileOperationRegistrationOptions, FoldingRangeProviderCapability,
+ HoverProviderCapability, ImplementationProviderCapability, OneOf, RenameOptions, SaveOptions,
SelectionRangeProviderCapability, SemanticTokensFullOptions, SemanticTokensLegend,
SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions, TextDocumentSyncCapability,
TextDocumentSyncKind, TextDocumentSyncOptions, TypeDefinitionProviderCapability,
- WorkDoneProgressOptions,
+ WorkDoneProgressOptions, WorkspaceFileOperationsServerCapabilities,
+ WorkspaceServerCapabilities,
};
use rustc_hash::FxHashSet;
use serde_json::json;
document_link_provider: None,
color_provider: None,
execute_command_provider: None,
- workspace: None,
+ workspace: Some(WorkspaceServerCapabilities {
+ workspace_folders: None,
+ file_operations: Some(WorkspaceFileOperationsServerCapabilities {
+ did_create: None,
+ will_create: None,
+ did_rename: None,
+ will_rename: Some(FileOperationRegistrationOptions {
+ filters: vec![FileOperationFilter {
+ scheme: Some(String::from("file")),
+ pattern: FileOperationPattern {
+ glob: String::from("**/*.rs"),
+ matches: Some(FileOperationPatternKind::File),
+ options: None,
+ },
+ }],
+ }),
+ did_delete: None,
+ will_delete: None,
+ }),
+ }),
call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
semantic_tokens_provider: Some(
SemanticTokensOptions {
use std::{
io::Write as _,
process::{self, Stdio},
+ sync::Arc,
};
use ide::{
- CompletionResolveCapability, FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData,
- NavigationTarget, Query, RangeInfo, Runnable, RunnableKind, SearchScope, SymbolKind, TextEdit,
+ AssistConfig, CompletionResolveCapability, FileId, FilePosition, FileRange, HoverAction,
+ HoverGotoTypeData, LineIndex, NavigationTarget, Query, RangeInfo, Runnable, RunnableKind,
+ SearchScope, SourceChange, SymbolKind, TextEdit,
};
use itertools::Itertools;
use lsp_server::ErrorCode;
}
}
+pub(crate) fn handle_will_rename_files(
+ snap: GlobalStateSnapshot,
+ params: lsp_types::RenameFilesParams,
+) -> Result<Option<lsp_types::WorkspaceEdit>> {
+ let _p = profile::span("handle_will_rename_files");
+
+ let source_changes: Vec<SourceChange> = params
+ .files
+ .into_iter()
+ .filter_map(|file_rename| {
+ let from = Url::parse(&file_rename.old_uri).ok()?;
+ let to = Url::parse(&file_rename.new_uri).ok()?;
+
+ let from_path = from.to_file_path().ok()?;
+ let to_path = to.to_file_path().ok()?;
+
+ // Limit to single-level moves for now.
+ match (from_path.parent(), to_path.parent()) {
+ (Some(p1), Some(p2)) if p1 == p2 => {
+ let new_name = to_path.file_stem()?;
+ let new_name = new_name.to_str()?;
+ Some((snap.url_to_file_id(&from).ok()?, new_name.to_string()))
+ }
+ _ => None,
+ }
+ })
+ .filter_map(|(file_id, new_name)| {
+ snap.analysis.will_rename_file(file_id, &new_name).ok()?
+ })
+ .collect();
+
+ // Drop file system edits since we're just renaming things on the same level
+ let edits = source_changes.into_iter().map(|it| it.source_file_edits).flatten().collect();
+ let source_change = SourceChange::from_edits(edits, Vec::new());
+
+ let workspace_edit = to_proto::workspace_edit(&snap, source_change)?;
+ Ok(Some(workspace_edit))
+}
+
pub(crate) fn handle_goto_definition(
snap: GlobalStateSnapshot,
params: lsp_types::GotoDefinitionParams,
}
}
-fn handle_fixes(
- snap: &GlobalStateSnapshot,
- params: &lsp_types::CodeActionParams,
- res: &mut Vec<lsp_ext::CodeAction>,
-) -> Result<()> {
- let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
- let line_index = snap.analysis.file_line_index(file_id)?;
- let range = from_proto::text_range(&line_index, params.range);
-
- match ¶ms.context.only {
- Some(v) => {
- if !v.iter().any(|it| {
- it == &lsp_types::CodeActionKind::EMPTY
- || it == &lsp_types::CodeActionKind::QUICKFIX
- }) {
- return Ok(());
- }
- }
- None => {}
- };
-
- let diagnostics = snap.analysis.diagnostics(&snap.config.diagnostics, file_id)?;
-
- for fix in diagnostics
- .into_iter()
- .filter_map(|d| d.fix)
- .filter(|fix| fix.fix_trigger_range.intersect(range).is_some())
- {
- let edit = to_proto::snippet_workspace_edit(&snap, fix.source_change)?;
- let action = lsp_ext::CodeAction {
- title: fix.label.to_string(),
- group: None,
- kind: Some(CodeActionKind::QUICKFIX),
- edit: Some(edit),
- is_preferred: Some(false),
- data: None,
- };
- res.push(action);
- }
-
- for fix in snap.check_fixes.get(&file_id).into_iter().flatten() {
- let fix_range = from_proto::text_range(&line_index, fix.range);
- if fix_range.intersect(range).is_none() {
- continue;
- }
- res.push(fix.action.clone());
- }
- Ok(())
-}
-
pub(crate) fn handle_code_action(
- mut snap: GlobalStateSnapshot,
+ snap: GlobalStateSnapshot,
params: lsp_types::CodeActionParams,
) -> Result<Option<Vec<lsp_ext::CodeAction>>> {
let _p = profile::span("handle_code_action");
let range = from_proto::text_range(&line_index, params.range);
let frange = FileRange { file_id, range };
- snap.config.assist.allowed = params
- .clone()
- .context
- .only
- .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect());
+ let assists_config = AssistConfig {
+ allowed: params
+ .clone()
+ .context
+ .only
+ .map(|it| it.into_iter().filter_map(from_proto::assist_kind).collect()),
+ ..snap.config.assist
+ };
let mut res: Vec<lsp_ext::CodeAction> = Vec::new();
- handle_fixes(&snap, ¶ms, &mut res)?;
+ let include_quick_fixes = match ¶ms.context.only {
+ Some(v) => v.iter().any(|it| {
+ it == &lsp_types::CodeActionKind::EMPTY || it == &lsp_types::CodeActionKind::QUICKFIX
+ }),
+ None => true,
+ };
+ if include_quick_fixes {
+ add_quick_fixes(&snap, frange, &line_index, &mut res)?;
+ }
if snap.config.client_caps.code_action_resolve {
for (index, assist) in
- snap.analysis.unresolved_assists(&snap.config.assist, frange)?.into_iter().enumerate()
+ snap.analysis.unresolved_assists(&assists_config, frange)?.into_iter().enumerate()
{
res.push(to_proto::unresolved_code_action(&snap, params.clone(), assist, index)?);
}
} else {
- for assist in snap.analysis.resolved_assists(&snap.config.assist, frange)?.into_iter() {
+ for assist in snap.analysis.resolved_assists(&assists_config, frange)?.into_iter() {
res.push(to_proto::resolved_code_action(&snap, assist)?);
}
}
Ok(Some(res))
}
+fn add_quick_fixes(
+ snap: &GlobalStateSnapshot,
+ frange: FileRange,
+ line_index: &Arc<LineIndex>,
+ acc: &mut Vec<lsp_ext::CodeAction>,
+) -> Result<()> {
+ let diagnostics = snap.analysis.diagnostics(&snap.config.diagnostics, frange.file_id)?;
+
+ for fix in diagnostics
+ .into_iter()
+ .filter_map(|d| d.fix)
+ .filter(|fix| fix.fix_trigger_range.intersect(frange.range).is_some())
+ {
+ let edit = to_proto::snippet_workspace_edit(&snap, fix.source_change)?;
+ let action = lsp_ext::CodeAction {
+ title: fix.label.to_string(),
+ group: None,
+ kind: Some(CodeActionKind::QUICKFIX),
+ edit: Some(edit),
+ is_preferred: Some(false),
+ data: None,
+ };
+ acc.push(action);
+ }
+
+ for fix in snap.check_fixes.get(&frange.file_id).into_iter().flatten() {
+ let fix_range = from_proto::text_range(&line_index, fix.range);
+ if fix_range.intersect(frange.range).is_some() {
+ acc.push(fix.action.clone());
+ }
+ }
+ Ok(())
+}
+
pub(crate) fn handle_code_action_resolve(
mut snap: GlobalStateSnapshot,
mut code_action: lsp_ext::CodeAction,
pub type Result<T, E = Error> = std::result::Result<T, E>;
pub fn from_json<T: DeserializeOwned>(what: &'static str, json: serde_json::Value) -> Result<T> {
- let res = T::deserialize(&json)
+ let res = serde_path_to_error::deserialize(&json)
.map_err(|e| format!("Failed to deserialize {}: {}; {}", what, e, json))?;
Ok(res)
}
.on::<lsp_types::request::SemanticTokensRangeRequest>(
handlers::handle_semantic_tokens_range,
)
+ .on::<lsp_types::request::WillRenameFiles>(handlers::handle_will_rename_files)
.on::<lsp_ext::Ssr>(handlers::handle_ssr)
.finish();
Ok(())
[dependencies]
itertools = "0.9.0"
rowan = "0.10.0"
-rustc_lexer = { version = "691.0.0", package = "rustc-ap-rustc_lexer" }
+rustc_lexer = { version = "695.0.0", package = "rustc-ap-rustc_lexer" }
rustc-hash = "1.1.0"
arrayvec = "0.5.1"
once_cell = "1.3.1"