X-Git-Url: https://git.lizzy.rs/?a=blobdiff_plain;f=crates%2Frust-analyzer%2Fsrc%2Fto_proto.rs;h=e53cd3c7ba541b59440823d49f233713f8f78a69;hb=c9b4ac5be4daaabc062ab1ee663eba8594750003;hp=73dcba6940eaccf94116113960afd2e6237ffa11;hpb=ef6df1d994394e5f2097edfa12d0489762903099;p=rust.git diff --git a/crates/rust-analyzer/src/to_proto.rs b/crates/rust-analyzer/src/to_proto.rs index 73dcba6940e..e53cd3c7ba5 100644 --- a/crates/rust-analyzer/src/to_proto.rs +++ b/crates/rust-analyzer/src/to_proto.rs @@ -6,7 +6,7 @@ }; use ide::{ - Annotation, AnnotationKind, Assist, AssistKind, CallInfo, Cancelable, CompletionItem, + Annotation, AnnotationKind, Assist, AssistKind, CallInfo, Cancellable, CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange, FileSystemEdit, Fold, FoldKind, Highlight, HlMod, HlOperator, HlPunct, HlRange, HlTag, Indel, InlayHint, InlayKind, InsertTextFormat, Markup, NavigationTarget, ReferenceAccess, RenameError, Runnable, @@ -270,9 +270,12 @@ fn set_score(res: &mut lsp_types::CompletionItem, relevance: CompletionRelevance set_score(&mut lsp_item_with_ref, relevance); lsp_item_with_ref.label = format!("&{}{}", mutability.as_keyword_for_ref(), lsp_item_with_ref.label); - if let Some(lsp_types::CompletionTextEdit::Edit(it)) = &mut lsp_item_with_ref.text_edit - { - it.new_text = format!("&{}{}", mutability.as_keyword_for_ref(), it.new_text); + if let Some(it) = &mut lsp_item_with_ref.text_edit { + let new_text = match it { + lsp_types::CompletionTextEdit::Edit(it) => &mut it.new_text, + lsp_types::CompletionTextEdit::InsertAndReplace(it) => &mut it.new_text, + }; + *new_text = format!("&{}{}", mutability.as_keyword_for_ref(), new_text); } vec![lsp_item_with_ref, lsp_item] } @@ -381,6 +384,7 @@ pub(crate) fn semantic_tokens( text: &str, line_index: &LineIndex, highlights: Vec, + highlight_strings: bool, ) -> lsp_types::SemanticTokens { let id = TOKEN_RESULT_COUNTER.fetch_add(1, Ordering::SeqCst).to_string(); let mut builder = semantic_tokens::SemanticTokensBuilder::new(id); @@ -389,8 +393,11 @@ pub(crate) fn semantic_tokens( if highlight_range.highlight.is_empty() { continue; } - let (type_, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); - let token_index = semantic_tokens::type_index(type_); + let (ty, mods) = semantic_token_type_and_modifiers(highlight_range.highlight); + if !highlight_strings && ty == lsp_types::SemanticTokenType::STRING { + continue; + } + let token_index = semantic_tokens::type_index(ty); let modifier_bitset = mods.0; for mut text_range in line_index.index.lines(highlight_range.range) { @@ -398,7 +405,7 @@ pub(crate) fn semantic_tokens( text_range = TextRange::new(text_range.start(), text_range.end() - TextSize::of('\n')); } - let range = range(&line_index, text_range); + let range = range(line_index, text_range); builder.push(range, token_index, modifier_bitset); } } @@ -459,7 +466,7 @@ fn semantic_token_type_and_modifiers( HlTag::BoolLiteral => semantic_tokens::BOOLEAN, HlTag::BuiltinType => semantic_tokens::BUILTIN_TYPE, HlTag::ByteLiteral | HlTag::NumericLiteral => lsp_types::SemanticTokenType::NUMBER, - HlTag::CharLiteral => semantic_tokens::CHAR_LITERAL, + HlTag::CharLiteral => semantic_tokens::CHAR, HlTag::Comment => lsp_types::SemanticTokenType::COMMENT, HlTag::EscapeSequence => semantic_tokens::ESCAPE_SEQUENCE, HlTag::FormatSpecifier => semantic_tokens::FORMAT_SPECIFIER, @@ -497,6 +504,7 @@ fn semantic_token_type_and_modifiers( HlMod::Mutable => semantic_tokens::MUTABLE, HlMod::Consuming => semantic_tokens::CONSUMING, HlMod::Async => semantic_tokens::ASYNC, + HlMod::Library => semantic_tokens::LIBRARY, HlMod::Unsafe => semantic_tokens::UNSAFE, HlMod::Callable => semantic_tokens::CALLABLE, HlMod::Static => lsp_types::SemanticTokenModifier::STATIC, @@ -526,6 +534,7 @@ pub(crate) fn folding_range( | FoldKind::Consts | FoldKind::Statics | FoldKind::WhereClause + | FoldKind::ReturnType | FoldKind::Array => None, }; @@ -597,7 +606,7 @@ pub(crate) fn url_from_abs_path(path: &Path) -> lsp_types::Url { // Note: lowercasing the `path` itself doesn't help, the `Url::parse` // machinery *also* canonicalizes the drive letter. So, just massage the // string in place. - let mut url = url.into_string(); + let mut url: String = url.into(); url[driver_letter_range].make_ascii_lowercase(); lsp_types::Url::parse(&url).unwrap() } @@ -718,7 +727,7 @@ pub(crate) fn snippet_text_document_edit( pub(crate) fn snippet_text_document_ops( snap: &GlobalStateSnapshot, file_system_edit: FileSystemEdit, -) -> Cancelable> { +) -> Cancellable> { let mut ops = Vec::new(); match file_system_edit { FileSystemEdit::CreateFile { dst, initial_contents } => { @@ -748,7 +757,7 @@ pub(crate) fn snippet_text_document_ops( let new_uri = snap.anchored_path(&dst); let mut rename_file = lsp_types::RenameFile { old_uri, new_uri, options: None, annotation_id: None }; - if snap.analysis.is_library_file(src) == Ok(true) + if snap.analysis.is_library_file(src).ok() == Some(true) && snap.config.change_annotation_support() { rename_file.annotation_id = Some(outside_workspace_annotation_id()) @@ -772,7 +781,7 @@ pub(crate) fn snippet_workspace_edit( document_changes.extend_from_slice(&ops); } for (file_id, edit) in source_change.source_file_edits { - let edit = snippet_text_document_edit(&snap, source_change.is_snippet, file_id, edit)?; + let edit = snippet_text_document_edit(snap, source_change.is_snippet, file_id, edit)?; document_changes.push(lsp_ext::SnippetDocumentChangeOperation::Edit(edit)); } let mut workspace_edit = lsp_ext::SnippetWorkspaceEdit { @@ -948,7 +957,7 @@ pub(crate) fn code_lens( let annotation_range = range(&line_index, annotation.range); let action = run.action(); - let r = runnable(&snap, run)?; + let r = runnable(snap, run)?; let command = if debug { command::debug_single(&r) @@ -1141,7 +1150,7 @@ mod tests { use ide::Analysis; use ide_db::helpers::{ - insert_use::{InsertUseConfig, PrefixKind}, + insert_use::{ImportGranularity, InsertUseConfig, PrefixKind}, SnippetCap, }; @@ -1169,12 +1178,14 @@ fn main() { &ide::CompletionConfig { enable_postfix_completions: true, enable_imports_on_the_fly: true, + enable_self_on_the_fly: true, add_call_parenthesis: true, add_call_argument_snippets: true, snippet_cap: SnippetCap::new(true), insert_use: InsertUseConfig { - merge: None, + granularity: ImportGranularity::Item, prefix_kind: PrefixKind::Plain, + enforce_granularity: true, group: true, }, }, @@ -1225,12 +1236,12 @@ fn main() { assert_eq!(folds.len(), 4); let line_index = LineIndex { - index: Arc::new(ide::LineIndex::new(&text)), + index: Arc::new(ide::LineIndex::new(text)), endings: LineEndings::Unix, encoding: OffsetEncoding::Utf16, }; let converted: Vec = - folds.into_iter().map(|it| folding_range(&text, &line_index, true, it)).collect(); + folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect(); let expected_lines = [(0, 2), (4, 10), (5, 6), (7, 9)]; assert_eq!(converted.len(), expected_lines.len());