//!
//! Each tick provides an immutable snapshot of the state as `WorldSnapshot`.
-use std::{sync::Arc, time::Instant};
+use std::{
+ sync::{Arc, Mutex},
+ time::Instant,
+};
use crossbeam_channel::{unbounded, Receiver, Sender};
use flycheck::FlycheckHandle;
-use lsp_types::Url;
+use lsp_types::{SemanticTokens, Url};
use parking_lot::RwLock;
use ra_db::{CrateId, VfsPath};
use ra_ide::{Analysis, AnalysisChange, AnalysisHost, FileId};
pub(crate) analysis_host: AnalysisHost,
pub(crate) diagnostics: DiagnosticCollection,
pub(crate) mem_docs: FxHashMap<VfsPath, DocumentData>,
+ pub(crate) semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
pub(crate) vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
pub(crate) status: Status,
pub(crate) source_root_config: SourceRootConfig,
pub(crate) check_fixes: CheckFixes,
pub(crate) latest_requests: Arc<RwLock<LatestRequests>>,
mem_docs: FxHashMap<VfsPath, DocumentData>,
+ pub semantic_tokens_cache: Arc<Mutex<FxHashMap<Url, SemanticTokens>>>,
vfs: Arc<RwLock<(vfs::Vfs, FxHashMap<FileId, LineEndings>)>>,
pub(crate) workspaces: Arc<Vec<ProjectWorkspace>>,
}
analysis_host,
diagnostics: Default::default(),
mem_docs: FxHashMap::default(),
+ semantic_tokens_cache: Arc::new(Default::default()),
vfs: Arc::new(RwLock::new((vfs::Vfs::default(), FxHashMap::default()))),
status: Status::default(),
source_root_config: SourceRootConfig::default(),
latest_requests: Arc::clone(&self.latest_requests),
check_fixes: Arc::clone(&self.diagnostics.check_fixes),
mem_docs: self.mem_docs.clone(),
+ semantic_tokens_cache: Arc::clone(&self.semantic_tokens_cache),
}
}
CallHierarchyOutgoingCall, CallHierarchyOutgoingCallsParams, CallHierarchyPrepareParams,
CodeActionKind, CodeLens, Command, CompletionItem, Diagnostic, DocumentFormattingParams,
DocumentHighlight, DocumentSymbol, FoldingRange, FoldingRangeParams, HoverContents, Location,
- Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensParams,
- SemanticTokensRangeParams, SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation,
- SymbolTag, TextDocumentIdentifier, Url, WorkspaceEdit,
+ Position, PrepareRenameResponse, Range, RenameParams, SemanticTokensEditResult,
+ SemanticTokensEditsParams, SemanticTokensParams, SemanticTokensRangeParams,
+ SemanticTokensRangeResult, SemanticTokensResult, SymbolInformation, SymbolTag,
+ TextDocumentIdentifier, Url, WorkspaceEdit,
};
use ra_ide::{
FileId, FilePosition, FileRange, HoverAction, HoverGotoTypeData, NavigationTarget, Query,
let highlights = snap.analysis.highlight(file_id)?;
let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
+
+ // Unconditionally cache the tokens
+ snap.semantic_tokens_cache
+ .lock()
+ .unwrap()
+ .insert(params.text_document.uri, semantic_tokens.clone());
+
+ Ok(Some(semantic_tokens.into()))
+}
+
+pub(crate) fn handle_semantic_tokens_edits(
+ snap: GlobalStateSnapshot,
+ params: SemanticTokensEditsParams,
+) -> Result<Option<SemanticTokensEditResult>> {
+ let _p = profile("handle_semantic_tokens_edits");
+
+ let file_id = from_proto::file_id(&snap, ¶ms.text_document.uri)?;
+ let text = snap.analysis.file_text(file_id)?;
+ let line_index = snap.analysis.file_line_index(file_id)?;
+
+ let highlights = snap.analysis.highlight(file_id)?;
+
+ let semantic_tokens = to_proto::semantic_tokens(&text, &line_index, highlights);
+
+ let mut cache = snap.semantic_tokens_cache.lock().unwrap();
+ let cached_tokens = cache.entry(params.text_document.uri).or_default();
+
+ if let Some(prev_id) = &cached_tokens.result_id {
+ if *prev_id == params.previous_result_id {
+ let edits = to_proto::semantic_token_edits(&cached_tokens, &semantic_tokens);
+ *cached_tokens = semantic_tokens;
+ return Ok(Some(edits.into()));
+ }
+ }
+
+ *cached_tokens = semantic_tokens.clone();
+
Ok(Some(semantic_tokens.into()))
}
use std::ops;
-use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens};
+use lsp_types::{
+ Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
+ SemanticTokensEdit,
+};
macro_rules! define_semantic_token_types {
($(($ident:ident, $string:literal)),*$(,)?) => {
/// Tokens are encoded relative to each other.
///
/// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45
-#[derive(Default)]
pub(crate) struct SemanticTokensBuilder {
+ id: String,
prev_line: u32,
prev_char: u32,
data: Vec<SemanticToken>,
}
impl SemanticTokensBuilder {
+ pub fn new(id: String) -> Self {
+ SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
+ }
+
/// Push a new token onto the builder
pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
let mut push_line = range.start.line as u32;
}
pub fn build(self) -> SemanticTokens {
- SemanticTokens { result_id: None, data: self.data }
+ SemanticTokens { result_id: Some(self.id), data: self.data }
+ }
+}
+
+pub fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
+ let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
+
+ let (_, old) = old.split_at(offset);
+ let (_, new) = new.split_at(offset);
+
+ let offset_from_end =
+ new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
+
+ let (old, _) = old.split_at(old.len() - offset_from_end);
+ let (new, _) = new.split_at(new.len() - offset_from_end);
+
+ if old.is_empty() && new.is_empty() {
+ vec![]
+ } else {
+ // The lsp data field is actually a byte-diff but we
+ // travel in tokens so `start` and `delete_count` are in multiples of the
+ // serialized size of `SemanticToken`.
+ vec![SemanticTokensEdit {
+ start: 5 * offset as u32,
+ delete_count: 5 * old.len() as u32,
+ data: Some(new.into()),
+ }]
}
}
pub fn type_index(type_: SemanticTokenType) -> u32 {
SUPPORTED_TYPES.iter().position(|it| *it == type_).unwrap() as u32
}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
+ SemanticToken {
+ delta_line: t.0,
+ delta_start: t.1,
+ length: t.2,
+ token_type: t.3,
+ token_modifiers_bitset: t.4,
+ }
+ }
+
+ #[test]
+ fn test_diff_insert_at_end() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(
+ edits[0],
+ SemanticTokensEdit {
+ start: 10,
+ delete_count: 0,
+ data: Some(vec![from((11, 12, 13, 14, 15))])
+ }
+ );
+ }
+
+ #[test]
+ fn test_diff_insert_at_beginning() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(
+ edits[0],
+ SemanticTokensEdit {
+ start: 0,
+ delete_count: 0,
+ data: Some(vec![from((11, 12, 13, 14, 15))])
+ }
+ );
+ }
+
+ #[test]
+ fn test_diff_insert_in_middle() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [
+ from((1, 2, 3, 4, 5)),
+ from((10, 20, 30, 40, 50)),
+ from((60, 70, 80, 90, 100)),
+ from((6, 7, 8, 9, 10)),
+ ];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(
+ edits[0],
+ SemanticTokensEdit {
+ start: 5,
+ delete_count: 0,
+ data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
+ }
+ );
+ }
+
+ #[test]
+ fn test_diff_remove_from_end() {
+ let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
+ }
+
+ #[test]
+ fn test_diff_remove_from_beginning() {
+ let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
+ }
+
+ #[test]
+ fn test_diff_remove_from_middle() {
+ let before = [
+ from((1, 2, 3, 4, 5)),
+ from((10, 20, 30, 40, 50)),
+ from((60, 70, 80, 90, 100)),
+ from((6, 7, 8, 9, 10)),
+ ];
+ let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
+
+ let edits = diff_tokens(&before, &after);
+ assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });
+ }
+}