1 //! Semantic Tokens helpers
6 Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
10 macro_rules! define_semantic_token_types {
13 $($standard:ident),*$(,)?
16 $(($custom:ident, $string:literal)),*$(,)?
20 $(pub(crate) const $standard: SemanticTokenType = SemanticTokenType::$standard;)*
21 $(pub(crate) const $custom: SemanticTokenType = SemanticTokenType::new($string);)*
23 pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
24 $(SemanticTokenType::$standard,)*
30 define_semantic_token_types![
54 (ARITHMETIC, "arithmetic"),
55 (ATTRIBUTE, "attribute"),
56 (ATTRIBUTE_BRACKET, "attributeBracket"),
61 (BUILTIN_ATTRIBUTE, "builtinAttribute"),
62 (BUILTIN_TYPE, "builtinType"),
66 (COMPARISON, "comparison"),
67 (CONST_PARAMETER, "constParameter"),
69 (DERIVE_HELPER, "deriveHelper"),
71 (ESCAPE_SEQUENCE, "escapeSequence"),
72 (FORMAT_SPECIFIER, "formatSpecifier"),
75 (LIFETIME, "lifetime"),
77 (MACRO_BANG, "macroBang"),
78 (PARENTHESIS, "parenthesis"),
79 (PUNCTUATION, "punctuation"),
80 (SELF_KEYWORD, "selfKeyword"),
81 (SELF_TYPE_KEYWORD, "selfTypeKeyword"),
82 (SEMICOLON, "semicolon"),
83 (TYPE_ALIAS, "typeAlias"),
84 (TOOL_MODULE, "toolModule"),
86 (UNRESOLVED_REFERENCE, "unresolvedReference"),
90 macro_rules! define_semantic_token_modifiers {
93 $($standard:ident),*$(,)?
96 $(($custom:ident, $string:literal)),*$(,)?
101 $(pub(crate) const $standard: SemanticTokenModifier = SemanticTokenModifier::$standard;)*
102 $(pub(crate) const $custom: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
104 pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
105 $(SemanticTokenModifier::$standard,)*
111 define_semantic_token_modifiers![
120 (ATTRIBUTE_MODIFIER, "attribute"),
121 (CALLABLE, "callable"),
122 (CONSTANT, "constant"),
123 (CONSUMING, "consuming"),
124 (CONTROL_FLOW, "controlFlow"),
125 (CRATE_ROOT, "crateRoot"),
126 (INJECTED, "injected"),
127 (INTRA_DOC_LINK, "intraDocLink"),
128 (LIBRARY, "library"),
129 (MUTABLE, "mutable"),
131 (REFERENCE, "reference"),
132 (TRAIT_MODIFIER, "trait"),
138 pub(crate) struct ModifierSet(pub(crate) u32);
140 impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
141 fn bitor_assign(&mut self, rhs: SemanticTokenModifier) {
142 let idx = SUPPORTED_MODIFIERS.iter().position(|it| it == &rhs).unwrap();
147 /// Tokens are encoded relative to each other.
149 /// This is a direct port of <https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45>
150 pub(crate) struct SemanticTokensBuilder {
154 data: Vec<SemanticToken>,
157 impl SemanticTokensBuilder {
158 pub(crate) fn new(id: String) -> Self {
159 SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
162 /// Push a new token onto the builder
163 pub(crate) fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
164 let mut push_line = range.start.line as u32;
165 let mut push_char = range.start.character as u32;
167 if !self.data.is_empty() {
168 push_line -= self.prev_line;
170 push_char -= self.prev_char;
174 // A token cannot be multiline
175 let token_len = range.end.character - range.start.character;
177 let token = SemanticToken {
178 delta_line: push_line,
179 delta_start: push_char,
180 length: token_len as u32,
181 token_type: token_index,
182 token_modifiers_bitset: modifier_bitset,
185 self.data.push(token);
187 self.prev_line = range.start.line as u32;
188 self.prev_char = range.start.character as u32;
191 pub(crate) fn build(self) -> SemanticTokens {
192 SemanticTokens { result_id: Some(self.id), data: self.data }
196 pub(crate) fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
197 let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
199 let (_, old) = old.split_at(offset);
200 let (_, new) = new.split_at(offset);
202 let offset_from_end =
203 new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
205 let (old, _) = old.split_at(old.len() - offset_from_end);
206 let (new, _) = new.split_at(new.len() - offset_from_end);
208 if old.is_empty() && new.is_empty() {
211 // The lsp data field is actually a byte-diff but we
212 // travel in tokens so `start` and `delete_count` are in multiples of the
213 // serialized size of `SemanticToken`.
214 vec![SemanticTokensEdit {
215 start: 5 * offset as u32,
216 delete_count: 5 * old.len() as u32,
217 data: Some(new.into()),
222 pub(crate) fn type_index(ty: SemanticTokenType) -> u32 {
223 SUPPORTED_TYPES.iter().position(|it| *it == ty).unwrap() as u32
230 fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
236 token_modifiers_bitset: t.4,
241 fn test_diff_insert_at_end() {
242 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
243 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
245 let edits = diff_tokens(&before, &after);
251 data: Some(vec![from((11, 12, 13, 14, 15))])
257 fn test_diff_insert_at_beginning() {
258 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
259 let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
261 let edits = diff_tokens(&before, &after);
267 data: Some(vec![from((11, 12, 13, 14, 15))])
273 fn test_diff_insert_in_middle() {
274 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
276 from((1, 2, 3, 4, 5)),
277 from((10, 20, 30, 40, 50)),
278 from((60, 70, 80, 90, 100)),
279 from((6, 7, 8, 9, 10)),
282 let edits = diff_tokens(&before, &after);
288 data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
294 fn test_diff_remove_from_end() {
295 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
296 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
298 let edits = diff_tokens(&before, &after);
299 assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
303 fn test_diff_remove_from_beginning() {
304 let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
305 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
307 let edits = diff_tokens(&before, &after);
308 assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
312 fn test_diff_remove_from_middle() {
314 from((1, 2, 3, 4, 5)),
315 from((10, 20, 30, 40, 50)),
316 from((60, 70, 80, 90, 100)),
317 from((6, 7, 8, 9, 10)),
319 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
321 let edits = diff_tokens(&before, &after);
322 assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });