1 //! Semantic Tokens helpers
6 Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
10 macro_rules! define_semantic_token_types {
11 ($(($ident:ident, $string:literal)),*$(,)?) => {
12 $(pub(crate) const $ident: SemanticTokenType = SemanticTokenType::new($string);)*
14 pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
15 SemanticTokenType::COMMENT,
16 SemanticTokenType::KEYWORD,
17 SemanticTokenType::STRING,
18 SemanticTokenType::NUMBER,
19 SemanticTokenType::REGEXP,
20 SemanticTokenType::OPERATOR,
21 SemanticTokenType::NAMESPACE,
22 SemanticTokenType::TYPE,
23 SemanticTokenType::STRUCT,
24 SemanticTokenType::CLASS,
25 SemanticTokenType::INTERFACE,
26 SemanticTokenType::ENUM,
27 SemanticTokenType::ENUM_MEMBER,
28 SemanticTokenType::TYPE_PARAMETER,
29 SemanticTokenType::FUNCTION,
30 SemanticTokenType::METHOD,
31 SemanticTokenType::PROPERTY,
32 SemanticTokenType::MACRO,
33 SemanticTokenType::VARIABLE,
34 SemanticTokenType::PARAMETER,
40 define_semantic_token_types![
42 (ARITHMETIC, "arithmetic"),
43 (ATTRIBUTE, "attribute"),
44 (ATTRIBUTE_BRACKET, "attributeBracket"),
49 (BUILTIN_ATTRIBUTE, "builtinAttribute"),
50 (BUILTIN_TYPE, "builtinType"),
54 (COMPARISON, "comparison"),
55 (CONST_PARAMETER, "constParameter"),
57 (DERIVE_HELPER, "deriveHelper"),
59 (ESCAPE_SEQUENCE, "escapeSequence"),
60 (FORMAT_SPECIFIER, "formatSpecifier"),
63 (LIFETIME, "lifetime"),
65 (MACRO_BANG, "macroBang"),
66 (OPERATOR, "operator"),
67 (PARENTHESIS, "parenthesis"),
68 (PUNCTUATION, "punctuation"),
69 (SELF_KEYWORD, "selfKeyword"),
70 (SELF_TYPE_KEYWORD, "selfTypeKeyword"),
71 (SEMICOLON, "semicolon"),
72 (TYPE_ALIAS, "typeAlias"),
73 (TOOL_MODULE, "toolModule"),
75 (UNRESOLVED_REFERENCE, "unresolvedReference"),
78 macro_rules! define_semantic_token_modifiers {
79 ($(($ident:ident, $string:literal)),*$(,)?) => {
80 $(pub(crate) const $ident: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
82 pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
83 SemanticTokenModifier::DOCUMENTATION,
84 SemanticTokenModifier::DECLARATION,
85 SemanticTokenModifier::DEFINITION,
86 SemanticTokenModifier::STATIC,
87 SemanticTokenModifier::ABSTRACT,
88 SemanticTokenModifier::DEPRECATED,
89 SemanticTokenModifier::READONLY,
90 SemanticTokenModifier::DEFAULT_LIBRARY,
96 define_semantic_token_modifiers![
98 (ATTRIBUTE_MODIFIER, "attribute"),
99 (CALLABLE, "callable"),
100 (CONSTANT, "constant"),
101 (CONSUMING, "consuming"),
102 (CONTROL_FLOW, "controlFlow"),
103 (CRATE_ROOT, "crateRoot"),
104 (INJECTED, "injected"),
105 (INTRA_DOC_LINK, "intraDocLink"),
106 (LIBRARY, "library"),
107 (MUTABLE, "mutable"),
109 (REFERENCE, "reference"),
110 (TRAIT_MODIFIER, "trait"),
115 pub(crate) struct ModifierSet(pub(crate) u32);
117 impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
118 fn bitor_assign(&mut self, rhs: SemanticTokenModifier) {
119 let idx = SUPPORTED_MODIFIERS.iter().position(|it| it == &rhs).unwrap();
124 /// Tokens are encoded relative to each other.
126 /// This is a direct port of <https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45>
127 pub(crate) struct SemanticTokensBuilder {
131 data: Vec<SemanticToken>,
134 impl SemanticTokensBuilder {
135 pub(crate) fn new(id: String) -> Self {
136 SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
139 /// Push a new token onto the builder
140 pub(crate) fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
141 let mut push_line = range.start.line as u32;
142 let mut push_char = range.start.character as u32;
144 if !self.data.is_empty() {
145 push_line -= self.prev_line;
147 push_char -= self.prev_char;
151 // A token cannot be multiline
152 let token_len = range.end.character - range.start.character;
154 let token = SemanticToken {
155 delta_line: push_line,
156 delta_start: push_char,
157 length: token_len as u32,
158 token_type: token_index,
159 token_modifiers_bitset: modifier_bitset,
162 self.data.push(token);
164 self.prev_line = range.start.line as u32;
165 self.prev_char = range.start.character as u32;
168 pub(crate) fn build(self) -> SemanticTokens {
169 SemanticTokens { result_id: Some(self.id), data: self.data }
173 pub(crate) fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
174 let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
176 let (_, old) = old.split_at(offset);
177 let (_, new) = new.split_at(offset);
179 let offset_from_end =
180 new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
182 let (old, _) = old.split_at(old.len() - offset_from_end);
183 let (new, _) = new.split_at(new.len() - offset_from_end);
185 if old.is_empty() && new.is_empty() {
188 // The lsp data field is actually a byte-diff but we
189 // travel in tokens so `start` and `delete_count` are in multiples of the
190 // serialized size of `SemanticToken`.
191 vec![SemanticTokensEdit {
192 start: 5 * offset as u32,
193 delete_count: 5 * old.len() as u32,
194 data: Some(new.into()),
199 pub(crate) fn type_index(ty: SemanticTokenType) -> u32 {
200 SUPPORTED_TYPES.iter().position(|it| *it == ty).unwrap() as u32
207 fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
213 token_modifiers_bitset: t.4,
218 fn test_diff_insert_at_end() {
219 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
220 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
222 let edits = diff_tokens(&before, &after);
228 data: Some(vec![from((11, 12, 13, 14, 15))])
234 fn test_diff_insert_at_beginning() {
235 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
236 let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
238 let edits = diff_tokens(&before, &after);
244 data: Some(vec![from((11, 12, 13, 14, 15))])
250 fn test_diff_insert_in_middle() {
251 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
253 from((1, 2, 3, 4, 5)),
254 from((10, 20, 30, 40, 50)),
255 from((60, 70, 80, 90, 100)),
256 from((6, 7, 8, 9, 10)),
259 let edits = diff_tokens(&before, &after);
265 data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
271 fn test_diff_remove_from_end() {
272 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
273 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
275 let edits = diff_tokens(&before, &after);
276 assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
280 fn test_diff_remove_from_beginning() {
281 let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
282 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
284 let edits = diff_tokens(&before, &after);
285 assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
289 fn test_diff_remove_from_middle() {
291 from((1, 2, 3, 4, 5)),
292 from((10, 20, 30, 40, 50)),
293 from((60, 70, 80, 90, 100)),
294 from((6, 7, 8, 9, 10)),
296 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
298 let edits = diff_tokens(&before, &after);
299 assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });