1 //! Semantic Tokens helpers
2
3 use std::ops;
4
5 use lsp_types::{
6 Range, SemanticToken, SemanticTokenModifier, SemanticTokenType, SemanticTokens,
7 SemanticTokensEdit,
8 };
9
10 macro_rules! define_semantic_token_types {
11 (
12 standard {
13 $($standard:ident),*$(,)?
14 }
15 custom {
16 $(($custom:ident, $string:literal) $(=> $fallback:ident)?),*$(,)?
17 }
18
19 ) => {
20 $(pub(crate) const $standard: SemanticTokenType = SemanticTokenType::$standard;)*
21 $(pub(crate) const $custom: SemanticTokenType = SemanticTokenType::new($string);)*
22
23 pub(crate) const SUPPORTED_TYPES: &[SemanticTokenType] = &[
24 $(SemanticTokenType::$standard,)*
25 $($custom),*
26 ];
27
28 pub(crate) fn standard_fallback_type(token: SemanticTokenType) -> Option<SemanticTokenType> {
29 $(
30 if token == $custom {
31 None $(.or(Some(SemanticTokenType::$fallback)))?
32 } else
33 )*
34 { Some(token )}
35 }
36 };
37 }
38
39 define_semantic_token_types![
40 standard {
41 COMMENT,
42 DECORATOR,
43 ENUM_MEMBER,
44 ENUM,
45 FUNCTION,
46 INTERFACE,
47 KEYWORD,
48 MACRO,
49 METHOD,
50 NAMESPACE,
51 NUMBER,
52 OPERATOR,
53 PARAMETER,
54 PROPERTY,
55 STRING,
56 STRUCT,
57 TYPE_PARAMETER,
58 VARIABLE,
59 }
60
61 custom {
62 (ANGLE, "angle"),
63 (ARITHMETIC, "arithmetic") => OPERATOR,
64 (ATTRIBUTE, "attribute") => DECORATOR,
65 (ATTRIBUTE_BRACKET, "attributeBracket") => DECORATOR,
66 (BITWISE, "bitwise") => OPERATOR,
67 (BOOLEAN, "boolean"),
68 (BRACE, "brace"),
69 (BRACKET, "bracket"),
70 (BUILTIN_ATTRIBUTE, "builtinAttribute") => DECORATOR,
71 (BUILTIN_TYPE, "builtinType"),
72 (CHAR, "character") => STRING,
73 (COLON, "colon"),
74 (COMMA, "comma"),
75 (COMPARISON, "comparison") => OPERATOR,
76 (CONST_PARAMETER, "constParameter"),
77 (DERIVE, "derive") => DECORATOR,
78 (DERIVE_HELPER, "deriveHelper") => DECORATOR,
79 (DOT, "dot"),
80 (ESCAPE_SEQUENCE, "escapeSequence") => STRING,
81 (FORMAT_SPECIFIER, "formatSpecifier") => STRING,
82 (GENERIC, "generic") => TYPE_PARAMETER,
83 (LABEL, "label"),
84 (LIFETIME, "lifetime"),
85 (LOGICAL, "logical") => OPERATOR,
86 (MACRO_BANG, "macroBang") => MACRO,
87 (PARENTHESIS, "parenthesis"),
88 (PUNCTUATION, "punctuation"),
89 (SELF_KEYWORD, "selfKeyword") => KEYWORD,
90 (SELF_TYPE_KEYWORD, "selfTypeKeyword") => KEYWORD,
91 (SEMICOLON, "semicolon"),
92 (TYPE_ALIAS, "typeAlias"),
93 (TOOL_MODULE, "toolModule") => DECORATOR,
94 (UNION, "union"),
95 (UNRESOLVED_REFERENCE, "unresolvedReference"),
96 }
97 ];
98
99 macro_rules! count_tts {
100 () => {0usize};
101 ($_head:tt $($tail:tt)*) => {1usize + count_tts!($($tail)*)};
102 }
103 macro_rules! define_semantic_token_modifiers {
104 (
105 standard {
106 $($standard:ident),*$(,)?
107 }
108 custom {
109 $(($custom:ident, $string:literal)),*$(,)?
110 }
111
112 ) => {
113
114 $(pub(crate) const $standard: SemanticTokenModifier = SemanticTokenModifier::$standard;)*
115 $(pub(crate) const $custom: SemanticTokenModifier = SemanticTokenModifier::new($string);)*
116
117 pub(crate) const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
118 $(SemanticTokenModifier::$standard,)*
119 $($custom),*
120 ];
121
122 const LAST_STANDARD_MOD: usize = count_tts!($($standard)*);
123 };
124 }
125
126 define_semantic_token_modifiers![
127 standard {
128 DOCUMENTATION,
129 DECLARATION,
130 STATIC,
131 DEFAULT_LIBRARY,
132 }
133 custom {
134 (ASYNC, "async"),
135 (ATTRIBUTE_MODIFIER, "attribute"),
136 (CALLABLE, "callable"),
137 (CONSTANT, "constant"),
138 (CONSUMING, "consuming"),
139 (CONTROL_FLOW, "controlFlow"),
140 (CRATE_ROOT, "crateRoot"),
141 (INJECTED, "injected"),
142 (INTRA_DOC_LINK, "intraDocLink"),
143 (LIBRARY, "library"),
144 (MACRO_MODIFIER, "macro"),
145 (MUTABLE, "mutable"),
146 (PUBLIC, "public"),
147 (REFERENCE, "reference"),
148 (TRAIT_MODIFIER, "trait"),
149 (UNSAFE, "unsafe"),
150 }
151 ];
152
153 #[derive(Default)]
154 pub(crate) struct ModifierSet(pub(crate) u32);
155
156 impl ModifierSet {
standard_fallback(&mut self)157 pub(crate) fn standard_fallback(&mut self) {
158 // Remove all non standard modifiers
159 self.0 = self.0 & !(!0u32 << LAST_STANDARD_MOD)
160 }
161 }
162
163 impl ops::BitOrAssign<SemanticTokenModifier> for ModifierSet {
bitor_assign(&mut self, rhs: SemanticTokenModifier)164 fn bitor_assign(&mut self, rhs: SemanticTokenModifier) {
165 let idx = SUPPORTED_MODIFIERS.iter().position(|it| it == &rhs).unwrap();
166 self.0 |= 1 << idx;
167 }
168 }
169
170 /// Tokens are encoded relative to each other.
171 ///
172 /// This is a direct port of <https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45>
173 pub(crate) struct SemanticTokensBuilder {
174 id: String,
175 prev_line: u32,
176 prev_char: u32,
177 data: Vec<SemanticToken>,
178 }
179
180 impl SemanticTokensBuilder {
new(id: String) -> Self181 pub(crate) fn new(id: String) -> Self {
182 SemanticTokensBuilder { id, prev_line: 0, prev_char: 0, data: Default::default() }
183 }
184
185 /// Push a new token onto the builder
push(&mut self, range: Range, token_index: u32, modifier_bitset: u32)186 pub(crate) fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
187 let mut push_line = range.start.line;
188 let mut push_char = range.start.character;
189
190 if !self.data.is_empty() {
191 push_line -= self.prev_line;
192 if push_line == 0 {
193 push_char -= self.prev_char;
194 }
195 }
196
197 // A token cannot be multiline
198 let token_len = range.end.character - range.start.character;
199
200 let token = SemanticToken {
201 delta_line: push_line,
202 delta_start: push_char,
203 length: token_len,
204 token_type: token_index,
205 token_modifiers_bitset: modifier_bitset,
206 };
207
208 self.data.push(token);
209
210 self.prev_line = range.start.line;
211 self.prev_char = range.start.character;
212 }
213
build(self) -> SemanticTokens214 pub(crate) fn build(self) -> SemanticTokens {
215 SemanticTokens { result_id: Some(self.id), data: self.data }
216 }
217 }
218
diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit>219 pub(crate) fn diff_tokens(old: &[SemanticToken], new: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
220 let offset = new.iter().zip(old.iter()).take_while(|&(n, p)| n == p).count();
221
222 let (_, old) = old.split_at(offset);
223 let (_, new) = new.split_at(offset);
224
225 let offset_from_end =
226 new.iter().rev().zip(old.iter().rev()).take_while(|&(n, p)| n == p).count();
227
228 let (old, _) = old.split_at(old.len() - offset_from_end);
229 let (new, _) = new.split_at(new.len() - offset_from_end);
230
231 if old.is_empty() && new.is_empty() {
232 vec![]
233 } else {
234 // The lsp data field is actually a byte-diff but we
235 // travel in tokens so `start` and `delete_count` are in multiples of the
236 // serialized size of `SemanticToken`.
237 vec![SemanticTokensEdit {
238 start: 5 * offset as u32,
239 delete_count: 5 * old.len() as u32,
240 data: Some(new.into()),
241 }]
242 }
243 }
244
type_index(ty: SemanticTokenType) -> u32245 pub(crate) fn type_index(ty: SemanticTokenType) -> u32 {
246 SUPPORTED_TYPES.iter().position(|it| *it == ty).unwrap() as u32
247 }
248
249 #[cfg(test)]
250 mod tests {
251 use super::*;
252
from(t: (u32, u32, u32, u32, u32)) -> SemanticToken253 fn from(t: (u32, u32, u32, u32, u32)) -> SemanticToken {
254 SemanticToken {
255 delta_line: t.0,
256 delta_start: t.1,
257 length: t.2,
258 token_type: t.3,
259 token_modifiers_bitset: t.4,
260 }
261 }
262
263 #[test]
test_diff_insert_at_end()264 fn test_diff_insert_at_end() {
265 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
266 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
267
268 let edits = diff_tokens(&before, &after);
269 assert_eq!(
270 edits[0],
271 SemanticTokensEdit {
272 start: 10,
273 delete_count: 0,
274 data: Some(vec![from((11, 12, 13, 14, 15))])
275 }
276 );
277 }
278
279 #[test]
test_diff_insert_at_beginning()280 fn test_diff_insert_at_beginning() {
281 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
282 let after = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
283
284 let edits = diff_tokens(&before, &after);
285 assert_eq!(
286 edits[0],
287 SemanticTokensEdit {
288 start: 0,
289 delete_count: 0,
290 data: Some(vec![from((11, 12, 13, 14, 15))])
291 }
292 );
293 }
294
295 #[test]
test_diff_insert_in_middle()296 fn test_diff_insert_in_middle() {
297 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
298 let after = [
299 from((1, 2, 3, 4, 5)),
300 from((10, 20, 30, 40, 50)),
301 from((60, 70, 80, 90, 100)),
302 from((6, 7, 8, 9, 10)),
303 ];
304
305 let edits = diff_tokens(&before, &after);
306 assert_eq!(
307 edits[0],
308 SemanticTokensEdit {
309 start: 5,
310 delete_count: 0,
311 data: Some(vec![from((10, 20, 30, 40, 50)), from((60, 70, 80, 90, 100))])
312 }
313 );
314 }
315
316 #[test]
test_diff_remove_from_end()317 fn test_diff_remove_from_end() {
318 let before = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10)), from((11, 12, 13, 14, 15))];
319 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
320
321 let edits = diff_tokens(&before, &after);
322 assert_eq!(edits[0], SemanticTokensEdit { start: 10, delete_count: 5, data: Some(vec![]) });
323 }
324
325 #[test]
test_diff_remove_from_beginning()326 fn test_diff_remove_from_beginning() {
327 let before = [from((11, 12, 13, 14, 15)), from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
328 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
329
330 let edits = diff_tokens(&before, &after);
331 assert_eq!(edits[0], SemanticTokensEdit { start: 0, delete_count: 5, data: Some(vec![]) });
332 }
333
334 #[test]
test_diff_remove_from_middle()335 fn test_diff_remove_from_middle() {
336 let before = [
337 from((1, 2, 3, 4, 5)),
338 from((10, 20, 30, 40, 50)),
339 from((60, 70, 80, 90, 100)),
340 from((6, 7, 8, 9, 10)),
341 ];
342 let after = [from((1, 2, 3, 4, 5)), from((6, 7, 8, 9, 10))];
343
344 let edits = diff_tokens(&before, &after);
345 assert_eq!(edits[0], SemanticTokensEdit { start: 5, delete_count: 10, data: Some(vec![]) });
346 }
347 }
348