tinymist_query/
semantic_tokens_delta.rs

1use lsp_types::{SemanticToken, SemanticTokensEdit};
2
3use crate::prelude::*;
4
5/// The [`textDocument/semanticTokens/full/delta`] request is sent from the
6/// client to the server to resolve the semantic tokens of a given file,
7/// **returning only the delta**.
8///
9/// [`textDocument/semanticTokens/full/delta`]: https://microsoft.github.io/language-server-protocol/specification#textDocument_semanticTokens
10///
11/// Similar to [`semantic_tokens_full`](crate::SemanticTokensFullRequest),
12/// except it returns a sequence of [`lsp_types::SemanticTokensEdit`] to
13/// transform a previous result into a new result.
14///
15/// # Compatibility
16///
17/// This request was introduced in specification version 3.16.0.
18#[derive(Debug, Clone)]
19pub struct SemanticTokensDeltaRequest {
20    /// The path of the document to get semantic tokens for.
21    pub path: PathBuf,
22    /// The previous result id to compute the delta from.
23    pub previous_result_id: String,
24}
25
26impl SemanticRequest for SemanticTokensDeltaRequest {
27    type Response = SemanticTokensFullDeltaResult;
28    /// Handles the request to compute the semantic tokens delta for a given
29    /// document.
30    fn request(self, ctx: &mut LocalContext) -> Option<Self::Response> {
31        let source = ctx.source_by_path(&self.path).ok()?;
32        let (tokens, result_id) = ctx.cached_tokens(&source);
33
34        Some(match ctx.tokens.as_ref().and_then(|t| t.prev.as_ref()) {
35            Some(cached) => SemanticTokensFullDeltaResult::TokensDelta(SemanticTokensDelta {
36                result_id,
37                edits: token_delta(cached, &tokens),
38            }),
39            None => {
40                log::warn!(
41                    "No previous tokens found for delta computation in {}, prev_id: {:?}",
42                    self.path.display(),
43                    self.previous_result_id
44                );
45                SemanticTokensFullDeltaResult::Tokens(SemanticTokens {
46                    result_id,
47                    data: tokens.as_ref().clone(),
48                })
49            }
50        })
51    }
52}
53
54fn token_delta(from: &[SemanticToken], to: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
55    // Taken from `rust-analyzer`'s algorithm
56    // https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/semantic_tokens.rs#L219
57
58    let start = from
59        .iter()
60        .zip(to.iter())
61        .take_while(|(x, y)| x == y)
62        .count();
63
64    let (_, from) = from.split_at(start);
65    let (_, to) = to.split_at(start);
66
67    let dist_from_end = from
68        .iter()
69        .rev()
70        .zip(to.iter().rev())
71        .take_while(|(x, y)| x == y)
72        .count();
73
74    let (from, _) = from.split_at(from.len() - dist_from_end);
75    let (to, _) = to.split_at(to.len() - dist_from_end);
76
77    if from.is_empty() && to.is_empty() {
78        vec![]
79    } else {
80        vec![SemanticTokensEdit {
81            start: 5 * start as u32,
82            delete_count: 5 * from.len() as u32,
83            data: Some(to.into()),
84        }]
85    }
86}