tinymist_query/semantic_tokens_delta.rs
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
use lsp_types::{SemanticToken, SemanticTokensEdit};
use crate::prelude::*;
/// The [`textDocument/semanticTokens/full/delta`] request is sent from the
/// client to the server to resolve the semantic tokens of a given file,
/// **returning only the delta**.
///
/// [`textDocument/semanticTokens/full/delta`]: https://microsoft.github.io/language-server-protocol/specification#textDocument_semanticTokens
///
/// Similar to [`semantic_tokens_full`](crate::SemanticTokensFullRequest),
/// except it returns a sequence of [`lsp_types::SemanticTokensEdit`] to
/// transform a previous result into a new result.
///
/// # Compatibility
///
/// This request was introduced in specification version 3.16.0.
#[derive(Debug, Clone)]
pub struct SemanticTokensDeltaRequest {
/// The path of the document to get semantic tokens for.
pub path: PathBuf,
/// The previous result id to compute the delta from.
pub previous_result_id: String,
}
impl SemanticRequest for SemanticTokensDeltaRequest {
type Response = SemanticTokensFullDeltaResult;
/// Handles the request to compute the semantic tokens delta for a given
/// document.
fn request(self, ctx: &mut LocalContext) -> Option<Self::Response> {
let source = ctx.source_by_path(&self.path).ok()?;
let (tokens, result_id) = ctx.cached_tokens(&source);
Some(match ctx.tokens.as_ref().and_then(|t| t.prev.as_ref()) {
Some(cached) => SemanticTokensFullDeltaResult::TokensDelta(SemanticTokensDelta {
result_id,
edits: token_delta(cached, &tokens),
}),
None => {
log::warn!(
"No previous tokens found for delta computation in {}, prev_id: {:?}",
self.path.display(),
self.previous_result_id
);
SemanticTokensFullDeltaResult::Tokens(SemanticTokens {
result_id,
data: tokens.as_ref().clone(),
})
}
})
}
}
fn token_delta(from: &[SemanticToken], to: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
// Taken from `rust-analyzer`'s algorithm
// https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/semantic_tokens.rs#L219
let start = from
.iter()
.zip(to.iter())
.take_while(|(x, y)| x == y)
.count();
let (_, from) = from.split_at(start);
let (_, to) = to.split_at(start);
let dist_from_end = from
.iter()
.rev()
.zip(to.iter().rev())
.take_while(|(x, y)| x == y)
.count();
let (from, _) = from.split_at(from.len() - dist_from_end);
let (to, _) = to.split_at(to.len() - dist_from_end);
if from.is_empty() && to.is_empty() {
vec![]
} else {
vec![SemanticTokensEdit {
start: 5 * start as u32,
delete_count: 5 * from.len() as u32,
data: Some(to.into()),
}]
}
}