mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-11-23 12:46:43 +00:00
86 lines
2.9 KiB
Rust
86 lines
2.9 KiB
Rust
use lsp_types::{SemanticToken, SemanticTokensEdit};
|
|
|
|
use crate::prelude::*;
|
|
|
|
/// The [`textDocument/semanticTokens/full/delta`] request is sent from the
|
|
/// client to the server to resolve the semantic tokens of a given file,
|
|
/// **returning only the delta**.
|
|
///
|
|
/// [`textDocument/semanticTokens/full/delta`]: https://microsoft.github.io/language-server-protocol/specification#textDocument_semanticTokens
|
|
///
|
|
/// Similar to [`semantic_tokens_full`](crate::SemanticTokensFullRequest),
|
|
/// except it returns a sequence of [`lsp_types::SemanticTokensEdit`] to
|
|
/// transform a previous result into a new result.
|
|
///
|
|
/// # Compatibility
|
|
///
|
|
/// This request was introduced in specification version 3.16.0.
|
|
#[derive(Debug, Clone)]
|
|
pub struct SemanticTokensDeltaRequest {
|
|
/// The path of the document to get semantic tokens for.
|
|
pub path: PathBuf,
|
|
/// The previous result id to compute the delta from.
|
|
pub previous_result_id: String,
|
|
}
|
|
|
|
impl SemanticRequest for SemanticTokensDeltaRequest {
|
|
type Response = SemanticTokensFullDeltaResult;
|
|
/// Handles the request to compute the semantic tokens delta for a given
|
|
/// document.
|
|
fn request(self, ctx: &mut LocalContext) -> Option<Self::Response> {
|
|
let source = ctx.source_by_path(&self.path).ok()?;
|
|
let (tokens, result_id) = ctx.cached_tokens(&source);
|
|
|
|
Some(match ctx.tokens.as_ref().and_then(|t| t.prev.as_ref()) {
|
|
Some(cached) => SemanticTokensFullDeltaResult::TokensDelta(SemanticTokensDelta {
|
|
result_id,
|
|
edits: token_delta(cached, &tokens),
|
|
}),
|
|
None => {
|
|
log::warn!(
|
|
"No previous tokens found for delta computation in {}, prev_id: {:?}",
|
|
self.path.display(),
|
|
self.previous_result_id
|
|
);
|
|
SemanticTokensFullDeltaResult::Tokens(SemanticTokens {
|
|
result_id,
|
|
data: tokens.as_ref().clone(),
|
|
})
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
fn token_delta(from: &[SemanticToken], to: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
|
|
// Taken from `rust-analyzer`'s algorithm
|
|
// https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/semantic_tokens.rs#L219
|
|
|
|
let start = from
|
|
.iter()
|
|
.zip(to.iter())
|
|
.take_while(|(x, y)| x == y)
|
|
.count();
|
|
|
|
let (_, from) = from.split_at(start);
|
|
let (_, to) = to.split_at(start);
|
|
|
|
let dist_from_end = from
|
|
.iter()
|
|
.rev()
|
|
.zip(to.iter().rev())
|
|
.take_while(|(x, y)| x == y)
|
|
.count();
|
|
|
|
let (from, _) = from.split_at(from.len() - dist_from_end);
|
|
let (to, _) = to.split_at(to.len() - dist_from_end);
|
|
|
|
if from.is_empty() && to.is_empty() {
|
|
vec![]
|
|
} else {
|
|
vec![SemanticTokensEdit {
|
|
start: 5 * start as u32,
|
|
delete_count: 5 * from.len() as u32,
|
|
data: Some(to.into()),
|
|
}]
|
|
}
|
|
}
|