mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-07-24 05:05:00 +00:00
feat: identify identifier kind for semantic highlighting (#741)
* feat: identify identifier kind for semantic highlighting * test: update snapshot
This commit is contained in:
parent
1d49e110e2
commit
8ca953dbed
17 changed files with 180 additions and 88 deletions
|
@ -34,8 +34,8 @@ use crate::syntax::{
|
|||
};
|
||||
use crate::upstream::{tooltip_, Tooltip};
|
||||
use crate::{
|
||||
lsp_to_typst, path_to_url, typst_to_lsp, LspPosition, LspRange, PositionEncoding, TypstRange,
|
||||
VersionedDocument,
|
||||
lsp_to_typst, path_to_url, typst_to_lsp, LspPosition, LspRange, PositionEncoding,
|
||||
SemanticTokenContext, TypstRange, VersionedDocument,
|
||||
};
|
||||
|
||||
use super::{analyze_expr_, definition, Definition};
|
||||
|
@ -53,6 +53,8 @@ pub struct Analysis {
|
|||
pub workers: Arc<AnalysisGlobalWorkers>,
|
||||
/// The global cache grid for analysis.
|
||||
pub cache_grid: Arc<Mutex<AnalysisGlobalCacheGrid>>,
|
||||
/// The semantic token context.
|
||||
pub tokens_ctx: Arc<SemanticTokenContext>,
|
||||
}
|
||||
|
||||
impl Analysis {
|
||||
|
|
|
@ -3,4 +3,4 @@ source: crates/tinymist-query/src/semantic_tokens_full.rs
|
|||
expression: "serde_json::to_string(&result).unwrap()"
|
||||
input_file: crates/tinymist-query/src/fixtures/semantic_tokens/base.typ
|
||||
---
|
||||
{"data":[0,0,1,2,0,0,1,3,2,0,0,3,1,20,0,0,1,1,18,0,0,1,1,20,0,0,1,1,3,0,0,1,1,20,0,0,1,1,4,0,0,1,1,8,0]}
|
||||
{"data":[0,0,1,2,0,0,1,3,2,0,0,3,1,22,0,0,1,1,20,0,0,1,1,22,0,0,1,1,3,0,0,1,1,22,0,0,1,1,4,0,0,1,1,10,0]}
|
||||
|
|
|
@ -3,4 +3,4 @@ source: crates/tinymist-query/src/semantic_tokens_full.rs
|
|||
expression: "serde_json::to_string(&result).unwrap()"
|
||||
input_file: crates/tinymist-query/src/fixtures/semantic_tokens/content-block.typ
|
||||
---
|
||||
{"data":[0,0,1,8,0,0,1,1,8,0,0,1,1,8,0,0,1,1,20,0,1,0,1,8,0,0,1,1,8,0,0,1,1,20,0,0,1,1,8,0,0,1,1,20,0,1,0,1,8,0,0,1,1,8,0,0,1,1,8,0,0,1,1,8,0,0,1,1,20,0,0,1,1,8,0,0,1,1,8,0,0,1,1,8,0,0,1,1,8,0,0,1,1,8,0,0,1,3,20,0,0,3,1,8,0,0,1,1,8,0,0,1,1,8,0,0,1,1,20,0]}
|
||||
{"data":[0,0,1,10,0,0,1,1,10,0,0,1,1,10,0,0,1,1,22,0,1,0,1,10,0,0,1,1,10,0,0,1,1,22,0,0,1,1,10,0,0,1,1,22,0,1,0,1,10,0,0,1,1,10,0,0,1,1,10,0,0,1,1,10,0,0,1,1,22,0,0,1,1,10,0,0,1,1,10,0,0,1,1,10,0,0,1,1,10,0,0,1,1,10,0,0,1,3,22,0,0,3,1,10,0,0,1,1,10,0,0,1,1,10,0,0,1,1,22,0]}
|
||||
|
|
|
@ -3,4 +3,4 @@ source: crates/tinymist-query/src/semantic_tokens_full.rs
|
|||
expression: "serde_json::to_string(&result).unwrap()"
|
||||
input_file: crates/tinymist-query/src/fixtures/semantic_tokens/fn.typ
|
||||
---
|
||||
{"data":[0,0,1,2,0,0,1,3,2,0,0,3,1,20,0,0,1,1,5,0,0,1,1,8,0,0,1,1,18,0,0,1,1,8,0,0,1,1,20,0,0,1,1,18,0,0,1,1,8,0,0,1,1,20,0,0,1,1,3,0,0,1,1,20,0,0,1,1,8,0,0,1,1,20,0,0,1,1,18,0,0,1,1,20,0,0,1,2,3,0,0,2,1,20,0,0,1,1,18,0,0,1,1,20,0,0,1,1,8,0]}
|
||||
{"data":[0,0,1,2,0,0,1,3,2,0,0,3,1,22,0,0,1,1,5,0,0,1,1,10,0,0,1,1,20,0,0,1,1,10,0,0,1,1,22,0,0,1,1,20,0,0,1,1,10,0,0,1,1,22,0,0,1,1,3,0,0,1,1,22,0,0,1,1,10,0,0,1,1,22,0,0,1,1,20,0,0,1,1,22,0,0,1,2,3,0,0,2,1,22,0,0,1,1,20,0,0,1,1,22,0,0,1,1,10,0]}
|
||||
|
|
|
@ -3,4 +3,4 @@ source: crates/tinymist-query/src/semantic_tokens_full.rs
|
|||
expression: "serde_json::to_string(&result).unwrap()"
|
||||
input_file: crates/tinymist-query/src/fixtures/semantic_tokens/fn2.typ
|
||||
---
|
||||
{"data":[0,0,1,2,0,0,1,3,2,0,0,3,1,20,0,0,1,2,5,0,0,2,1,8,0,0,1,1,8,0,0,1,1,20,0,0,1,1,3,0,0,1,1,20,0,0,1,1,8,0,0,1,1,20,0,1,0,2,20,0,0,2,3,5,0,0,3,1,8,0,0,1,7,1,0,0,7,1,8,0,0,1,1,20,0,0,1,4,18,0,0,4,1,8,0,0,1,1,20,0,0,1,5,1,0,0,5,1,8,0,0,1,1,20,0,1,0,2,20,0,0,2,3,20,0,0,3,3,20,0,0,3,1,20,0,1,0,2,20,0,0,2,1,20,0,0,1,1,20,0,1,0,2,20,0,0,2,1,20,0,0,1,1,20,0,1,0,2,20,0,0,2,3,20,0,0,3,1,20,0,1,0,2,20,0,0,2,1,8,0,0,1,1,20,0,0,1,1,8,0,0,1,1,20,0,1,0,1,8,0]}
|
||||
{"data":[0,0,1,2,0,0,1,3,2,0,0,3,1,22,0,0,1,2,5,0,0,2,1,10,0,0,1,1,10,0,0,1,1,22,0,0,1,1,3,0,0,1,1,22,0,0,1,1,10,0,0,1,1,22,0,1,0,2,22,0,0,2,3,5,0,0,3,1,10,0,0,1,7,1,0,0,7,1,10,0,0,1,1,22,0,0,1,4,20,0,0,4,1,10,0,0,1,1,22,0,0,1,5,1,0,0,5,1,10,0,0,1,1,22,0,1,0,2,22,0,0,2,3,22,0,0,3,3,22,0,0,3,1,22,0,1,0,2,22,0,0,2,1,22,0,0,1,1,22,0,1,0,2,22,0,0,2,1,22,0,0,1,1,22,0,1,0,2,22,0,0,2,3,22,0,0,3,1,22,0,1,0,2,22,0,0,2,1,10,0,0,1,1,22,0,0,1,1,10,0,0,1,1,22,0,1,0,1,10,0]}
|
||||
|
|
|
@ -3,4 +3,4 @@ source: crates/tinymist-query/src/semantic_tokens_full.rs
|
|||
expression: "serde_json::to_string(&result).unwrap()"
|
||||
input_file: crates/tinymist-query/src/fixtures/semantic_tokens/for-loop.typ
|
||||
---
|
||||
{"data":[0,0,1,2,0,0,1,3,2,0,0,3,1,20,0,0,1,1,18,0,0,1,1,20,0,0,1,2,2,0,0,2,1,20,0,0,1,5,5,0,0,5,1,8,0,0,1,1,4,0,0,1,1,8,0,0,1,1,20,0,0,1,1,8,0,0,1,1,20,0,0,1,1,18,0,0,1,1,20,0,0,1,2,3,0,0,2,1,20,0,0,1,1,4,0,0,1,1,20,0,0,1,1,8,0]}
|
||||
{"data":[0,0,1,2,0,0,1,3,2,0,0,3,1,22,0,0,1,1,20,0,0,1,1,22,0,0,1,2,2,0,0,2,1,22,0,0,1,5,5,0,0,5,1,10,0,0,1,1,4,0,0,1,1,10,0,0,1,1,22,0,0,1,1,10,0,0,1,1,22,0,0,1,1,20,0,0,1,1,22,0,0,1,2,3,0,0,2,1,22,0,0,1,1,4,0,0,1,1,22,0,0,1,1,10,0]}
|
||||
|
|
|
@ -3,4 +3,4 @@ source: crates/tinymist-query/src/semantic_tokens_full.rs
|
|||
expression: "serde_json::to_string(&result).unwrap()"
|
||||
input_file: crates/tinymist-query/src/fixtures/semantic_tokens/heading.typ
|
||||
---
|
||||
{"data":[0,0,3,14,0,0,3,1,20,0,0,1,1,5,0,0,1,4,5,0,0,4,1,8,0,0,1,17,1,0,0,17,1,8,0]}
|
||||
{"data":[0,0,3,16,0,0,3,1,22,0,0,1,1,5,0,0,1,4,5,0,0,4,1,10,0,0,1,17,1,0,0,17,1,10,0]}
|
||||
|
|
|
@ -3,4 +3,4 @@ source: crates/tinymist-query/src/semantic_tokens_full.rs
|
|||
expression: "serde_json::to_string(&result).unwrap()"
|
||||
input_file: crates/tinymist-query/src/fixtures/semantic_tokens/tinymist_issue_601.typ
|
||||
---
|
||||
{"data":[0,0,1,19,4,0,1,3,5,4,0,3,1,8,4,0,1,1,20,4,0,1,1,3,4,0,1,1,20,4,0,1,1,20,4,1,0,1,20,4,0,1,1,20,4,1,0,1,20,4]}
|
||||
{"data":[0,0,1,21,4,0,1,3,5,4,0,3,1,10,4,0,1,1,22,4,0,1,1,3,4,0,1,1,22,4,0,1,1,22,4,1,0,1,22,4,0,1,1,22,4,1,0,1,22,4]}
|
||||
|
|
|
@ -3,4 +3,4 @@ source: crates/tinymist-query/src/semantic_tokens_full.rs
|
|||
expression: "serde_json::to_string(&result).unwrap()"
|
||||
input_file: crates/tinymist-query/src/fixtures/semantic_tokens/tinymist_issue_638.typ
|
||||
---
|
||||
{"data":[0,0,9,20,0,0,9,1,20,0]}
|
||||
{"data":[0,0,9,22,0,0,9,1,22,0]}
|
||||
|
|
|
@ -3,4 +3,4 @@ source: crates/tinymist-query/src/semantic_tokens_full.rs
|
|||
expression: "serde_json::to_string(&result).unwrap()"
|
||||
input_file: crates/tinymist-query/src/fixtures/semantic_tokens/typst_lsp_issue_264.typ
|
||||
---
|
||||
{"data":[0,0,3,0,0,1,0,4,0,0,1,0,3,0,0,1,0,2,0,0,0,2,1,20,0,1,0,1,20,0,1,0,3,20,0,0,3,4,20,0,0,4,1,20,0,1,0,17,20,0,0,17,1,20,0,1,0,3,20,0]}
|
||||
{"data":[0,0,3,0,0,1,0,4,0,0,1,0,3,0,0,1,0,2,0,0,0,2,1,22,0,1,0,1,22,0,1,0,3,22,0,0,3,4,22,0,0,4,1,22,0,1,0,17,22,0,0,17,1,22,0,1,0,3,22,0]}
|
||||
|
|
|
@ -1,10 +1,14 @@
|
|||
use std::ops::Range;
|
||||
use std::{ops::Range, sync::Arc};
|
||||
|
||||
use lsp_types::{SemanticToken, SemanticTokensEdit};
|
||||
use parking_lot::RwLock;
|
||||
use typst::syntax::{ast, LinkedNode, Source, SyntaxKind};
|
||||
|
||||
use crate::{LspPosition, PositionEncoding};
|
||||
use crate::{
|
||||
syntax::{Expr, ExprInfo},
|
||||
ty::Ty,
|
||||
LspPosition, PositionEncoding,
|
||||
};
|
||||
|
||||
use self::delta::token_delta;
|
||||
use self::modifier_set::ModifierSet;
|
||||
|
@ -43,11 +47,16 @@ impl SemanticTokenContext {
|
|||
}
|
||||
|
||||
/// Get the semantic tokens for a source.
|
||||
pub fn get_semantic_tokens_full(&self, source: &Source) -> (Vec<SemanticToken>, String) {
|
||||
pub fn semantic_tokens_full(
|
||||
&self,
|
||||
source: &Source,
|
||||
ei: Arc<ExprInfo>,
|
||||
) -> (Vec<SemanticToken>, String) {
|
||||
let root = LinkedNode::new(source.root());
|
||||
|
||||
let mut tokenizer = Tokenizer::new(
|
||||
source.clone(),
|
||||
ei,
|
||||
self.allow_multiline_token,
|
||||
self.position_encoding,
|
||||
);
|
||||
|
@ -59,15 +68,16 @@ impl SemanticTokenContext {
|
|||
}
|
||||
|
||||
/// Get the semantic tokens delta for a source.
|
||||
pub fn try_semantic_tokens_delta_from_result_id(
|
||||
pub fn semantic_tokens_delta(
|
||||
&self,
|
||||
source: &Source,
|
||||
ei: Arc<ExprInfo>,
|
||||
result_id: &str,
|
||||
) -> (Result<Vec<SemanticTokensEdit>, Vec<SemanticToken>>, String) {
|
||||
let cached = self.cache.write().try_take_result(result_id);
|
||||
|
||||
// this call will overwrite the cache, so need to read from cache first
|
||||
let (tokens, result_id) = self.get_semantic_tokens_full(source);
|
||||
let (tokens, result_id) = self.semantic_tokens_full(source, ei);
|
||||
|
||||
match cached {
|
||||
Some(cached) => (Ok(token_delta(&cached, &tokens)), result_id),
|
||||
|
@ -81,6 +91,7 @@ struct Tokenizer {
|
|||
pos_offset: usize,
|
||||
output: Vec<SemanticToken>,
|
||||
source: Source,
|
||||
ei: Arc<ExprInfo>,
|
||||
encoding: PositionEncoding,
|
||||
|
||||
allow_multiline_token: bool,
|
||||
|
@ -89,12 +100,18 @@ struct Tokenizer {
|
|||
}
|
||||
|
||||
impl Tokenizer {
|
||||
fn new(source: Source, allow_multiline_token: bool, encoding: PositionEncoding) -> Self {
|
||||
fn new(
|
||||
source: Source,
|
||||
ei: Arc<ExprInfo>,
|
||||
allow_multiline_token: bool,
|
||||
encoding: PositionEncoding,
|
||||
) -> Self {
|
||||
Self {
|
||||
curr_pos: LspPosition::new(0, 0),
|
||||
pos_offset: 0,
|
||||
output: Vec::new(),
|
||||
source,
|
||||
ei,
|
||||
allow_multiline_token,
|
||||
encoding,
|
||||
|
||||
|
@ -105,10 +122,10 @@ impl Tokenizer {
|
|||
/// Tokenize a node and its children
|
||||
fn tokenize_tree(&mut self, root: &LinkedNode, modifiers: ModifierSet) {
|
||||
let is_leaf = root.get().children().len() == 0;
|
||||
let modifiers = modifiers | modifiers_from_node(root);
|
||||
let mut modifiers = modifiers | modifiers_from_node(root);
|
||||
|
||||
let range = root.range();
|
||||
let mut token = token_from_node(root)
|
||||
let mut token = token_from_node(&self.ei, root, &mut modifiers)
|
||||
.or_else(|| is_leaf.then_some(TokenType::Text))
|
||||
.map(|token_type| Token::new(token_type, modifiers, range.clone()));
|
||||
|
||||
|
@ -319,7 +336,11 @@ fn modifiers_from_node(node: &LinkedNode) -> ModifierSet {
|
|||
/// In tokenization, returning `Some` stops recursion, while returning `None`
|
||||
/// continues and attempts to tokenize each of `node`'s children. If there are
|
||||
/// no children, `Text` is taken as the default.
|
||||
fn token_from_node(node: &LinkedNode) -> Option<TokenType> {
|
||||
fn token_from_node(
|
||||
ei: &ExprInfo,
|
||||
node: &LinkedNode,
|
||||
modifier: &mut ModifierSet,
|
||||
) -> Option<TokenType> {
|
||||
use SyntaxKind::*;
|
||||
|
||||
match node.kind() {
|
||||
|
@ -329,8 +350,8 @@ fn token_from_node(node: &LinkedNode) -> Option<TokenType> {
|
|||
Underscore if node.parent_kind() == Some(Emph) => Some(TokenType::Punctuation),
|
||||
Underscore if node.parent_kind() == Some(MathAttach) => Some(TokenType::Operator),
|
||||
|
||||
MathIdent | Ident => Some(token_from_ident(node)),
|
||||
Hash => token_from_hashtag(node),
|
||||
MathIdent | Ident => Some(token_from_ident(ei, node, modifier)),
|
||||
Hash => token_from_hashtag(ei, node, modifier),
|
||||
|
||||
LeftBrace | RightBrace | LeftBracket | RightBracket | LeftParen | RightParen | Comma
|
||||
| Semicolon | Colon => Some(TokenType::Punctuation),
|
||||
|
@ -359,28 +380,99 @@ fn token_from_node(node: &LinkedNode) -> Option<TokenType> {
|
|||
}
|
||||
|
||||
// TODO: differentiate also using tokens in scope, not just context
|
||||
fn is_function_ident(ident: &LinkedNode) -> bool {
|
||||
let Some(next) = ident.next_leaf() else {
|
||||
return false;
|
||||
fn token_from_ident(ei: &ExprInfo, ident: &LinkedNode, modifier: &mut ModifierSet) -> TokenType {
|
||||
let resolved = ei.resolves.get(&ident.span());
|
||||
let context = if let Some(resolved) = resolved {
|
||||
match (&resolved.root, &resolved.val) {
|
||||
(Some(e), t) => Some(token_from_decl_expr(e, t.as_ref(), modifier)),
|
||||
(_, Some(t)) => Some(token_from_term(t, modifier)),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let function_call = matches!(next.kind(), SyntaxKind::LeftParen)
|
||||
&& matches!(
|
||||
next.parent_kind(),
|
||||
Some(SyntaxKind::Args | SyntaxKind::Params)
|
||||
);
|
||||
let function_content = matches!(next.kind(), SyntaxKind::LeftBracket)
|
||||
&& matches!(next.parent_kind(), Some(SyntaxKind::ContentBlock));
|
||||
function_call || function_content
|
||||
|
||||
if !matches!(context, None | Some(TokenType::Interpolated)) {
|
||||
return context.unwrap_or(TokenType::Interpolated);
|
||||
}
|
||||
|
||||
let next = ident.next_leaf();
|
||||
let next_parent = next.as_ref().and_then(|n| n.parent_kind());
|
||||
let next_kind = next.map(|n| n.kind());
|
||||
let lexical_function_call = matches!(next_kind, Some(SyntaxKind::LeftParen))
|
||||
&& matches!(next_parent, Some(SyntaxKind::Args | SyntaxKind::Params));
|
||||
if lexical_function_call {
|
||||
return TokenType::Function;
|
||||
}
|
||||
|
||||
let function_content = matches!(next_kind, Some(SyntaxKind::LeftBracket))
|
||||
&& matches!(next_parent, Some(SyntaxKind::ContentBlock));
|
||||
if function_content {
|
||||
return TokenType::Function;
|
||||
}
|
||||
|
||||
TokenType::Interpolated
|
||||
}
|
||||
|
||||
fn token_from_ident(ident: &LinkedNode) -> TokenType {
|
||||
if is_function_ident(ident) {
|
||||
TokenType::Function
|
||||
} else {
|
||||
TokenType::Interpolated
|
||||
fn token_from_term(t: &Ty, modifier: &mut ModifierSet) -> TokenType {
|
||||
use typst::foundations::Value::*;
|
||||
match t {
|
||||
Ty::Func(..) => TokenType::Function,
|
||||
Ty::Value(v) => {
|
||||
match &v.val {
|
||||
Func(..) => TokenType::Function,
|
||||
Type(..) => {
|
||||
*modifier = *modifier | ModifierSet::new(&[Modifier::DefaultLibrary]);
|
||||
TokenType::Function
|
||||
}
|
||||
Module(..) => ns(modifier),
|
||||
// todo: read only modifier
|
||||
_ => TokenType::Interpolated,
|
||||
}
|
||||
}
|
||||
_ => TokenType::Interpolated,
|
||||
}
|
||||
}
|
||||
|
||||
fn token_from_decl_expr(expr: &Expr, term: Option<&Ty>, modifier: &mut ModifierSet) -> TokenType {
|
||||
use crate::syntax::Decl::*;
|
||||
match expr {
|
||||
Expr::Type(term) => token_from_term(term, modifier),
|
||||
Expr::Decl(decl) => match decl.as_ref() {
|
||||
Func(..) => TokenType::Function,
|
||||
Var(..) => TokenType::Interpolated,
|
||||
Module(..) => ns(modifier),
|
||||
ModuleAlias(..) => ns(modifier),
|
||||
PathStem(..) => ns(modifier),
|
||||
ImportAlias(..) => TokenType::Interpolated,
|
||||
IdentRef(..) => TokenType::Interpolated,
|
||||
ImportPath(..) => TokenType::Interpolated,
|
||||
IncludePath(..) => TokenType::Interpolated,
|
||||
Import(..) => TokenType::Interpolated,
|
||||
ContentRef(..) => TokenType::Interpolated,
|
||||
Label(..) => TokenType::Interpolated,
|
||||
StrName(..) => TokenType::Interpolated,
|
||||
ModuleImport(..) => TokenType::Interpolated,
|
||||
Closure(..) => TokenType::Interpolated,
|
||||
Pattern(..) => TokenType::Interpolated,
|
||||
Spread(..) => TokenType::Interpolated,
|
||||
Content(..) => TokenType::Interpolated,
|
||||
Constant(..) => TokenType::Interpolated,
|
||||
BibEntry(..) => TokenType::Interpolated,
|
||||
Docs(..) => TokenType::Interpolated,
|
||||
Generated(..) => TokenType::Interpolated,
|
||||
},
|
||||
_ => term
|
||||
.map(|term| token_from_term(term, modifier))
|
||||
.unwrap_or(TokenType::Interpolated),
|
||||
}
|
||||
}
|
||||
|
||||
fn ns(modifier: &mut ModifierSet) -> TokenType {
|
||||
*modifier = *modifier | ModifierSet::new(&[Modifier::Static, Modifier::ReadOnly]);
|
||||
TokenType::Namespace
|
||||
}
|
||||
|
||||
fn get_expr_following_hashtag<'a>(hashtag: &LinkedNode<'a>) -> Option<LinkedNode<'a>> {
|
||||
hashtag
|
||||
.next_sibling()
|
||||
|
@ -388,8 +480,12 @@ fn get_expr_following_hashtag<'a>(hashtag: &LinkedNode<'a>) -> Option<LinkedNode
|
|||
.and_then(|node| node.leftmost_leaf())
|
||||
}
|
||||
|
||||
fn token_from_hashtag(hashtag: &LinkedNode) -> Option<TokenType> {
|
||||
fn token_from_hashtag(
|
||||
ei: &ExprInfo,
|
||||
hashtag: &LinkedNode,
|
||||
modifier: &mut ModifierSet,
|
||||
) -> Option<TokenType> {
|
||||
get_expr_following_hashtag(hashtag)
|
||||
.as_ref()
|
||||
.and_then(token_from_node)
|
||||
.and_then(|e| token_from_node(ei, e, modifier))
|
||||
}
|
||||
|
|
|
@ -34,6 +34,8 @@ pub enum TokenType {
|
|||
Number,
|
||||
Function,
|
||||
Decorator,
|
||||
Type,
|
||||
Namespace,
|
||||
// Custom types
|
||||
Bool,
|
||||
Punctuation,
|
||||
|
@ -72,6 +74,8 @@ impl From<TokenType> for SemanticTokenType {
|
|||
Number => Self::NUMBER,
|
||||
Function => Self::FUNCTION,
|
||||
Decorator => Self::DECORATOR,
|
||||
Type => Self::TYPE,
|
||||
Namespace => Self::NAMESPACE,
|
||||
Bool => BOOL,
|
||||
Punctuation => PUNCTUATION,
|
||||
Escape => ESCAPE,
|
||||
|
@ -101,14 +105,17 @@ pub enum Modifier {
|
|||
Strong,
|
||||
Emph,
|
||||
Math,
|
||||
ReadOnly,
|
||||
Static,
|
||||
DefaultLibrary,
|
||||
}
|
||||
|
||||
impl Modifier {
|
||||
pub fn index(self) -> u8 {
|
||||
pub const fn index(self) -> u8 {
|
||||
self as u8
|
||||
}
|
||||
|
||||
pub fn bitmask(self) -> u32 {
|
||||
pub const fn bitmask(self) -> u32 {
|
||||
0b1 << self.index()
|
||||
}
|
||||
}
|
||||
|
@ -121,6 +128,9 @@ impl From<Modifier> for SemanticTokenModifier {
|
|||
Strong => STRONG,
|
||||
Emph => EMPH,
|
||||
Math => MATH,
|
||||
ReadOnly => Self::READONLY,
|
||||
Static => Self::STATIC,
|
||||
DefaultLibrary => Self::DEFAULT_LIBRARY,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{prelude::*, SemanticTokenContext};
|
||||
use crate::prelude::*;
|
||||
|
||||
/// The [`textDocument/semanticTokens/full/delta`] request is sent from the
|
||||
/// client to the server to resolve the semantic tokens of a given file,
|
||||
|
@ -21,16 +21,17 @@ pub struct SemanticTokensDeltaRequest {
|
|||
pub previous_result_id: String,
|
||||
}
|
||||
|
||||
impl SemanticTokensDeltaRequest {
|
||||
impl SemanticRequest for SemanticTokensDeltaRequest {
|
||||
type Response = SemanticTokensFullDeltaResult;
|
||||
/// Handles the request to compute the semantic tokens delta for a given
|
||||
/// document.
|
||||
pub fn request(
|
||||
self,
|
||||
ctx: &SemanticTokenContext,
|
||||
source: Source,
|
||||
) -> Option<SemanticTokensFullDeltaResult> {
|
||||
fn request(self, ctx: &mut AnalysisContext) -> Option<Self::Response> {
|
||||
let source = ctx.source_by_path(&self.path).ok()?;
|
||||
let ei = ctx.expr_stage(&source);
|
||||
|
||||
let token_ctx = &ctx.analysis.tokens_ctx;
|
||||
let (tokens, result_id) =
|
||||
ctx.try_semantic_tokens_delta_from_result_id(&source, &self.previous_result_id);
|
||||
token_ctx.semantic_tokens_delta(&source, ei, &self.previous_result_id);
|
||||
|
||||
match tokens {
|
||||
Ok(edits) => Some(
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use crate::{prelude::*, SemanticTokenContext};
|
||||
use crate::prelude::*;
|
||||
|
||||
/// The [`textDocument/semanticTokens/full`] request is sent from the client to
|
||||
/// the server to resolve the semantic tokens of a given file.
|
||||
|
@ -22,14 +22,15 @@ pub struct SemanticTokensFullRequest {
|
|||
pub path: PathBuf,
|
||||
}
|
||||
|
||||
impl SemanticTokensFullRequest {
|
||||
impl SemanticRequest for SemanticTokensFullRequest {
|
||||
type Response = SemanticTokensResult;
|
||||
|
||||
/// Handles the request to compute the semantic tokens for a given document.
|
||||
pub fn request(
|
||||
self,
|
||||
ctx: &SemanticTokenContext,
|
||||
source: Source,
|
||||
) -> Option<SemanticTokensResult> {
|
||||
let (tokens, result_id) = ctx.get_semantic_tokens_full(&source);
|
||||
fn request(self, ctx: &mut AnalysisContext) -> Option<Self::Response> {
|
||||
let source = ctx.source_by_path(&self.path).ok()?;
|
||||
let ei = ctx.expr_stage(&source);
|
||||
let token_ctx = &ctx.analysis.tokens_ctx;
|
||||
let (tokens, result_id) = token_ctx.semantic_tokens_full(&source, ei);
|
||||
|
||||
Some(
|
||||
SemanticTokens {
|
||||
|
@ -131,13 +132,9 @@ mod tests {
|
|||
#[test]
|
||||
fn test() {
|
||||
snapshot_testing("semantic_tokens", &|ctx, path| {
|
||||
let source = ctx.source_by_path(&path).unwrap();
|
||||
|
||||
let request = SemanticTokensFullRequest { path: path.clone() };
|
||||
|
||||
let cache = SemanticTokenContext::default();
|
||||
|
||||
let mut result = request.request(&cache, source).unwrap();
|
||||
let mut result = request.request(ctx).unwrap();
|
||||
if let SemanticTokensResult::Tokens(tokens) = &mut result {
|
||||
tokens.result_id.take();
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ use reflexo::ImmutPath;
|
|||
use reflexo_typst::vfs::notify::{FileChangeSet, MemoryEvent};
|
||||
use reflexo_typst::world::EntryState;
|
||||
use tinymist_query::analysis::Analysis;
|
||||
use tinymist_query::ExportKind;
|
||||
use tinymist_query::{ExportKind, SemanticTokenContext};
|
||||
use tinymist_render::PeriscopeRenderer;
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
|
@ -111,6 +111,11 @@ impl LanguageState {
|
|||
caches: Default::default(),
|
||||
workers: Default::default(),
|
||||
cache_grid: Default::default(),
|
||||
tokens_ctx: Arc::new(SemanticTokenContext::new(
|
||||
const_config.position_encoding,
|
||||
const_config.tokens_overlapping_token_support,
|
||||
const_config.tokens_multiline_token_support,
|
||||
)),
|
||||
}),
|
||||
periscope: PeriscopeRenderer::new(periscope_args.unwrap_or_default()),
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ use sync_lsp::*;
|
|||
use task::{CacheTask, ExportUserConfig, FormatTask, FormatUserConfig, UserActionTask};
|
||||
use tinymist_query::{
|
||||
get_semantic_tokens_options, get_semantic_tokens_registration,
|
||||
get_semantic_tokens_unregistration, PageSelection, SemanticTokenContext,
|
||||
get_semantic_tokens_unregistration, PageSelection,
|
||||
};
|
||||
use tinymist_query::{
|
||||
lsp_to_typst, CompilerQueryRequest, CompilerQueryResponse, FoldRequestFeature, OnExportRequest,
|
||||
|
@ -75,8 +75,6 @@ pub struct LanguageState {
|
|||
pub config: Config,
|
||||
|
||||
// Resources
|
||||
/// The semantic token context.
|
||||
pub tokens_ctx: SemanticTokenContext,
|
||||
/// Source synchronized with client
|
||||
pub memory_changes: HashMap<Arc<Path>, MemoryFileMeta>,
|
||||
/// The preview state.
|
||||
|
@ -107,11 +105,6 @@ impl LanguageState {
|
|||
editor_tx: mpsc::UnboundedSender<EditorRequest>,
|
||||
) -> Self {
|
||||
let const_config = &config.const_config;
|
||||
let tokens_ctx = SemanticTokenContext::new(
|
||||
const_config.position_encoding,
|
||||
const_config.tokens_overlapping_token_support,
|
||||
const_config.tokens_multiline_token_support,
|
||||
);
|
||||
let formatter = FormatTask::new(FormatUserConfig {
|
||||
mode: config.formatter_mode,
|
||||
width: config.formatter_print_width.unwrap_or(120),
|
||||
|
@ -134,7 +127,6 @@ impl LanguageState {
|
|||
|
||||
pinning: false,
|
||||
focusing: None,
|
||||
tokens_ctx,
|
||||
formatter,
|
||||
user_action: Default::default(),
|
||||
cache: CacheTask::default(),
|
||||
|
@ -1002,17 +994,6 @@ macro_rules! query_source {
|
|||
}};
|
||||
}
|
||||
|
||||
macro_rules! query_tokens_cache {
|
||||
($self:ident, $method:ident, $req:expr) => {{
|
||||
let path: ImmutPath = $req.path.clone().into();
|
||||
|
||||
$self.query_source(path, |source| {
|
||||
let res = $req.request(&$self.tokens_ctx, source);
|
||||
Ok(CompilerQueryResponse::$method(res))
|
||||
})
|
||||
}};
|
||||
}
|
||||
|
||||
impl LanguageState {
|
||||
/// Perform a language query.
|
||||
pub fn query(&mut self, query: CompilerQueryRequest) -> QueryFuture {
|
||||
|
@ -1022,8 +1003,6 @@ impl LanguageState {
|
|||
let is_pinning = self.pinning;
|
||||
just_ok(match query {
|
||||
InteractCodeContext(req) => query_source!(self, InteractCodeContext, req)?,
|
||||
SemanticTokensFull(req) => query_tokens_cache!(self, SemanticTokensFull, req)?,
|
||||
SemanticTokensDelta(req) => query_tokens_cache!(self, SemanticTokensDelta, req)?,
|
||||
FoldingRange(req) => query_source!(self, FoldingRange, req)?,
|
||||
SelectionRange(req) => query_source!(self, SelectionRange, req)?,
|
||||
DocumentSymbol(req) => query_source!(self, DocumentSymbol, req)?,
|
||||
|
@ -1067,6 +1046,8 @@ impl LanguageState {
|
|||
}
|
||||
|
||||
let resp = match query {
|
||||
SemanticTokensFull(req) => handle.run_semantic(snap, req, R::SemanticTokensFull),
|
||||
SemanticTokensDelta(req) => handle.run_semantic(snap, req, R::SemanticTokensDelta),
|
||||
Hover(req) => handle.run_stateful(snap, req, R::Hover),
|
||||
GotoDefinition(req) => handle.run_stateful(snap, req, R::GotoDefinition),
|
||||
GotoDeclaration(req) => handle.run_semantic(snap, req, R::GotoDeclaration),
|
||||
|
|
|
@ -374,7 +374,7 @@ fn e2e() {
|
|||
});
|
||||
|
||||
let hash = replay_log(&tinymist_binary, &root.join("neovim"));
|
||||
insta::assert_snapshot!(hash, @"siphash128_13:3e22530e3933265ad0afd4a48d02ee45");
|
||||
insta::assert_snapshot!(hash, @"siphash128_13:3032733ed0223012cd64233e662dcea2");
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -385,7 +385,7 @@ fn e2e() {
|
|||
});
|
||||
|
||||
let hash = replay_log(&tinymist_binary, &root.join("vscode"));
|
||||
insta::assert_snapshot!(hash, @"siphash128_13:f37a8869de8fb35d105aa93312714860");
|
||||
insta::assert_snapshot!(hash, @"siphash128_13:57d7707bb9b4bae7109a830833934a6d");
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue