feat: nested document symbols

This commit is contained in:
Myriad-Dreamin 2024-03-07 19:31:24 +08:00
parent 50ca444915
commit 22a68003fc
10 changed files with 503 additions and 97 deletions

39
Cargo.lock generated
View file

@ -570,6 +570,18 @@ dependencies = [
"syn 2.0.52", "syn 2.0.52",
] ]
[[package]]
name = "console"
version = "0.15.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb"
dependencies = [
"encode_unicode",
"lazy_static",
"libc",
"windows-sys 0.52.0",
]
[[package]] [[package]]
name = "core-foundation" name = "core-foundation"
version = "0.9.4" version = "0.9.4"
@ -913,6 +925,12 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced"
[[package]]
name = "encode_unicode"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
[[package]] [[package]]
name = "encoding_rs" name = "encoding_rs"
version = "0.8.33" version = "0.8.33"
@ -1730,6 +1748,19 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "insta"
version = "1.36.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0a7c22c4d34ef4788c351e971c52bfdfe7ea2766f8c5466bc175dd46e52ac22e"
dependencies = [
"console",
"lazy_static",
"linked-hash-map",
"similar",
"yaml-rust",
]
[[package]] [[package]]
name = "instant" name = "instant"
version = "0.1.12" version = "0.1.12"
@ -2999,6 +3030,12 @@ version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a"
[[package]]
name = "similar"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32fea41aca09ee824cc9724996433064c89f7777e60762749a4170a14abbfa21"
[[package]] [[package]]
name = "simplecss" name = "simplecss"
version = "0.2.1" version = "0.2.1"
@ -3388,9 +3425,11 @@ version = "0.1.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"comemo", "comemo",
"insta",
"itertools 0.12.1", "itertools 0.12.1",
"lazy_static", "lazy_static",
"log", "log",
"once_cell",
"parking_lot", "parking_lot",
"regex", "regex",
"serde", "serde",

View file

@ -29,10 +29,17 @@ typst-ide.workspace = true
typst-ts-core = { version = "0.4.2-rc6", default-features = false, features = [ typst-ts-core = { version = "0.4.2-rc6", default-features = false, features = [
"flat-vector", "flat-vector",
"vector-bbox", "vector-bbox",
"no-content-hint",
] } ] }
typst-ts-compiler.workspace = true typst-ts-compiler.workspace = true
tower-lsp.workspace = true tower-lsp.workspace = true
[dev-dependencies]
once_cell.workspace = true
insta.workspace = true
serde.workspace = true
serde_json.workspace = true
# [lints] # [lints]
# workspace = true # workspace = true

View file

@ -1,3 +1,7 @@
use std::ops::Range;
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
use crate::prelude::*; use crate::prelude::*;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -13,40 +17,146 @@ impl DocumentSymbolRequest {
) -> Option<DocumentSymbolResponse> { ) -> Option<DocumentSymbolResponse> {
let source = get_suitable_source_in_workspace(world, &self.path).ok()?; let source = get_suitable_source_in_workspace(world, &self.path).ok()?;
let uri = Url::from_file_path(self.path).unwrap(); let symbols = get_lexical_hierarchy(source.clone(), LexicalScopeGranularity::None);
let symbols = get_document_symbols(source, uri, position_encoding);
symbols.map(DocumentSymbolResponse::Flat) let symbols =
symbols.map(|symbols| filter_document_symbols(&symbols, &source, position_encoding));
symbols.map(DocumentSymbolResponse::Nested)
} }
} }
#[comemo::memoize] #[allow(deprecated)]
pub(crate) fn get_document_symbols( fn filter_document_symbols(
source: Source, symbols: &[LexicalHierarchy],
uri: Url, source: &Source,
position_encoding: PositionEncoding, position_encoding: PositionEncoding,
) -> Option<Vec<SymbolInformation>> { ) -> Vec<DocumentSymbol> {
struct DocumentSymbolWorker { symbols
symbols: Vec<SymbolInformation>, .iter()
.map(|e| {
let rng =
typst_to_lsp::range(e.info.range.clone(), source, position_encoding).raw_range;
DocumentSymbol {
name: e.info.name.clone(),
detail: None,
kind: match e.info.kind {
LexicalKind::Namespace(..) => SymbolKind::NAMESPACE,
LexicalKind::Variable => SymbolKind::VARIABLE,
LexicalKind::Function => SymbolKind::FUNCTION,
LexicalKind::Constant => SymbolKind::CONSTANT,
LexicalKind::Block => unreachable!(),
},
tags: None,
deprecated: None,
range: rng,
selection_range: rng,
// .raw_range,
children: e
.children
.as_ref()
.map(|ch| filter_document_symbols(ch, source, position_encoding)),
}
})
.collect()
}
#[derive(Debug, Clone, Hash)]
pub(crate) enum LexicalKind {
Namespace(i16),
Variable,
Function,
Constant,
Block,
}
#[derive(Debug, Clone, Copy, Hash, Default, PartialEq, Eq)]
pub(crate) enum LexicalScopeGranularity {
#[default]
None,
Block,
}
#[derive(Debug, Clone, Hash)]
pub(crate) struct LexicalInfo {
pub name: String,
pub kind: LexicalKind,
pub range: Range<usize>,
}
#[derive(Debug, Clone, Hash)]
pub(crate) struct LexicalHierarchy {
pub info: LexicalInfo,
pub children: Option<comemo::Prehashed<EcoVec<LexicalHierarchy>>>,
}
pub(crate) fn get_lexical_hierarchy(
source: Source,
g: LexicalScopeGranularity,
) -> Option<EcoVec<LexicalHierarchy>> {
fn symbreak(sym: LexicalInfo, curr: EcoVec<LexicalHierarchy>) -> LexicalHierarchy {
LexicalHierarchy {
info: sym,
children: if curr.is_empty() {
None
} else {
Some(comemo::Prehashed::new(curr))
},
}
} }
impl DocumentSymbolWorker { #[derive(Default)]
/// Get all symbols for a node recursively. struct LexicalHierarchyWorker {
pub fn get_symbols<'a>( g: LexicalScopeGranularity,
&mut self, stack: Vec<(LexicalInfo, EcoVec<LexicalHierarchy>)>,
node: LinkedNode<'a>, }
source: &'a Source,
uri: &'a Url,
position_encoding: PositionEncoding,
) -> anyhow::Result<()> {
let own_symbol = get_ident(&node, source, uri, position_encoding)?;
for child in node.children() { impl LexicalHierarchyWorker {
self.get_symbols(child, source, uri, position_encoding)?; fn symbreak(&mut self) {
} let (symbol, children) = self.stack.pop().unwrap();
let current = &mut self.stack.last_mut().unwrap().1;
current.push(symbreak(symbol, children));
}
/// Get all symbols for a node recursively.
fn get_symbols(&mut self, node: LinkedNode) -> anyhow::Result<()> {
let own_symbol = get_ident(&node, self.g)?;
if let Some(symbol) = own_symbol { if let Some(symbol) = own_symbol {
self.symbols.push(symbol); if let LexicalKind::Namespace(level) = symbol.kind {
'heading_break: while let Some((w, _)) = self.stack.last() {
match w.kind {
LexicalKind::Namespace(l) if l < level => break 'heading_break,
LexicalKind::Block => break 'heading_break,
_ if self.stack.len() <= 1 => break 'heading_break,
_ => {}
}
self.symbreak();
}
}
let is_heading = matches!(symbol.kind, LexicalKind::Namespace(..));
self.stack.push((symbol, eco_vec![]));
let stack_height = self.stack.len();
for child in node.children() {
self.get_symbols(child)?;
}
if is_heading {
while stack_height < self.stack.len() {
self.symbreak();
}
} else {
while stack_height <= self.stack.len() {
self.symbreak();
}
}
} else {
for child in node.children() {
self.get_symbols(child)?;
}
} }
Ok(()) Ok(())
@ -58,29 +168,21 @@ pub(crate) fn get_document_symbols(
#[allow(deprecated)] #[allow(deprecated)]
fn get_ident( fn get_ident(
node: &LinkedNode, node: &LinkedNode,
source: &Source, g: LexicalScopeGranularity,
uri: &Url, ) -> anyhow::Result<Option<LexicalInfo>> {
position_encoding: PositionEncoding, let (name, kind) = match node.kind() {
) -> anyhow::Result<Option<SymbolInformation>> {
match node.kind() {
SyntaxKind::Label => { SyntaxKind::Label => {
let ast_node = node let ast_node = node
.cast::<ast::Label>() .cast::<ast::Label>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?; .ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string(); let name = ast_node.get().to_string();
let symbol = SymbolInformation {
name, (name, LexicalKind::Constant)
kind: SymbolKind::CONSTANT, }
tags: None, SyntaxKind::CodeBlock | SyntaxKind::ContentBlock
deprecated: None, // do not use, deprecated, use `tags` instead if LexicalScopeGranularity::None != g =>
location: LspLocation { {
uri: uri.clone(), (String::new(), LexicalKind::Block)
range: typst_to_lsp::range(node.range(), source, position_encoding)
.raw_range,
},
container_name: None,
};
Ok(Some(symbol))
} }
SyntaxKind::Ident => { SyntaxKind::Ident => {
let ast_node = node let ast_node = node
@ -92,7 +194,7 @@ pub(crate) fn get_document_symbols(
}; };
let kind = match parent.kind() { let kind = match parent.kind() {
// for variable definitions, the Let binding holds an Ident // for variable definitions, the Let binding holds an Ident
SyntaxKind::LetBinding => SymbolKind::VARIABLE, SyntaxKind::LetBinding => LexicalKind::Variable,
// for function definitions, the Let binding holds a Closure which holds the // for function definitions, the Let binding holds a Closure which holds the
// Ident // Ident
SyntaxKind::Closure => { SyntaxKind::Closure => {
@ -100,25 +202,14 @@ pub(crate) fn get_document_symbols(
return Ok(None); return Ok(None);
}; };
match grand_parent.kind() { match grand_parent.kind() {
SyntaxKind::LetBinding => SymbolKind::FUNCTION, SyntaxKind::LetBinding => LexicalKind::Function,
_ => return Ok(None), _ => return Ok(None),
} }
} }
_ => return Ok(None), _ => return Ok(None),
}; };
let symbol = SymbolInformation {
name, (name, kind)
kind,
tags: None,
deprecated: None, // do not use, deprecated, use `tags` instead
location: LspLocation {
uri: uri.clone(),
range: typst_to_lsp::range(node.range(), source, position_encoding)
.raw_range,
},
container_name: None,
};
Ok(Some(symbol))
} }
SyntaxKind::Markup => { SyntaxKind::Markup => {
let name = node.get().to_owned().into_text().to_string(); let name = node.get().to_owned().into_text().to_string();
@ -129,34 +220,114 @@ pub(crate) fn get_document_symbols(
return Ok(None); return Ok(None);
}; };
let kind = match parent.kind() { let kind = match parent.kind() {
SyntaxKind::Heading => SymbolKind::NAMESPACE, SyntaxKind::Heading => LexicalKind::Namespace(
parent.cast::<ast::Heading>().unwrap().level().get() as i16,
),
_ => return Ok(None), _ => return Ok(None),
}; };
let symbol = SymbolInformation {
name, (name, kind)
kind,
tags: None,
deprecated: None, // do not use, deprecated, use `tags` instead
location: LspLocation {
uri: uri.clone(),
range: typst_to_lsp::range(node.range(), source, position_encoding)
.raw_range,
},
container_name: None,
};
Ok(Some(symbol))
} }
_ => Ok(None), _ => return Ok(None),
} };
Ok(Some(LexicalInfo {
name,
kind,
range: node.range(),
}))
} }
let root = LinkedNode::new(source.root()); let root = LinkedNode::new(source.root());
let mut worker = DocumentSymbolWorker { symbols: vec![] }; let mut worker = LexicalHierarchyWorker {
g,
..LexicalHierarchyWorker::default()
};
worker.stack.push((
LexicalInfo {
name: "deadbeef".to_string(),
kind: LexicalKind::Namespace(-1),
range: 0..0,
},
eco_vec![],
));
let res = worker.get_symbols(root).ok();
let res = worker while worker.stack.len() > 1 {
.get_symbols(root, &source, &uri, position_encoding) worker.symbreak();
.ok(); }
res.map(|_| worker.stack.pop().unwrap().1)
res.map(|_| worker.symbols) }
#[cfg(test)]
mod tests {
use insta::assert_snapshot;
use super::*;
use crate::tests::*;
#[test]
fn test_get_document_symbols() {
run_with_source(
r#"
= Heading 1
#let a = 1;
== Heading 2
#let b = 1;
= Heading 3
#let c = 1;
#let d = {
#let e = 1;
0
}
"#,
|world, path| {
let request = DocumentSymbolRequest { path };
let result = request.request(world, PositionEncoding::Utf16);
assert_snapshot!(JsonRepr::new_redacted(result.unwrap(), &REDACT_LOC), @r###"
[
{
"children": [
{
"kind": 13,
"name": "a"
},
{
"children": [
{
"kind": 13,
"name": "b"
}
],
"kind": 3,
"name": "Heading 2"
}
],
"kind": 3,
"name": "Heading 1"
},
{
"children": [
{
"kind": 13,
"name": "c"
},
{
"kind": 13,
"name": "d"
},
{
"kind": 13,
"name": "e"
}
],
"kind": 3,
"name": "Heading 3"
}
]
"###);
},
);
}
} }

View file

@ -0,0 +1,38 @@
use crate::{get_lexical_hierarchy, prelude::*, LexicalScopeGranularity};
#[derive(Debug, Clone)]
pub struct FoldingRangeRequest {
pub path: PathBuf,
}
impl FoldingRangeRequest {
pub fn request(
self,
world: &TypstSystemWorld,
position_encoding: PositionEncoding,
) -> Option<Vec<FoldingRange>> {
let source = get_suitable_source_in_workspace(world, &self.path).ok()?;
let symbols = get_lexical_hierarchy(source, LexicalScopeGranularity::Block)?;
let _ = symbols;
let _ = position_encoding;
None
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::tests::*;
#[test]
fn test_folding_range_request() {
run_with_source("let a = 1;", |world, path| {
let request = FoldingRangeRequest { path };
let result = request.request(world, PositionEncoding::Utf16);
assert_eq!(result, None);
});
}
}

View file

@ -18,6 +18,8 @@ pub(crate) mod hover;
pub use hover::*; pub use hover::*;
pub(crate) mod completion; pub(crate) mod completion;
pub use completion::*; pub use completion::*;
pub(crate) mod folding_range;
pub use folding_range::*;
pub(crate) mod selection_range; pub(crate) mod selection_range;
pub use selection_range::*; pub use selection_range::*;
@ -25,3 +27,106 @@ pub mod lsp_typst_boundary;
pub use lsp_typst_boundary::*; pub use lsp_typst_boundary::*;
mod prelude; mod prelude;
#[cfg(test)]
mod tests {
use core::fmt;
use std::{
collections::HashSet,
path::{Path, PathBuf},
};
use once_cell::sync::Lazy;
use serde::Serialize;
use serde_json::{ser::PrettyFormatter, Serializer, Value};
use typst_ts_compiler::ShadowApiExt;
pub use typst_ts_compiler::TypstSystemWorld;
use typst_ts_core::{config::CompileOpts, Bytes};
pub fn run_with_source<T>(
source: &str,
f: impl FnOnce(&mut TypstSystemWorld, PathBuf) -> T,
) -> T {
let root = if cfg!(windows) {
PathBuf::from("C:\\")
} else {
PathBuf::from("/")
};
let mut world = TypstSystemWorld::new(CompileOpts {
root_dir: root.clone(),
..Default::default()
})
.unwrap();
let pw = &root.join(Path::new("/main.typ"));
world
.with_shadow_file(pw, Bytes::from(source.as_bytes()), move |e| {
Ok(f(e, pw.to_owned()))
})
.unwrap()
}
// pub static REDACT_URI: Lazy<RedactFields> = Lazy::new(||
// RedactFields::from_iter(["uri"]));
pub static REDACT_LOC: Lazy<RedactFields> =
Lazy::new(|| RedactFields::from_iter(["location", "range", "selectionRange"]));
pub struct JsonRepr(Value);
impl JsonRepr {
// pub fn new(v: impl serde::Serialize) -> Self {
// let s = serde_json::to_value(v).unwrap();
// Self(REDACT_URI.redact(s))
// }
pub fn new_redacted(v: impl serde::Serialize, rm: &RedactFields) -> Self {
let s = serde_json::to_value(v).unwrap();
Self(rm.redact(s))
}
}
impl fmt::Display for JsonRepr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let w = std::io::BufWriter::new(Vec::new());
let mut ser = Serializer::with_formatter(w, PrettyFormatter::with_indent(b" "));
self.0.serialize(&mut ser).unwrap();
f.write_str(&String::from_utf8(ser.into_inner().into_inner().unwrap()).unwrap())
}
}
pub trait Redact {
fn redact(&self, v: Value) -> Value;
}
pub struct RedactFields(HashSet<&'static str>);
impl FromIterator<&'static str> for RedactFields {
fn from_iter<T: IntoIterator<Item = &'static str>>(iter: T) -> Self {
Self(iter.into_iter().collect())
}
}
impl Redact for RedactFields {
fn redact(&self, v: Value) -> Value {
match v {
Value::Object(mut m) => {
for (_, v) in m.iter_mut() {
*v = self.redact(v.clone());
}
for k in self.0.iter() {
m.remove(*k);
}
Value::Object(m)
}
Value::Array(mut a) => {
for v in a.iter_mut() {
*v = self.redact(v.clone());
}
Value::Array(a)
}
Value::String(s) => Value::String(s),
v => v,
}
}
}
}

View file

@ -9,10 +9,11 @@ pub use anyhow::anyhow;
pub use itertools::{Format, Itertools}; pub use itertools::{Format, Itertools};
pub use log::{error, trace}; pub use log::{error, trace};
pub use tower_lsp::lsp_types::{ pub use tower_lsp::lsp_types::{
CompletionResponse, DiagnosticRelatedInformation, DocumentSymbolResponse, Documentation, Hover, CompletionResponse, DiagnosticRelatedInformation, DocumentSymbol, DocumentSymbolResponse,
Location as LspLocation, MarkupContent, MarkupKind, Position as LspPosition, SelectionRange, Documentation, FoldingRange, Hover, Location as LspLocation, MarkupContent, MarkupKind,
SemanticTokens, SemanticTokensDelta, SemanticTokensFullDeltaResult, SemanticTokensResult, Position as LspPosition, SelectionRange, SemanticTokens, SemanticTokensDelta,
SignatureHelp, SignatureInformation, SymbolInformation, SymbolKind, Url, SemanticTokensFullDeltaResult, SemanticTokensResult, SignatureHelp, SignatureInformation,
SymbolInformation, SymbolKind, Url,
}; };
pub use typst::diag::{EcoString, FileError, FileResult, Tracepoint}; pub use typst::diag::{EcoString, FileError, FileResult, Tracepoint};
pub use typst::foundations::{Func, ParamInfo, Value}; pub use typst::foundations::{Func, ParamInfo, Value};

View file

@ -1,7 +1,7 @@
use typst_ts_compiler::NotifyApi; use typst_ts_compiler::NotifyApi;
use crate::document_symbol::get_document_symbols; use crate::document_symbol::get_lexical_hierarchy;
use crate::prelude::*; use crate::{prelude::*, LexicalHierarchy, LexicalKind, LexicalScopeGranularity};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct SymbolRequest { pub struct SymbolRequest {
@ -23,11 +23,12 @@ impl SymbolRequest {
return; return;
}; };
let uri = Url::from_file_path(path).unwrap(); let uri = Url::from_file_path(path).unwrap();
let res = get_document_symbols(source, uri, position_encoding).and_then(|symbols| { let res = get_lexical_hierarchy(source.clone(), LexicalScopeGranularity::None)
self.pattern .and_then(|symbols| {
.as_ref() self.pattern.as_ref().map(|pattern| {
.map(|pattern| filter_document_symbols(symbols, pattern)) filter_document_symbols(&symbols, pattern, &source, &uri, position_encoding)
}); })
});
if let Some(mut res) = res { if let Some(mut res) = res {
symbols.append(&mut res) symbols.append(&mut res)
@ -38,12 +39,42 @@ impl SymbolRequest {
} }
} }
#[allow(deprecated)]
fn filter_document_symbols( fn filter_document_symbols(
symbols: Vec<SymbolInformation>, symbols: &[LexicalHierarchy],
query_string: &str, query_string: &str,
source: &Source,
uri: &Url,
position_encoding: PositionEncoding,
) -> Vec<SymbolInformation> { ) -> Vec<SymbolInformation> {
symbols symbols
.into_iter() .iter()
.filter(|e| e.name.contains(query_string)) .flat_map(|e| {
[e].into_iter()
.chain(e.children.as_deref().into_iter().flatten())
})
.filter(|e| e.info.name.contains(query_string))
.map(|e| {
let rng =
typst_to_lsp::range(e.info.range.clone(), source, position_encoding).raw_range;
SymbolInformation {
name: e.info.name.clone(),
kind: match e.info.kind {
LexicalKind::Namespace(..) => SymbolKind::NAMESPACE,
LexicalKind::Variable => SymbolKind::VARIABLE,
LexicalKind::Function => SymbolKind::FUNCTION,
LexicalKind::Constant => SymbolKind::CONSTANT,
LexicalKind::Block => unreachable!(),
},
tags: None,
deprecated: None,
location: LspLocation {
uri: uri.clone(),
range: rng,
},
container_name: None,
}
})
.collect() .collect()
} }

View file

@ -34,6 +34,7 @@ typst-assets = { workspace = true, features = ["fonts"] }
typst-ts-core = { version = "0.4.2-rc6", default-features = false, features = [ typst-ts-core = { version = "0.4.2-rc6", default-features = false, features = [
"flat-vector", "flat-vector",
"vector-bbox", "vector-bbox",
"no-content-hint",
] } ] }
typst-ts-compiler.workspace = true typst-ts-compiler.workspace = true
typst-preview.workspace = true typst-preview.workspace = true

View file

@ -12,7 +12,7 @@ use tinymist_query::{
}; };
use tokio::sync::{broadcast, mpsc, watch, Mutex, RwLock}; use tokio::sync::{broadcast, mpsc, watch, Mutex, RwLock};
use tower_lsp::lsp_types::{ use tower_lsp::lsp_types::{
CompletionResponse, DocumentSymbolResponse, Hover, SelectionRange, CompletionResponse, DocumentSymbolResponse, FoldingRange, Hover, SelectionRange,
SemanticTokensFullDeltaResult, SemanticTokensResult, SignatureHelp, SymbolInformation, SemanticTokensFullDeltaResult, SemanticTokensResult, SignatureHelp, SymbolInformation,
TextDocumentContentChangeEvent, Url, TextDocumentContentChangeEvent, Url,
}; };
@ -300,6 +300,7 @@ pub enum CompilerQueryRequest {
Symbol(tinymist_query::SymbolRequest), Symbol(tinymist_query::SymbolRequest),
SemanticTokensFull(tinymist_query::SemanticTokensFullRequest), SemanticTokensFull(tinymist_query::SemanticTokensFullRequest),
SemanticTokensDelta(tinymist_query::SemanticTokensDeltaRequest), SemanticTokensDelta(tinymist_query::SemanticTokensDeltaRequest),
FoldingRange(tinymist_query::FoldingRangeRequest),
SelectionRange(tinymist_query::SelectionRangeRequest), SelectionRange(tinymist_query::SelectionRangeRequest),
} }
@ -313,6 +314,7 @@ pub enum CompilerQueryResponse {
Symbol(Option<Vec<SymbolInformation>>), Symbol(Option<Vec<SymbolInformation>>),
SemanticTokensFull(Option<SemanticTokensResult>), SemanticTokensFull(Option<SemanticTokensResult>),
SemanticTokensDelta(Option<SemanticTokensFullDeltaResult>), SemanticTokensDelta(Option<SemanticTokensFullDeltaResult>),
FoldingRange(Option<Vec<FoldingRange>>),
SelectionRange(Option<Vec<SelectionRange>>), SelectionRange(Option<Vec<SelectionRange>>),
} }
@ -681,6 +683,7 @@ impl<H: CompilationHandle> CompileNode<H> {
SignatureHelp(req) => query_world!(self, SignatureHelp, req), SignatureHelp(req) => query_world!(self, SignatureHelp, req),
DocumentSymbol(req) => query_world!(self, DocumentSymbol, req), DocumentSymbol(req) => query_world!(self, DocumentSymbol, req),
Symbol(req) => query_world!(self, Symbol, req), Symbol(req) => query_world!(self, Symbol, req),
FoldingRange(req) => query_world!(self, FoldingRange, req),
SelectionRange(req) => query_world!(self, SelectionRange, req), SelectionRange(req) => query_world!(self, SelectionRange, req),
CompilerQueryRequest::SemanticTokensDelta(..) CompilerQueryRequest::SemanticTokensDelta(..)
| CompilerQueryRequest::SemanticTokensFull(..) => unreachable!(), | CompilerQueryRequest::SemanticTokensFull(..) => unreachable!(),

View file

@ -16,9 +16,9 @@ use once_cell::sync::OnceCell;
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use tinymist_query::{ use tinymist_query::{
get_semantic_tokens_options, get_semantic_tokens_registration, get_semantic_tokens_options, get_semantic_tokens_registration,
get_semantic_tokens_unregistration, CompletionRequest, DocumentSymbolRequest, HoverRequest, get_semantic_tokens_unregistration, CompletionRequest, DocumentSymbolRequest,
PositionEncoding, SelectionRangeRequest, SemanticTokensDeltaRequest, SemanticTokensFullRequest, FoldingRangeRequest, HoverRequest, PositionEncoding, SelectionRangeRequest,
SignatureHelpRequest, SymbolRequest, SemanticTokensDeltaRequest, SemanticTokensFullRequest, SignatureHelpRequest, SymbolRequest,
}; };
use anyhow::bail; use anyhow::bail;
@ -556,6 +556,16 @@ impl LanguageServer for TypstServer {
run_query!(self, Symbol, SymbolRequest { pattern }) run_query!(self, Symbol, SymbolRequest { pattern })
} }
async fn folding_range(
&self,
params: FoldingRangeParams,
) -> jsonrpc::Result<Option<Vec<FoldingRange>>> {
let uri = params.text_document.uri;
let path = uri.to_file_path().unwrap();
run_query!(self, FoldingRange, FoldingRangeRequest { path })
}
async fn selection_range( async fn selection_range(
&self, &self,
params: SelectionRangeParams, params: SelectionRangeParams,