feat: create query crate

This commit is contained in:
Myriad-Dreamin 2024-03-07 13:35:45 +08:00
parent 7e9bddb763
commit 9af8eb4b52
29 changed files with 1312 additions and 1341 deletions

28
Cargo.lock generated
View file

@ -3366,27 +3366,43 @@ dependencies = [
"env_logger",
"futures",
"itertools 0.12.1",
"lazy_static",
"log",
"once_cell",
"parking_lot",
"percent-encoding",
"regex",
"serde",
"serde_json",
"strum",
"thiserror",
"tinymist-query",
"tokio",
"tower-lsp",
"typst",
"typst-assets",
"typst-ide",
"typst-pdf",
"typst-preview",
"typst-ts-compiler",
"typst-ts-core",
]
[[package]]
name = "tinymist-query"
version = "0.1.0"
dependencies = [
"anyhow",
"comemo",
"itertools 0.12.1",
"lazy_static",
"log",
"parking_lot",
"regex",
"serde",
"serde_json",
"strum",
"tower-lsp",
"typst",
"typst-ide",
"typst-ts-compiler",
"typst-ts-core",
]
[[package]]
name = "tinystr"
version = "0.7.5"

View file

@ -0,0 +1,38 @@
[package]
name = "tinymist-query"
description = "Language queries for tinymist."
categories = ["compilers", "command-line-utilities"]
keywords = ["api", "language", "typst"]
authors.workspace = true
version.workspace = true
license.workspace = true
edition.workspace = true
homepage.workspace = true
repository.workspace = true
[dependencies]
anyhow.workspace = true
comemo.workspace = true
regex.workspace = true
itertools.workspace = true
lazy_static.workspace = true
strum.workspace = true
log.workspace = true
serde.workspace = true
serde_json.workspace = true
parking_lot.workspace = true
typst.workspace = true
typst-ide.workspace = true
typst-ts-core = { version = "0.4.2-rc6", default-features = false, features = [
"flat-vector",
"vector-bbox",
] }
typst-ts-compiler.workspace = true
tower-lsp.workspace = true
# [lints]
# workspace = true

View file

@ -0,0 +1,27 @@
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct CompletionRequest {
pub path: PathBuf,
pub position: LspPosition,
pub position_encoding: PositionEncoding,
pub explicit: bool,
}
pub fn completion(
world: &TypstSystemWorld,
doc: Option<Arc<TypstDocument>>,
req: CompletionRequest,
) -> Option<CompletionResponse> {
let source = get_suitable_source_in_workspace(world, &req.path).ok()?;
let typst_offset =
lsp_to_typst::position_to_offset(req.position, req.position_encoding, &source);
let (typst_start_offset, completions) =
typst_ide::autocomplete(world, doc.as_deref(), &source, typst_offset, req.explicit)?;
let lsp_start_position =
typst_to_lsp::offset_to_position(typst_start_offset, req.position_encoding, &source);
let replace_range = LspRawRange::new(lsp_start_position, req.position);
Some(typst_to_lsp::completions(&completions, replace_range).into())
}

View file

@ -0,0 +1,143 @@
use crate::prelude::*;
pub type DiagnosticsMap = HashMap<Url, Vec<LspDiagnostic>>;
pub fn convert_diagnostics<'a>(
project: &TypstSystemWorld,
errors: impl IntoIterator<Item = &'a TypstDiagnostic>,
position_encoding: PositionEncoding,
) -> DiagnosticsMap {
errors
.into_iter()
.flat_map(|error| {
convert_diagnostic(project, error, position_encoding)
.map_err(move |conversion_err| {
error!("could not convert Typst error to diagnostic: {conversion_err:?} error to convert: {error:?}");
})
})
.collect::<Vec<_>>()
.into_iter()
.into_group_map()
}
fn convert_diagnostic(
project: &TypstSystemWorld,
typst_diagnostic: &TypstDiagnostic,
position_encoding: PositionEncoding,
) -> anyhow::Result<(Url, LspDiagnostic)> {
let uri;
let lsp_range;
if let Some((id, span)) = diagnostic_span_id(typst_diagnostic) {
uri = Url::from_file_path(project.path_for_id(id)?).unwrap();
let source = project.source(id)?;
lsp_range = diagnostic_range(&source, span, position_encoding).raw_range;
} else {
uri = Url::from_file_path(project.root.clone()).unwrap();
lsp_range = LspRawRange::default();
};
let lsp_severity = diagnostic_severity(typst_diagnostic.severity);
let typst_message = &typst_diagnostic.message;
let typst_hints = &typst_diagnostic.hints;
let lsp_message = format!("{typst_message}{}", diagnostic_hints(typst_hints));
let tracepoints = diagnostic_related_information(project, typst_diagnostic, position_encoding)?;
let diagnostic = LspDiagnostic {
range: lsp_range,
severity: Some(lsp_severity),
message: lsp_message,
source: Some("typst".to_owned()),
related_information: Some(tracepoints),
..Default::default()
};
Ok((uri, diagnostic))
}
fn tracepoint_to_relatedinformation(
project: &TypstSystemWorld,
tracepoint: &Spanned<Tracepoint>,
position_encoding: PositionEncoding,
) -> anyhow::Result<Option<DiagnosticRelatedInformation>> {
if let Some(id) = tracepoint.span.id() {
let uri = Url::from_file_path(project.path_for_id(id)?).unwrap();
let source = project.source(id)?;
if let Some(typst_range) = source.range(tracepoint.span) {
let lsp_range = typst_to_lsp::range(typst_range, &source, position_encoding);
return Ok(Some(DiagnosticRelatedInformation {
location: LspLocation {
uri,
range: lsp_range.raw_range,
},
message: tracepoint.v.to_string(),
}));
}
}
Ok(None)
}
fn diagnostic_related_information(
project: &TypstSystemWorld,
typst_diagnostic: &TypstDiagnostic,
position_encoding: PositionEncoding,
) -> anyhow::Result<Vec<DiagnosticRelatedInformation>> {
let mut tracepoints = vec![];
for tracepoint in &typst_diagnostic.trace {
if let Some(info) =
tracepoint_to_relatedinformation(project, tracepoint, position_encoding)?
{
tracepoints.push(info);
}
}
Ok(tracepoints)
}
fn diagnostic_span_id(typst_diagnostic: &TypstDiagnostic) -> Option<(FileId, TypstSpan)> {
iter::once(typst_diagnostic.span)
.chain(typst_diagnostic.trace.iter().map(|trace| trace.span))
.find_map(|span| Some((span.id()?, span)))
}
fn diagnostic_range(
source: &Source,
typst_span: TypstSpan,
position_encoding: PositionEncoding,
) -> LspRange {
// Due to #241 and maybe typst/typst#2035, we sometimes fail to find the span.
// In that case, we use a default span as a better alternative to
// panicking.
//
// This may have been fixed after Typst 0.7.0, but it's still nice to avoid
// panics in case something similar reappears.
match source.find(typst_span) {
Some(node) => {
let typst_range = node.range();
typst_to_lsp::range(typst_range, source, position_encoding)
}
None => LspRange::new(
LspRawRange::new(LspPosition::new(0, 0), LspPosition::new(0, 0)),
position_encoding,
),
}
}
fn diagnostic_severity(typst_severity: TypstSeverity) -> LspSeverity {
match typst_severity {
TypstSeverity::Error => LspSeverity::ERROR,
TypstSeverity::Warning => LspSeverity::WARNING,
}
}
fn diagnostic_hints(typst_hints: &[EcoString]) -> Format<impl Iterator<Item = EcoString> + '_> {
iter::repeat(EcoString::from("\n\nHint: "))
.take(typst_hints.len())
.interleave(typst_hints.iter().cloned())
.format("")
}

View file

@ -0,0 +1,160 @@
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct DocumentSymbolRequest {
pub path: PathBuf,
pub position_encoding: PositionEncoding,
}
pub fn document_symbol(
world: &TypstSystemWorld,
req: DocumentSymbolRequest,
) -> Option<DocumentSymbolResponse> {
let source = get_suitable_source_in_workspace(world, &req.path).ok()?;
let uri = Url::from_file_path(req.path).unwrap();
let symbols = get_document_symbols(source, uri, req.position_encoding);
symbols.map(DocumentSymbolResponse::Flat)
}
#[comemo::memoize]
pub(crate) fn get_document_symbols(
source: Source,
uri: Url,
position_encoding: PositionEncoding,
) -> Option<Vec<SymbolInformation>> {
struct DocumentSymbolWorker {
symbols: Vec<SymbolInformation>,
}
impl DocumentSymbolWorker {
/// Get all symbols for a node recursively.
pub fn get_symbols<'a>(
&mut self,
node: LinkedNode<'a>,
source: &'a Source,
uri: &'a Url,
position_encoding: PositionEncoding,
) -> anyhow::Result<()> {
let own_symbol = get_ident(&node, source, uri, position_encoding)?;
for child in node.children() {
self.get_symbols(child, source, uri, position_encoding)?;
}
if let Some(symbol) = own_symbol {
self.symbols.push(symbol);
}
Ok(())
}
}
/// Get symbol for a leaf node of a valid type, or `None` if the node is an
/// invalid type.
#[allow(deprecated)]
fn get_ident(
node: &LinkedNode,
source: &Source,
uri: &Url,
position_encoding: PositionEncoding,
) -> anyhow::Result<Option<SymbolInformation>> {
match node.kind() {
SyntaxKind::Label => {
let ast_node = node
.cast::<ast::Label>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
let symbol = SymbolInformation {
name,
kind: SymbolKind::CONSTANT,
tags: None,
deprecated: None, // do not use, deprecated, use `tags` instead
location: LspLocation {
uri: uri.clone(),
range: typst_to_lsp::range(node.range(), source, position_encoding)
.raw_range,
},
container_name: None,
};
Ok(Some(symbol))
}
SyntaxKind::Ident => {
let ast_node = node
.cast::<ast::Ident>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
let Some(parent) = node.parent() else {
return Ok(None);
};
let kind = match parent.kind() {
// for variable definitions, the Let binding holds an Ident
SyntaxKind::LetBinding => SymbolKind::VARIABLE,
// for function definitions, the Let binding holds a Closure which holds the
// Ident
SyntaxKind::Closure => {
let Some(grand_parent) = parent.parent() else {
return Ok(None);
};
match grand_parent.kind() {
SyntaxKind::LetBinding => SymbolKind::FUNCTION,
_ => return Ok(None),
}
}
_ => return Ok(None),
};
let symbol = SymbolInformation {
name,
kind,
tags: None,
deprecated: None, // do not use, deprecated, use `tags` instead
location: LspLocation {
uri: uri.clone(),
range: typst_to_lsp::range(node.range(), source, position_encoding)
.raw_range,
},
container_name: None,
};
Ok(Some(symbol))
}
SyntaxKind::Markup => {
let name = node.get().to_owned().into_text().to_string();
if name.is_empty() {
return Ok(None);
}
let Some(parent) = node.parent() else {
return Ok(None);
};
let kind = match parent.kind() {
SyntaxKind::Heading => SymbolKind::NAMESPACE,
_ => return Ok(None),
};
let symbol = SymbolInformation {
name,
kind,
tags: None,
deprecated: None, // do not use, deprecated, use `tags` instead
location: LspLocation {
uri: uri.clone(),
range: typst_to_lsp::range(node.range(), source, position_encoding)
.raw_range,
},
container_name: None,
};
Ok(Some(symbol))
}
_ => Ok(None),
}
}
let root = LinkedNode::new(source.root());
let mut worker = DocumentSymbolWorker { symbols: vec![] };
let res = worker
.get_symbols(root, &source, &uri, position_encoding)
.ok();
res.map(|_| worker.symbols)
}

View file

@ -0,0 +1,28 @@
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct HoverRequest {
pub path: PathBuf,
pub position: LspPosition,
pub position_encoding: PositionEncoding,
}
pub fn hover(
world: &TypstSystemWorld,
doc: Option<Arc<TypstDocument>>,
req: HoverRequest,
) -> Option<Hover> {
let source = get_suitable_source_in_workspace(world, &req.path).ok()?;
let typst_offset =
lsp_to_typst::position_to_offset(req.position, req.position_encoding, &source);
let typst_tooltip = typst_ide::tooltip(world, doc.as_deref(), &source, typst_offset)?;
let ast_node = LinkedNode::new(source.root()).leaf_at(typst_offset)?;
let range = typst_to_lsp::range(ast_node.range(), &source, req.position_encoding);
Some(Hover {
contents: typst_to_lsp::tooltip(&typst_tooltip),
range: Some(range.raw_range),
})
}

View file

@ -0,0 +1,27 @@
pub mod analysis;
pub(crate) mod diagnostics;
pub use diagnostics::*;
pub(crate) mod signature_help;
pub use signature_help::*;
pub(crate) mod document_symbol;
pub use document_symbol::*;
pub(crate) mod symbol;
pub use symbol::*;
pub(crate) mod semantic_tokens;
pub use semantic_tokens::*;
pub(crate) mod semantic_tokens_full;
pub use semantic_tokens_full::*;
pub(crate) mod semantic_tokens_delta;
pub use semantic_tokens_delta::*;
pub(crate) mod hover;
pub use hover::*;
pub(crate) mod completion;
pub use completion::*;
pub(crate) mod selection_range;
pub use selection_range::*;
pub mod lsp_typst_boundary;
pub use lsp_typst_boundary::*;
mod prelude;

View file

@ -7,7 +7,7 @@ pub type LspPosition = lsp_types::Position;
/// The interpretation of an `LspCharacterOffset` depends on the
/// `LspPositionEncoding`
pub type LspCharacterOffset = u32;
pub type LspPositionEncoding = crate::config::PositionEncoding;
pub type LspPositionEncoding = PositionEncoding;
/// Byte offset (i.e. UTF-8 bytes) in Typst files, either from the start of the
/// line or the file
pub type TypstOffset = usize;
@ -30,6 +30,33 @@ pub type TypstSeverity = typst::diag::Severity;
pub type LspParamInfo = lsp_types::ParameterInformation;
pub type TypstParamInfo = typst::foundations::ParamInfo;
/// What counts as "1 character" for string indexing. We should always prefer
/// UTF-8, but support UTF-16 as long as it is standard. For more background on
/// encodings and LSP, try ["The bottom emoji breaks rust-analyzer"](https://fasterthanli.me/articles/the-bottom-emoji-breaks-rust-analyzer),
/// a well-written article on the topic.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Default)]
pub enum PositionEncoding {
/// "1 character" means "1 UTF-16 code unit"
///
/// This is the only required encoding for LSPs to support, but it's not a
/// natural one (unless you're working in JS). Prefer UTF-8, and refer
/// to the article linked in the `PositionEncoding` docs for more
/// background.
#[default]
Utf16,
/// "1 character" means "1 byte"
Utf8,
}
impl From<PositionEncoding> for lsp_types::PositionEncodingKind {
fn from(position_encoding: PositionEncoding) -> Self {
match position_encoding {
PositionEncoding::Utf16 => Self::UTF16,
PositionEncoding::Utf8 => Self::UTF8,
}
}
}
/// An LSP range with its associated encoding.
pub struct LspRange {
pub raw_range: LspRawRange,
@ -289,8 +316,7 @@ pub mod typst_to_lsp {
mod test {
use typst::syntax::Source;
use crate::config::PositionEncoding;
use crate::lsp_typst_boundary::lsp_to_typst;
use crate::{lsp_to_typst, PositionEncoding};
use super::*;

View file

@ -0,0 +1,40 @@
pub use std::{
collections::HashMap,
iter,
path::{Path, PathBuf},
sync::Arc,
};
pub use anyhow::anyhow;
pub use itertools::{Format, Itertools};
pub use log::{error, trace};
pub use tower_lsp::lsp_types::{
CompletionResponse, DiagnosticRelatedInformation, DocumentSymbolResponse, Documentation, Hover,
Location as LspLocation, MarkupContent, MarkupKind, Position as LspPosition, SelectionRange,
SemanticTokens, SemanticTokensDelta, SemanticTokensFullDeltaResult, SemanticTokensResult,
SignatureHelp, SignatureInformation, SymbolInformation, SymbolKind, Url,
};
pub use typst::diag::{EcoString, FileError, FileResult, Tracepoint};
pub use typst::foundations::{Func, ParamInfo, Value};
pub use typst::syntax::{
ast::{self, AstNode},
FileId, LinkedNode, Source, Spanned, SyntaxKind, VirtualPath,
};
pub use typst::World;
use typst_ts_compiler::service::WorkspaceProvider;
pub use typst_ts_compiler::TypstSystemWorld;
pub use typst_ts_core::{TypstDocument, TypstFileId};
pub use crate::analysis::analyze::analyze_expr;
pub use crate::lsp_typst_boundary::{
lsp_to_typst, typst_to_lsp, LspDiagnostic, LspRange, LspRawRange, LspSeverity,
PositionEncoding, TypstDiagnostic, TypstSeverity, TypstSpan,
};
pub fn get_suitable_source_in_workspace(w: &TypstSystemWorld, p: &Path) -> FileResult<Source> {
// todo: source in packages
let relative_path = p
.strip_prefix(&w.workspace_root())
.map_err(|_| FileError::NotFound(p.to_owned()))?;
w.source(TypstFileId::new(None, VirtualPath::new(relative_path)))
}

View file

@ -0,0 +1,40 @@
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct SelectionRangeRequest {
pub path: PathBuf,
pub positions: Vec<LspPosition>,
pub position_encoding: PositionEncoding,
}
pub fn selection_range(
world: &TypstSystemWorld,
req: SelectionRangeRequest,
) -> Option<Vec<SelectionRange>> {
let source = get_suitable_source_in_workspace(world, &req.path).ok()?;
let mut ranges = Vec::new();
for position in req.positions {
let typst_offset =
lsp_to_typst::position_to_offset(position, req.position_encoding, &source);
let tree = LinkedNode::new(source.root());
let leaf = tree.leaf_at(typst_offset)?;
ranges.push(range_for_node(&source, req.position_encoding, &leaf));
}
Some(ranges)
}
fn range_for_node(
source: &Source,
position_encoding: PositionEncoding,
node: &LinkedNode,
) -> SelectionRange {
let range = typst_to_lsp::range(node.range(), source, position_encoding);
SelectionRange {
range: range.raw_range,
parent: node
.parent()
.map(|node| Box::new(range_for_node(source, position_encoding, node))),
}
}

View file

@ -7,12 +7,12 @@ struct CachedTokens {
}
#[derive(Default, Debug)]
pub struct Cache {
pub struct CacheInner {
last_sent: Option<CachedTokens>,
next_id: u64,
}
impl Cache {
impl CacheInner {
pub fn try_take_result(&mut self, id: &str) -> Option<Vec<SemanticToken>> {
let id = id.parse::<u64>().ok()?;
match self.last_sent.take() {

View file

@ -1,4 +1,5 @@
use itertools::Itertools;
use parking_lot::RwLock;
use strum::IntoEnumIterator;
use tower_lsp::lsp_types::{
Registration, SemanticToken, SemanticTokensEdit, SemanticTokensFullOptions,
@ -7,15 +8,14 @@ use tower_lsp::lsp_types::{
use typst::diag::EcoString;
use typst::syntax::{ast, LinkedNode, Source, SyntaxKind};
use crate::actor::typst::CompileCluster;
use crate::config::PositionEncoding;
use crate::PositionEncoding;
use self::delta::token_delta;
use self::modifier_set::ModifierSet;
use self::token_encode::encode_tokens;
use self::typst_tokens::{Modifier, TokenType};
pub use self::delta::Cache as SemanticTokenCache;
pub use self::delta::CacheInner as TokenCacheInner;
mod delta;
mod modifier_set;
@ -58,7 +58,10 @@ pub fn get_semantic_tokens_options() -> SemanticTokensOptions {
}
}
impl CompileCluster {
#[derive(Default)]
pub struct SemanticTokenCache(RwLock<TokenCacheInner>);
impl SemanticTokenCache {
pub fn get_semantic_tokens_full(
&self,
source: &Source,
@ -70,10 +73,7 @@ impl CompileCluster {
let encoded_tokens = encode_tokens(tokens, source, encoding);
let output_tokens = encoded_tokens.map(|(token, _)| token).collect_vec();
let result_id = self
.semantic_tokens_delta_cache
.write()
.cache_result(output_tokens.clone());
let result_id = self.0.write().cache_result(output_tokens.clone());
(output_tokens, result_id)
}
@ -84,10 +84,7 @@ impl CompileCluster {
result_id: &str,
encoding: PositionEncoding,
) -> (Result<Vec<SemanticTokensEdit>, Vec<SemanticToken>>, String) {
let cached = self
.semantic_tokens_delta_cache
.write()
.try_take_result(result_id);
let cached = self.0.write().try_take_result(result_id);
// this call will overwrite the cache, so need to read from cache first
let (tokens, result_id) = self.get_semantic_tokens_full(source, encoding);

View file

@ -0,0 +1,86 @@
use tower_lsp::lsp_types::{Position, SemanticToken};
use typst::diag::EcoString;
use typst::syntax::Source;
use crate::typst_to_lsp;
use crate::PositionEncoding;
use super::Token;
pub(super) fn encode_tokens<'a>(
tokens: impl Iterator<Item = Token> + 'a,
source: &'a Source,
encoding: PositionEncoding,
) -> impl Iterator<Item = (SemanticToken, EcoString)> + 'a {
tokens.scan(Position::new(0, 0), move |last_position, token| {
let (encoded_token, source_code, position) =
encode_token(token, last_position, source, encoding);
*last_position = position;
Some((encoded_token, source_code))
})
}
fn encode_token(
token: Token,
last_position: &Position,
source: &Source,
encoding: PositionEncoding,
) -> (SemanticToken, EcoString, Position) {
let position = typst_to_lsp::offset_to_position(token.offset, encoding, source);
let delta = last_position.delta(&position);
let length = token.source.as_str().encoded_len(encoding);
let lsp_token = SemanticToken {
delta_line: delta.delta_line,
delta_start: delta.delta_start,
length: length as u32,
token_type: token.token_type as u32,
token_modifiers_bitset: token.modifiers.bitset(),
};
(lsp_token, token.source, position)
}
pub trait StrExt {
fn encoded_len(&self, encoding: PositionEncoding) -> usize;
}
impl StrExt for str {
fn encoded_len(&self, encoding: PositionEncoding) -> usize {
match encoding {
PositionEncoding::Utf8 => self.len(),
PositionEncoding::Utf16 => self.chars().map(char::len_utf16).sum(),
}
}
}
pub trait PositionExt {
fn delta(&self, to: &Self) -> PositionDelta;
}
impl PositionExt for Position {
/// Calculates the delta from `self` to `to`. This is in the `SemanticToken`
/// sense, so the delta's `character` is relative to `self`'s
/// `character` iff `self` and `to` are on the same line. Otherwise,
/// it's relative to the start of the line `to` is on.
fn delta(&self, to: &Self) -> PositionDelta {
let line_delta = to.line - self.line;
let char_delta = if line_delta == 0 {
to.character - self.character
} else {
to.character
};
PositionDelta {
delta_line: line_delta,
delta_start: char_delta,
}
}
}
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default)]
pub struct PositionDelta {
pub delta_line: u32,
pub delta_start: u32,
}

View file

@ -0,0 +1,37 @@
use crate::{prelude::*, SemanticTokenCache};
#[derive(Debug, Clone)]
pub struct SemanticTokensDeltaRequest {
pub path: PathBuf,
pub previous_result_id: String,
pub position_encoding: PositionEncoding,
}
pub fn semantic_tokens_delta(
cache: &SemanticTokenCache,
source: Source,
req: SemanticTokensDeltaRequest,
) -> Option<SemanticTokensFullDeltaResult> {
let (tokens, result_id) = cache.try_semantic_tokens_delta_from_result_id(
&source,
&req.previous_result_id,
req.position_encoding,
);
match tokens {
Ok(edits) => Some(
SemanticTokensDelta {
result_id: Some(result_id),
edits,
}
.into(),
),
Err(tokens) => Some(
SemanticTokens {
result_id: Some(result_id),
data: tokens,
}
.into(),
),
}
}

View file

@ -0,0 +1,23 @@
use crate::{prelude::*, SemanticTokenCache};
#[derive(Debug, Clone)]
pub struct SemanticTokensFullRequest {
pub path: PathBuf,
pub position_encoding: PositionEncoding,
}
pub fn semantic_tokens_full(
cache: &SemanticTokenCache,
source: Source,
req: SemanticTokensFullRequest,
) -> Option<SemanticTokensResult> {
let (tokens, result_id) = cache.get_semantic_tokens_full(&source, req.position_encoding);
Some(
SemanticTokens {
result_id: Some(result_id),
data: tokens,
}
.into(),
)
}

View file

@ -0,0 +1,165 @@
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct SignatureHelpRequest {
pub path: PathBuf,
pub position: LspPosition,
pub position_encoding: PositionEncoding,
}
pub fn signature_help(
world: &TypstSystemWorld,
SignatureHelpRequest {
path,
position,
position_encoding,
}: SignatureHelpRequest,
) -> Option<SignatureHelp> {
let source = get_suitable_source_in_workspace(world, &path).ok()?;
let typst_offset = lsp_to_typst::position_to_offset(position, position_encoding, &source);
let ast_node = LinkedNode::new(source.root()).leaf_at(typst_offset)?;
let (callee, callee_node, args) = surrounding_function_syntax(&ast_node)?;
let mut ancestor = &ast_node;
while !ancestor.is::<ast::Expr>() {
ancestor = ancestor.parent()?;
}
if !callee.hash() && !matches!(callee, ast::Expr::MathIdent(_)) {
return None;
}
let values = analyze_expr(world, &callee_node);
let function = values.into_iter().find_map(|v| match v {
Value::Func(f) => Some(f),
_ => None,
})?;
trace!("got function {function:?}");
let param_index = param_index_at_leaf(&ast_node, &function, args);
let label = format!(
"{}({}){}",
function.name().unwrap_or("<anonymous closure>"),
match function.params() {
Some(params) => params
.iter()
.map(typst_to_lsp::param_info_to_label)
.join(", "),
None => "".to_owned(),
},
match function.returns() {
Some(returns) => format!("-> {}", typst_to_lsp::cast_info_to_label(returns)),
None => "".to_owned(),
}
);
let params = function
.params()
.unwrap_or_default()
.iter()
.map(typst_to_lsp::param_info)
.collect();
trace!("got signature info {label} {params:?}");
let documentation = function.docs().map(markdown_docs);
let active_parameter = param_index.map(|i| i as u32);
Some(SignatureHelp {
signatures: vec![SignatureInformation {
label,
documentation,
parameters: Some(params),
active_parameter,
}],
active_signature: Some(0),
active_parameter: None,
})
}
fn surrounding_function_syntax<'b>(
leaf: &'b LinkedNode,
) -> Option<(ast::Expr<'b>, LinkedNode<'b>, ast::Args<'b>)> {
let parent = leaf.parent()?;
let parent = match parent.kind() {
SyntaxKind::Named => parent.parent()?,
_ => parent,
};
let args = parent.cast::<ast::Args>()?;
let grand = parent.parent()?;
let expr = grand.cast::<ast::Expr>()?;
let callee = match expr {
ast::Expr::FuncCall(call) => call.callee(),
ast::Expr::Set(set) => set.target(),
_ => return None,
};
Some((callee, grand.find(callee.span())?, args))
}
fn param_index_at_leaf(leaf: &LinkedNode, function: &Func, args: ast::Args) -> Option<usize> {
let deciding = deciding_syntax(leaf);
let params = function.params()?;
let param_index = find_param_index(&deciding, params, args)?;
trace!("got param index {param_index}");
Some(param_index)
}
/// Find the piece of syntax that decides what we're completing.
fn deciding_syntax<'b>(leaf: &'b LinkedNode) -> LinkedNode<'b> {
let mut deciding = leaf.clone();
while !matches!(
deciding.kind(),
SyntaxKind::LeftParen | SyntaxKind::Comma | SyntaxKind::Colon
) {
let Some(prev) = deciding.prev_leaf() else {
break;
};
deciding = prev;
}
deciding
}
fn find_param_index(deciding: &LinkedNode, params: &[ParamInfo], args: ast::Args) -> Option<usize> {
match deciding.kind() {
// After colon: "func(param:|)", "func(param: |)".
SyntaxKind::Colon => {
let prev = deciding.prev_leaf()?;
let param_ident = prev.cast::<ast::Ident>()?;
params
.iter()
.position(|param| param.name == param_ident.as_str())
}
// Before: "func(|)", "func(hi|)", "func(12,|)".
SyntaxKind::Comma | SyntaxKind::LeftParen => {
let next = deciding.next_leaf();
let following_param = next.as_ref().and_then(|next| next.cast::<ast::Ident>());
match following_param {
Some(next) => params
.iter()
.position(|param| param.named && param.name.starts_with(next.as_str())),
None => {
let positional_args_so_far = args
.items()
.filter(|arg| matches!(arg, ast::Arg::Pos(_)))
.count();
params
.iter()
.enumerate()
.filter(|(_, param)| param.positional)
.map(|(i, _)| i)
.nth(positional_args_so_far)
}
}
}
_ => None,
}
}
fn markdown_docs(docs: &str) -> Documentation {
Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,
value: docs.to_owned(),
})
}

View file

@ -0,0 +1,50 @@
use typst_ts_compiler::NotifyApi;
use crate::document_symbol::get_document_symbols;
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct SymbolRequest {
pub pattern: Option<String>,
pub position_encoding: PositionEncoding,
}
pub fn symbol(
world: &TypstSystemWorld,
SymbolRequest {
pattern,
position_encoding,
}: SymbolRequest,
) -> Option<Vec<SymbolInformation>> {
// todo: expose source
let mut symbols = vec![];
world.iter_dependencies(&mut |path, _| {
let Ok(source) = get_suitable_source_in_workspace(world, path) else {
return;
};
let uri = Url::from_file_path(path).unwrap();
let res = get_document_symbols(source, uri, position_encoding).and_then(|symbols| {
pattern
.as_ref()
.map(|pattern| filter_document_symbols(symbols, pattern))
});
if let Some(mut res) = res {
symbols.append(&mut res)
}
});
Some(symbols)
}
fn filter_document_symbols(
symbols: Vec<SymbolInformation>,
query_string: &str,
) -> Vec<SymbolInformation> {
symbols
.into_iter()
.filter(|e| e.name.contains(query_string))
.collect()
}

View file

@ -20,26 +20,22 @@ doc = false
[dependencies]
tinymist-query = { path = "../tinymist-query" }
once_cell.workspace = true
anyhow.workspace = true
comemo.workspace = true
thiserror.workspace = true
tokio.workspace = true
futures.workspace = true
regex.workspace = true
itertools.workspace = true
lazy_static.workspace = true
strum.workspace = true
async-trait.workspace = true
env_logger.workspace = true
log.workspace = true
percent-encoding.workspace = true
serde.workspace = true
serde_json.workspace = true
parking_lot.workspace = true
typst.workspace = true
typst-ide.workspace = true
typst-pdf.workspace = true
typst-assets = { workspace = true, features = ["fonts"] }

View file

@ -11,7 +11,7 @@ use tokio::sync::{
};
use typst_ts_core::TypstDocument;
use crate::config::ExportPdfMode;
use crate::lsp::ExportPdfMode;
#[derive(Debug, Clone)]
pub enum RenderActorRequest {

File diff suppressed because it is too large Load diff

View file

@ -1,221 +0,0 @@
use std::{fmt, path::PathBuf};
use anyhow::bail;
use futures::future::BoxFuture;
use itertools::Itertools;
use serde::Deserialize;
use serde_json::{Map, Value};
use tower_lsp::lsp_types::{self, ConfigurationItem, InitializeParams, PositionEncodingKind};
use crate::ext::InitializeParamsExt;
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ExperimentalFormatterMode {
#[default]
Off,
On,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ExportPdfMode {
Never,
#[default]
OnSave,
OnType,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum SemanticTokensMode {
Disable,
#[default]
Enable,
}
pub type Listener<T> = Box<dyn FnMut(&T) -> BoxFuture<anyhow::Result<()>> + Send + Sync>;
const CONFIG_ITEMS: &[&str] = &[
"exportPdf",
"rootPath",
"semanticTokens",
"experimentalFormatterMode",
];
#[derive(Default)]
pub struct Config {
pub export_pdf: ExportPdfMode,
pub root_path: Option<PathBuf>,
pub semantic_tokens: SemanticTokensMode,
pub formatter: ExperimentalFormatterMode,
semantic_tokens_listeners: Vec<Listener<SemanticTokensMode>>,
formatter_listeners: Vec<Listener<ExperimentalFormatterMode>>,
}
impl Config {
pub fn get_items() -> Vec<ConfigurationItem> {
let sections = CONFIG_ITEMS
.iter()
.flat_map(|item| [format!("tinymist.{item}"), item.to_string()]);
sections
.map(|section| ConfigurationItem {
section: Some(section),
..Default::default()
})
.collect()
}
pub fn values_to_map(values: Vec<Value>) -> Map<String, Value> {
let unpaired_values = values
.into_iter()
.tuples()
.map(|(a, b)| if !a.is_null() { a } else { b });
CONFIG_ITEMS
.iter()
.map(|item| item.to_string())
.zip(unpaired_values)
.collect()
}
pub fn listen_semantic_tokens(&mut self, listener: Listener<SemanticTokensMode>) {
self.semantic_tokens_listeners.push(listener);
}
// pub fn listen_formatting(&mut self, listener:
// Listener<ExperimentalFormatterMode>) { self.formatter_listeners.
// push(listener); }
pub async fn update(&mut self, update: &Value) -> anyhow::Result<()> {
if let Value::Object(update) = update {
self.update_by_map(update).await
} else {
bail!("got invalid configuration object {update}")
}
}
pub async fn update_by_map(&mut self, update: &Map<String, Value>) -> anyhow::Result<()> {
let export_pdf = update
.get("exportPdf")
.map(ExportPdfMode::deserialize)
.and_then(Result::ok);
if let Some(export_pdf) = export_pdf {
self.export_pdf = export_pdf;
}
let root_path = update.get("rootPath");
if let Some(root_path) = root_path {
if root_path.is_null() {
self.root_path = None;
}
if let Some(root_path) = root_path.as_str().map(PathBuf::from) {
self.root_path = Some(root_path);
}
}
let semantic_tokens = update
.get("semanticTokens")
.map(SemanticTokensMode::deserialize)
.and_then(Result::ok);
if let Some(semantic_tokens) = semantic_tokens {
for listener in &mut self.semantic_tokens_listeners {
listener(&semantic_tokens).await?;
}
self.semantic_tokens = semantic_tokens;
}
let formatter = update
.get("experimentalFormatterMode")
.map(ExperimentalFormatterMode::deserialize)
.and_then(Result::ok);
if let Some(formatter) = formatter {
for listener in &mut self.formatter_listeners {
listener(&formatter).await?;
}
self.formatter = formatter;
}
Ok(())
}
}
impl fmt::Debug for Config {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Config")
.field("export_pdf", &self.export_pdf)
.field("formatter", &self.formatter)
.field("semantic_tokens", &self.semantic_tokens)
.field(
"semantic_tokens_listeners",
&format_args!("Vec[len = {}]", self.semantic_tokens_listeners.len()),
)
.field(
"formatter_listeners",
&format_args!("Vec[len = {}]", self.formatter_listeners.len()),
)
.finish()
}
}
/// What counts as "1 character" for string indexing. We should always prefer
/// UTF-8, but support UTF-16 as long as it is standard. For more background on
/// encodings and LSP, try ["The bottom emoji breaks rust-analyzer"](https://fasterthanli.me/articles/the-bottom-emoji-breaks-rust-analyzer),
/// a well-written article on the topic.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Default)]
pub enum PositionEncoding {
/// "1 character" means "1 UTF-16 code unit"
///
/// This is the only required encoding for LSPs to support, but it's not a
/// natural one (unless you're working in JS). Prefer UTF-8, and refer
/// to the article linked in the `PositionEncoding` docs for more
/// background.
#[default]
Utf16,
/// "1 character" means "1 byte"
Utf8,
}
impl From<PositionEncoding> for lsp_types::PositionEncodingKind {
fn from(position_encoding: PositionEncoding) -> Self {
match position_encoding {
PositionEncoding::Utf16 => Self::UTF16,
PositionEncoding::Utf8 => Self::UTF8,
}
}
}
/// Configuration set at initialization that won't change within a single
/// session
#[derive(Debug)]
pub struct ConstConfig {
pub position_encoding: PositionEncoding,
pub supports_semantic_tokens_dynamic_registration: bool,
pub supports_document_formatting_dynamic_registration: bool,
pub supports_config_change_registration: bool,
}
impl ConstConfig {
fn choose_encoding(params: &InitializeParams) -> PositionEncoding {
let encodings = params.position_encodings();
if encodings.contains(&PositionEncodingKind::UTF8) {
PositionEncoding::Utf8
} else {
PositionEncoding::Utf16
}
}
}
impl From<&InitializeParams> for ConstConfig {
fn from(params: &InitializeParams) -> Self {
Self {
position_encoding: Self::choose_encoding(params),
supports_semantic_tokens_dynamic_registration: params
.supports_semantic_tokens_dynamic_registration(),
supports_document_formatting_dynamic_registration: params
.supports_document_formatting_dynamic_registration(),
supports_config_change_registration: params.supports_config_change_registration(),
}
}
}

View file

@ -1,141 +0,0 @@
use std::ffi::OsStr;
use std::path::PathBuf;
use tower_lsp::lsp_types::{DocumentFormattingClientCapabilities, Url};
use tower_lsp::lsp_types::{
InitializeParams, Position, PositionEncodingKind, SemanticTokensClientCapabilities,
};
use typst::syntax::VirtualPath;
use crate::config::PositionEncoding;
pub trait InitializeParamsExt {
fn position_encodings(&self) -> &[PositionEncodingKind];
fn supports_config_change_registration(&self) -> bool;
fn semantic_tokens_capabilities(&self) -> Option<&SemanticTokensClientCapabilities>;
fn document_formatting_capabilities(&self) -> Option<&DocumentFormattingClientCapabilities>;
fn supports_semantic_tokens_dynamic_registration(&self) -> bool;
fn supports_document_formatting_dynamic_registration(&self) -> bool;
fn root_paths(&self) -> Vec<PathBuf>;
}
static DEFAULT_ENCODING: [PositionEncodingKind; 1] = [PositionEncodingKind::UTF16];
impl InitializeParamsExt for InitializeParams {
fn position_encodings(&self) -> &[PositionEncodingKind] {
self.capabilities
.general
.as_ref()
.and_then(|general| general.position_encodings.as_ref())
.map(|encodings| encodings.as_slice())
.unwrap_or(&DEFAULT_ENCODING)
}
fn supports_config_change_registration(&self) -> bool {
self.capabilities
.workspace
.as_ref()
.and_then(|workspace| workspace.configuration)
.unwrap_or(false)
}
fn semantic_tokens_capabilities(&self) -> Option<&SemanticTokensClientCapabilities> {
self.capabilities
.text_document
.as_ref()?
.semantic_tokens
.as_ref()
}
fn document_formatting_capabilities(&self) -> Option<&DocumentFormattingClientCapabilities> {
self.capabilities
.text_document
.as_ref()?
.formatting
.as_ref()
}
fn supports_semantic_tokens_dynamic_registration(&self) -> bool {
self.semantic_tokens_capabilities()
.and_then(|semantic_tokens| semantic_tokens.dynamic_registration)
.unwrap_or(false)
}
fn supports_document_formatting_dynamic_registration(&self) -> bool {
self.document_formatting_capabilities()
.and_then(|document_format| document_format.dynamic_registration)
.unwrap_or(false)
}
#[allow(deprecated)] // `self.root_path` is marked as deprecated
fn root_paths(&self) -> Vec<PathBuf> {
match self.workspace_folders.as_ref() {
Some(roots) => roots
.iter()
.map(|root| &root.uri)
.map(Url::to_file_path)
.collect::<Result<Vec<_>, _>>()
.unwrap(),
None => self
.root_uri
.as_ref()
.map(|uri| uri.to_file_path().unwrap())
.or_else(|| self.root_path.clone().map(PathBuf::from))
.into_iter()
.collect(),
}
}
}
pub trait StrExt {
fn encoded_len(&self, encoding: PositionEncoding) -> usize;
}
impl StrExt for str {
fn encoded_len(&self, encoding: PositionEncoding) -> usize {
match encoding {
PositionEncoding::Utf8 => self.len(),
PositionEncoding::Utf16 => self.chars().map(char::len_utf16).sum(),
}
}
}
pub trait VirtualPathExt {
fn with_extension(&self, extension: impl AsRef<OsStr>) -> Self;
}
impl VirtualPathExt for VirtualPath {
fn with_extension(&self, extension: impl AsRef<OsStr>) -> Self {
Self::new(self.as_rooted_path().with_extension(extension))
}
}
pub trait PositionExt {
fn delta(&self, to: &Self) -> PositionDelta;
}
impl PositionExt for Position {
/// Calculates the delta from `self` to `to`. This is in the `SemanticToken`
/// sense, so the delta's `character` is relative to `self`'s
/// `character` iff `self` and `to` are on the same line. Otherwise,
/// it's relative to the start of the line `to` is on.
fn delta(&self, to: &Self) -> PositionDelta {
let line_delta = to.line - self.line;
let char_delta = if line_delta == 0 {
to.character - self.character
} else {
to.character
};
PositionDelta {
delta_line: line_delta,
delta_start: char_delta,
}
}
}
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default)]
pub struct PositionDelta {
pub delta_line: u32,
pub delta_start: u32,
}

View file

@ -1,6 +1,8 @@
pub use tower_lsp::Client as LspHost;
use std::borrow::Cow;
use std::fmt;
use std::path::PathBuf;
use std::sync::Arc;
use anyhow::Context;
@ -9,28 +11,187 @@ use futures::FutureExt;
use log::{error, info, trace};
use once_cell::sync::OnceCell;
use serde_json::Value as JsonValue;
use tinymist_query::{
get_semantic_tokens_options, get_semantic_tokens_registration,
get_semantic_tokens_unregistration, CompletionRequest, DocumentSymbolRequest, HoverRequest,
PositionEncoding, SelectionRangeRequest, SemanticTokensDeltaRequest, SemanticTokensFullRequest,
SignatureHelpRequest, SymbolRequest,
};
use anyhow::bail;
use futures::future::BoxFuture;
use itertools::Itertools;
use serde::Deserialize;
use serde_json::{Map, Value};
use tokio::sync::{Mutex, RwLock};
use tower_lsp::lsp_types::*;
use tower_lsp::{jsonrpc, LanguageServer};
use tower_lsp::lsp_types::ConfigurationItem;
use tower_lsp::{jsonrpc, lsp_types::*, LanguageServer};
use typst::model::Document;
use typst_ts_core::config::CompileOpts;
use crate::actor;
use crate::actor::typst::CompileCluster;
use crate::actor::typst::{
CompilerQueryResponse, CompletionRequest, DocumentSymbolRequest, HoverRequest,
OnSaveExportRequest, SelectionRangeRequest, SemanticTokensDeltaRequest,
SemanticTokensFullRequest, SignatureHelpRequest, SymbolRequest,
};
use crate::config::{
Config, ConstConfig, ExperimentalFormatterMode, ExportPdfMode, SemanticTokensMode,
};
use crate::ext::InitializeParamsExt;
use crate::actor::typst::{CompilerQueryResponse, OnSaveExportRequest};
use super::semantic_tokens::{
get_semantic_tokens_options, get_semantic_tokens_registration,
get_semantic_tokens_unregistration,
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ExperimentalFormatterMode {
#[default]
Off,
On,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ExportPdfMode {
Never,
#[default]
OnSave,
OnType,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum SemanticTokensMode {
Disable,
#[default]
Enable,
}
pub type Listener<T> = Box<dyn FnMut(&T) -> BoxFuture<anyhow::Result<()>> + Send + Sync>;
const CONFIG_ITEMS: &[&str] = &[
"exportPdf",
"rootPath",
"semanticTokens",
"experimentalFormatterMode",
];
#[derive(Default)]
pub struct Config {
pub export_pdf: ExportPdfMode,
pub root_path: Option<PathBuf>,
pub semantic_tokens: SemanticTokensMode,
pub formatter: ExperimentalFormatterMode,
semantic_tokens_listeners: Vec<Listener<SemanticTokensMode>>,
formatter_listeners: Vec<Listener<ExperimentalFormatterMode>>,
}
impl Config {
pub fn get_items() -> Vec<ConfigurationItem> {
let sections = CONFIG_ITEMS
.iter()
.flat_map(|item| [format!("tinymist.{item}"), item.to_string()]);
sections
.map(|section| ConfigurationItem {
section: Some(section),
..Default::default()
})
.collect()
}
pub fn values_to_map(values: Vec<Value>) -> Map<String, Value> {
let unpaired_values = values
.into_iter()
.tuples()
.map(|(a, b)| if !a.is_null() { a } else { b });
CONFIG_ITEMS
.iter()
.map(|item| item.to_string())
.zip(unpaired_values)
.collect()
}
pub fn listen_semantic_tokens(&mut self, listener: Listener<SemanticTokensMode>) {
self.semantic_tokens_listeners.push(listener);
}
// pub fn listen_formatting(&mut self, listener:
// Listener<ExperimentalFormatterMode>) { self.formatter_listeners.
// push(listener); }
pub async fn update(&mut self, update: &Value) -> anyhow::Result<()> {
if let Value::Object(update) = update {
self.update_by_map(update).await
} else {
bail!("got invalid configuration object {update}")
}
}
pub async fn update_by_map(&mut self, update: &Map<String, Value>) -> anyhow::Result<()> {
let export_pdf = update
.get("exportPdf")
.map(ExportPdfMode::deserialize)
.and_then(Result::ok);
if let Some(export_pdf) = export_pdf {
self.export_pdf = export_pdf;
}
let root_path = update.get("rootPath");
if let Some(root_path) = root_path {
if root_path.is_null() {
self.root_path = None;
}
if let Some(root_path) = root_path.as_str().map(PathBuf::from) {
self.root_path = Some(root_path);
}
}
let semantic_tokens = update
.get("semanticTokens")
.map(SemanticTokensMode::deserialize)
.and_then(Result::ok);
if let Some(semantic_tokens) = semantic_tokens {
for listener in &mut self.semantic_tokens_listeners {
listener(&semantic_tokens).await?;
}
self.semantic_tokens = semantic_tokens;
}
let formatter = update
.get("experimentalFormatterMode")
.map(ExperimentalFormatterMode::deserialize)
.and_then(Result::ok);
if let Some(formatter) = formatter {
for listener in &mut self.formatter_listeners {
listener(&formatter).await?;
}
self.formatter = formatter;
}
Ok(())
}
}
impl fmt::Debug for Config {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Config")
.field("export_pdf", &self.export_pdf)
.field("formatter", &self.formatter)
.field("semantic_tokens", &self.semantic_tokens)
.field(
"semantic_tokens_listeners",
&format_args!("Vec[len = {}]", self.semantic_tokens_listeners.len()),
)
.field(
"formatter_listeners",
&format_args!("Vec[len = {}]", self.formatter_listeners.len()),
)
.finish()
}
}
/// Configuration set at initialization that won't change within a single
/// session
#[derive(Debug)]
pub struct ConstConfig {
pub position_encoding: PositionEncoding,
pub supports_semantic_tokens_dynamic_registration: bool,
pub supports_document_formatting_dynamic_registration: bool,
pub supports_config_change_registration: bool,
}
pub struct TypstServer {
pub client: LspHost,
@ -583,3 +744,105 @@ impl TypstServer {
todo!()
}
}
impl ConstConfig {
fn choose_encoding(params: &InitializeParams) -> PositionEncoding {
let encodings = params.position_encodings();
if encodings.contains(&PositionEncodingKind::UTF8) {
PositionEncoding::Utf8
} else {
PositionEncoding::Utf16
}
}
}
impl From<&InitializeParams> for ConstConfig {
fn from(params: &InitializeParams) -> Self {
Self {
position_encoding: Self::choose_encoding(params),
supports_semantic_tokens_dynamic_registration: params
.supports_semantic_tokens_dynamic_registration(),
supports_document_formatting_dynamic_registration: params
.supports_document_formatting_dynamic_registration(),
supports_config_change_registration: params.supports_config_change_registration(),
}
}
}
pub trait InitializeParamsExt {
fn position_encodings(&self) -> &[PositionEncodingKind];
fn supports_config_change_registration(&self) -> bool;
fn semantic_tokens_capabilities(&self) -> Option<&SemanticTokensClientCapabilities>;
fn document_formatting_capabilities(&self) -> Option<&DocumentFormattingClientCapabilities>;
fn supports_semantic_tokens_dynamic_registration(&self) -> bool;
fn supports_document_formatting_dynamic_registration(&self) -> bool;
fn root_paths(&self) -> Vec<PathBuf>;
}
static DEFAULT_ENCODING: [PositionEncodingKind; 1] = [PositionEncodingKind::UTF16];
impl InitializeParamsExt for InitializeParams {
fn position_encodings(&self) -> &[PositionEncodingKind] {
self.capabilities
.general
.as_ref()
.and_then(|general| general.position_encodings.as_ref())
.map(|encodings| encodings.as_slice())
.unwrap_or(&DEFAULT_ENCODING)
}
fn supports_config_change_registration(&self) -> bool {
self.capabilities
.workspace
.as_ref()
.and_then(|workspace| workspace.configuration)
.unwrap_or(false)
}
fn semantic_tokens_capabilities(&self) -> Option<&SemanticTokensClientCapabilities> {
self.capabilities
.text_document
.as_ref()?
.semantic_tokens
.as_ref()
}
fn document_formatting_capabilities(&self) -> Option<&DocumentFormattingClientCapabilities> {
self.capabilities
.text_document
.as_ref()?
.formatting
.as_ref()
}
fn supports_semantic_tokens_dynamic_registration(&self) -> bool {
self.semantic_tokens_capabilities()
.and_then(|semantic_tokens| semantic_tokens.dynamic_registration)
.unwrap_or(false)
}
fn supports_document_formatting_dynamic_registration(&self) -> bool {
self.document_formatting_capabilities()
.and_then(|document_format| document_format.dynamic_registration)
.unwrap_or(false)
}
#[allow(deprecated)] // `self.root_path` is marked as deprecated
fn root_paths(&self) -> Vec<PathBuf> {
match self.workspace_folders.as_ref() {
Some(roots) => roots
.iter()
.map(|root| &root.uri)
.map(Url::to_file_path)
.collect::<Result<Vec<_>, _>>()
.unwrap(),
None => self
.root_uri
.as_ref()
.map(|uri| uri.to_file_path().unwrap())
.or_else(|| self.root_path.clone().map(PathBuf::from))
.into_iter()
.collect(),
}
}
}

View file

@ -1,19 +1,11 @@
//! # tinymist LSP Server
mod config;
mod ext;
mod lsp_typst_boundary;
// pub mod formatting;
pub mod actor;
pub mod analysis;
pub mod lsp;
pub mod semantic_tokens;
use tower_lsp::{LspService, Server};
use lsp::TypstServer;
// #[derive(Debug, Clone)]
// struct Args {}
@ -49,7 +41,7 @@ async fn main() {
let stdin = tokio::io::stdin();
let stdout = tokio::io::stdout();
let (service, socket) = LspService::new(TypstServer::new);
let (service, socket) = LspService::new(lsp::TypstServer::new);
Server::new(stdin, stdout, socket).serve(service).await;
}

View file

@ -1,44 +0,0 @@
use tower_lsp::lsp_types::{Position, SemanticToken};
use typst::diag::EcoString;
use typst::syntax::Source;
use crate::config::PositionEncoding;
use crate::ext::{PositionExt, StrExt};
use crate::lsp_typst_boundary::typst_to_lsp;
use super::Token;
pub(super) fn encode_tokens<'a>(
tokens: impl Iterator<Item = Token> + 'a,
source: &'a Source,
encoding: PositionEncoding,
) -> impl Iterator<Item = (SemanticToken, EcoString)> + 'a {
tokens.scan(Position::new(0, 0), move |last_position, token| {
let (encoded_token, source_code, position) =
encode_token(token, last_position, source, encoding);
*last_position = position;
Some((encoded_token, source_code))
})
}
fn encode_token(
token: Token,
last_position: &Position,
source: &Source,
encoding: PositionEncoding,
) -> (SemanticToken, EcoString, Position) {
let position = typst_to_lsp::offset_to_position(token.offset, encoding, source);
let delta = last_position.delta(&position);
let length = token.source.as_str().encoded_len(encoding);
let lsp_token = SemanticToken {
delta_line: delta.delta_line,
delta_start: delta.delta_start,
length: length as u32,
token_type: token.token_type as u32,
token_modifiers_bitset: token.modifiers.bitset(),
};
(lsp_token, token.source, position)
}