feat: create query crate

This commit is contained in:
Myriad-Dreamin 2024-03-07 13:35:45 +08:00
parent 7e9bddb763
commit 9af8eb4b52
29 changed files with 1312 additions and 1341 deletions

View file

@ -0,0 +1,38 @@
[package]
name = "tinymist-query"
description = "Language queries for tinymist."
categories = ["compilers", "command-line-utilities"]
keywords = ["api", "language", "typst"]
authors.workspace = true
version.workspace = true
license.workspace = true
edition.workspace = true
homepage.workspace = true
repository.workspace = true
[dependencies]
anyhow.workspace = true
comemo.workspace = true
regex.workspace = true
itertools.workspace = true
lazy_static.workspace = true
strum.workspace = true
log.workspace = true
serde.workspace = true
serde_json.workspace = true
parking_lot.workspace = true
typst.workspace = true
typst-ide.workspace = true
typst-ts-core = { version = "0.4.2-rc6", default-features = false, features = [
"flat-vector",
"vector-bbox",
] }
typst-ts-compiler.workspace = true
tower-lsp.workspace = true
# [lints]
# workspace = true

View file

@ -0,0 +1,107 @@
use comemo::Track;
use typst::diag::EcoString;
use typst::engine::{Engine, Route};
use typst::eval::{Tracer, Vm};
use typst::foundations::{Label, Scopes, Value};
use typst::introspection::{Introspector, Locator};
use typst::model::{BibliographyElem, Document};
use typst::syntax::{ast, LinkedNode, Span, SyntaxKind};
use typst::World;
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
/// Try to determine a set of possible values for an expression.
pub fn analyze_expr(world: &dyn World, node: &LinkedNode) -> EcoVec<Value> {
match node.cast::<ast::Expr>() {
Some(ast::Expr::None(_)) => eco_vec![Value::None],
Some(ast::Expr::Auto(_)) => eco_vec![Value::Auto],
Some(ast::Expr::Bool(v)) => eco_vec![Value::Bool(v.get())],
Some(ast::Expr::Int(v)) => eco_vec![Value::Int(v.get())],
Some(ast::Expr::Float(v)) => eco_vec![Value::Float(v.get())],
Some(ast::Expr::Numeric(v)) => eco_vec![Value::numeric(v.get())],
Some(ast::Expr::Str(v)) => eco_vec![Value::Str(v.get().into())],
Some(ast::Expr::FieldAccess(access)) => {
let Some(child) = node.children().next() else {
return eco_vec![];
};
analyze_expr(world, &child)
.into_iter()
.filter_map(|target| target.field(&access.field()).ok())
.collect()
}
Some(_) => {
if let Some(parent) = node.parent() {
if parent.kind() == SyntaxKind::FieldAccess && node.index() > 0 {
return analyze_expr(world, parent);
}
}
let mut tracer = Tracer::new();
tracer.inspect(node.span());
typst::compile(world, &mut tracer).ok();
tracer.values()
}
_ => eco_vec![],
}
}
/// Try to load a module from the current source file.
pub fn analyze_import(world: &dyn World, source: &LinkedNode) -> Option<Value> {
let source = analyze_expr(world, source).into_iter().next()?;
if source.scope().is_some() {
return Some(source);
}
let mut locator = Locator::default();
let introspector = Introspector::default();
let mut tracer = Tracer::new();
let engine = Engine {
world: world.track(),
route: Route::default(),
introspector: introspector.track(),
locator: &mut locator,
tracer: tracer.track_mut(),
};
let mut vm = Vm::new(engine, Scopes::new(Some(world.library())), Span::detached());
typst::eval::import(&mut vm, source, Span::detached(), true)
.ok()
.map(Value::Module)
}
/// Find all labels and details for them.
///
/// Returns:
/// - All labels and descriptions for them, if available
/// - A split offset: All labels before this offset belong to nodes, all after
/// belong to a bibliography.
pub fn analyze_labels(document: &Document) -> (Vec<(Label, Option<EcoString>)>, usize) {
let mut output = vec![];
// Labels in the document.
for elem in document.introspector.all() {
let Some(label) = elem.label() else { continue };
let details = elem
.get_by_name("caption")
.or_else(|| elem.get_by_name("body"))
.and_then(|field| match field {
Value::Content(content) => Some(content),
_ => None,
})
.as_ref()
.unwrap_or(elem)
.plain_text();
output.push((label, Some(details)));
}
let split = output.len();
// Bibliography keys.
for (key, detail) in BibliographyElem::keys(document.introspector.track()) {
output.push((Label::new(&key), detail));
}
(output, split)
}

View file

@ -0,0 +1 @@
pub mod analyze;

View file

@ -0,0 +1,27 @@
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct CompletionRequest {
pub path: PathBuf,
pub position: LspPosition,
pub position_encoding: PositionEncoding,
pub explicit: bool,
}
pub fn completion(
world: &TypstSystemWorld,
doc: Option<Arc<TypstDocument>>,
req: CompletionRequest,
) -> Option<CompletionResponse> {
let source = get_suitable_source_in_workspace(world, &req.path).ok()?;
let typst_offset =
lsp_to_typst::position_to_offset(req.position, req.position_encoding, &source);
let (typst_start_offset, completions) =
typst_ide::autocomplete(world, doc.as_deref(), &source, typst_offset, req.explicit)?;
let lsp_start_position =
typst_to_lsp::offset_to_position(typst_start_offset, req.position_encoding, &source);
let replace_range = LspRawRange::new(lsp_start_position, req.position);
Some(typst_to_lsp::completions(&completions, replace_range).into())
}

View file

@ -0,0 +1,143 @@
use crate::prelude::*;
pub type DiagnosticsMap = HashMap<Url, Vec<LspDiagnostic>>;
pub fn convert_diagnostics<'a>(
project: &TypstSystemWorld,
errors: impl IntoIterator<Item = &'a TypstDiagnostic>,
position_encoding: PositionEncoding,
) -> DiagnosticsMap {
errors
.into_iter()
.flat_map(|error| {
convert_diagnostic(project, error, position_encoding)
.map_err(move |conversion_err| {
error!("could not convert Typst error to diagnostic: {conversion_err:?} error to convert: {error:?}");
})
})
.collect::<Vec<_>>()
.into_iter()
.into_group_map()
}
fn convert_diagnostic(
project: &TypstSystemWorld,
typst_diagnostic: &TypstDiagnostic,
position_encoding: PositionEncoding,
) -> anyhow::Result<(Url, LspDiagnostic)> {
let uri;
let lsp_range;
if let Some((id, span)) = diagnostic_span_id(typst_diagnostic) {
uri = Url::from_file_path(project.path_for_id(id)?).unwrap();
let source = project.source(id)?;
lsp_range = diagnostic_range(&source, span, position_encoding).raw_range;
} else {
uri = Url::from_file_path(project.root.clone()).unwrap();
lsp_range = LspRawRange::default();
};
let lsp_severity = diagnostic_severity(typst_diagnostic.severity);
let typst_message = &typst_diagnostic.message;
let typst_hints = &typst_diagnostic.hints;
let lsp_message = format!("{typst_message}{}", diagnostic_hints(typst_hints));
let tracepoints = diagnostic_related_information(project, typst_diagnostic, position_encoding)?;
let diagnostic = LspDiagnostic {
range: lsp_range,
severity: Some(lsp_severity),
message: lsp_message,
source: Some("typst".to_owned()),
related_information: Some(tracepoints),
..Default::default()
};
Ok((uri, diagnostic))
}
fn tracepoint_to_relatedinformation(
project: &TypstSystemWorld,
tracepoint: &Spanned<Tracepoint>,
position_encoding: PositionEncoding,
) -> anyhow::Result<Option<DiagnosticRelatedInformation>> {
if let Some(id) = tracepoint.span.id() {
let uri = Url::from_file_path(project.path_for_id(id)?).unwrap();
let source = project.source(id)?;
if let Some(typst_range) = source.range(tracepoint.span) {
let lsp_range = typst_to_lsp::range(typst_range, &source, position_encoding);
return Ok(Some(DiagnosticRelatedInformation {
location: LspLocation {
uri,
range: lsp_range.raw_range,
},
message: tracepoint.v.to_string(),
}));
}
}
Ok(None)
}
fn diagnostic_related_information(
project: &TypstSystemWorld,
typst_diagnostic: &TypstDiagnostic,
position_encoding: PositionEncoding,
) -> anyhow::Result<Vec<DiagnosticRelatedInformation>> {
let mut tracepoints = vec![];
for tracepoint in &typst_diagnostic.trace {
if let Some(info) =
tracepoint_to_relatedinformation(project, tracepoint, position_encoding)?
{
tracepoints.push(info);
}
}
Ok(tracepoints)
}
fn diagnostic_span_id(typst_diagnostic: &TypstDiagnostic) -> Option<(FileId, TypstSpan)> {
iter::once(typst_diagnostic.span)
.chain(typst_diagnostic.trace.iter().map(|trace| trace.span))
.find_map(|span| Some((span.id()?, span)))
}
fn diagnostic_range(
source: &Source,
typst_span: TypstSpan,
position_encoding: PositionEncoding,
) -> LspRange {
// Due to #241 and maybe typst/typst#2035, we sometimes fail to find the span.
// In that case, we use a default span as a better alternative to
// panicking.
//
// This may have been fixed after Typst 0.7.0, but it's still nice to avoid
// panics in case something similar reappears.
match source.find(typst_span) {
Some(node) => {
let typst_range = node.range();
typst_to_lsp::range(typst_range, source, position_encoding)
}
None => LspRange::new(
LspRawRange::new(LspPosition::new(0, 0), LspPosition::new(0, 0)),
position_encoding,
),
}
}
fn diagnostic_severity(typst_severity: TypstSeverity) -> LspSeverity {
match typst_severity {
TypstSeverity::Error => LspSeverity::ERROR,
TypstSeverity::Warning => LspSeverity::WARNING,
}
}
fn diagnostic_hints(typst_hints: &[EcoString]) -> Format<impl Iterator<Item = EcoString> + '_> {
iter::repeat(EcoString::from("\n\nHint: "))
.take(typst_hints.len())
.interleave(typst_hints.iter().cloned())
.format("")
}

View file

@ -0,0 +1,160 @@
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct DocumentSymbolRequest {
pub path: PathBuf,
pub position_encoding: PositionEncoding,
}
pub fn document_symbol(
world: &TypstSystemWorld,
req: DocumentSymbolRequest,
) -> Option<DocumentSymbolResponse> {
let source = get_suitable_source_in_workspace(world, &req.path).ok()?;
let uri = Url::from_file_path(req.path).unwrap();
let symbols = get_document_symbols(source, uri, req.position_encoding);
symbols.map(DocumentSymbolResponse::Flat)
}
#[comemo::memoize]
pub(crate) fn get_document_symbols(
source: Source,
uri: Url,
position_encoding: PositionEncoding,
) -> Option<Vec<SymbolInformation>> {
struct DocumentSymbolWorker {
symbols: Vec<SymbolInformation>,
}
impl DocumentSymbolWorker {
/// Get all symbols for a node recursively.
pub fn get_symbols<'a>(
&mut self,
node: LinkedNode<'a>,
source: &'a Source,
uri: &'a Url,
position_encoding: PositionEncoding,
) -> anyhow::Result<()> {
let own_symbol = get_ident(&node, source, uri, position_encoding)?;
for child in node.children() {
self.get_symbols(child, source, uri, position_encoding)?;
}
if let Some(symbol) = own_symbol {
self.symbols.push(symbol);
}
Ok(())
}
}
/// Get symbol for a leaf node of a valid type, or `None` if the node is an
/// invalid type.
#[allow(deprecated)]
fn get_ident(
node: &LinkedNode,
source: &Source,
uri: &Url,
position_encoding: PositionEncoding,
) -> anyhow::Result<Option<SymbolInformation>> {
match node.kind() {
SyntaxKind::Label => {
let ast_node = node
.cast::<ast::Label>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
let symbol = SymbolInformation {
name,
kind: SymbolKind::CONSTANT,
tags: None,
deprecated: None, // do not use, deprecated, use `tags` instead
location: LspLocation {
uri: uri.clone(),
range: typst_to_lsp::range(node.range(), source, position_encoding)
.raw_range,
},
container_name: None,
};
Ok(Some(symbol))
}
SyntaxKind::Ident => {
let ast_node = node
.cast::<ast::Ident>()
.ok_or_else(|| anyhow!("cast to ast node failed: {:?}", node))?;
let name = ast_node.get().to_string();
let Some(parent) = node.parent() else {
return Ok(None);
};
let kind = match parent.kind() {
// for variable definitions, the Let binding holds an Ident
SyntaxKind::LetBinding => SymbolKind::VARIABLE,
// for function definitions, the Let binding holds a Closure which holds the
// Ident
SyntaxKind::Closure => {
let Some(grand_parent) = parent.parent() else {
return Ok(None);
};
match grand_parent.kind() {
SyntaxKind::LetBinding => SymbolKind::FUNCTION,
_ => return Ok(None),
}
}
_ => return Ok(None),
};
let symbol = SymbolInformation {
name,
kind,
tags: None,
deprecated: None, // do not use, deprecated, use `tags` instead
location: LspLocation {
uri: uri.clone(),
range: typst_to_lsp::range(node.range(), source, position_encoding)
.raw_range,
},
container_name: None,
};
Ok(Some(symbol))
}
SyntaxKind::Markup => {
let name = node.get().to_owned().into_text().to_string();
if name.is_empty() {
return Ok(None);
}
let Some(parent) = node.parent() else {
return Ok(None);
};
let kind = match parent.kind() {
SyntaxKind::Heading => SymbolKind::NAMESPACE,
_ => return Ok(None),
};
let symbol = SymbolInformation {
name,
kind,
tags: None,
deprecated: None, // do not use, deprecated, use `tags` instead
location: LspLocation {
uri: uri.clone(),
range: typst_to_lsp::range(node.range(), source, position_encoding)
.raw_range,
},
container_name: None,
};
Ok(Some(symbol))
}
_ => Ok(None),
}
}
let root = LinkedNode::new(source.root());
let mut worker = DocumentSymbolWorker { symbols: vec![] };
let res = worker
.get_symbols(root, &source, &uri, position_encoding)
.ok();
res.map(|_| worker.symbols)
}

View file

@ -0,0 +1,28 @@
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct HoverRequest {
pub path: PathBuf,
pub position: LspPosition,
pub position_encoding: PositionEncoding,
}
pub fn hover(
world: &TypstSystemWorld,
doc: Option<Arc<TypstDocument>>,
req: HoverRequest,
) -> Option<Hover> {
let source = get_suitable_source_in_workspace(world, &req.path).ok()?;
let typst_offset =
lsp_to_typst::position_to_offset(req.position, req.position_encoding, &source);
let typst_tooltip = typst_ide::tooltip(world, doc.as_deref(), &source, typst_offset)?;
let ast_node = LinkedNode::new(source.root()).leaf_at(typst_offset)?;
let range = typst_to_lsp::range(ast_node.range(), &source, req.position_encoding);
Some(Hover {
contents: typst_to_lsp::tooltip(&typst_tooltip),
range: Some(range.raw_range),
})
}

View file

@ -0,0 +1,27 @@
pub mod analysis;
pub(crate) mod diagnostics;
pub use diagnostics::*;
pub(crate) mod signature_help;
pub use signature_help::*;
pub(crate) mod document_symbol;
pub use document_symbol::*;
pub(crate) mod symbol;
pub use symbol::*;
pub(crate) mod semantic_tokens;
pub use semantic_tokens::*;
pub(crate) mod semantic_tokens_full;
pub use semantic_tokens_full::*;
pub(crate) mod semantic_tokens_delta;
pub use semantic_tokens_delta::*;
pub(crate) mod hover;
pub use hover::*;
pub(crate) mod completion;
pub use completion::*;
pub(crate) mod selection_range;
pub use selection_range::*;
pub mod lsp_typst_boundary;
pub use lsp_typst_boundary::*;
mod prelude;

View file

@ -0,0 +1,408 @@
//! Conversions between Typst and LSP types and representations
use tower_lsp::lsp_types;
use typst::syntax::Source;
pub type LspPosition = lsp_types::Position;
/// The interpretation of an `LspCharacterOffset` depends on the
/// `LspPositionEncoding`
pub type LspCharacterOffset = u32;
pub type LspPositionEncoding = PositionEncoding;
/// Byte offset (i.e. UTF-8 bytes) in Typst files, either from the start of the
/// line or the file
pub type TypstOffset = usize;
pub type TypstSpan = typst::syntax::Span;
/// An LSP range. It needs its associated `LspPositionEncoding` to be used. The
/// `LspRange` struct provides this range with that encoding.
pub type LspRawRange = lsp_types::Range;
pub type TypstRange = std::ops::Range<usize>;
pub type TypstTooltip = typst_ide::Tooltip;
pub type LspHoverContents = lsp_types::HoverContents;
pub type LspDiagnostic = lsp_types::Diagnostic;
pub type TypstDiagnostic = typst::diag::SourceDiagnostic;
pub type LspSeverity = lsp_types::DiagnosticSeverity;
pub type TypstSeverity = typst::diag::Severity;
pub type LspParamInfo = lsp_types::ParameterInformation;
pub type TypstParamInfo = typst::foundations::ParamInfo;
/// What counts as "1 character" for string indexing. We should always prefer
/// UTF-8, but support UTF-16 as long as it is standard. For more background on
/// encodings and LSP, try ["The bottom emoji breaks rust-analyzer"](https://fasterthanli.me/articles/the-bottom-emoji-breaks-rust-analyzer),
/// a well-written article on the topic.
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Default)]
pub enum PositionEncoding {
/// "1 character" means "1 UTF-16 code unit"
///
/// This is the only required encoding for LSPs to support, but it's not a
/// natural one (unless you're working in JS). Prefer UTF-8, and refer
/// to the article linked in the `PositionEncoding` docs for more
/// background.
#[default]
Utf16,
/// "1 character" means "1 byte"
Utf8,
}
impl From<PositionEncoding> for lsp_types::PositionEncodingKind {
fn from(position_encoding: PositionEncoding) -> Self {
match position_encoding {
PositionEncoding::Utf16 => Self::UTF16,
PositionEncoding::Utf8 => Self::UTF8,
}
}
}
/// An LSP range with its associated encoding.
pub struct LspRange {
pub raw_range: LspRawRange,
pub encoding: LspPositionEncoding,
}
impl LspRange {
pub fn new(raw_range: LspRawRange, encoding: LspPositionEncoding) -> Self {
Self {
raw_range,
encoding,
}
}
pub fn into_range_on(self, source: &Source) -> TypstRange {
lsp_to_typst::range(&self, source)
}
}
pub type LspCompletion = lsp_types::CompletionItem;
pub type LspCompletionKind = lsp_types::CompletionItemKind;
pub type TypstCompletion = typst_ide::Completion;
pub type TypstCompletionKind = typst_ide::CompletionKind;
pub mod lsp_to_typst {
use typst::syntax::Source;
use super::*;
pub fn position_to_offset(
lsp_position: LspPosition,
lsp_position_encoding: LspPositionEncoding,
typst_source: &Source,
) -> TypstOffset {
match lsp_position_encoding {
LspPositionEncoding::Utf8 => {
let line_index = lsp_position.line as usize;
let column_index = lsp_position.character as usize;
typst_source
.line_column_to_byte(line_index, column_index)
.unwrap()
}
LspPositionEncoding::Utf16 => {
// We have a line number and a UTF-16 offset into that line. We want a byte
// offset into the file.
//
// Typst's `Source` provides several UTF-16 methods:
// - `len_utf16` for the length of the file
// - `byte_to_utf16` to convert a byte offset from the start of the file to a
// UTF-16 offset from the start of the file
// - `utf16_to_byte` to do the opposite of `byte_to_utf16`
//
// Unfortunately, none of these address our needs well, so we do some math
// instead. This is not the fastest possible implementation, but
// it's the most reasonable without access to the internal state
// of `Source`.
// TODO: Typst's `Source` could easily provide an implementation of the method
// we need here. Submit a PR against `typst` to add it, then
// update this if/when merged.
let line_index = lsp_position.line as usize;
let utf16_offset_in_line = lsp_position.character as usize;
let byte_line_offset = typst_source.line_to_byte(line_index).unwrap();
let utf16_line_offset = typst_source.byte_to_utf16(byte_line_offset).unwrap();
let utf16_offset = utf16_line_offset + utf16_offset_in_line;
typst_source.utf16_to_byte(utf16_offset).unwrap()
}
}
}
pub fn range(lsp_range: &LspRange, source: &Source) -> TypstRange {
let lsp_start = lsp_range.raw_range.start;
let typst_start = position_to_offset(lsp_start, lsp_range.encoding, source);
let lsp_end = lsp_range.raw_range.end;
let typst_end = position_to_offset(lsp_end, lsp_range.encoding, source);
TypstRange {
start: typst_start,
end: typst_end,
}
}
}
pub mod typst_to_lsp {
use itertools::Itertools;
use lazy_static::lazy_static;
use regex::{Captures, Regex};
use tower_lsp::lsp_types::{
CompletionTextEdit, Documentation, InsertTextFormat, LanguageString, MarkedString,
MarkupContent, MarkupKind, TextEdit,
};
use typst::diag::EcoString;
use typst::foundations::{CastInfo, Repr};
use typst::syntax::Source;
use super::*;
pub fn offset_to_position(
typst_offset: TypstOffset,
lsp_position_encoding: LspPositionEncoding,
typst_source: &Source,
) -> LspPosition {
let line_index = typst_source.byte_to_line(typst_offset).unwrap();
let column_index = typst_source.byte_to_column(typst_offset).unwrap();
let lsp_line = line_index as u32;
let lsp_column = match lsp_position_encoding {
LspPositionEncoding::Utf8 => column_index as LspCharacterOffset,
LspPositionEncoding::Utf16 => {
// See the implementation of `lsp_to_typst::position_to_offset` for discussion
// relevant to this function.
// TODO: Typst's `Source` could easily provide an implementation of the method
// we need here. Submit a PR to `typst` to add it, then update
// this if/when merged.
let utf16_offset = typst_source.byte_to_utf16(typst_offset).unwrap();
let byte_line_offset = typst_source.line_to_byte(line_index).unwrap();
let utf16_line_offset = typst_source.byte_to_utf16(byte_line_offset).unwrap();
let utf16_column_offset = utf16_offset - utf16_line_offset;
utf16_column_offset as LspCharacterOffset
}
};
LspPosition::new(lsp_line, lsp_column)
}
pub fn range(
typst_range: TypstRange,
typst_source: &Source,
lsp_position_encoding: LspPositionEncoding,
) -> LspRange {
let typst_start = typst_range.start;
let lsp_start = offset_to_position(typst_start, lsp_position_encoding, typst_source);
let typst_end = typst_range.end;
let lsp_end = offset_to_position(typst_end, lsp_position_encoding, typst_source);
let raw_range = LspRawRange::new(lsp_start, lsp_end);
LspRange::new(raw_range, lsp_position_encoding)
}
fn completion_kind(typst_completion_kind: TypstCompletionKind) -> LspCompletionKind {
match typst_completion_kind {
TypstCompletionKind::Syntax => LspCompletionKind::SNIPPET,
TypstCompletionKind::Func => LspCompletionKind::FUNCTION,
TypstCompletionKind::Param => LspCompletionKind::VARIABLE,
TypstCompletionKind::Constant => LspCompletionKind::CONSTANT,
TypstCompletionKind::Symbol(_) => LspCompletionKind::TEXT,
TypstCompletionKind::Type => LspCompletionKind::CLASS,
}
}
lazy_static! {
static ref TYPST_SNIPPET_PLACEHOLDER_RE: Regex = Regex::new(r"\$\{(.*?)\}").unwrap();
}
/// Adds numbering to placeholders in snippets
fn snippet(typst_snippet: &EcoString) -> String {
let mut counter = 1;
let result =
TYPST_SNIPPET_PLACEHOLDER_RE.replace_all(typst_snippet.as_str(), |cap: &Captures| {
let substitution = format!("${{{}:{}}}", counter, &cap[1]);
counter += 1;
substitution
});
result.to_string()
}
pub fn completion(
typst_completion: &TypstCompletion,
lsp_replace: LspRawRange,
) -> LspCompletion {
let typst_snippet = typst_completion
.apply
.as_ref()
.unwrap_or(&typst_completion.label);
let lsp_snippet = snippet(typst_snippet);
let text_edit = CompletionTextEdit::Edit(TextEdit::new(lsp_replace, lsp_snippet));
LspCompletion {
label: typst_completion.label.to_string(),
kind: Some(completion_kind(typst_completion.kind.clone())),
detail: typst_completion.detail.as_ref().map(String::from),
text_edit: Some(text_edit),
insert_text_format: Some(InsertTextFormat::SNIPPET),
..Default::default()
}
}
pub fn completions(
typst_completions: &[TypstCompletion],
lsp_replace: LspRawRange,
) -> Vec<LspCompletion> {
typst_completions
.iter()
.map(|typst_completion| completion(typst_completion, lsp_replace))
.collect_vec()
}
pub fn tooltip(typst_tooltip: &TypstTooltip) -> LspHoverContents {
let lsp_marked_string = match typst_tooltip {
TypstTooltip::Text(text) => MarkedString::String(text.to_string()),
TypstTooltip::Code(code) => MarkedString::LanguageString(LanguageString {
language: "typst".to_owned(),
value: code.to_string(),
}),
};
LspHoverContents::Scalar(lsp_marked_string)
}
pub fn param_info(typst_param_info: &TypstParamInfo) -> LspParamInfo {
LspParamInfo {
label: lsp_types::ParameterLabel::Simple(typst_param_info.name.to_owned()),
documentation: param_info_to_docs(typst_param_info),
}
}
pub fn param_info_to_label(typst_param_info: &TypstParamInfo) -> String {
format!(
"{}: {}",
typst_param_info.name,
cast_info_to_label(&typst_param_info.input)
)
}
fn param_info_to_docs(typst_param_info: &TypstParamInfo) -> Option<Documentation> {
if !typst_param_info.docs.is_empty() {
Some(Documentation::MarkupContent(MarkupContent {
value: typst_param_info.docs.to_owned(),
kind: MarkupKind::Markdown,
}))
} else {
None
}
}
pub fn cast_info_to_label(cast_info: &CastInfo) -> String {
match cast_info {
CastInfo::Any => "any".to_owned(),
CastInfo::Value(value, _) => value.repr().to_string(),
CastInfo::Type(ty) => ty.to_string(),
CastInfo::Union(options) => options.iter().map(cast_info_to_label).join(" "),
}
}
}
#[cfg(test)]
mod test {
use typst::syntax::Source;
use crate::{lsp_to_typst, PositionEncoding};
use super::*;
const ENCODING_TEST_STRING: &str = "test 🥺 test";
#[test]
fn utf16_position_to_utf8_offset() {
let source = Source::detached(ENCODING_TEST_STRING);
let start = LspPosition {
line: 0,
character: 0,
};
let emoji = LspPosition {
line: 0,
character: 5,
};
let post_emoji = LspPosition {
line: 0,
character: 7,
};
let end = LspPosition {
line: 0,
character: 12,
};
let start_offset =
lsp_to_typst::position_to_offset(start, PositionEncoding::Utf16, &source);
let start_actual = 0;
let emoji_offset =
lsp_to_typst::position_to_offset(emoji, PositionEncoding::Utf16, &source);
let emoji_actual = 5;
let post_emoji_offset =
lsp_to_typst::position_to_offset(post_emoji, PositionEncoding::Utf16, &source);
let post_emoji_actual = 9;
let end_offset = lsp_to_typst::position_to_offset(end, PositionEncoding::Utf16, &source);
let end_actual = 14;
assert_eq!(start_offset, start_actual);
assert_eq!(emoji_offset, emoji_actual);
assert_eq!(post_emoji_offset, post_emoji_actual);
assert_eq!(end_offset, end_actual);
}
#[test]
fn utf8_offset_to_utf16_position() {
let source = Source::detached(ENCODING_TEST_STRING);
let start = 0;
let emoji = 5;
let post_emoji = 9;
let end = 14;
let start_position = LspPosition {
line: 0,
character: 0,
};
let start_actual =
typst_to_lsp::offset_to_position(start, PositionEncoding::Utf16, &source);
let emoji_position = LspPosition {
line: 0,
character: 5,
};
let emoji_actual =
typst_to_lsp::offset_to_position(emoji, PositionEncoding::Utf16, &source);
let post_emoji_position = LspPosition {
line: 0,
character: 7,
};
let post_emoji_actual =
typst_to_lsp::offset_to_position(post_emoji, PositionEncoding::Utf16, &source);
let end_position = LspPosition {
line: 0,
character: 12,
};
let end_actual = typst_to_lsp::offset_to_position(end, PositionEncoding::Utf16, &source);
assert_eq!(start_position, start_actual);
assert_eq!(emoji_position, emoji_actual);
assert_eq!(post_emoji_position, post_emoji_actual);
assert_eq!(end_position, end_actual);
}
}

View file

@ -0,0 +1,40 @@
pub use std::{
collections::HashMap,
iter,
path::{Path, PathBuf},
sync::Arc,
};
pub use anyhow::anyhow;
pub use itertools::{Format, Itertools};
pub use log::{error, trace};
pub use tower_lsp::lsp_types::{
CompletionResponse, DiagnosticRelatedInformation, DocumentSymbolResponse, Documentation, Hover,
Location as LspLocation, MarkupContent, MarkupKind, Position as LspPosition, SelectionRange,
SemanticTokens, SemanticTokensDelta, SemanticTokensFullDeltaResult, SemanticTokensResult,
SignatureHelp, SignatureInformation, SymbolInformation, SymbolKind, Url,
};
pub use typst::diag::{EcoString, FileError, FileResult, Tracepoint};
pub use typst::foundations::{Func, ParamInfo, Value};
pub use typst::syntax::{
ast::{self, AstNode},
FileId, LinkedNode, Source, Spanned, SyntaxKind, VirtualPath,
};
pub use typst::World;
use typst_ts_compiler::service::WorkspaceProvider;
pub use typst_ts_compiler::TypstSystemWorld;
pub use typst_ts_core::{TypstDocument, TypstFileId};
pub use crate::analysis::analyze::analyze_expr;
pub use crate::lsp_typst_boundary::{
lsp_to_typst, typst_to_lsp, LspDiagnostic, LspRange, LspRawRange, LspSeverity,
PositionEncoding, TypstDiagnostic, TypstSeverity, TypstSpan,
};
pub fn get_suitable_source_in_workspace(w: &TypstSystemWorld, p: &Path) -> FileResult<Source> {
// todo: source in packages
let relative_path = p
.strip_prefix(&w.workspace_root())
.map_err(|_| FileError::NotFound(p.to_owned()))?;
w.source(TypstFileId::new(None, VirtualPath::new(relative_path)))
}

View file

@ -0,0 +1,40 @@
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct SelectionRangeRequest {
pub path: PathBuf,
pub positions: Vec<LspPosition>,
pub position_encoding: PositionEncoding,
}
pub fn selection_range(
world: &TypstSystemWorld,
req: SelectionRangeRequest,
) -> Option<Vec<SelectionRange>> {
let source = get_suitable_source_in_workspace(world, &req.path).ok()?;
let mut ranges = Vec::new();
for position in req.positions {
let typst_offset =
lsp_to_typst::position_to_offset(position, req.position_encoding, &source);
let tree = LinkedNode::new(source.root());
let leaf = tree.leaf_at(typst_offset)?;
ranges.push(range_for_node(&source, req.position_encoding, &leaf));
}
Some(ranges)
}
fn range_for_node(
source: &Source,
position_encoding: PositionEncoding,
node: &LinkedNode,
) -> SelectionRange {
let range = typst_to_lsp::range(node.range(), source, position_encoding);
SelectionRange {
range: range.raw_range,
parent: node
.parent()
.map(|node| Box::new(range_for_node(source, position_encoding, node))),
}
}

View file

@ -0,0 +1,75 @@
use tower_lsp::lsp_types::{SemanticToken, SemanticTokensEdit};
#[derive(Debug)]
struct CachedTokens {
tokens: Vec<SemanticToken>,
id: u64,
}
#[derive(Default, Debug)]
pub struct CacheInner {
last_sent: Option<CachedTokens>,
next_id: u64,
}
impl CacheInner {
pub fn try_take_result(&mut self, id: &str) -> Option<Vec<SemanticToken>> {
let id = id.parse::<u64>().ok()?;
match self.last_sent.take() {
Some(cached) if cached.id == id => Some(cached.tokens),
Some(cached) => {
// replace after taking
self.last_sent = Some(cached);
None
}
None => None,
}
}
pub fn cache_result(&mut self, tokens: Vec<SemanticToken>) -> String {
let id = self.get_next_id();
let cached = CachedTokens { tokens, id };
self.last_sent = Some(cached);
id.to_string()
}
fn get_next_id(&mut self) -> u64 {
let id = self.next_id;
self.next_id += 1;
id
}
}
pub fn token_delta(from: &[SemanticToken], to: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
// Taken from `rust-analyzer`'s algorithm
// https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/semantic_tokens.rs#L219
let start = from
.iter()
.zip(to.iter())
.take_while(|(x, y)| x == y)
.count();
let (_, from) = from.split_at(start);
let (_, to) = to.split_at(start);
let dist_from_end = from
.iter()
.rev()
.zip(to.iter().rev())
.take_while(|(x, y)| x == y)
.count();
let (from, _) = from.split_at(from.len() - dist_from_end);
let (to, _) = to.split_at(to.len() - dist_from_end);
if from.is_empty() && to.is_empty() {
vec![]
} else {
vec![SemanticTokensEdit {
start: 5 * start as u32,
delete_count: 5 * from.len() as u32,
data: Some(to.into()),
}]
}
}

View file

@ -0,0 +1,235 @@
use itertools::Itertools;
use parking_lot::RwLock;
use strum::IntoEnumIterator;
use tower_lsp::lsp_types::{
Registration, SemanticToken, SemanticTokensEdit, SemanticTokensFullOptions,
SemanticTokensLegend, SemanticTokensOptions, Unregistration,
};
use typst::diag::EcoString;
use typst::syntax::{ast, LinkedNode, Source, SyntaxKind};
use crate::PositionEncoding;
use self::delta::token_delta;
use self::modifier_set::ModifierSet;
use self::token_encode::encode_tokens;
use self::typst_tokens::{Modifier, TokenType};
pub use self::delta::CacheInner as TokenCacheInner;
mod delta;
mod modifier_set;
mod token_encode;
mod typst_tokens;
pub fn get_legend() -> SemanticTokensLegend {
SemanticTokensLegend {
token_types: TokenType::iter().map(Into::into).collect(),
token_modifiers: Modifier::iter().map(Into::into).collect(),
}
}
const SEMANTIC_TOKENS_REGISTRATION_ID: &str = "semantic_tokens";
const SEMANTIC_TOKENS_METHOD_ID: &str = "textDocument/semanticTokens";
pub fn get_semantic_tokens_registration(options: SemanticTokensOptions) -> Registration {
Registration {
id: SEMANTIC_TOKENS_REGISTRATION_ID.to_owned(),
method: SEMANTIC_TOKENS_METHOD_ID.to_owned(),
register_options: Some(
serde_json::to_value(options)
.expect("semantic tokens options should be representable as JSON value"),
),
}
}
pub fn get_semantic_tokens_unregistration() -> Unregistration {
Unregistration {
id: SEMANTIC_TOKENS_REGISTRATION_ID.to_owned(),
method: SEMANTIC_TOKENS_METHOD_ID.to_owned(),
}
}
pub fn get_semantic_tokens_options() -> SemanticTokensOptions {
SemanticTokensOptions {
legend: get_legend(),
full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }),
..Default::default()
}
}
#[derive(Default)]
pub struct SemanticTokenCache(RwLock<TokenCacheInner>);
impl SemanticTokenCache {
pub fn get_semantic_tokens_full(
&self,
source: &Source,
encoding: PositionEncoding,
) -> (Vec<SemanticToken>, String) {
let root = LinkedNode::new(source.root());
let tokens = tokenize_tree(&root, ModifierSet::empty());
let encoded_tokens = encode_tokens(tokens, source, encoding);
let output_tokens = encoded_tokens.map(|(token, _)| token).collect_vec();
let result_id = self.0.write().cache_result(output_tokens.clone());
(output_tokens, result_id)
}
pub fn try_semantic_tokens_delta_from_result_id(
&self,
source: &Source,
result_id: &str,
encoding: PositionEncoding,
) -> (Result<Vec<SemanticTokensEdit>, Vec<SemanticToken>>, String) {
let cached = self.0.write().try_take_result(result_id);
// this call will overwrite the cache, so need to read from cache first
let (tokens, result_id) = self.get_semantic_tokens_full(source, encoding);
match cached {
Some(cached) => (Ok(token_delta(&cached, &tokens)), result_id),
None => (Err(tokens), result_id),
}
}
}
fn tokenize_single_node(node: &LinkedNode, modifiers: ModifierSet) -> Option<Token> {
let is_leaf = node.children().next().is_none();
token_from_node(node)
.or_else(|| is_leaf.then_some(TokenType::Text))
.map(|token_type| Token::new(token_type, modifiers, node))
}
/// Tokenize a node and its children
fn tokenize_tree<'a>(
root: &LinkedNode<'a>,
parent_modifiers: ModifierSet,
) -> Box<dyn Iterator<Item = Token> + 'a> {
let modifiers = parent_modifiers | modifiers_from_node(root);
let token = tokenize_single_node(root, modifiers).into_iter();
let children = root
.children()
.flat_map(move |child| tokenize_tree(&child, modifiers));
Box::new(token.chain(children))
}
pub struct Token {
pub token_type: TokenType,
pub modifiers: ModifierSet,
pub offset: usize,
pub source: EcoString,
}
impl Token {
pub fn new(token_type: TokenType, modifiers: ModifierSet, node: &LinkedNode) -> Self {
let source = node.get().clone().into_text();
Self {
token_type,
modifiers,
offset: node.offset(),
source,
}
}
}
/// Determines the [`Modifier`]s to be applied to a node and all its children.
///
/// Note that this does not recurse up, so calling it on a child node may not
/// return a modifier that should be applied to it due to a parent.
fn modifiers_from_node(node: &LinkedNode) -> ModifierSet {
match node.kind() {
SyntaxKind::Emph => ModifierSet::new(&[Modifier::Emph]),
SyntaxKind::Strong => ModifierSet::new(&[Modifier::Strong]),
SyntaxKind::Math | SyntaxKind::Equation => ModifierSet::new(&[Modifier::Math]),
_ => ModifierSet::empty(),
}
}
/// Determines the best [`TokenType`] for an entire node and its children, if
/// any. If there is no single `TokenType`, or none better than `Text`, returns
/// `None`.
///
/// In tokenization, returning `Some` stops recursion, while returning `None`
/// continues and attempts to tokenize each of `node`'s children. If there are
/// no children, `Text` is taken as the default.
fn token_from_node(node: &LinkedNode) -> Option<TokenType> {
use SyntaxKind::*;
match node.kind() {
Star if node.parent_kind() == Some(Strong) => Some(TokenType::Punctuation),
Star if node.parent_kind() == Some(ModuleImport) => Some(TokenType::Operator),
Underscore if node.parent_kind() == Some(Emph) => Some(TokenType::Punctuation),
Underscore if node.parent_kind() == Some(MathAttach) => Some(TokenType::Operator),
MathIdent | Ident => Some(token_from_ident(node)),
Hash => token_from_hashtag(node),
LeftBrace | RightBrace | LeftBracket | RightBracket | LeftParen | RightParen | Comma
| Semicolon | Colon => Some(TokenType::Punctuation),
Linebreak | Escape | Shorthand => Some(TokenType::Escape),
Link => Some(TokenType::Link),
Raw => Some(TokenType::Raw),
Label => Some(TokenType::Label),
RefMarker => Some(TokenType::Ref),
Heading | HeadingMarker => Some(TokenType::Heading),
ListMarker | EnumMarker | TermMarker => Some(TokenType::ListMarker),
MathAlignPoint | Plus | Minus | Slash | Hat | Dot | Eq | EqEq | ExclEq | Lt | LtEq | Gt
| GtEq | PlusEq | HyphEq | StarEq | SlashEq | Dots | Arrow | Not | And | Or => {
Some(TokenType::Operator)
}
Dollar => Some(TokenType::Delimiter),
None | Auto | Let | Show | If | Else | For | In | While | Break | Continue | Return
| Import | Include | As | Set => Some(TokenType::Keyword),
Bool => Some(TokenType::Bool),
Int | Float | Numeric => Some(TokenType::Number),
Str => Some(TokenType::String),
LineComment | BlockComment => Some(TokenType::Comment),
Error => Some(TokenType::Error),
// Disambiguate from `SyntaxKind::None`
_ => Option::None,
}
}
// TODO: differentiate also using tokens in scope, not just context
fn is_function_ident(ident: &LinkedNode) -> bool {
let Some(next) = ident.next_leaf() else {
return false;
};
let function_call = matches!(next.kind(), SyntaxKind::LeftParen)
&& matches!(
next.parent_kind(),
Some(SyntaxKind::Args | SyntaxKind::Params)
);
let function_content = matches!(next.kind(), SyntaxKind::LeftBracket)
&& matches!(next.parent_kind(), Some(SyntaxKind::ContentBlock));
function_call || function_content
}
fn token_from_ident(ident: &LinkedNode) -> TokenType {
if is_function_ident(ident) {
TokenType::Function
} else {
TokenType::Interpolated
}
}
fn get_expr_following_hashtag<'a>(hashtag: &LinkedNode<'a>) -> Option<LinkedNode<'a>> {
hashtag
.next_sibling()
.filter(|next| next.cast::<ast::Expr>().map_or(false, |expr| expr.hash()))
.and_then(|node| node.leftmost_leaf())
}
fn token_from_hashtag(hashtag: &LinkedNode) -> Option<TokenType> {
get_expr_following_hashtag(hashtag)
.as_ref()
.and_then(token_from_node)
}

View file

@ -0,0 +1,33 @@
use std::ops;
use super::typst_tokens::Modifier;
#[derive(Default, Clone, Copy)]
pub struct ModifierSet(u32);
impl ModifierSet {
pub fn empty() -> Self {
Self::default()
}
pub fn new(modifiers: &[Modifier]) -> Self {
let bits = modifiers
.iter()
.copied()
.map(Modifier::bitmask)
.fold(0, |bits, mask| bits | mask);
Self(bits)
}
pub fn bitset(self) -> u32 {
self.0
}
}
impl ops::BitOr for ModifierSet {
type Output = Self;
fn bitor(self, rhs: Self) -> Self::Output {
Self(self.0 | rhs.0)
}
}

View file

@ -0,0 +1,86 @@
use tower_lsp::lsp_types::{Position, SemanticToken};
use typst::diag::EcoString;
use typst::syntax::Source;
use crate::typst_to_lsp;
use crate::PositionEncoding;
use super::Token;
pub(super) fn encode_tokens<'a>(
tokens: impl Iterator<Item = Token> + 'a,
source: &'a Source,
encoding: PositionEncoding,
) -> impl Iterator<Item = (SemanticToken, EcoString)> + 'a {
tokens.scan(Position::new(0, 0), move |last_position, token| {
let (encoded_token, source_code, position) =
encode_token(token, last_position, source, encoding);
*last_position = position;
Some((encoded_token, source_code))
})
}
fn encode_token(
token: Token,
last_position: &Position,
source: &Source,
encoding: PositionEncoding,
) -> (SemanticToken, EcoString, Position) {
let position = typst_to_lsp::offset_to_position(token.offset, encoding, source);
let delta = last_position.delta(&position);
let length = token.source.as_str().encoded_len(encoding);
let lsp_token = SemanticToken {
delta_line: delta.delta_line,
delta_start: delta.delta_start,
length: length as u32,
token_type: token.token_type as u32,
token_modifiers_bitset: token.modifiers.bitset(),
};
(lsp_token, token.source, position)
}
pub trait StrExt {
fn encoded_len(&self, encoding: PositionEncoding) -> usize;
}
impl StrExt for str {
fn encoded_len(&self, encoding: PositionEncoding) -> usize {
match encoding {
PositionEncoding::Utf8 => self.len(),
PositionEncoding::Utf16 => self.chars().map(char::len_utf16).sum(),
}
}
}
pub trait PositionExt {
fn delta(&self, to: &Self) -> PositionDelta;
}
impl PositionExt for Position {
/// Calculates the delta from `self` to `to`. This is in the `SemanticToken`
/// sense, so the delta's `character` is relative to `self`'s
/// `character` iff `self` and `to` are on the same line. Otherwise,
/// it's relative to the start of the line `to` is on.
fn delta(&self, to: &Self) -> PositionDelta {
let line_delta = to.line - self.line;
let char_delta = if line_delta == 0 {
to.character - self.character
} else {
to.character
};
PositionDelta {
delta_line: line_delta,
delta_start: char_delta,
}
}
}
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default)]
pub struct PositionDelta {
pub delta_line: u32,
pub delta_start: u32,
}

View file

@ -0,0 +1,133 @@
//! Types for tokens used for Typst syntax
use strum::EnumIter;
use tower_lsp::lsp_types::{SemanticTokenModifier, SemanticTokenType};
const BOOL: SemanticTokenType = SemanticTokenType::new("bool");
const PUNCTUATION: SemanticTokenType = SemanticTokenType::new("punct");
const ESCAPE: SemanticTokenType = SemanticTokenType::new("escape");
const LINK: SemanticTokenType = SemanticTokenType::new("link");
const RAW: SemanticTokenType = SemanticTokenType::new("raw");
const LABEL: SemanticTokenType = SemanticTokenType::new("label");
const REF: SemanticTokenType = SemanticTokenType::new("ref");
const HEADING: SemanticTokenType = SemanticTokenType::new("heading");
const LIST_MARKER: SemanticTokenType = SemanticTokenType::new("marker");
const LIST_TERM: SemanticTokenType = SemanticTokenType::new("term");
const DELIMITER: SemanticTokenType = SemanticTokenType::new("delim");
const INTERPOLATED: SemanticTokenType = SemanticTokenType::new("pol");
const ERROR: SemanticTokenType = SemanticTokenType::new("error");
const TEXT: SemanticTokenType = SemanticTokenType::new("text");
/// Very similar to [`typst_ide::Tag`], but with convenience traits, and
/// extensible because we want to further customize highlighting
#[derive(Clone, Copy, EnumIter)]
#[repr(u32)]
pub enum TokenType {
// Standard LSP types
Comment,
String,
Keyword,
Operator,
Number,
Function,
Decorator,
// Custom types
Bool,
Punctuation,
Escape,
Link,
Raw,
Label,
Ref,
Heading,
ListMarker,
ListTerm,
Delimiter,
Interpolated,
Error,
/// Any text in markup without a more specific token type, possible styled.
///
/// We perform styling (like bold and italics) via modifiers. That means
/// everything that should receive styling needs to be a token so we can
/// apply a modifier to it. This token type is mostly for that, since
/// text should usually not be specially styled.
Text,
}
impl From<TokenType> for SemanticTokenType {
fn from(token_type: TokenType) -> Self {
use TokenType::*;
match token_type {
Comment => Self::COMMENT,
String => Self::STRING,
Keyword => Self::KEYWORD,
Operator => Self::OPERATOR,
Number => Self::NUMBER,
Function => Self::FUNCTION,
Decorator => Self::DECORATOR,
Bool => BOOL,
Punctuation => PUNCTUATION,
Escape => ESCAPE,
Link => LINK,
Raw => RAW,
Label => LABEL,
Ref => REF,
Heading => HEADING,
ListMarker => LIST_MARKER,
ListTerm => LIST_TERM,
Delimiter => DELIMITER,
Interpolated => INTERPOLATED,
Error => ERROR,
Text => TEXT,
}
}
}
const STRONG: SemanticTokenModifier = SemanticTokenModifier::new("strong");
const EMPH: SemanticTokenModifier = SemanticTokenModifier::new("emph");
const MATH: SemanticTokenModifier = SemanticTokenModifier::new("math");
#[derive(Clone, Copy, EnumIter)]
#[repr(u8)]
pub enum Modifier {
Strong,
Emph,
Math,
}
impl Modifier {
pub fn index(self) -> u8 {
self as u8
}
pub fn bitmask(self) -> u32 {
0b1 << self.index()
}
}
impl From<Modifier> for SemanticTokenModifier {
fn from(modifier: Modifier) -> Self {
use Modifier::*;
match modifier {
Strong => STRONG,
Emph => EMPH,
Math => MATH,
}
}
}
#[cfg(test)]
mod test {
use strum::IntoEnumIterator;
use super::*;
#[test]
fn ensure_not_too_many_modifiers() {
// Because modifiers are encoded in a 32 bit bitmask, we can't have more than 32
// modifiers
assert!(Modifier::iter().len() <= 32);
}
}

View file

@ -0,0 +1,37 @@
use crate::{prelude::*, SemanticTokenCache};
#[derive(Debug, Clone)]
pub struct SemanticTokensDeltaRequest {
pub path: PathBuf,
pub previous_result_id: String,
pub position_encoding: PositionEncoding,
}
pub fn semantic_tokens_delta(
cache: &SemanticTokenCache,
source: Source,
req: SemanticTokensDeltaRequest,
) -> Option<SemanticTokensFullDeltaResult> {
let (tokens, result_id) = cache.try_semantic_tokens_delta_from_result_id(
&source,
&req.previous_result_id,
req.position_encoding,
);
match tokens {
Ok(edits) => Some(
SemanticTokensDelta {
result_id: Some(result_id),
edits,
}
.into(),
),
Err(tokens) => Some(
SemanticTokens {
result_id: Some(result_id),
data: tokens,
}
.into(),
),
}
}

View file

@ -0,0 +1,23 @@
use crate::{prelude::*, SemanticTokenCache};
#[derive(Debug, Clone)]
pub struct SemanticTokensFullRequest {
pub path: PathBuf,
pub position_encoding: PositionEncoding,
}
pub fn semantic_tokens_full(
cache: &SemanticTokenCache,
source: Source,
req: SemanticTokensFullRequest,
) -> Option<SemanticTokensResult> {
let (tokens, result_id) = cache.get_semantic_tokens_full(&source, req.position_encoding);
Some(
SemanticTokens {
result_id: Some(result_id),
data: tokens,
}
.into(),
)
}

View file

@ -0,0 +1,165 @@
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct SignatureHelpRequest {
pub path: PathBuf,
pub position: LspPosition,
pub position_encoding: PositionEncoding,
}
pub fn signature_help(
world: &TypstSystemWorld,
SignatureHelpRequest {
path,
position,
position_encoding,
}: SignatureHelpRequest,
) -> Option<SignatureHelp> {
let source = get_suitable_source_in_workspace(world, &path).ok()?;
let typst_offset = lsp_to_typst::position_to_offset(position, position_encoding, &source);
let ast_node = LinkedNode::new(source.root()).leaf_at(typst_offset)?;
let (callee, callee_node, args) = surrounding_function_syntax(&ast_node)?;
let mut ancestor = &ast_node;
while !ancestor.is::<ast::Expr>() {
ancestor = ancestor.parent()?;
}
if !callee.hash() && !matches!(callee, ast::Expr::MathIdent(_)) {
return None;
}
let values = analyze_expr(world, &callee_node);
let function = values.into_iter().find_map(|v| match v {
Value::Func(f) => Some(f),
_ => None,
})?;
trace!("got function {function:?}");
let param_index = param_index_at_leaf(&ast_node, &function, args);
let label = format!(
"{}({}){}",
function.name().unwrap_or("<anonymous closure>"),
match function.params() {
Some(params) => params
.iter()
.map(typst_to_lsp::param_info_to_label)
.join(", "),
None => "".to_owned(),
},
match function.returns() {
Some(returns) => format!("-> {}", typst_to_lsp::cast_info_to_label(returns)),
None => "".to_owned(),
}
);
let params = function
.params()
.unwrap_or_default()
.iter()
.map(typst_to_lsp::param_info)
.collect();
trace!("got signature info {label} {params:?}");
let documentation = function.docs().map(markdown_docs);
let active_parameter = param_index.map(|i| i as u32);
Some(SignatureHelp {
signatures: vec![SignatureInformation {
label,
documentation,
parameters: Some(params),
active_parameter,
}],
active_signature: Some(0),
active_parameter: None,
})
}
fn surrounding_function_syntax<'b>(
leaf: &'b LinkedNode,
) -> Option<(ast::Expr<'b>, LinkedNode<'b>, ast::Args<'b>)> {
let parent = leaf.parent()?;
let parent = match parent.kind() {
SyntaxKind::Named => parent.parent()?,
_ => parent,
};
let args = parent.cast::<ast::Args>()?;
let grand = parent.parent()?;
let expr = grand.cast::<ast::Expr>()?;
let callee = match expr {
ast::Expr::FuncCall(call) => call.callee(),
ast::Expr::Set(set) => set.target(),
_ => return None,
};
Some((callee, grand.find(callee.span())?, args))
}
fn param_index_at_leaf(leaf: &LinkedNode, function: &Func, args: ast::Args) -> Option<usize> {
let deciding = deciding_syntax(leaf);
let params = function.params()?;
let param_index = find_param_index(&deciding, params, args)?;
trace!("got param index {param_index}");
Some(param_index)
}
/// Find the piece of syntax that decides what we're completing.
fn deciding_syntax<'b>(leaf: &'b LinkedNode) -> LinkedNode<'b> {
let mut deciding = leaf.clone();
while !matches!(
deciding.kind(),
SyntaxKind::LeftParen | SyntaxKind::Comma | SyntaxKind::Colon
) {
let Some(prev) = deciding.prev_leaf() else {
break;
};
deciding = prev;
}
deciding
}
fn find_param_index(deciding: &LinkedNode, params: &[ParamInfo], args: ast::Args) -> Option<usize> {
match deciding.kind() {
// After colon: "func(param:|)", "func(param: |)".
SyntaxKind::Colon => {
let prev = deciding.prev_leaf()?;
let param_ident = prev.cast::<ast::Ident>()?;
params
.iter()
.position(|param| param.name == param_ident.as_str())
}
// Before: "func(|)", "func(hi|)", "func(12,|)".
SyntaxKind::Comma | SyntaxKind::LeftParen => {
let next = deciding.next_leaf();
let following_param = next.as_ref().and_then(|next| next.cast::<ast::Ident>());
match following_param {
Some(next) => params
.iter()
.position(|param| param.named && param.name.starts_with(next.as_str())),
None => {
let positional_args_so_far = args
.items()
.filter(|arg| matches!(arg, ast::Arg::Pos(_)))
.count();
params
.iter()
.enumerate()
.filter(|(_, param)| param.positional)
.map(|(i, _)| i)
.nth(positional_args_so_far)
}
}
}
_ => None,
}
}
fn markdown_docs(docs: &str) -> Documentation {
Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,
value: docs.to_owned(),
})
}

View file

@ -0,0 +1,50 @@
use typst_ts_compiler::NotifyApi;
use crate::document_symbol::get_document_symbols;
use crate::prelude::*;
#[derive(Debug, Clone)]
pub struct SymbolRequest {
pub pattern: Option<String>,
pub position_encoding: PositionEncoding,
}
pub fn symbol(
world: &TypstSystemWorld,
SymbolRequest {
pattern,
position_encoding,
}: SymbolRequest,
) -> Option<Vec<SymbolInformation>> {
// todo: expose source
let mut symbols = vec![];
world.iter_dependencies(&mut |path, _| {
let Ok(source) = get_suitable_source_in_workspace(world, path) else {
return;
};
let uri = Url::from_file_path(path).unwrap();
let res = get_document_symbols(source, uri, position_encoding).and_then(|symbols| {
pattern
.as_ref()
.map(|pattern| filter_document_symbols(symbols, pattern))
});
if let Some(mut res) = res {
symbols.append(&mut res)
}
});
Some(symbols)
}
fn filter_document_symbols(
symbols: Vec<SymbolInformation>,
query_string: &str,
) -> Vec<SymbolInformation> {
symbols
.into_iter()
.filter(|e| e.name.contains(query_string))
.collect()
}