feat: support offline definition queries (LSIF) (#2032)
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run

The purpose of the [Language Server Index Format (LSIF)][LSIF] is to
define a standard format for language servers or other programming tools
to dump their knowledge about a workspace.

Use cases:
- providing stable json format of package docs:
  - The unstable export was supported in #1809
- cache analyze results in such format.
- if we export typst docs to LSIF, some tools can help read typst code,
such as [lsif-node](https://github.com/Microsoft/lsif-node)
- if we have a typst package helping render typst docs according to
LSIF, it can also be used to render docs for other programming
languages, for example [OCaml](https://github.com/rvantonder/lsif-ocaml)
and [TypeScript](https://github.com/sourcegraph/lsif-node)

LSIF is not the only index format, so we may reconsider to use other one
in future, such as [SCIP](https://sourcegraph.com/blog/announcing-scip)


[LSIF]:
https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/
This commit is contained in:
Myriad-Dreamin 2025-09-29 23:19:49 +08:00 committed by GitHub
parent a4d256d75b
commit d617b145e2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
8 changed files with 874 additions and 111 deletions

View file

@ -35,7 +35,7 @@ where
/// Registers a raw request handler that handlers a kind of untyped lsp
/// request.
pub fn with_raw_request<R: dapts::IRequest>(
pub fn with_raw_request<R: IRequest>(
mut self,
handler: RawHandler<Args::S, JsonValue>,
) -> Self {
@ -46,7 +46,7 @@ where
// todo: unsafe typed
/// Registers an raw request handler that handlers a kind of typed lsp
/// request.
pub fn with_request_<R: dapts::IRequest>(
pub fn with_request_<R: IRequest>(
mut self,
handler: fn(&mut Args::S, R::Arguments) -> ScheduleResult,
) -> Self {
@ -58,7 +58,7 @@ where
}
/// Registers a typed request handler.
pub fn with_request<R: dapts::IRequest>(
pub fn with_request<R: IRequest>(
mut self,
handler: AsyncHandler<Args::S, R::Arguments, R::Response>,
) -> Self {
@ -70,6 +70,7 @@ where
}
}
#[cfg(feature = "system")]
impl<Args: Initializer> LsDriver<DapMessage, Args>
where
Args::S: 'static,
@ -81,7 +82,6 @@ where
///
/// See [`transport::MirrorArgs`] for information about the record-replay
/// feature.
#[cfg(feature = "system")]
pub fn start(
&mut self,
inbox: TConnectionRx<DapMessage>,
@ -115,7 +115,6 @@ where
}
/// Starts the debug adaptor on the given connection.
#[cfg(feature = "system")]
pub fn start_(&mut self, inbox: TConnectionRx<DapMessage>) -> anyhow::Result<()> {
use EventOrMessage::*;

View file

@ -1,27 +1,53 @@
use std::path::{Path, PathBuf};
use std::str::FromStr;
use std::sync::Arc;
use futures::future::MaybeDone;
use reflexo_typst::package::PackageSpec;
use sync_ls::transport::{MirrorArgs, with_stdio_transport};
use sync_ls::{LspBuilder, LspMessage, LspResult, internal_error};
use tinymist::{Config, ServerState, SuperInit};
use tinymist::Config;
use tinymist_project::WorldProvider;
use tinymist_query::analysis::Analysis;
use tinymist_query::package::PackageInfo;
use tinymist_std::error::prelude::*;
use typlite::CompileOnceArgs;
use crate::*;
/// The commands for language server queries.
#[derive(Debug, Clone, clap::Subcommand)]
#[clap(rename_all = "camelCase")]
pub enum QueryCommands {
/// Get the lsif for a specific package.
Lsif(QueryLsifArgs),
/// Get the documentation for a specific package.
PackageDocs(PackageDocsArgs),
/// Check a specific package.
CheckPackage(PackageDocsArgs),
}
#[derive(Debug, Clone, clap::Parser)]
pub struct QueryLsifArgs {
/// Compile a document once before querying.
#[clap(flatten)]
pub compile: CompileOnceArgs,
/// The path of the package to request lsif for.
#[clap(long)]
pub path: Option<String>,
/// The package of the package to request lsif for.
#[clap(long)]
pub id: String,
/// The output path for the requested lsif.
#[clap(short, long)]
pub output: String,
// /// The format of requested lsif.
// #[clap(long)]
// pub format: Option<QueryDocsFormat>,
}
#[derive(Debug, Clone, clap::Parser)]
pub struct PackageDocsArgs {
/// Compile a document once before querying.
#[clap(flatten)]
pub compile: CompileOnceArgs,
/// The path of the package to request docs for.
#[clap(long)]
pub path: Option<String>,
@ -37,78 +63,87 @@ pub struct PackageDocsArgs {
}
/// The main entry point for language server queries.
pub fn query_main(cmds: QueryCommands) -> Result<()> {
pub fn query_main(mut cmds: QueryCommands) -> Result<()> {
use tinymist_project::package::PackageRegistry;
let (config, _) = Config::extract_lsp_params(Default::default(), Default::default());
let const_config = &config.const_config;
let analysis = Arc::new(Analysis {
position_encoding: const_config.position_encoding,
allow_overlapping_token: const_config.tokens_overlapping_token_support,
allow_multiline_token: const_config.tokens_multiline_token_support,
remove_html: !config.support_html_in_markdown,
extended_code_action: config.extended_code_action,
completion_feat: config.completion.clone(),
color_theme: match config.color_theme.as_deref() {
Some("dark") => tinymist_query::ColorTheme::Dark,
_ => tinymist_query::ColorTheme::Light,
},
lint: config.lint.when().clone(),
periscope: None,
tokens_caches: Arc::default(),
workers: Default::default(),
caches: Default::default(),
analysis_rev_cache: Arc::default(),
stats: Arc::default(),
});
with_stdio_transport::<LspMessage>(MirrorArgs::default(), |conn| {
let client_root = client_root(conn.sender);
let client = client_root.weak();
let compile = match &mut cmds {
QueryCommands::Lsif(args) => &mut args.compile,
QueryCommands::PackageDocs(args) => &mut args.compile,
QueryCommands::CheckPackage(args) => &mut args.compile,
};
if compile.input.is_none() {
compile.input = Some("main.typ".to_string());
}
let verse = compile.resolve()?;
let snap = verse.computation();
let snap = analysis.query_snapshot(snap, None);
// todo: roots, inputs, font_opts
let config = Config::default();
let (id, path) = match &cmds {
QueryCommands::Lsif(args) => (&args.id, &args.path),
QueryCommands::PackageDocs(args) => (&args.id, &args.path),
QueryCommands::CheckPackage(args) => (&args.id, &args.path),
};
let pkg = PackageSpec::from_str(id).unwrap();
let path = path.as_ref().map(PathBuf::from);
let path = path.unwrap_or_else(|| snap.registry().resolve(&pkg).unwrap().as_ref().into());
let mut service = ServerState::install_lsp(LspBuilder::new(
SuperInit {
client: client.to_typed(),
exec_cmds: Vec::new(),
config,
err: None,
},
client.clone(),
))
.build();
let info = PackageInfo {
path,
namespace: pkg.namespace,
name: pkg.name,
version: pkg.version.to_string(),
};
let resp = service.ready(()).unwrap();
let MaybeDone::Done(resp) = resp else {
anyhow::bail!("internal error: not sync init")
};
resp.unwrap();
match cmds {
QueryCommands::Lsif(args) => {
let res = snap.run_within_package(&info, move |a| {
let knowledge = tinymist_query::index::knowledge(a)
.map_err(map_string_err("failed to generate index"))?;
Ok(knowledge.bind(a.shared()).to_string())
})?;
let state = service.state_mut().unwrap();
let output_path = Path::new(&args.output);
std::fs::write(output_path, res).context_ut("failed to write lsif output")?;
}
QueryCommands::PackageDocs(args) => {
let res = snap.run_within_package(&info, |a| {
let doc = tinymist_query::docs::package_docs(a, &info)
.map_err(map_string_err("failed to generate docs"))?;
tinymist_query::docs::package_docs_md(&doc)
.map_err(map_string_err("failed to generate docs"))
})?;
let snap = state.snapshot().unwrap();
let res = RUNTIMES.tokio_runtime.block_on(async move {
match cmds {
QueryCommands::PackageDocs(args) => {
let pkg = PackageSpec::from_str(&args.id).unwrap();
let path = args.path.map(PathBuf::from);
let path = path
.unwrap_or_else(|| snap.registry().resolve(&pkg).unwrap().as_ref().into());
let res = state
.resource_package_docs_(PackageInfo {
path,
namespace: pkg.namespace,
name: pkg.name,
version: pkg.version.to_string(),
})?
.await?;
let output_path = Path::new(&args.output);
std::fs::write(output_path, res).map_err(internal_error)?;
}
QueryCommands::CheckPackage(args) => {
let pkg = PackageSpec::from_str(&args.id).unwrap();
let path = args.path.map(PathBuf::from);
let path = path
.unwrap_or_else(|| snap.registry().resolve(&pkg).unwrap().as_ref().into());
state
.check_package(PackageInfo {
path,
namespace: pkg.namespace,
name: pkg.name,
version: pkg.version.to_string(),
})?
.await?;
}
};
LspResult::Ok(())
});
res.map_err(|e| anyhow::anyhow!("{e:?}"))
})?;
let output_path = Path::new(&args.output);
std::fs::write(output_path, res).context_ut("failed to write package docs")?;
}
QueryCommands::CheckPackage(_args) => {
snap.run_within_package(&info, |a| {
tinymist_query::package::check_package(a, &info)
.map_err(map_string_err("failed to check package"))
})?;
}
};
Ok(())
}

View file

@ -23,6 +23,7 @@ pub mod signature;
pub use signature::*;
pub mod semantic_tokens;
pub use semantic_tokens::*;
use tinymist_std::error::WithContextUntyped;
mod post_tyck;
mod tyck;
pub(crate) use crate::ty::*;
@ -39,8 +40,8 @@ use ecow::eco_format;
use lsp_types::Url;
use tinymist_project::LspComputeGraph;
use tinymist_std::{Result, bail};
use tinymist_world::{EntryReader, TaskInputs};
use typst::diag::{FileError, FileResult};
use tinymist_world::{EntryReader, EntryState, TaskInputs};
use typst::diag::{FileError, FileResult, StrResult};
use typst::foundations::{Func, Value};
use typst::syntax::FileId;
@ -131,6 +132,36 @@ impl LspQuerySnapshot {
let mut ctx = self.analysis.enter_(world, self.rev_lock);
Ok(f(&mut ctx))
}
/// Checks within package
pub fn run_within_package<T>(
self,
info: &crate::package::PackageInfo,
f: impl FnOnce(&mut LocalContextGuard) -> Result<T> + Send + Sync,
) -> Result<T> {
let world = self.world();
let entry: StrResult<EntryState> = Ok(()).and_then(|_| {
let toml_id = crate::package::get_manifest_id(info)?;
let toml_path = world.path_for_id(toml_id)?.as_path().to_owned();
let pkg_root = toml_path
.parent()
.ok_or_else(|| eco_format!("cannot get package root (parent of {toml_path:?})"))?;
let manifest = crate::package::get_manifest(world, toml_id)?;
let entry_point = toml_id.join(&manifest.package.entrypoint);
Ok(EntryState::new_rooted_by_id(pkg_root.into(), entry_point))
});
let entry = entry.context_ut("resolve package entry")?;
let snap = self.task(TaskInputs {
entry: Some(entry),
inputs: None,
});
snap.run_analysis(f)?
}
}
#[cfg(test)]

View file

@ -0,0 +1,358 @@
//! Dumps typst knowledge from workspace.
//!
//! Reference Impls:
//! - <https://github.com/sourcegraph/lsif-jsonnet/blob/e186f9fde623efa8735261e9cb059ad3a58b535f/dumper/dumper.go>
//! - <https://github.com/rust-lang/rust-analyzer/blob/5c0b555a65cadc14a6a16865c3e065c9d30b0bef/crates/ide/src/static_index.rs>
//! - <https://github.com/rust-lang/rust-analyzer/blob/5c0b555a65cadc14a6a16865c3e065c9d30b0bef/crates/rust-analyzer/src/cli/lsif.rs>
use core::fmt;
use std::sync::Arc;
use crate::analysis::{SemanticTokens, SharedContext};
use crate::index::protocol::ResultSet;
use crate::prelude::Definition;
use crate::{LocalContext, path_to_url};
use ecow::EcoString;
use lsp_types::Url;
use tinymist_analysis::syntax::classify_syntax;
use tinymist_std::error::WithContextUntyped;
use tinymist_std::hash::FxHashMap;
use tinymist_std::typst::TypstDocument;
use tinymist_world::EntryReader;
use typst::syntax::{FileId, LinkedNode, Source, Span};
pub mod protocol;
use protocol as p;
/// The dumped knowledge.
pub struct Knowledge {
/// The meta data.
pub meta: p::MetaData,
/// The files.
pub files: Vec<FileIndex>,
}
impl Knowledge {
/// Creates a new empty knowledge.
pub fn bind<'a>(&'a self, ctx: &'a Arc<SharedContext>) -> KnowledgeWithContext<'a> {
KnowledgeWithContext {
knowledge: self,
ctx,
}
}
}
/// A view of knowledge with context for dumping.
pub struct KnowledgeWithContext<'a> {
knowledge: &'a Knowledge,
ctx: &'a Arc<SharedContext>,
}
impl fmt::Display for KnowledgeWithContext<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut files = FxHashMap::default();
let mut encoder = LsifEncoder {
ctx: self.ctx,
writer: f,
id: IdCounter::new(),
files: &mut files,
results: FxHashMap::default(),
};
encoder.emit_meta(&self.knowledge.meta).map_err(|err| {
log::error!("cannot write meta data: {err}");
fmt::Error
})?;
encoder.emit_files(&self.knowledge.files).map_err(|err| {
log::error!("cannot write files: {err}");
fmt::Error
})
}
}
struct IdCounter {
next: usize,
}
impl IdCounter {
fn new() -> Self {
Self { next: 0 }
}
fn next(&mut self) -> usize {
let id = self.next;
self.next += 1;
id
}
}
trait LsifWrite {
fn write_element(&mut self, id: i32, element: p::Element) -> fmt::Result;
}
impl<T: fmt::Write> LsifWrite for T {
fn write_element(&mut self, id: i32, element: p::Element) -> fmt::Result {
let entry = p::Entry { id, data: element };
self.write_str(&serde_json::to_string(&entry).unwrap())?;
self.write_char('\n')
}
}
struct LsifEncoder<'a, W: fmt::Write> {
ctx: &'a Arc<SharedContext>,
writer: &'a mut W,
id: IdCounter,
files: &'a mut FxHashMap<FileId, i32>,
results: FxHashMap<Span, i32>,
}
impl<'a, W: fmt::Write> LsifEncoder<'a, W> {
fn alloc_file_id(&mut self, fid: FileId) -> i32 {
*self.files.entry(fid).or_insert_with(|| {
let id = self.id.next() as i32;
self.writer
.write_element(
id,
p::Element::Vertex(p::Vertex::Document(&p::Document {
uri: self.ctx.uri_for_id(fid).unwrap_or_else(|err| {
log::error!("cannot get uri for {fid:?}: {err}");
Url::parse("file:///unknown").unwrap()
}),
language_id: EcoString::inline("typst"),
})),
)
.unwrap();
id
})
}
fn alloc_result_id(&mut self, span: Span) -> tinymist_std::Result<i32> {
if let Some(id) = self.results.get(&span) {
return Ok(*id);
}
let id = self.emit_element(p::Element::Vertex(p::Vertex::ResultSet(ResultSet {
key: None,
})))?;
self.results.insert(span, id);
Ok(id)
}
fn emit_element(&mut self, element: p::Element) -> tinymist_std::Result<i32> {
let id = self.id.next() as i32;
self.writer
.write_element(id, element)
.context_ut("cannot write element")?;
Ok(id)
}
fn emit_meta(&mut self, meta: &p::MetaData) -> tinymist_std::Result<()> {
let obj = p::Element::Vertex(p::Vertex::MetaData(meta));
self.emit_element(obj).map(|_| ())
}
fn emit_files(&mut self, files: &[FileIndex]) -> tinymist_std::Result<()> {
for (idx, file) in files.iter().enumerate() {
eprintln!("emit file: {:?}, {idx} of {}", file.fid, files.len());
let source = self
.ctx
.source_by_id(file.fid)
.context_ut("cannot get source")?;
let fid = self.alloc_file_id(file.fid);
let semantic_tokens_id =
self.emit_element(p::Element::Vertex(p::Vertex::SemanticTokensResult {
result: lsp_types::SemanticTokens {
result_id: None,
data: file.semantic_tokens.as_ref().clone(),
},
}))?;
self.emit_element(p::Element::Edge(p::Edge::SemanticTokens(p::EdgeData {
out_v: fid,
in_v: semantic_tokens_id,
})))?;
let tokens_id = file
.references
.iter()
.flat_map(|(k, v)| {
let rng = self.emit_span(*k, &source);
let def_rng = self.emit_def_span(v, &source, false);
rng.into_iter().chain(def_rng.into_iter())
})
.collect();
self.emit_element(p::Element::Edge(p::Edge::Contains(p::EdgeDataMultiIn {
out_v: fid,
in_vs: tokens_id,
})))?;
for (s, def) in &file.references {
let res_id = self.alloc_result_id(*s)?;
self.emit_element(p::Element::Edge(p::Edge::Next(p::EdgeData {
out_v: res_id,
in_v: fid,
})))?;
let def_id = self.emit_element(p::Element::Vertex(p::Vertex::DefinitionResult))?;
let Some(def_range) = self.emit_def_span(def, &source, true) else {
continue;
};
let Some(file_id) = def.file_id() else {
continue;
};
let file_vertex_id = self.alloc_file_id(file_id);
self.emit_element(p::Element::Edge(p::Edge::Item(p::Item {
document: file_vertex_id,
property: None,
edge_data: p::EdgeDataMultiIn {
in_vs: vec![def_range],
out_v: def_id,
},
})))?;
self.emit_element(p::Element::Edge(p::Edge::Definition(p::EdgeData {
in_v: def_id,
out_v: res_id,
})))?;
}
}
Ok(())
}
fn emit_span(&mut self, span: Span, source: &Source) -> Option<i32> {
let range = source.range(span)?;
self.emit_element(p::Element::Vertex(p::Vertex::Range {
range: self.ctx.to_lsp_range(range, source),
tag: None,
}))
.ok()
}
fn emit_def_span(&mut self, def: &Definition, source: &Source, external: bool) -> Option<i32> {
let s = def.decl.span();
if !s.is_detached() && s.id() == Some(source.id()) {
self.emit_span(s, source)
} else if let Some(fid) = def.file_id()
&& fid == source.id()
{
// todo: module it self
None
} else if external && !s.is_detached() {
let external_src = self.ctx.source_by_id(def.file_id()?).ok()?;
self.emit_span(s, &external_src)
} else {
None
}
}
}
/// The index of a file.
pub struct FileIndex {
/// The file id.
pub fid: FileId,
/// The semantic tokens of the file.
pub semantic_tokens: SemanticTokens,
/// The documentation of the file.
pub documentation: Option<EcoString>,
/// The references in the file.
pub references: FxHashMap<Span, Definition>,
}
/// Dumps typst knowledge in [LSIF] format from workspace.
///
/// [LSIF]: https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/
pub fn knowledge(ctx: &mut LocalContext) -> tinymist_std::Result<Knowledge> {
let root = ctx
.world()
.entry_state()
.workspace_root()
.ok_or_else(|| tinymist_std::error_once!("workspace root is not set"))?;
let files = ctx.source_files().clone();
let mut worker = DumpWorker {
ctx,
strings: FxHashMap::default(),
references: FxHashMap::default(),
doc: None,
};
let files = files
.iter()
.map(move |fid| worker.file(fid))
.collect::<tinymist_std::Result<Vec<FileIndex>>>()?;
Ok(Knowledge {
meta: p::MetaData {
version: "0.6.0".to_string(),
project_root: path_to_url(&root)?,
position_encoding: p::Encoding::Utf16,
tool_info: Some(p::ToolInfo {
name: "tinymist".to_string(),
args: vec![],
version: Some(env!("CARGO_PKG_VERSION").to_string()),
}),
},
files,
})
}
struct DumpWorker<'a> {
/// The context.
ctx: &'a mut LocalContext,
/// The document.
doc: Option<TypstDocument>,
/// A string interner.
strings: FxHashMap<EcoString, EcoString>,
/// The references collected so far.
references: FxHashMap<Span, Definition>,
}
impl DumpWorker<'_> {
fn file(&mut self, fid: &FileId) -> tinymist_std::Result<FileIndex> {
let source = self.ctx.source_by_id(*fid).context_ut("cannot parse")?;
let semantic_tokens = crate::SemanticTokensFullRequest::compute(self.ctx, &source);
let root = LinkedNode::new(source.root());
self.walk(&source, &root);
let references = std::mem::take(&mut self.references);
Ok(FileIndex {
fid: *fid,
semantic_tokens,
documentation: Some(self.intern("File documentation.")), // todo
references,
})
}
fn intern(&mut self, s: &str) -> EcoString {
if let Some(v) = self.strings.get(s) {
return v.clone();
}
let v = EcoString::from(s);
self.strings.insert(v.clone(), v.clone());
v
}
fn walk(&mut self, source: &Source, node: &LinkedNode) {
if node.get().children().len() == 0 {
let Some(syntax) = classify_syntax(node.clone(), node.offset()) else {
return;
};
let span = syntax.node().span();
if self.references.contains_key(&span) {
return;
}
let Some(def) = self.ctx.def_of_syntax(source, self.doc.as_ref(), syntax) else {
return;
};
self.references.insert(span, def);
return;
}
for child in node.children() {
self.walk(source, &child);
}
}
}

View file

@ -0,0 +1,349 @@
//! Borrowed from `lsp-types` crate and modified to fit our need.
//! Types of Language Server Index Format (LSIF). LSIF is a standard format
//! for language servers or other programming tools to dump their knowledge
//! about a workspace.
//!
//! Based on <https://microsoft.github.io/language-server-protocol/specifications/lsif/0.6.0/specification/>
#![allow(missing_docs)]
// todo: large_enum_variant
use ecow::EcoString;
use lsp_types::{Range, SemanticTokens, Url};
use serde::{Deserialize, Serialize};
pub type Id = i32;
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum LocationOrRangeId {
Location(lsp_types::Location),
RangeId(Id),
}
#[derive(Debug, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct Entry<'a> {
pub id: Id,
#[serde(flatten)]
pub data: Element<'a>,
}
#[derive(Debug, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "type")]
#[allow(clippy::large_enum_variant)]
pub enum Element<'a> {
Vertex(Vertex<'a>),
Edge(Edge),
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct ToolInfo {
pub name: String,
#[serde(default = "Default::default")]
#[serde(skip_serializing_if = "Vec::is_empty")]
pub args: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
}
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone, Copy)]
pub enum Encoding {
/// Currently only 'utf-16' is supported due to the limitations in LSP.
#[serde(rename = "utf-16")]
Utf16,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct RangeBasedDocumentSymbol {
pub id: Id,
#[serde(default = "Default::default")]
#[serde(skip_serializing_if = "Vec::is_empty")]
pub children: Vec<RangeBasedDocumentSymbol>,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(untagged)]
pub enum DocumentSymbolOrRangeBasedVec {
DocumentSymbol(Vec<lsp_types::DocumentSymbol>),
RangeBased(Vec<RangeBasedDocumentSymbol>),
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DefinitionTag {
/// The text covered by the range
text: String,
/// The symbol kind.
kind: lsp_types::SymbolKind,
/// Indicates if this symbol is deprecated.
#[serde(default)]
#[serde(skip_serializing_if = "std::ops::Not::not")]
deprecated: bool,
/// The full range of the definition not including leading/trailing
/// whitespace but everything else, e.g comments and code.
/// The range must be included in fullRange.
full_range: Range,
/// Optional detail information for the definition.
#[serde(skip_serializing_if = "Option::is_none")]
detail: Option<String>,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DeclarationTag {
/// The text covered by the range
text: String,
/// The symbol kind.
kind: lsp_types::SymbolKind,
/// Indicates if this symbol is deprecated.
#[serde(default)]
deprecated: bool,
/// The full range of the definition not including leading/trailing
/// whitespace but everything else, e.g comments and code.
/// The range must be included in fullRange.
full_range: Range,
/// Optional detail information for the definition.
#[serde(skip_serializing_if = "Option::is_none")]
detail: Option<String>,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ReferenceTag {
text: String,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UnknownTag {
text: String,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "type")]
pub enum RangeTag {
Definition(DefinitionTag),
Declaration(DeclarationTag),
Reference(ReferenceTag),
Unknown(UnknownTag),
}
#[derive(Debug, PartialEq, Serialize)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "label")]
pub enum Vertex<'a> {
MetaData(&'a MetaData),
/// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#the-project-vertex>
Project(Project),
Document(&'a Document),
/// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#ranges>
Range {
#[serde(flatten)]
range: Range,
#[serde(skip_serializing_if = "Option::is_none")]
tag: Option<RangeTag>,
},
/// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#result-set>
ResultSet(ResultSet),
Moniker(lsp_types::Moniker),
PackageInformation(PackageInformation),
#[serde(rename = "$event")]
Event(Event),
DefinitionResult,
DeclarationResult,
TypeDefinitionResult,
ReferenceResult,
ImplementationResult,
FoldingRangeResult {
result: Vec<lsp_types::FoldingRange>,
},
SemanticTokensResult {
result: SemanticTokens,
},
HoverResult {
result: lsp_types::Hover,
},
DocumentSymbolResult {
result: DocumentSymbolOrRangeBasedVec,
},
DocumentLinkResult {
result: Vec<lsp_types::DocumentLink>,
},
DiagnosticResult {
result: Vec<lsp_types::Diagnostic>,
},
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum EventKind {
Begin,
End,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum EventScope {
Document,
Project,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct Event {
pub kind: EventKind,
pub scope: EventScope,
pub data: Id,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "label")]
pub enum Edge {
Contains(EdgeDataMultiIn),
Moniker(EdgeData),
NextMoniker(EdgeData),
Next(EdgeData),
PackageInformation(EdgeData),
Item(Item),
// Methods
#[serde(rename = "textDocument/definition")]
Definition(EdgeData),
#[serde(rename = "textDocument/declaration")]
Declaration(EdgeData),
#[serde(rename = "textDocument/hover")]
Hover(EdgeData),
#[serde(rename = "textDocument/references")]
References(EdgeData),
#[serde(rename = "textDocument/implementation")]
Implementation(EdgeData),
#[serde(rename = "textDocument/typeDefinition")]
TypeDefinition(EdgeData),
#[serde(rename = "textDocument/foldingRange")]
FoldingRange(EdgeData),
#[serde(rename = "textDocument/documentLink")]
DocumentLink(EdgeData),
#[serde(rename = "textDocument/documentSymbol")]
DocumentSymbol(EdgeData),
#[serde(rename = "textDocument/diagnostic")]
Diagnostic(EdgeData),
#[serde(rename = "textDocument/semanticTokens")]
SemanticTokens(EdgeData),
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct EdgeData {
pub in_v: Id,
pub out_v: Id,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct EdgeDataMultiIn {
pub in_vs: Vec<Id>,
pub out_v: Id,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(untagged)]
pub enum DefinitionResultType {
Scalar(LocationOrRangeId),
Array(LocationOrRangeId),
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ItemKind {
Declarations,
Definitions,
References,
ReferenceResults,
ImplementationResults,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Item {
pub document: Id,
#[serde(skip_serializing_if = "Option::is_none")]
pub property: Option<ItemKind>,
#[serde(flatten)]
pub edge_data: EdgeDataMultiIn,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Document {
pub uri: Url,
pub language_id: EcoString,
}
/// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#result-set>
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ResultSet {
#[serde(skip_serializing_if = "Option::is_none")]
pub key: Option<String>,
}
/// <https://github.com/Microsoft/language-server-protocol/blob/master/indexFormat/specification.md#the-project-vertex>
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Project {
#[serde(skip_serializing_if = "Option::is_none")]
pub resource: Option<Url>,
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
pub kind: String,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct MetaData {
/// The version of the LSIF format using semver notation. See <https://semver.org/>. Please note
/// the version numbers starting with 0 don't adhere to semver and adopters
/// have to assume that each new version is breaking.
pub version: String,
/// The project root (in form of an URI) used to compute this dump.
pub project_root: Url,
/// The string encoding used to compute line and character values in
/// positions and ranges.
pub position_encoding: Encoding,
/// Information about the tool that created the dump
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_info: Option<ToolInfo>,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Repository {
pub r#type: String,
pub url: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub commit_id: Option<String>,
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PackageInformation {
pub name: String,
pub manager: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub uri: Option<Url>,
#[serde(skip_serializing_if = "Option::is_none")]
pub content: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub repository: Option<Repository>,
#[serde(skip_serializing_if = "Option::is_none")]
pub version: Option<String>,
}

View file

@ -44,6 +44,7 @@ pub use workspace_label::*;
pub mod analysis;
pub mod docs;
pub mod index;
pub mod package;
pub mod syntax;
pub mod testing;

View file

@ -37,6 +37,13 @@ impl SemanticRequest for SemanticTokensFullRequest {
}
}
impl SemanticTokensFullRequest {
/// Computes the semantic tokens for a given source code.
pub fn compute(ctx: &mut LocalContext, source: &Source) -> crate::analysis::SemanticTokens {
crate::analysis::semantic_tokens::get_semantic_tokens(ctx, source)
}
}
#[cfg(test)]
mod tests {
use super::*;

View file

@ -17,16 +17,14 @@ use tinymist_query::package::PackageInfo;
use tinymist_query::{LocalContextGuard, LspRange};
use tinymist_std::error::prelude::*;
use tinymist_task::ExportMarkdownTask;
use typst::diag::{eco_format, StrResult};
use typst::syntax::{LinkedNode, Source};
use world::TaskInputs;
use super::*;
use crate::lsp::query::run_query;
use crate::tool::ast::AstRepr;
#[cfg(feature = "system")]
use typst::diag::EcoString;
use typst::diag::{EcoString, StrResult};
#[cfg(feature = "system")]
use typst::syntax::package::{PackageSpec, VersionlessPackageSpec};
@ -729,6 +727,18 @@ impl ServerState {
just_future(async move { serde_json::to_value(fut.await?).map_err(internal_error) })
}
/// Get the lsif for package
pub fn resource_lsif_(
&mut self,
info: PackageInfo,
) -> LspResult<impl Future<Output = LspResult<String>>> {
self.within_package(info.clone(), move |a| {
let knowledge = tinymist_query::index::knowledge(a)
.map_err(map_string_err("failed to generate docs"))?;
Ok(knowledge.bind(a.shared()).to_string())
})
}
/// Get the all symbol docs
pub fn resource_package_docs_(
&mut self,
@ -736,11 +746,9 @@ impl ServerState {
) -> LspResult<impl Future<Output = LspResult<String>>> {
self.within_package(info.clone(), move |a| {
let doc = tinymist_query::docs::package_docs(a, &info)
.map_err(map_string_err("failed to generate docs"))
.map_err(internal_error)?;
.map_err(map_string_err("failed to generate docs"))?;
tinymist_query::docs::package_docs_md(&doc)
.map_err(map_string_err("failed to generate docs"))
.map_err(internal_error)
})
}
@ -752,7 +760,6 @@ impl ServerState {
self.within_package(info.clone(), move |a| {
tinymist_query::package::check_package(a, &info)
.map_err(map_string_err("failed to check package"))
.map_err(internal_error)
})
}
@ -760,34 +767,10 @@ impl ServerState {
pub fn within_package<T>(
&mut self,
info: PackageInfo,
f: impl FnOnce(&mut LocalContextGuard) -> LspResult<T> + Send + Sync,
f: impl FnOnce(&mut LocalContextGuard) -> Result<T> + Send + Sync,
) -> LspResult<impl Future<Output = LspResult<T>>> {
let snap = self.query_snapshot().map_err(internal_error)?;
Ok(async move {
let world = snap.world();
let entry: StrResult<EntryState> = Ok(()).and_then(|_| {
let toml_id = tinymist_query::package::get_manifest_id(&info)?;
let toml_path = world.path_for_id(toml_id)?.as_path().to_owned();
let pkg_root = toml_path.parent().ok_or_else(|| {
eco_format!("cannot get package root (parent of {toml_path:?})")
})?;
let manifest = tinymist_query::package::get_manifest(world, toml_id)?;
let entry_point = toml_id.join(&manifest.package.entrypoint);
Ok(EntryState::new_rooted_by_id(pkg_root.into(), entry_point))
});
let entry = entry.map_err(|e| internal_error(e.to_string()))?;
let snap = snap.task(TaskInputs {
entry: Some(entry),
inputs: None,
});
snap.run_analysis(f).map_err(internal_error)?
})
Ok(async move { snap.run_within_package(&info, f).map_err(internal_error) })
}
}