feat: prepare for parallelizing lsp requests (#342)

* feat: migrate steal_sync to snapshot_sync

* feat: correctly make world/analysis snapshot

* very rayon

* dev: recover async lsp request

* gg

* dev: pin

* fix: make server stable

* dev: disable concurrent server by default

* dev: very sync
This commit is contained in:
Myriad-Dreamin 2024-06-26 10:37:57 +08:00 committed by GitHub
parent 5e4e1e9877
commit 6fcad1c1c7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
36 changed files with 1736 additions and 1318 deletions

View file

@ -147,7 +147,7 @@ jobs:
cp "target/${{ matrix.rust-target }}/gh-release/tinymist$(If ('${{ matrix.platform }}' -eq 'win32') { '.exe' } else { '' } )" "tinymist-${{ env.target }}$(If ('${{ matrix.platform }}' -eq 'win32') { '.exe' } else { '' } )"
- name: Test tinymist
run: |
cargo test --profile=gh-release --workspace --target ${{ matrix.rust-target }}
cargo test --profile=gh-release --workspace --target ${{ matrix.rust-target }} --features stable-server
if: (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') && (matrix.platform == 'linux') && (matrix.arch == 'x64')
- name: Upload split debug symbols for windows
if: matrix.platform == 'win32' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true')

27
Cargo.lock generated
View file

@ -2919,7 +2919,7 @@ dependencies = [
[[package]]
name = "reflexo"
version = "0.5.0-rc4"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=36b969e2d5743544dca8679c53c89129b989ec80#36b969e2d5743544dca8679c53c89129b989ec80"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=11b1ef0909ee6ded49eb84db999af14276125a62#11b1ef0909ee6ded49eb84db999af14276125a62"
dependencies = [
"base64 0.22.1",
"bitvec",
@ -2944,7 +2944,7 @@ dependencies = [
[[package]]
name = "reflexo-vfs"
version = "0.5.0-rc4"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=36b969e2d5743544dca8679c53c89129b989ec80#36b969e2d5743544dca8679c53c89129b989ec80"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=11b1ef0909ee6ded49eb84db999af14276125a62#11b1ef0909ee6ded49eb84db999af14276125a62"
dependencies = [
"append-only-vec",
"indexmap 2.2.6",
@ -2961,7 +2961,7 @@ dependencies = [
[[package]]
name = "reflexo-world"
version = "0.5.0-rc4"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=36b969e2d5743544dca8679c53c89129b989ec80#36b969e2d5743544dca8679c53c89129b989ec80"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=11b1ef0909ee6ded49eb84db999af14276125a62#11b1ef0909ee6ded49eb84db999af14276125a62"
dependencies = [
"append-only-vec",
"chrono",
@ -2975,6 +2975,7 @@ dependencies = [
"indexmap 2.2.6",
"log",
"nohash-hasher",
"notify",
"once_cell",
"parking_lot",
"reflexo",
@ -2988,6 +2989,7 @@ dependencies = [
"tar",
"typst",
"typst-ts-core",
"walkdir",
]
[[package]]
@ -3959,6 +3961,8 @@ dependencies = [
"parking_lot",
"paste",
"pin-project-lite",
"rayon",
"reflexo",
"serde",
"serde_json",
"tinymist-assets 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
@ -4023,6 +4027,7 @@ dependencies = [
"regex",
"rust_iso3166",
"rust_iso639",
"rustc-hash",
"serde",
"serde_json",
"serde_yaml",
@ -4525,44 +4530,38 @@ dependencies = [
[[package]]
name = "typst-ts-compiler"
version = "0.5.0-rc4"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=36b969e2d5743544dca8679c53c89129b989ec80#36b969e2d5743544dca8679c53c89129b989ec80"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=11b1ef0909ee6ded49eb84db999af14276125a62#11b1ef0909ee6ded49eb84db999af14276125a62"
dependencies = [
"append-only-vec",
"base64 0.22.1",
"codespan-reporting",
"comemo 0.4.0",
"dirs",
"flate2",
"fontdb",
"fst",
"hex",
"indexmap 2.2.6",
"instant",
"log",
"nohash-hasher",
"notify",
"once_cell",
"parking_lot",
"pathdiff",
"rayon",
"reflexo",
"reflexo-vfs",
"reflexo-world",
"rustc-hash",
"serde",
"serde_json",
"sha2",
"strum 0.25.0",
"tar",
"tokio",
"typst",
"typst-ts-core",
"typst-ts-svg-exporter",
"walkdir",
]
[[package]]
name = "typst-ts-core"
version = "0.5.0-rc4"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=36b969e2d5743544dca8679c53c89129b989ec80#36b969e2d5743544dca8679c53c89129b989ec80"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=11b1ef0909ee6ded49eb84db999af14276125a62#11b1ef0909ee6ded49eb84db999af14276125a62"
dependencies = [
"base64 0.22.1",
"base64-serde",
@ -4600,7 +4599,7 @@ dependencies = [
[[package]]
name = "typst-ts-svg-exporter"
version = "0.5.0-rc4"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=36b969e2d5743544dca8679c53c89129b989ec80#36b969e2d5743544dca8679c53c89129b989ec80"
source = "git+https://github.com/Myriad-Dreamin/typst.ts/?rev=11b1ef0909ee6ded49eb84db999af14276125a62#11b1ef0909ee6ded49eb84db999af14276125a62"
dependencies = [
"base64 0.22.1",
"comemo 0.4.0",

View file

@ -80,6 +80,7 @@ tokio = { version = "1.36.0", features = [
"rt-multi-thread",
"io-std",
] }
rayon = "1.10.0"
tokio-util = { version = "0.7.10", features = ["compat"] }
serde = { version = "1", features = ["derive"] }
serde_json = "1"
@ -88,6 +89,7 @@ yaml-rust2 = "0.8"
biblatex = "0.9"
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap = { version = "=5.5.3", features = ["raw-api"] }
rustc-hash = "1"
triomphe = { version = "0.1.10", default-features = false, features = ["std"] }
hashbrown = { version = "0.14", features = [
"inline-more",
@ -145,11 +147,11 @@ typst-syntax = { git = "https://github.com/Myriad-Dreamin/typst.git", branch = "
# typst-render = { path = "../typst/crates/typst-render" }
# typst-syntax = { path = "../typst/crates/typst-syntax" }
typst-ts-svg-exporter = { git = "https://github.com/Myriad-Dreamin/typst.ts/", rev = "36b969e2d5743544dca8679c53c89129b989ec80" }
reflexo = { git = "https://github.com/Myriad-Dreamin/typst.ts/", rev = "36b969e2d5743544dca8679c53c89129b989ec80" }
reflexo-world = { git = "https://github.com/Myriad-Dreamin/typst.ts/", rev = "36b969e2d5743544dca8679c53c89129b989ec80" }
typst-ts-core = { git = "https://github.com/Myriad-Dreamin/typst.ts/", rev = "36b969e2d5743544dca8679c53c89129b989ec80" }
typst-ts-compiler = { git = "https://github.com/Myriad-Dreamin/typst.ts/", rev = "36b969e2d5743544dca8679c53c89129b989ec80" }
typst-ts-svg-exporter = { git = "https://github.com/Myriad-Dreamin/typst.ts/", rev = "11b1ef0909ee6ded49eb84db999af14276125a62" }
reflexo = { git = "https://github.com/Myriad-Dreamin/typst.ts/", rev = "11b1ef0909ee6ded49eb84db999af14276125a62" }
reflexo-world = { git = "https://github.com/Myriad-Dreamin/typst.ts/", rev = "11b1ef0909ee6ded49eb84db999af14276125a62" }
typst-ts-core = { git = "https://github.com/Myriad-Dreamin/typst.ts/", rev = "11b1ef0909ee6ded49eb84db999af14276125a62" }
typst-ts-compiler = { git = "https://github.com/Myriad-Dreamin/typst.ts/", rev = "11b1ef0909ee6ded49eb84db999af14276125a62" }
# typst-ts-svg-exporter = { path = "../typst.ts/exporter/svg" }
# reflexo = { path = "../typst.ts/crates/reflexo/" }

View file

@ -48,6 +48,7 @@ rust_iso3166 = "0.1.4"
# We need to freeze the version of the crate, as the raw-api feature is considered unstable
dashmap.workspace = true
rustc-hash.workspace = true
hashbrown.workspace = true
triomphe.workspace = true

View file

@ -403,8 +403,7 @@ mod signature_tests {
let result = analyze_signature(
ctx,
source.clone(),
SignatureTarget::Syntax(callee_node.clone()),
SignatureTarget::Syntax(source.clone(), callee_node.clone()),
);
assert_snapshot!(SignatureSnapshot(result.as_ref()));

View file

@ -74,7 +74,7 @@ pub fn analyze_call_no_cache(
callee_node: LinkedNode,
args: ast::Args<'_>,
) -> Option<CallInfo> {
let signature = analyze_signature(ctx, source, SignatureTarget::Syntax(callee_node))?;
let signature = analyze_signature(ctx, SignatureTarget::Syntax(source, callee_node))?;
trace!("got signature {signature:?}");
let mut info = CallInfo {

View file

@ -1,4 +1,4 @@
use std::sync::atomic::AtomicBool;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::{
collections::{HashMap, HashSet},
hash::Hash,
@ -10,8 +10,8 @@ use ecow::{EcoString, EcoVec};
use lsp_types::Url;
use once_cell::sync::OnceCell;
use parking_lot::RwLock;
use reflexo::hash::hash128;
use reflexo::{cow_mut::CowMut, debug_loc::DataSource, ImmutPath};
use reflexo::hash::{hash128, FxDashMap};
use reflexo::{debug_loc::DataSource, ImmutPath};
use typst::eval::Eval;
use typst::foundations::{self, Func};
use typst::syntax::{LinkedNode, SyntaxNode};
@ -121,10 +121,6 @@ impl ModuleAnalysisCache {
/// The analysis data holds globally.
pub struct Analysis {
/// The root of the workspace.
/// This means that the analysis result won't be valid if the root directory
/// changes.
pub root: ImmutPath,
/// The position encoding for the workspace.
pub position_encoding: PositionEncoding,
/// The position encoding for the workspace.
@ -136,25 +132,28 @@ pub struct Analysis {
impl Analysis {
/// Get estimated memory usage of the analysis data.
pub fn estimated_memory(&self) -> usize {
self.caches.modules.capacity() * 32
+ self
.caches
.modules
.values()
.map(|v| {
v.def_use_lexical_hierarchy
.output
.read()
.as_ref()
.map_or(0, |e| e.iter().map(|e| e.estimated_memory()).sum())
})
.sum::<usize>()
let _ = LexicalHierarchy::estimated_memory;
// todo: implement
// self.caches.modules.capacity() * 32
// + self .caches .modules .values() .map(|v| { v.def_use_lexical_hierarchy
// .output .read() .as_ref() .map_or(0, |e| e.iter().map(|e|
// e.estimated_memory()).sum()) }) .sum::<usize>()
0
}
fn gc(&mut self) {
self.caches
.signatures
.retain(|_, (l, _, _)| (self.caches.lifetime - *l) < 30);
/// Get a snapshot of the analysis data.
pub fn snapshot<'a>(
&'a self,
root: ImmutPath,
resources: &'a dyn AnalysisResources,
) -> AnalysisContext<'a> {
AnalysisContext::new(root, resources, self)
}
/// Clear all cached resources.
pub fn clear_cache(&self) {
self.caches.signatures.clear();
self.caches.modules.clear();
}
}
@ -248,10 +247,7 @@ impl<Inputs, Output> ComputingNode<Inputs, Output> {
Inputs: ComputeDebug + Hash + Clone,
Output: Clone,
{
if self
.computing
.swap(true, std::sync::atomic::Ordering::SeqCst)
{
if self.computing.swap(true, Ordering::SeqCst) {
return Err(());
}
let input_cmp = self.inputs.read();
@ -279,8 +275,7 @@ impl<Inputs, Output> ComputingNode<Inputs, Output> {
}
});
self.computing
.store(false, std::sync::atomic::Ordering::SeqCst);
self.computing.store(false, Ordering::SeqCst);
res
}
@ -313,8 +308,6 @@ pub struct ModuleAnalysisGlobalCache {
bibliography: Arc<ComputingNode<EcoVec<(TypstFileId, Bytes)>, Arc<BibInfo>>>,
import: Arc<ComputingNode<EcoVec<LexicalHierarchy>, Arc<ImportInfo>>>,
signature_source: Option<Source>,
signatures: HashMap<usize, Signature>,
}
impl Default for ModuleAnalysisGlobalCache {
@ -325,9 +318,6 @@ impl Default for ModuleAnalysisGlobalCache {
import: Arc::new(ComputingNode::new("import")),
def_use: Arc::new(ComputingNode::new("def_use")),
bibliography: Arc::new(ComputingNode::new("bibliography")),
signature_source: None,
signatures: Default::default(),
}
}
}
@ -336,70 +326,11 @@ impl Default for ModuleAnalysisGlobalCache {
/// of a module.
#[derive(Default)]
pub struct AnalysisGlobalCaches {
lifetime: u64,
modules: HashMap<TypstFileId, ModuleAnalysisGlobalCache>,
signatures: HashMap<u128, (u64, foundations::Func, Signature)>,
}
impl AnalysisGlobalCaches {
/// Get the signature of a function.
pub fn signature(&self, source: Option<Source>, func: &SignatureTarget) -> Option<Signature> {
match func {
SignatureTarget::Syntax(node) => {
// todo: check performance on peeking signature source frequently
let cache = self.modules.get(&node.span().id()?)?;
if cache
.signature_source
.as_ref()
.zip(source)
.map_or(true, |(s, t)| hash128(s) != hash128(&t))
{
return None;
}
cache.signatures.get(&node.offset()).cloned()
}
SignatureTarget::Runtime(rt) => self
.signatures
.get(&hash128(rt))
.and_then(|(_, cached_func, s)| (rt == cached_func).then_some(s.clone())),
}
}
/// Compute the signature of a function.
pub fn compute_signature(
&mut self,
source: Option<Source>,
func: SignatureTarget,
compute: impl FnOnce() -> Signature,
) -> Signature {
match func {
SignatureTarget::Syntax(node) => {
let cache = self.modules.entry(node.span().id().unwrap()).or_default();
// todo: check performance on peeking signature source frequently
if cache
.signature_source
.as_ref()
.zip(source.as_ref())
.map_or(true, |(s, t)| hash128(s) != hash128(t))
{
cache.signature_source = source;
cache.signatures.clear();
}
let key = node.offset();
cache.signatures.entry(key).or_insert_with(compute).clone()
}
SignatureTarget::Runtime(rt) => {
let key = hash128(&rt);
self.signatures
.entry(key)
.or_insert_with(|| (self.lifetime, rt, compute()))
.2
.clone()
}
}
}
lifetime: AtomicU64,
clear_lifetime: AtomicU64,
modules: FxDashMap<TypstFileId, Arc<ModuleAnalysisGlobalCache>>,
static_signatures: FxDashMap<u128, (u64, Source, usize, Signature)>,
signatures: FxDashMap<u128, (u64, foundations::Func, Signature)>,
}
/// A cache for all level of analysis results of a module.
@ -440,31 +371,37 @@ pub trait AnalysisResources {
/// The context for analyzers.
pub struct AnalysisContext<'a> {
/// The root of the workspace.
/// This means that the analysis result won't be valid if the root directory
/// changes.
pub root: ImmutPath,
/// The world surface for Typst compiler
pub resources: &'a dyn AnalysisResources,
/// The analysis data
pub analysis: CowMut<'a, Analysis>,
pub analysis: &'a Analysis,
/// The caches for analysis.
lifetime: u64,
/// Local caches for analysis.
caches: AnalysisCaches,
}
// todo: gc in new thread
impl<'w> Drop for AnalysisContext<'w> {
fn drop(&mut self) {
self.gc();
}
}
impl<'w> AnalysisContext<'w> {
/// Create a new analysis context.
pub fn new(resources: &'w dyn AnalysisResources, a: Analysis) -> Self {
pub fn new(root: ImmutPath, resources: &'w dyn AnalysisResources, a: &'w Analysis) -> Self {
// self.caches.lifetime += 1;
let lifetime = a.caches.lifetime.fetch_add(1, Ordering::SeqCst);
Self {
root,
resources,
analysis: CowMut::Owned(a),
caches: AnalysisCaches::default(),
}
}
/// Create a new analysis context with borrowing the analysis data.
pub fn new_borrow(resources: &'w dyn AnalysisResources, a: &'w mut Analysis) -> Self {
a.caches.lifetime += 1;
a.gc();
Self {
resources,
analysis: CowMut::Borrowed(a),
analysis: a,
lifetime,
caches: AnalysisCaches::default(),
}
}
@ -491,7 +428,7 @@ impl<'w> AnalysisContext<'w> {
.completion_files
.get_or_init(|| {
scan_workspace_files(
&self.analysis.root,
&self.root,
PathPreference::Special.ext_matcher(),
|relative_path| relative_path.to_owned(),
)
@ -535,7 +472,7 @@ impl<'w> AnalysisContext<'w> {
// will be resolved.
let root = match id.package() {
Some(spec) => self.resources.resolve(spec)?,
None => self.analysis.root.clone(),
None => self.root.clone(),
};
// Join the path to the root. If it tries to escape, deny
@ -566,10 +503,10 @@ impl<'w> AnalysisContext<'w> {
/// Get the source of a file by file path.
pub fn source_by_path(&mut self, p: &Path) -> FileResult<Source> {
// todo: source in packages
let relative_path = p.strip_prefix(&self.analysis.root).map_err(|_| {
let relative_path = p.strip_prefix(&self.root).map_err(|_| {
FileError::Other(Some(eco_format!(
"not in root, path is {p:?}, root is {:?}",
self.analysis.root
self.root
)))
})?;
@ -668,6 +605,56 @@ impl<'w> AnalysisContext<'w> {
Some(self.to_lsp_range(position, &source))
}
/// Get the signature of a function.
pub fn signature(&self, func: &SignatureTarget) -> Option<Signature> {
match func {
SignatureTarget::Syntax(source, node) => {
// todo: check performance on peeking signature source frequently
let cache_key = (source, node.offset());
self.analysis
.caches
.static_signatures
.get(&hash128(&cache_key))
.and_then(|slot| (cache_key.1 == slot.2).then_some(slot.3.clone()))
}
SignatureTarget::Runtime(rt) => self
.analysis
.caches
.signatures
.get(&hash128(rt))
.and_then(|slot| (rt == &slot.1).then_some(slot.2.clone())),
}
}
/// Compute the signature of a function.
pub fn compute_signature(
&self,
func: SignatureTarget,
compute: impl FnOnce() -> Signature,
) -> Signature {
match func {
SignatureTarget::Syntax(source, node) => {
let cache_key = (source, node.offset());
self.analysis
.caches
.static_signatures
.entry(hash128(&cache_key))
.or_insert_with(|| (self.lifetime, cache_key.0, cache_key.1, compute()))
.3
.clone()
}
SignatureTarget::Runtime(rt) => {
let key = hash128(&rt);
self.analysis
.caches
.signatures
.entry(key)
.or_insert_with(|| (self.lifetime, rt, compute()))
.2
.clone()
}
}
}
/// Get the type check information of a source file.
pub(crate) fn type_check(&mut self, source: Source) -> Option<Arc<TypeScheme>> {
@ -714,7 +701,6 @@ impl<'w> AnalysisContext<'w> {
let l = cache
.def_use_lexical_hierarchy
.compute(source.clone(), |_before, after| {
cache.signatures.clear();
crate::syntax::get_lexical_hierarchy(after, crate::syntax::LexicalScopeKind::DefUse)
})
.ok()
@ -749,7 +735,6 @@ impl<'w> AnalysisContext<'w> {
let l = cache
.def_use_lexical_hierarchy
.compute(source.clone(), |_before, after| {
cache.signatures.clear();
crate::syntax::get_lexical_hierarchy(after, crate::syntax::LexicalScopeKind::DefUse)
})
.ok()
@ -810,8 +795,8 @@ impl<'w> AnalysisContext<'w> {
res
}
fn at_module(&mut self, fid: TypstFileId) -> &mut ModuleAnalysisGlobalCache {
self.analysis.caches.modules.entry(fid).or_default()
fn at_module(&self, fid: TypstFileId) -> Arc<ModuleAnalysisGlobalCache> {
self.analysis.caches.modules.entry(fid).or_default().clone()
}
pub(crate) fn with_vm<T>(&self, f: impl FnOnce(&mut typst::eval::Vm) -> T) -> T {
@ -897,6 +882,37 @@ impl<'w> AnalysisContext<'w> {
post_type_check(self, &ty_chk, k.clone()).or_else(|| ty_chk.type_of_span(k.span()))
}
fn gc(&self) {
let lifetime = self.lifetime;
loop {
let latest_clear_lifetime = self.analysis.caches.clear_lifetime.load(Ordering::Relaxed);
if latest_clear_lifetime >= lifetime {
return;
}
if self.analysis.caches.clear_lifetime.compare_exchange(
latest_clear_lifetime,
lifetime,
Ordering::SeqCst,
Ordering::SeqCst,
) != Ok(latest_clear_lifetime)
{
continue;
}
break;
}
self.analysis
.caches
.static_signatures
.retain(|_, (l, _, _, _)| lifetime - *l < 60);
self.analysis
.caches
.signatures
.retain(|_, (l, _, _)| lifetime - *l < 60);
}
}
fn ceil_char_boundary(text: &str, mut cursor: usize) -> usize {

View file

@ -170,35 +170,29 @@ pub struct PartialSignature {
/// The language object that the signature is being analyzed for.
pub enum SignatureTarget<'a> {
/// A static node without knowing the function at runtime.
Syntax(LinkedNode<'a>),
Syntax(Source, LinkedNode<'a>),
/// A function that is known at runtime.
Runtime(Func),
}
pub(crate) fn analyze_dyn_signature(ctx: &mut AnalysisContext, func: Func) -> Signature {
ctx.analysis
.caches
.compute_signature(None, SignatureTarget::Runtime(func.clone()), || {
ctx.compute_signature(SignatureTarget::Runtime(func.clone()), || {
Signature::Primary(analyze_dyn_signature_inner(func))
})
}
pub(crate) fn analyze_signature(
ctx: &mut AnalysisContext,
source: Source,
callee_node: SignatureTarget,
) -> Option<Signature> {
if let Some(sig) = ctx
.analysis
.caches
.signature(Some(source.clone()), &callee_node)
{
if let Some(sig) = ctx.signature(&callee_node) {
return Some(sig);
}
let func = match callee_node {
SignatureTarget::Syntax(node) => {
SignatureTarget::Syntax(source, node) => {
let _ = resolve_callee_v2;
let _ = source;
// let res = resolve_callee_v2(ctx, node)?;
@ -239,9 +233,7 @@ pub(crate) fn analyze_signature(
}
let signature = ctx
.analysis
.caches
.compute_signature(None, SignatureTarget::Runtime(func.clone()), || {
.compute_signature(SignatureTarget::Runtime(func.clone()), || {
Signature::Primary(analyze_dyn_signature_inner(func))
})
.primary()

View file

@ -32,7 +32,7 @@ fn convert_diagnostic(
let source = ctx.world().source(id)?;
lsp_range = diagnostic_range(&source, span, ctx.position_encoding());
} else {
uri = path_to_url(&ctx.analysis.root)?;
uri = path_to_url(&ctx.root)?;
lsp_range = LspRange::default();
};

View file

@ -333,6 +333,40 @@ mod polymorphic {
DocumentMetrics(Option<DocumentMetricsResponse>),
ServerInfo(Option<HashMap<String, ServerInfoResponse>>),
}
impl CompilerQueryResponse {
pub fn to_untyped(self) -> serde_json::Result<JsonValue> {
match self {
Self::OnExport(res) => serde_json::to_value(res),
Self::OnSaveExport(res) => serde_json::to_value(res),
Self::Hover(res) => serde_json::to_value(res),
Self::GotoDefinition(res) => serde_json::to_value(res),
Self::GotoDeclaration(res) => serde_json::to_value(res),
Self::References(res) => serde_json::to_value(res),
Self::InlayHint(res) => serde_json::to_value(res),
Self::DocumentColor(res) => serde_json::to_value(res),
Self::DocumentHighlight(res) => serde_json::to_value(res),
Self::ColorPresentation(res) => serde_json::to_value(res),
Self::CodeAction(res) => serde_json::to_value(res),
Self::CodeLens(res) => serde_json::to_value(res),
Self::Completion(res) => serde_json::to_value(res),
Self::SignatureHelp(res) => serde_json::to_value(res),
Self::PrepareRename(res) => serde_json::to_value(res),
Self::Rename(res) => serde_json::to_value(res),
Self::DocumentSymbol(res) => serde_json::to_value(res),
Self::Symbol(res) => serde_json::to_value(res),
Self::SemanticTokensFull(res) => serde_json::to_value(res),
Self::SemanticTokensDelta(res) => serde_json::to_value(res),
Self::Formatting(res) => serde_json::to_value(res),
Self::FoldingRange(res) => serde_json::to_value(res),
Self::SelectionRange(res) => serde_json::to_value(res),
Self::InteractCodeContext(res) => serde_json::to_value(res),
Self::OnEnter(res) => serde_json::to_value(res),
Self::DocumentMetrics(res) => serde_json::to_value(res),
Self::ServerInfo(res) => serde_json::to_value(res),
}
}
}
}
pub use polymorphic::*;

View file

@ -36,6 +36,8 @@ impl SemanticRequest for SymbolRequest {
let mut symbols = vec![];
// todo! need compilation for iter_dependencies
ctx.resources.iter_dependencies(&mut |path| {
let Ok(source) = ctx.source_by_path(&path) else {
return;

View file

@ -14,7 +14,7 @@ use typst::syntax::{
};
use typst::{diag::PackageError, foundations::Bytes};
use typst_ts_compiler::{
service::CompileDriver, EntryManager, EntryReader, ShadowApi, TypstSystemUniverse, WorldDeps,
CompileDriver, EntryManager, EntryReader, ShadowApi, TypstSystemUniverse, WorldDeps,
};
use typst_ts_core::{
config::compiler::{EntryOpts, EntryState},
@ -67,17 +67,14 @@ pub fn snapshot_testing(name: &str, f: &impl Fn(&mut AnalysisContext, PathBuf))
TypstFileId::new(None, VirtualPath::new(p.strip_prefix(&root).unwrap()))
})
.collect::<Vec<_>>();
let mut w = w.spawn();
let mut w = w.snapshot();
let w = WrapWorld(&mut w);
let mut ctx = AnalysisContext::new(
&w,
Analysis {
root,
let a = Analysis {
position_encoding: PositionEncoding::Utf16,
enable_periscope: false,
caches: Default::default(),
},
);
};
let mut ctx = AnalysisContext::new(root, &w, &a);
ctx.test_completion_files(Vec::new);
ctx.test_files(|| paths);
f(&mut ctx, p);

View file

@ -1118,7 +1118,7 @@ pub fn complete_path(
let has_root = path.has_root();
let src_path = id.vpath();
let base = src_path.resolve(&ctx.analysis.root)?;
let base = src_path.resolve(&ctx.root)?;
let dst_path = src_path.join(path);
let mut compl_path = dst_path.as_rootless_path();
if !compl_path.is_dir() {
@ -1131,7 +1131,7 @@ pub fn complete_path(
return None;
}
let dirs = ctx.analysis.root.clone();
let dirs = ctx.root.clone();
log::debug!("compl_dirs: {dirs:?}");
// find directory or files in the path
let mut folder_completions = vec![];
@ -1150,7 +1150,7 @@ pub fn complete_path(
let label = if has_root {
// diff with root
let w = path.strip_prefix(&ctx.analysis.root).ok()?;
let w = path.strip_prefix(&ctx.root).ok()?;
eco_format!("/{}", unix_slash(w))
} else {
let base = base.parent()?;

View file

@ -47,6 +47,7 @@ typst-assets.workspace = true
typstyle.workspace = true
typstfmt_lib.workspace = true
reflexo.workspace = true
typst-ts-core = { workspace = true, default-features = false, features = [
"flat-vector",
"vector-bbox",
@ -71,9 +72,12 @@ tower-layer = "0.3.2"
tower-service = "0.3.2"
pin-project-lite = "0.2.13"
base64.workspace = true
rayon.workspace = true
[features]
default = ["cli", "embed-fonts", "no-content-hint", "preview"]
default = ["cli", "embed-fonts", "no-content-hint", "preview", "stable-server"]
stable-server = []
cli = ["clap"]

View file

@ -7,7 +7,7 @@ pub mod typ_client;
pub mod typ_server;
pub mod user_action;
use std::path::Path;
use std::sync::Arc;
use tinymist_query::analysis::Analysis;
use tinymist_query::ExportKind;
@ -19,7 +19,7 @@ use typst_ts_core::config::compiler::EntryState;
use self::{
export::{ExportActor, ExportConfig},
format::run_format_thread,
typ_client::{CompileClientActor, CompileDriver, CompileHandler},
typ_client::{CompileClientActor, CompileHandler},
typ_server::CompileServerActor,
user_action::run_user_action_thread,
};
@ -73,26 +73,32 @@ impl CompileState {
.run(),
);
// Create the server
let handler = CompileHandler {
log::info!(
"TypstActor: creating server for {editor_group}, entry: {entry:?}, inputs: {inputs:?}"
);
// Create the compile handler for client consuming results.
let position_encoding = self.const_config().position_encoding;
let enable_periscope = self.config.periscope_args.is_some();
let periscope_args = self.config.periscope_args.clone();
let handle = Arc::new(CompileHandler {
#[cfg(feature = "preview")]
inner: std::sync::Arc::new(parking_lot::Mutex::new(None)),
inner: std::sync::Arc::new(None),
diag_group: editor_group.clone(),
doc_tx,
export_tx: export_tx.clone(),
editor_tx: self.editor_tx.clone(),
};
analysis: Analysis {
position_encoding,
enable_periscope,
caches: Default::default(),
},
periscope: PeriscopeRenderer::new(periscope_args.unwrap_or_default()),
});
let position_encoding = self.const_config().position_encoding;
let enable_periscope = self.config.periscope_args.is_some();
let periscope_args = self.config.periscope_args.clone();
let diag_group = editor_group.clone();
let font_resolver = self.config.determine_fonts();
let entry_ = entry.clone();
log::info!(
"TypstActor: creating server for {diag_group}, entry: {entry:?}, inputs: {inputs:?}"
);
let handle_ = handle.clone();
self.handle.spawn_blocking(move || {
// Create the world
@ -100,30 +106,14 @@ impl CompileState {
let verse = LspWorldBuilder::build(entry_.clone(), font_resolver, inputs)
.expect("incorrect options");
// Create the compiler
let driver = CompileDriver {
inner: std::marker::PhantomData,
handler,
analysis: Analysis {
position_encoding,
root: Path::new("").into(),
enable_periscope,
caches: Default::default(),
},
periscope: PeriscopeRenderer::new(periscope_args.unwrap_or_default()),
};
// Create the actor
tokio::spawn(
CompileServerActor::new(driver, verse, entry_, intr_tx, intr_rx)
.with_watch(true)
.spawn(),
);
let server = CompileServerActor::new(verse, intr_tx, intr_rx).with_watch(Some(handle_));
tokio::spawn(server.spawn());
});
// Create the client
let config = self.config.clone();
let client = CompileClientActor::new(editor_group, config, entry, intr_tx_, export_tx);
let client = CompileClientActor::new(handle, config, entry, intr_tx_);
// We do send memory changes instead of initializing compiler with them.
// This is because there are state recorded inside of the compiler actor, and we
// must update them.

View file

@ -42,17 +42,13 @@ use tinymist_query::{
use tinymist_render::PeriscopeRenderer;
use tokio::sync::{mpsc, oneshot, watch};
use typst::{
diag::{PackageError, SourceDiagnostic, SourceResult},
diag::{PackageError, SourceDiagnostic},
layout::Position,
model::Document as TypstDocument,
syntax::package::PackageSpec,
World as TypstWorld,
};
use typst_ts_compiler::{
service::{CompileEnv, CompileMiddleware, Compiler, PureCompiler},
vfs::notify::MemoryEvent,
EntryManager, EntryReader,
};
use typst_ts_compiler::{vfs::notify::MemoryEvent, CompileReport, EntryReader, TaskInputs};
use typst_ts_core::{
config::compiler::EntryState, debug_loc::DataSource, error::prelude::*, typst::prelude::EcoVec,
Error, ImmutPath, TypstFont,
@ -61,36 +57,44 @@ use typst_ts_core::{
use super::{
editor::{EditorRequest, TinymistCompileStatusEnum},
export::ExportConfig,
typ_server::{CompileServerActor, Interrupt},
typ_server::{CompilationHandle, CompileSnapshot, CompiledArtifact, Interrupt},
};
use crate::{
actor::export::ExportRequest,
compile_init::CompileConfig,
tools::preview::{CompilationHandle, CompileStatus},
utils,
tools::preview::CompileStatus,
utils::{self, threaded_receive},
world::{LspCompilerFeat, LspWorld},
};
type CompileService<C> = CompileServerActor<C, LspCompilerFeat>;
pub type CompileClientActor = CompileClientActorImpl<CompileDriver>;
type EditorSender = mpsc::UnboundedSender<EditorRequest>;
use crate::tools::preview::CompilationHandle as PreviewCompilationHandle;
pub struct CompileHandler {
pub(super) diag_group: String,
pub(crate) diag_group: String,
pub(crate) analysis: Analysis,
pub(crate) periscope: PeriscopeRenderer,
#[cfg(feature = "preview")]
pub(super) inner: Arc<Mutex<Option<typst_preview::CompilationHandleImpl>>>,
pub(crate) inner: Arc<Option<typst_preview::CompilationHandleImpl>>,
pub(super) doc_tx: watch::Sender<Option<Arc<TypstDocument>>>,
pub(super) export_tx: mpsc::UnboundedSender<ExportRequest>,
pub(super) editor_tx: EditorSender,
pub(crate) doc_tx: watch::Sender<Option<Arc<TypstDocument>>>,
pub(crate) export_tx: mpsc::UnboundedSender<ExportRequest>,
pub(crate) editor_tx: EditorSender,
}
impl CompilationHandle for CompileHandler {
impl PreviewCompilationHandle for CompileHandler {
fn status(&self, _status: CompileStatus) {
self.editor_tx
.send(EditorRequest::Status(
self.diag_group.clone(),
TinymistCompileStatusEnum::Compiling,
))
.unwrap();
#[cfg(feature = "preview")]
if let Some(inner) = self.inner.lock().as_ref() {
if let Some(inner) = self.inner.as_ref() {
inner.status(_status);
}
}
@ -113,14 +117,48 @@ impl CompilationHandle for CompileHandler {
.unwrap();
#[cfg(feature = "preview")]
if let Some(inner) = self.inner.lock().as_ref() {
if let Some(inner) = self.inner.as_ref() {
inner.notify_compile(res);
}
}
}
impl CompilationHandle<LspCompilerFeat> for CompileHandler {
fn status(&self, rep: CompileReport) {
let status = match rep {
CompileReport::Suspend => {
self.push_diagnostics(None);
CompileStatus::CompileError
}
CompileReport::Stage(_, _, _) => CompileStatus::Compiling,
CompileReport::CompileSuccess(_, _, _) | CompileReport::CompileWarning(_, _, _) => {
CompileStatus::CompileSuccess
}
CompileReport::CompileError(_, _, _) | CompileReport::ExportError(_, _, _) => {
CompileStatus::CompileError
}
};
<Self as PreviewCompilationHandle>::status(self, status);
}
fn notify_compile(&self, snap: &CompiledArtifact<LspCompilerFeat>, _rep: CompileReport) {
let (res, err) = match snap.doc.clone() {
Ok(doc) => (Ok(doc), EcoVec::new()),
Err(err) => (Err(CompileStatus::CompileError), err),
};
self.notify_diagnostics(
&snap.world,
err,
snap.env.tracer.as_ref().map(|e| e.clone().warnings()),
);
<Self as PreviewCompilationHandle>::notify_compile(self, res);
}
}
impl CompileHandler {
fn push_diagnostics(&mut self, diagnostics: Option<DiagnosticsMap>) {
fn push_diagnostics(&self, diagnostics: Option<DiagnosticsMap>) {
let res = self
.editor_tx
.send(EditorRequest::Diag(self.diag_group.clone(), diagnostics));
@ -128,70 +166,9 @@ impl CompileHandler {
error!("failed to send diagnostics: {err:#}");
}
}
}
pub struct CompileDriver {
pub(super) inner: PureCompiler<LspWorld>,
#[allow(unused)]
pub(super) handler: CompileHandler,
pub(super) analysis: Analysis,
pub(super) periscope: PeriscopeRenderer,
}
impl CompileMiddleware for CompileDriver {
type Compiler = PureCompiler<LspWorld>;
fn inner(&self) -> &Self::Compiler {
&self.inner
}
fn inner_mut(&mut self) -> &mut Self::Compiler {
&mut self.inner
}
fn wrap_compile(
&mut self,
world: &LspWorld,
env: &mut CompileEnv,
) -> SourceResult<Arc<typst::model::Document>> {
self.handler
.editor_tx
.send(EditorRequest::Status(
self.handler.diag_group.clone(),
TinymistCompileStatusEnum::Compiling,
))
.unwrap();
self.handler.status(CompileStatus::Compiling);
match self
.ensure_main(world)
.and_then(|_| self.inner_mut().compile(world, env))
{
Ok(doc) => {
self.handler.notify_compile(Ok(doc.clone()));
self.notify_diagnostics(
world,
EcoVec::new(),
env.tracer.as_ref().map(|e| e.clone().warnings()),
);
Ok(doc)
}
Err(err) => {
self.handler
.notify_compile(Err(CompileStatus::CompileError));
self.notify_diagnostics(
world,
err,
env.tracer.as_ref().map(|e| e.clone().warnings()),
);
Err(EcoVec::new())
}
}
}
}
impl CompileDriver {
fn notify_diagnostics(
&mut self,
&self,
world: &LspWorld,
errors: EcoVec<SourceDiagnostic>,
warnings: Option<EcoVec<SourceDiagnostic>>,
@ -209,17 +186,17 @@ impl CompileDriver {
// todo: check all errors in this file
let detached = entry.is_inactive();
let valid = !detached;
self.handler.push_diagnostics(valid.then_some(diagnostics));
self.push_diagnostics(valid.then_some(diagnostics));
}
Err(err) => {
error!("TypstActor: failed to convert diagnostics: {:#}", err);
self.handler.push_diagnostics(None);
self.push_diagnostics(None);
}
}
}
pub fn run_analysis<T>(
&mut self,
&self,
w: &LspWorld,
f: impl FnOnce(&mut AnalysisContext<'_>) -> T,
) -> anyhow::Result<T> {
@ -271,73 +248,63 @@ impl CompileDriver {
let w = WrapWorld(w, &self.periscope);
self.analysis.root = root;
Ok(f(&mut AnalysisContext::new_borrow(&w, &mut self.analysis)))
let mut analysis = self.analysis.snapshot(root, &w);
Ok(f(&mut analysis))
}
}
pub struct CompileClientActorImpl<C: Compiler> {
pub diag_group: String,
pub struct CompileClientActor {
pub handle: Arc<CompileHandler>,
pub config: CompileConfig,
entry: EntryState,
intr_tx: mpsc::UnboundedSender<Interrupt<CompileService<C>>>,
export_tx: mpsc::UnboundedSender<ExportRequest>,
intr_tx: mpsc::UnboundedSender<Interrupt<LspCompilerFeat>>,
}
impl<C: Compiler<W = LspWorld> + Send> CompileClientActorImpl<C> {
impl CompileClientActor {
pub(crate) fn new(
diag_group: String,
handle: Arc<CompileHandler>,
config: CompileConfig,
entry: EntryState,
intr_tx: mpsc::UnboundedSender<Interrupt<CompileService<C>>>,
export_tx: mpsc::UnboundedSender<ExportRequest>,
intr_tx: mpsc::UnboundedSender<Interrupt<LspCompilerFeat>>,
) -> Self {
Self {
diag_group,
handle,
config,
entry,
intr_tx,
export_tx,
}
}
fn steal_inner<Ret: Send + 'static>(
&self,
f: impl FnOnce(&mut CompileService<C>) -> Ret + Send + 'static,
) -> ZResult<oneshot::Receiver<Ret>> {
/// Snapshot the compiler thread for tasks
pub fn snapshot(&self) -> ZResult<QuerySnap> {
let (tx, rx) = oneshot::channel();
self.intr_tx
.send(Interrupt::Snapshot(tx))
.map_err(map_string_err("failed to send snapshot request"))?;
Ok(QuerySnap {
#[cfg(feature = "stable-server")]
rx: Arc::new(Mutex::new(None)),
#[cfg(not(feature = "stable-server"))]
rx: Arc::new(Mutex::new(Some(rx))),
#[cfg(feature = "stable-server")]
snap: tokio::sync::OnceCell::new_with(Some(threaded_receive(rx))),
#[cfg(not(feature = "stable-server"))]
snap: tokio::sync::OnceCell::new(),
handle: self.handle.clone(),
})
}
/// Snapshot the compiler thread for tasks
pub fn sync_snapshot(&self) -> ZResult<CompileSnapshot<LspCompilerFeat>> {
let (tx, rx) = oneshot::channel();
let task = Box::new(move |this: &mut CompileService<C>| {
if tx.send(f(this)).is_err() {
// Receiver was dropped. The main thread may have exited, or the request may
// have been cancelled.
log::warn!("could not send back return value from Typst thread");
}
});
self.intr_tx
.send(Interrupt::Task(task))
.map_err(map_string_err("failed to send steal request"))?;
.send(Interrupt::Snapshot(tx))
.map_err(map_string_err("failed to send snapshot request"))?;
Ok(rx)
}
/// Steal the compiler thread and run the given function.
pub fn steal<Ret: Send + 'static>(
&self,
f: impl FnOnce(&mut CompileService<C>) -> Ret + Send + 'static,
) -> ZResult<Ret> {
utils::threaded_receive(self.steal_inner(f)?)
}
/// Steal the compiler thread and run the given function.
pub async fn steal_async<Ret: Send + 'static>(
&self,
f: impl FnOnce(&mut CompileService<C>) -> Ret + Send + 'static,
) -> ZResult<Ret> {
self.steal_inner(f)?
.await
.map_err(map_string_err("failed to call steal_async"))
threaded_receive(rx).map_err(map_string_err("failed to get snapshot"))
}
pub fn sync_config(&mut self, config: CompileConfig) {
@ -348,8 +315,15 @@ impl<C: Compiler<W = LspWorld> + Send> CompileClientActorImpl<C> {
let _ = self.intr_tx.send(Interrupt::Memory(event));
}
pub fn change_task(&self, task_inputs: TaskInputs) {
let _ = self.intr_tx.send(Interrupt::ChangeTask(task_inputs));
}
pub(crate) fn change_export_pdf(&mut self, config: ExportConfig) {
let _ = self.export_tx.send(ExportRequest::ChangeConfig(config));
let _ = self
.handle
.export_tx
.send(ExportRequest::ChangeConfig(config));
}
pub fn on_export(&self, kind: ExportKind, path: PathBuf) -> anyhow::Result<Option<PathBuf>> {
@ -357,7 +331,10 @@ impl<C: Compiler<W = LspWorld> + Send> CompileClientActorImpl<C> {
info!("CompileActor: on export: {}", path.display());
let (tx, rx) = oneshot::channel();
let _ = self.export_tx.send(ExportRequest::Oneshot(Some(kind), tx));
let _ = self
.handle
.export_tx
.send(ExportRequest::Oneshot(Some(kind), tx));
let res: Option<PathBuf> = utils::threaded_receive(rx)?;
info!("CompileActor: on export end: {path:?} as {res:?}");
@ -366,21 +343,24 @@ impl<C: Compiler<W = LspWorld> + Send> CompileClientActorImpl<C> {
pub fn on_save_export(&self, path: PathBuf) -> anyhow::Result<()> {
info!("CompileActor: on save export: {}", path.display());
let _ = self.export_tx.send(ExportRequest::OnSaved);
let _ = self.handle.export_tx.send(ExportRequest::OnSaved);
Ok(())
}
}
impl CompileClientActorImpl<CompileDriver> {
impl CompileClientActor {
pub fn settle(&mut self) {
let _ = self.change_entry(None);
info!("TypstActor({}): settle requested", self.diag_group);
info!("TypstActor({}): settle requested", self.handle.diag_group);
let (tx, rx) = oneshot::channel();
let _ = self.intr_tx.send(Interrupt::Settle(tx));
match utils::threaded_receive(rx) {
Ok(()) => info!("TypstActor({}): settled", self.diag_group),
Err(err) => error!("TypstActor({}): failed to settle: {err:#}", self.diag_group),
Ok(()) => info!("TypstActor({}): settled", self.handle.diag_group),
Err(err) => error!(
"TypstActor({}): failed to settle: {err:#}",
self.handle.diag_group
),
}
}
@ -397,81 +377,130 @@ impl CompileClientActorImpl<CompileDriver> {
return Ok(false);
}
let diag_group = &self.diag_group;
let diag_group = &self.handle.diag_group;
info!("the entry file of TypstActor({diag_group}) is changing to {next_entry:?}");
// todo
let next = next_entry.clone();
self.steal(move |compiler| {
compiler.change_entry(next.clone());
let next_is_inactive = next.is_inactive();
let res = compiler.verse.mutate_entry(next);
if next_is_inactive {
info!("TypstActor: removing diag");
compiler.compiler.compiler.handler.push_diagnostics(None);
}
res.map(|_| ())
.map_err(|err| error_once!("failed to change entry", err: format!("{err:?}")))
})??;
let next = next_entry.clone();
let _ = self.export_tx.send(ExportRequest::ChangeExportPath(next));
self.change_task(TaskInputs {
entry: Some(next_entry.clone()),
..Default::default()
});
// todo: let export request accept compiled artifact
let _ = self
.handle
.export_tx
.send(ExportRequest::ChangeExportPath(next_entry.clone()));
self.entry = next_entry;
Ok(true)
}
pub fn steal_state<T: Send + Sync + 'static>(
&self,
f: impl FnOnce(&mut AnalysisContext, Option<VersionedDocument>) -> T + Send + Sync + 'static,
) -> anyhow::Result<T> {
self.steal(move |compiler| {
let doc = compiler.success_doc();
let w = compiler.verse.spawn();
let c = &mut compiler.compiler.compiler;
c.run_analysis(&w, move |ctx| f(ctx, doc))
})?
}
pub fn steal_world<T: Send + Sync + 'static>(
&self,
f: impl FnOnce(&mut AnalysisContext) -> T + Send + Sync + 'static,
) -> anyhow::Result<T> {
self.steal(move |compiler| {
let w = compiler.verse.spawn();
compiler.compiler.compiler.run_analysis(&w, f)
})?
}
pub fn clear_cache(&self) {
let _ = self.steal(|c| {
c.compiler.compiler.analysis.caches = Default::default();
});
self.handle.analysis.clear_cache();
}
pub fn collect_server_info(&self) -> anyhow::Result<HashMap<String, ServerInfoResponse>> {
let dg = self.diag_group.clone();
self.steal(move |c| {
let cc = &c.compiler.compiler;
let w = c.verse.spawn();
let dg = self.handle.diag_group.clone();
let snap = self.sync_snapshot()?;
let w = &snap.world;
let info = ServerInfoResponse {
root: w.entry_state().root().map(|e| e.as_ref().to_owned()),
font_paths: w.font_resolver.font_paths().to_owned(),
inputs: c.verse.inputs().as_ref().deref().clone(),
inputs: w.inputs().as_ref().deref().clone(),
estimated_memory_usage: HashMap::from_iter([
// todo: vfs memory usage
// ("vfs".to_owned(), w.vfs.read().memory_usage()),
("analysis".to_owned(), cc.analysis.estimated_memory()),
// todo: analysis memory usage
// ("analysis".to_owned(), cc.analysis.estimated_memory()),
]),
};
HashMap::from_iter([(dg, info)])
})
.map_err(|e| e.into())
Ok(HashMap::from_iter([(dg, info)]))
}
}
pub struct QuerySnap {
rx: Arc<Mutex<Option<oneshot::Receiver<CompileSnapshot<LspCompilerFeat>>>>>,
snap: tokio::sync::OnceCell<ZResult<CompileSnapshot<LspCompilerFeat>>>,
handle: Arc<CompileHandler>,
}
impl QuerySnap {
/// Snapshot the compiler thread for tasks
pub async fn snapshot(&self) -> ZResult<CompileSnapshot<LspCompilerFeat>> {
self.snap
.get_or_init(|| async move {
let rx = self.rx.lock().take().unwrap();
rx.await.map_err(map_string_err("failed to get snapshot"))
})
.await
.clone()
}
/// Snapshot the compiler thread for tasks
pub fn snapshot_sync(&self) -> ZResult<CompileSnapshot<LspCompilerFeat>> {
if let Some(snap) = self.snap.get() {
return snap.clone();
}
let rx = self.rx.lock().take().unwrap();
threaded_receive(rx).map_err(map_string_err("failed to get snapshot"))
}
pub fn stateful_sync<T: tinymist_query::StatefulRequest>(
&self,
req: T,
) -> anyhow::Result<Option<T::Response>> {
let snap = self.snapshot_sync()?;
let w = &snap.world;
self.handle.run_analysis(w, |ctx| {
req.request(
ctx,
snap.success_doc.map(|doc| VersionedDocument {
version: w.revision().get(),
document: doc,
}),
)
})
}
pub async fn stateful<T: tinymist_query::StatefulRequest>(
&self,
req: T,
) -> anyhow::Result<Option<T::Response>> {
let snap = self.snapshot().await?;
let w = &snap.world;
self.handle.run_analysis(w, |ctx| {
req.request(
ctx,
snap.success_doc.map(|doc| VersionedDocument {
version: w.revision().get(),
document: doc,
}),
)
})
}
pub fn semantic_sync<T: tinymist_query::SemanticRequest>(
&self,
req: T,
) -> anyhow::Result<Option<T::Response>> {
let snap = self.snapshot_sync()?;
let w = &snap.world;
self.handle.run_analysis(w, |ctx| req.request(ctx))
}
pub async fn semantic<T: tinymist_query::SemanticRequest>(
&self,
req: T,
) -> anyhow::Result<Option<T::Response>> {
let snap = self.snapshot().await?;
let w = &snap.world;
self.handle.run_analysis(w, |ctx| req.request(ctx))
}
}

View file

@ -2,34 +2,123 @@
//!
//! Please check `tinymist::actor::typ_client` for architecture details.
use std::{collections::HashSet, path::Path, sync::Arc, thread::JoinHandle};
use std::{
collections::HashSet,
ops::Deref,
path::Path,
sync::{Arc, OnceLock},
thread::JoinHandle,
};
use tinymist_query::VersionedDocument;
use tokio::sync::{mpsc, oneshot};
use typst::{diag::SourceResult, util::Deferred};
use typst_ts_compiler::{
service::{
features::{FeatureSet, WITH_COMPILING_STATUS_FEATURE},
watch_deps, CompileEnv, CompileReporter, Compiler, ConsoleDiagReporter,
},
vfs::notify::{FilesystemEvent, MemoryEvent, NotifyMessage},
vfs::notify::{FilesystemEvent, MemoryEvent, NotifyMessage, UpstreamUpdateEvent},
watch_deps,
world::{CompilerFeat, CompilerUniverse, CompilerWorld},
Revising, WorldDeps,
CompileEnv, CompileReport, CompileReporter, Compiler, ConsoleDiagReporter, EntryReader,
PureCompiler, Revising, TaskInputs, WorldDeps,
};
use typst_ts_core::{
config::compiler::EntryState, exporter_builtins::GroupExporter, Exporter, QueryRef,
TypstDocument,
};
use typst_ts_core::{config::compiler::EntryState, TypstDocument};
/// A task that can be sent to the context (compiler thread)
///
/// The internal function will be dereferenced and called on the context.
type BorrowTask<Ctx> = Box<dyn FnOnce(&mut Ctx) + Send + 'static>;
type UsingCompiler<F> = CompileReporter<PureCompiler<CompilerWorld<F>>, CompilerWorld<F>>;
type CompileRawResult = Deferred<(SourceResult<Arc<TypstDocument>>, CompileEnv)>;
type DocState<F> = QueryRef<CompileRawResult, (), (UsingCompiler<F>, CompileEnv)>;
pub enum Interrupt<Ctx> {
/// Compile anyway.
Compile,
/// Borrow the compiler thread and run the task.
///
/// See [`CompileClient<Ctx>::steal_async`] for more information.
Task(BorrowTask<Ctx>),
pub struct CompileSnapshot<F: CompilerFeat> {
/// The compiler-thread local logical tick when the snapshot is taken.
pub compile_tick: usize,
/// Using env
pub env: CompileEnv,
/// Using world
pub world: Arc<CompilerWorld<F>>,
/// Compiling the document.
doc_state: Arc<DocState<F>>,
/// The last successfully compiled document.
pub success_doc: Option<Arc<TypstDocument>>,
}
impl<F: CompilerFeat + 'static> CompileSnapshot<F> {
pub fn start(&self) -> &CompileRawResult {
let res = self.doc_state.compute_with_context(|(mut c, mut env)| {
let w = self.world.clone();
Ok(Deferred::new(move || {
let res = c.compile(&w, &mut env);
(res, env)
}))
});
res.ok().unwrap()
}
pub fn doc(&self) -> SourceResult<Arc<TypstDocument>> {
self.start().wait().0.clone()
}
pub fn compile(&self) -> CompiledArtifact<F> {
let (doc, env) = self.start().wait().clone();
CompiledArtifact {
world: self.world.clone(),
compile_tick: self.compile_tick,
doc,
env,
success_doc: self.success_doc.clone(),
}
}
}
impl<F: CompilerFeat> Clone for CompileSnapshot<F> {
fn clone(&self) -> Self {
Self {
compile_tick: self.compile_tick,
env: self.env.clone(),
world: self.world.clone(),
doc_state: self.doc_state.clone(),
success_doc: self.success_doc.clone(),
}
}
}
#[derive(Clone)]
pub struct CompiledArtifact<F: CompilerFeat> {
pub world: Arc<CompilerWorld<F>>,
pub compile_tick: usize,
pub doc: SourceResult<Arc<TypstDocument>>,
/// Used env
pub env: CompileEnv,
pub success_doc: Option<Arc<TypstDocument>>,
}
// pub type NopCompilationHandle<T> = std::marker::PhantomData<fn(T)>;
#[cfg(feature = "stable-server")]
const COMPILE_CONCURRENCY: usize = 0;
#[cfg(not(feature = "stable-server"))]
const COMPILE_CONCURRENCY: usize = 1;
pub trait CompilationHandle<F: CompilerFeat>: Send + Sync + 'static {
fn status(&self, rep: CompileReport);
fn notify_compile(&self, res: &CompiledArtifact<F>, rep: CompileReport);
}
impl<F: CompilerFeat + Send + Sync + 'static> CompilationHandle<F>
for std::marker::PhantomData<fn(F)>
{
fn status(&self, _: CompileReport) {}
fn notify_compile(&self, _: &CompiledArtifact<F>, _: CompileReport) {}
}
pub enum Interrupt<F: CompilerFeat> {
/// Compiled from computing thread.
Compiled(CompiledArtifact<F>),
/// Change the watching entry.
ChangeTask(TaskInputs),
/// Request compiler to snapshot the current state.
Snapshot(oneshot::Sender<CompileSnapshot<F>>),
/// Memory file changes.
Memory(MemoryEvent),
/// File system event.
@ -52,17 +141,32 @@ struct TaggedMemoryEvent {
event: MemoryEvent,
}
struct SuspendState {
suspended: bool,
dirty: bool,
pub struct CompileServerOpts<F: CompilerFeat> {
pub exporter: GroupExporter<CompileSnapshot<F>>,
pub feature_set: FeatureSet,
pub compile_concurrency: usize,
}
/// The compiler thread.
pub struct CompileServerActor<C: Compiler, F: CompilerFeat> {
impl<F: CompilerFeat + Send + Sync + 'static> Default for CompileServerOpts<F> {
fn default() -> Self {
Self {
exporter: GroupExporter::new(vec![]),
feature_set: FeatureSet::default(),
compile_concurrency: COMPILE_CONCURRENCY,
}
}
}
/// The compiler actor.
pub struct CompileServerActor<F: CompilerFeat> {
/// The underlying universe.
pub verse: CompilerUniverse<F>,
/// The underlying compiler.
pub compiler: CompileReporter<C, CompilerWorld<F>>,
pub compiler: CompileReporter<PureCompiler<CompilerWorld<F>>, CompilerWorld<F>>,
/// The exporter for the compiled document.
pub exporter: GroupExporter<CompileSnapshot<F>>,
/// The compilation handle.
pub watch_handle: Arc<dyn CompilationHandle<F>>,
/// Whether to enable file system watching.
pub enable_watch: bool,
@ -75,38 +179,47 @@ pub struct CompileServerActor<C: Compiler, F: CompilerFeat> {
estimated_shadow_files: HashSet<Arc<Path>>,
/// The latest compiled document.
pub(crate) latest_doc: Option<Arc<TypstDocument>>,
/// The latest successfully compiled document.
/// The latest successly compiled document.
latest_success_doc: Option<Arc<TypstDocument>>,
/// feature set for compile_once mode.
once_feature_set: Arc<FeatureSet>,
/// Shared feature set for watch mode.
watch_feature_set: Arc<FeatureSet>,
// todo: private me
/// Channel for sending interrupts to the compiler thread.
intr_tx: mpsc::UnboundedSender<Interrupt<Self>>,
pub intr_tx: mpsc::UnboundedSender<Interrupt<F>>,
/// Channel for receiving interrupts from the compiler thread.
intr_rx: mpsc::UnboundedReceiver<Interrupt<Self>>,
intr_rx: mpsc::UnboundedReceiver<Interrupt<F>>,
suspend_state: SuspendState,
watch_snap: OnceLock<CompileSnapshot<F>>,
suspended: bool,
committed_revision: usize,
compile_concurrency: usize,
}
impl<F: CompilerFeat + Send + 'static, C: Compiler<W = CompilerWorld<F>> + Send + 'static>
CompileServerActor<C, F>
{
pub fn new_with_features(
compiler: C,
impl<F: CompilerFeat + Send + Sync + 'static> CompileServerActor<F> {
/// Create a new compiler actor with options
pub fn new_with(
verse: CompilerUniverse<F>,
entry: EntryState,
feature_set: FeatureSet,
intr_tx: mpsc::UnboundedSender<Interrupt<Self>>,
intr_rx: mpsc::UnboundedReceiver<Interrupt<Self>>,
intr_tx: mpsc::UnboundedSender<Interrupt<F>>,
intr_rx: mpsc::UnboundedReceiver<Interrupt<F>>,
CompileServerOpts {
exporter,
feature_set,
compile_concurrency,
}: CompileServerOpts<F>,
) -> Self {
let entry = verse.entry_state();
Self {
compiler: CompileReporter::new(compiler)
compiler: CompileReporter::new(std::marker::PhantomData)
.with_generic_reporter(ConsoleDiagReporter::default()),
exporter,
verse,
logical_tick: 1,
watch_handle: Arc::new(std::marker::PhantomData),
enable_watch: false,
dirty_shadow_logical_tick: 0,
@ -121,79 +234,41 @@ impl<F: CompilerFeat + Send + 'static, C: Compiler<W = CompilerWorld<F>> + Send
intr_tx,
intr_rx,
suspend_state: SuspendState {
watch_snap: OnceLock::new(),
suspended: entry.is_inactive(),
dirty: false,
},
committed_revision: 0,
compile_concurrency,
}
}
/// Create a new compiler thread.
/// Create a new compiler actor.
pub fn new(
compiler: C,
world: CompilerUniverse<F>,
entry: EntryState,
intr_tx: mpsc::UnboundedSender<Interrupt<Self>>,
intr_rx: mpsc::UnboundedReceiver<Interrupt<Self>>,
verse: CompilerUniverse<F>,
intr_tx: mpsc::UnboundedSender<Interrupt<F>>,
intr_rx: mpsc::UnboundedReceiver<Interrupt<F>>,
) -> Self {
Self::new_with_features(
compiler,
world,
entry,
FeatureSet::default(),
intr_tx,
intr_rx,
)
Self::new_with(verse, intr_tx, intr_rx, CompileServerOpts::default())
}
pub fn with_watch(mut self, watch: Option<Arc<dyn CompilationHandle<F>>>) -> Self {
self.enable_watch = watch.is_some();
match watch {
Some(watch) => self.watch_handle = watch,
None => self.watch_handle = Arc::new(std::marker::PhantomData),
}
pub fn with_watch(mut self, enable_watch: bool) -> Self {
self.enable_watch = enable_watch;
self
}
pub fn intr_tx(&self) -> mpsc::UnboundedSender<Interrupt<Self>> {
self.intr_tx.clone()
}
pub fn success_doc(&self) -> Option<VersionedDocument> {
self.latest_success_doc
.clone()
.map(|doc| VersionedDocument {
version: self.logical_tick,
document: doc,
})
}
pub fn doc(&self) -> Option<VersionedDocument> {
self.latest_doc.clone().map(|doc| VersionedDocument {
version: self.logical_tick,
document: doc,
})
}
fn make_env(&self, feature_set: Arc<FeatureSet>) -> CompileEnv {
CompileEnv::default().configure_shared(feature_set)
}
/// Run the compiler thread synchronously.
pub fn run(self) -> bool {
use tokio::runtime::Handle;
if Handle::try_current().is_err() && self.enable_watch {
log::error!("Typst compiler thread with watch enabled must be run in a tokio runtime");
return false;
}
tokio::task::block_in_place(move || Handle::current().block_on(self.block_run_inner()))
}
/// Inner function for `run`, it launches the compiler thread and blocks
/// until it exits.
async fn block_run_inner(mut self) -> bool {
/// Launches the compiler thread and blocks until it exits.
#[allow(unused)]
pub async fn run_and_wait(mut self) -> bool {
if !self.enable_watch {
let mut env = self.make_env(self.once_feature_set.clone());
let w = self.verse.spawn();
let compiled = self.compiler.compile(&w, &mut env);
return compiled.is_ok();
let artifact = self.compile_once();
return artifact.doc.is_ok();
}
if let Some(h) = self.spawn().await {
@ -208,9 +283,7 @@ impl<F: CompilerFeat + Send + 'static, C: Compiler<W = CompilerWorld<F>> + Send
/// Spawn the compiler thread.
pub async fn spawn(mut self) -> Option<JoinHandle<()>> {
if !self.enable_watch {
let mut env = self.make_env(self.once_feature_set.clone());
let w = self.verse.spawn();
self.compiler.compile(&w, &mut env).ok();
self.compile_once();
return None;
}
@ -232,8 +305,10 @@ impl<F: CompilerFeat + Send + 'static, C: Compiler<W = CompilerWorld<F>> + Send
}
};
// Trigger the first compilation (if active)
self.watch_compile(&compiler_ack);
// Spawn file system watcher.
// todo: don't compile if no entry
let fs_tx = self.intr_tx.clone();
tokio::spawn(watch_deps(dep_rx, move |event| {
log_send_error("fs_event", fs_tx.send(Interrupt::Fs(event)));
@ -260,9 +335,8 @@ impl<F: CompilerFeat + Send + 'static, C: Compiler<W = CompilerWorld<F>> + Send
}
// Ensure complied before executing tasks.
if matches!(event, Interrupt::Task(_)) && need_compile {
self.compile(&compiler_ack);
need_compile = false;
if matches!(event, Interrupt::Snapshot(_)) && need_compile {
need_compile = self.watch_compile(&compiler_ack);
}
need_compile |= self.process(event, &compiler_ack);
@ -275,7 +349,13 @@ impl<F: CompilerFeat + Send + 'static, C: Compiler<W = CompilerWorld<F>> + Send
}
if need_compile {
self.compile(&compiler_ack);
need_compile = self.watch_compile(&compiler_ack);
}
if need_compile {
need_compile = self.watch_compile(&compiler_ack);
if need_compile {
log::warn!("CompileServerActor: watch_compile infinite loop?");
}
}
}
@ -287,58 +367,169 @@ impl<F: CompilerFeat + Send + 'static, C: Compiler<W = CompilerWorld<F>> + Send
Some(compile_thread.unwrap())
}
pub fn change_entry(&mut self, entry: EntryState) {
self.suspend_state.suspended = entry.is_inactive();
if !self.suspend_state.suspended && self.suspend_state.dirty {
self.intr_tx.send(Interrupt::Compile).ok();
fn snapshot(&self, is_once: bool) -> CompileSnapshot<F> {
let world = self.verse.snapshot();
let c = self.compiler.clone();
let mut env = self.make_env(if is_once {
self.once_feature_set.clone()
} else {
self.watch_feature_set.clone()
});
if env.tracer.is_none() {
env.tracer = Some(Default::default());
}
CompileSnapshot {
world: Arc::new(world.clone()),
env: env.clone(),
compile_tick: self.logical_tick,
doc_state: Arc::new(QueryRef::with_context((c, env))),
success_doc: self.latest_success_doc.clone(),
}
}
// Reset the document state.
self.latest_doc = None;
self.latest_success_doc = None;
/// Compile the document once.
pub fn compile_once(&mut self) -> CompiledArtifact<F> {
let e = Arc::new(self.snapshot(true));
let err = self.exporter.export(e.world.deref(), e.clone());
if let Err(err) = err {
// todo: ExportError
log::error!("CompileServerActor: export error: {err:?}");
}
/// Compile the document.
fn compile(&mut self, send: impl Fn(CompilerResponse)) {
use CompilerResponse::*;
if self.suspend_state.suspended {
self.suspend_state.dirty = true;
return;
e.compile()
}
let w = self.verse.spawn();
/// Watch and compile the document once.
fn watch_compile(&mut self, send: impl Fn(CompilerResponse)) -> bool {
if self.suspended {
return false;
}
// Compile the document.
let mut env = self.make_env(self.watch_feature_set.clone());
self.latest_doc = self.compiler.compile(&w, &mut env).ok();
if self.latest_doc.is_some() {
let start = reflexo::time::now();
let compiling = self.snapshot(false);
self.watch_snap = OnceLock::new();
self.watch_snap.get_or_init(|| compiling.clone());
let h = self.watch_handle.clone();
let intr_tx = self.intr_tx.clone();
// todo unwrap main id
let id = compiling.world.main_id().unwrap();
self.watch_handle
.status(CompileReport::Stage(id, "compiling", start));
let compile = move || {
let compiled = compiling.compile();
let elapsed = start.elapsed().unwrap_or_default();
let rep;
match &compiled.doc {
Ok(..) => {
let warnings = compiled.env.tracer.as_ref().unwrap().clone().warnings();
if warnings.is_empty() {
rep = CompileReport::CompileSuccess(id, warnings, elapsed);
} else {
rep = CompileReport::CompileWarning(id, warnings, elapsed);
}
}
Err(err) => {
rep = CompileReport::CompileError(id, err.clone(), elapsed);
}
};
h.notify_compile(&compiled, rep);
compiled
};
if self.compile_concurrency == 0 {
self.processs_compile(compile(), send)
} else {
rayon::spawn(move || {
log_send_error("compiled", intr_tx.send(Interrupt::Compiled(compile())));
});
false
}
}
fn processs_compile(
&mut self,
artifact: CompiledArtifact<F>,
send: impl Fn(CompilerResponse),
) -> bool {
let w = &artifact.world;
let compiled_revision = w.revision().get();
if self.committed_revision >= compiled_revision {
return false;
}
let doc = artifact.doc.ok();
// Update state.
self.committed_revision = compiled_revision;
self.latest_doc.clone_from(&doc);
if doc.is_some() {
self.latest_success_doc.clone_from(&self.latest_doc);
}
// Notify the new file dependencies.
let mut deps = vec![];
artifact
.world
.iter_dependencies(&mut |dep| deps.push(dep.clone()));
send(CompilerResponse::Notify(NotifyMessage::SyncDependency(
deps,
)));
// Trigger an evict task.
rayon::spawn(move || {
// Evict compilation cache.
let evict_start = std::time::Instant::now();
comemo::evict(30);
let elapsed = evict_start.elapsed();
log::info!("CompileServerActor: evict compilation cache in {elapsed:?}",);
log::info!("CompileServerActor: evict compilation cache in {elapsed:?}");
});
// Notify the new file dependencies.
let mut deps = vec![];
w.iter_dependencies(&mut |dep| deps.push(dep.clone()));
send(Notify(NotifyMessage::SyncDependency(deps)));
self.process_may_laggy_compile()
}
fn process_may_laggy_compile(&mut self) -> bool {
// todo: rate limit
false
}
/// Process some interrupt. Return whether it needs compilation.
fn process(&mut self, event: Interrupt<Self>, send: impl Fn(CompilerResponse)) -> bool {
fn process(&mut self, event: Interrupt<F>, send: impl Fn(CompilerResponse)) -> bool {
use CompilerResponse::*;
match event {
Interrupt::Compile => true,
Interrupt::Task(task) => {
log::debug!("CompileServerActor: execute task");
task(self);
Interrupt::Snapshot(task) => {
log::debug!("CompileServerActor: take snapshot");
let _ = task.send(self.watch_snap.get_or_init(|| self.snapshot(false)).clone());
false
}
Interrupt::ChangeTask(change) => {
if let Some(entry) = change.entry.clone() {
self.change_entry(entry.clone());
}
self.verse.increment_revision(|verse| {
if let Some(inputs) = change.inputs {
verse.set_inputs(inputs);
}
if let Some(entry) = change.entry {
let res = verse.mutate_entry(entry);
if let Err(err) = res {
log::error!("CompileServerActor: change entry error: {err:?}");
}
}
});
true
}
Interrupt::Compiled(artifact) => self.processs_compile(artifact, send),
Interrupt::Memory(event) => {
log::debug!("CompileServerActor: memory event incoming");
@ -368,15 +559,13 @@ impl<F: CompilerFeat + Send + 'static, C: Compiler<W = CompilerWorld<F>> + Send
// Otherwise, send upstream update event.
// Also, record the logical tick when shadow is dirty.
self.dirty_shadow_logical_tick = self.logical_tick;
send(Notify(NotifyMessage::UpstreamUpdate(
typst_ts_compiler::vfs::notify::UpstreamUpdateEvent {
send(Notify(NotifyMessage::UpstreamUpdate(UpstreamUpdateEvent {
invalidates: files.into_iter().collect(),
opaque: Box::new(TaggedMemoryEvent {
logical_tick: self.logical_tick,
event,
}),
},
)));
})));
false
}
@ -399,6 +588,20 @@ impl<F: CompilerFeat + Send + 'static, C: Compiler<W = CompilerWorld<F>> + Send
}
}
fn change_entry(&mut self, entry: EntryState) -> bool {
self.suspended = entry.is_inactive();
if self.suspended {
log::info!("CompileServerActor: removing diag");
self.watch_handle.status(CompileReport::Suspend);
}
// Reset the document state.
self.latest_doc = None;
self.latest_success_doc = None;
!self.suspended
}
/// Apply delayed memory changes to underlying compiler.
fn apply_delayed_memory_changes(
verse: &mut Revising<CompilerUniverse<F>>,

View file

@ -1,5 +1,4 @@
use once_cell::sync::Lazy;
use tinymist::preview::PreviewCliArgs;
use tinymist::transport::MirrorArgs;
use tinymist::compile_init::{CompileOnceArgs, FontArgs};
@ -21,7 +20,8 @@ pub enum Commands {
/// Run Compile Server
Compile(CompileArgs),
/// Run Preview Server
Preview(PreviewCliArgs),
#[cfg(feature = "preview")]
Preview(tinymist::preview::PreviewCliArgs),
/// Probe
Probe,
}

View file

@ -1,3 +1,4 @@
use core::fmt;
use std::sync::Arc;
use std::time::Instant;
@ -19,6 +20,31 @@ pub struct Handle<H, C> {
pub type ReqHandler<S> = for<'a> fn(&'a mut S, lsp_server::Response);
type ReqQueue<S> = lsp_server::ReqQueue<(String, Instant), ReqHandler<S>>;
#[derive(Clone)]
pub struct AnyLspHost {
respond: Arc<dyn Fn(lsp_server::Response) + Send + Sync>,
}
impl fmt::Debug for AnyLspHost {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("AnyLspHost").finish()
}
}
impl Default for AnyLspHost {
fn default() -> Self {
Self {
respond: Arc::new(|_| {}),
}
}
}
impl AnyLspHost {
pub fn respond(&self, response: lsp_server::Response) {
(self.respond)(response)
}
}
/// The host for the language server, or known as the LSP client.
#[derive(Debug)]
pub struct LspHost<S> {
@ -35,6 +61,15 @@ impl<S> Clone for LspHost<S> {
}
}
impl<S: 'static> LspHost<S> {
pub fn to_untyped(self) -> AnyLspHost {
let this = self.clone();
AnyLspHost {
respond: Arc::new(move |msg| this.respond(msg)),
}
}
}
impl<S> LspHost<S> {
/// Creates a new language server host.
pub fn new(sender: Arc<RwLock<Option<crossbeam_channel::Sender<Message>>>>) -> Self {

View file

@ -36,12 +36,18 @@ pub mod tools;
pub mod transport;
mod utils;
mod world;
use std::pin::Pin;
pub use crate::harness::LspHost;
use futures::future::MaybeDone;
use serde_json::Value as JsonValue;
pub use server::compile;
pub use server::compile_init;
pub use server::lsp::*;
pub use server::lsp_init::*;
#[cfg(feature = "preview")]
pub use server::preview;
use tinymist_query::CompilerQueryResponse;
pub use world::{
CompileFontOpts, CompileOnceOpts, CompileOpts, LspUniverse, LspWorld, LspWorldBuilder,
};
@ -50,3 +56,33 @@ pub use world::{
use lsp_server::ResponseError;
type LspResult<Res> = Result<Res, ResponseError>;
type ScheduledResult = LspResult<Option<()>>;
type ResponseFuture<T> = MaybeDone<Pin<Box<dyn std::future::Future<Output = T> + Send>>>;
type LspResponseFuture<T> = LspResult<ResponseFuture<T>>;
type QueryFuture = anyhow::Result<ResponseFuture<anyhow::Result<CompilerQueryResponse>>>;
type SchedulableResponse<T> = LspResponseFuture<LspResult<T>>;
type AnySchedulableResponse = SchedulableResponse<JsonValue>;
macro_rules! just_ok {
($expr:expr) => {
Ok(futures::future::MaybeDone::Done(Ok($expr)))
};
}
use just_ok;
macro_rules! just_result {
($expr:expr) => {
Ok(futures::future::MaybeDone::Done($expr))
};
}
use just_result;
#[allow(unused)]
macro_rules! just_future {
($expr:expr) => {
Ok(futures::future::MaybeDone::Future(Box::pin($expr)))
};
}
#[allow(unused_imports)]
use just_future;

View file

@ -13,14 +13,13 @@ use parking_lot::RwLock;
use tinymist::{
compile_init::{CompileInit, CompileInitializeParams},
harness::{lsp_harness, InitializedLspDriver, LspDriver, LspHost},
preview::preview_main,
transport::with_stdio_transport,
CompileFontOpts, Init, LspWorld, LanguageState,
CompileFontOpts, Init, LanguageState, LspWorld,
};
use tokio::sync::mpsc;
use typst::World;
use typst::{eval::Tracer, foundations::IntoValue, syntax::Span};
use typst_ts_compiler::service::{CompileEnv, Compiler};
use typst_ts_compiler::{CompileEnv, Compiler, TaskInputs};
use typst_ts_core::{typst::prelude::EcoVec, TypstDict};
use crate::args::{CliArguments, Commands, CompileArgs, LspArgs};
@ -69,7 +68,13 @@ fn main() -> anyhow::Result<()> {
match args.command.unwrap_or_default() {
Commands::Lsp(args) => lsp_main(args),
Commands::Compile(args) => compiler_main(args),
Commands::Preview(args) => RUNTIMES.tokio_runtime.block_on(preview_main(args)),
#[cfg(feature = "preview")]
Commands::Preview(args) => {
#[cfg(feature = "preview")]
use tinymist::preview::preview_main;
RUNTIMES.tokio_runtime.block_on(preview_main(args))
}
Commands::Probe => Ok(()),
}
}
@ -181,15 +186,12 @@ pub fn compiler_main(args: CompileArgs) -> anyhow::Result<()> {
service.initialized(InitializedParams {});
let entry = service.config.determine_entry(Some(input.as_path().into()));
let (timings, _doc, diagnostics) = service
.compiler()
.steal(|c| {
c.verse.increment_revision(|verse| {
verse.mutate_entry(entry).unwrap();
verse.set_inputs(inputs);
});
let w = c.verse.spawn();
let snap = service.compiler().sync_snapshot().unwrap();
let w = snap.world.task(TaskInputs {
entry: Some(entry),
inputs: Some(inputs),
});
let mut env = CompileEnv {
tracer: Some(Tracer::default()),
@ -197,23 +199,19 @@ pub fn compiler_main(args: CompileArgs) -> anyhow::Result<()> {
};
typst_timing::enable();
let mut errors = EcoVec::new();
let res = match c.compiler.pure_compile(&w, &mut env) {
Ok(doc) => Some(doc),
Err(e) => {
if let Err(e) = std::marker::PhantomData.compile(&w, &mut env) {
errors = e;
None
}
};
let mut writer = std::io::BufWriter::new(Vec::new());
let _ = typst_timing::export_json(&mut writer, |span| {
resolve_span(&w, span).unwrap_or_else(|| ("unknown".to_string(), 0))
});
let s = String::from_utf8(writer.into_inner().unwrap()).unwrap();
let timings = String::from_utf8(writer.into_inner().unwrap()).unwrap();
let warnings = env.tracer.map(|e| e.warnings());
let diagnostics = c.compiler.compiler.run_analysis(&w, |ctx| {
let diagnostics = service.compiler().handle.run_analysis(&w, |ctx| {
tinymist_query::convert_diagnostics(
ctx,
warnings.iter().flatten().chain(errors.iter()),
@ -222,10 +220,6 @@ pub fn compiler_main(args: CompileArgs) -> anyhow::Result<()> {
let diagnostics = diagnostics.unwrap_or_default();
(s, res, diagnostics)
})
.unwrap();
lsp_server::Message::Notification(lsp_server::Notification {
method: "tinymistExt/diagnostics".to_owned(),
params: serde_json::json!(diagnostics),

View file

@ -9,7 +9,7 @@ mod prelude {
pub use serde_json::Value as JsonValue;
pub use typst::foundations::{Scope, Value};
pub use typst::symbols::Symbol;
pub use typst_ts_compiler::service::Compiler;
pub use typst_ts_compiler::Compiler;
pub use typst_ts_core::error::prelude::*;
pub use typst_ts_svg_exporter::ir::{GlyphItem, GlyphRef};
pub use typst_ts_svg_exporter::{DefaultExportFeature, SvgTask, SvgText};

View file

@ -1,5 +1,6 @@
use std::{collections::BTreeMap, path::Path, sync::Arc};
use typst_ts_compiler::{ShadowApi, TaskInputs};
use typst_ts_core::{config::compiler::EntryState, font::GlyphId, TypstDocument, TypstFont};
pub use super::prelude::*;
@ -184,36 +185,29 @@ impl LanguageState {
log::debug!("math shaping text: {text}", text = math_shaping_text);
let symbols_ref = symbols.keys().cloned().collect::<Vec<_>>();
let font = self
.primary()
.steal(move |e| {
let verse = &mut e.verse;
let snapshot = self.primary().sync_snapshot()?;
let font = {
let entry_path: Arc<Path> = Path::new("/._sym_.typ").into();
let new_entry = EntryState::new_rootless(entry_path.clone())?;
let (old_entry, prepared) = verse.increment_revision(|verse| {
let old_entry = verse.mutate_entry(new_entry).ok()?;
let prepared = verse
let new_entry = EntryState::new_rootless(entry_path.clone())
.ok_or_else(|| error_once!("cannot change entry"))?;
let mut forked = snapshot.world.task(TaskInputs {
entry: Some(new_entry),
..Default::default()
});
forked
.map_shadow(&entry_path, math_shaping_text.into_bytes().into())
.is_ok();
.map_err(|e| error_once!("cannot map shadow", err: e))?;
Some((old_entry, prepared))
})?;
let sym_doc = std::marker::PhantomData
.compile(&forked, &mut Default::default())
.map_err(|e| error_once!("cannot compile symbols", err: format!("{e:?}")))?;
let w = verse.spawn();
let sym_doc =
prepared.then(|| e.compiler.pure_compile(&w, &mut Default::default()));
verse.increment_revision(|verse| verse.mutate_entry(old_entry).ok())?;
log::debug!(
"sym doc: {doc:?}",
doc = sym_doc.as_ref().map(|e| e.as_ref().map(|_| ()))
);
let doc = sym_doc.transpose().ok()??;
Some(trait_symbol_fonts(&doc, &symbols_ref))
})
.ok()
.flatten();
log::debug!("sym doc: {sym_doc:?}");
Some(trait_symbol_fonts(&sym_doc, &symbols_ref))
};
let mut glyph_def = String::new();

View file

@ -3,9 +3,10 @@ use std::{collections::HashMap, path::Path, sync::Arc, time::Instant};
use crossbeam_channel::{select, Receiver};
use log::{error, info, warn};
use lsp_server::{ErrorCode, Message, Notification, Request, RequestId, Response, ResponseError};
use lsp_server::{ErrorCode, Message, Notification, Request, RequestId, Response};
use lsp_types::notification::Notification as _;
use once_cell::sync::OnceCell;
use serde::Serialize;
use serde_json::{Map, Value as JsonValue};
use tokio::sync::mpsc;
use typst::{diag::FileResult, syntax::Source};
@ -16,50 +17,19 @@ use crate::{
actor::{editor::EditorRequest, export::ExportConfig, typ_client::CompileClientActor},
compile_init::{CompileConfig, ConstCompileConfig},
harness::InitializedLspDriver,
invalid_params,
invalid_params, result_to_response,
state::MemoryFileMeta,
LspHost, LspResult,
};
type LspMethod<Res> = fn(srv: &mut CompileState, args: JsonValue) -> LspResult<Res>;
pub(crate) type LspHandler<Req, Res> = fn(srv: &mut CompileState, args: Req) -> LspResult<Res>;
type ExecuteCmdMap = HashMap<&'static str, LspHandler<Vec<JsonValue>, JsonValue>>;
type NotifyCmdMap = HashMap<&'static str, LspMethod<()>>;
type RegularCmdMap = HashMap<&'static str, LspMethod<JsonValue>>;
#[macro_export]
macro_rules! request_fn {
($desc: ty, Self::$method: ident) => {
(<$desc>::METHOD, {
const E: LspMethod<JsonValue> = |this, req| {
let req: <$desc as lsp_types::request::Request>::Params =
serde_json::from_value(req).unwrap(); // todo: soft unwrap
this.$method(req)
};
E
})
};
}
#[macro_export]
macro_rules! notify_fn {
($desc: ty, Self::$method: ident) => {
(<$desc>::METHOD, {
const E: LspMethod<()> = |this, input| {
let input: <$desc as lsp_types::notification::Notification>::Params =
serde_json::from_value(input).unwrap(); // todo: soft unwrap
this.$method(input)
};
E
})
};
}
use super::*;
/// The object providing the language server functionality.
pub struct CompileState {
/// The language server client.
pub client: LspHost<CompileState>,
/// The runtime handle to spawn tasks.
pub handle: tokio::runtime::Handle,
// State to synchronize with the client.
/// Whether the server is shutting down.
@ -74,15 +44,13 @@ pub struct CompileState {
// Command maps
/// Extra commands provided with `textDocument/executeCommand`.
pub exec_cmds: ExecuteCmdMap,
pub exec_cmds: ExecuteCmdMap<Self>,
/// Regular notifications for dispatching.
pub notify_cmds: NotifyCmdMap,
pub notify_cmds: NotifyCmdMap<Self>,
/// Regular commands for dispatching.
pub regular_cmds: RegularCmdMap,
pub regular_cmds: RegularCmdMap<Self>,
// Resources
/// The runtime handle to spawn tasks.
pub handle: tokio::runtime::Handle,
/// Source synchronized with client
pub memory_changes: HashMap<Arc<Path>, MemoryFileMeta>,
/// The diagnostics sender to send diagnostics to `crate::actor::cluster`.
@ -161,14 +129,15 @@ impl CompileState {
}
#[rustfmt::skip]
fn get_regular_cmds() -> RegularCmdMap {
fn get_regular_cmds() -> RegularCmdMap<Self> {
type State = CompileState;
use lsp_types::request::*;
RegularCmdMap::from_iter([
request_fn!(ExecuteCommand, Self::execute_command),
request_fn_!(ExecuteCommand, State::execute_command),
])
}
fn get_notify_cmds() -> NotifyCmdMap {
fn get_notify_cmds() -> NotifyCmdMap<Self> {
// use lsp_types::notification::*;
NotifyCmdMap::from_iter([
// notify_fn!(DidOpenTextDocument, Self::did_open),
@ -178,6 +147,33 @@ impl CompileState {
// notify_fn!(DidChangeConfiguration, Self::did_change_configuration),
])
}
pub fn schedule<T: Serialize + 'static>(
&mut self,
req_id: RequestId,
resp: SchedulableResponse<T>,
) -> ScheduledResult {
let resp = resp?;
use futures::future::MaybeDone::*;
match resp {
Done(output) => {
self.client.respond(result_to_response(req_id, output));
}
Future(fut) => {
let client = self.client.clone();
let req_id = req_id.clone();
self.handle.spawn(async move {
client.respond(result_to_response(req_id, fut.await));
});
}
Gone => {
log::warn!("response for request({req_id:?}) already taken");
}
};
Ok(Some(()))
}
}
#[derive(Debug)]
@ -254,21 +250,10 @@ impl CompileState {
return;
};
let result = handler(self, req.params);
if let Ok(response) = result_to_response(req.id, result) {
self.client.respond(response);
}
fn result_to_response(
id: RequestId,
result: Result<JsonValue, ResponseError>,
) -> Result<Response, Cancelled> {
let res = match result {
Ok(resp) => Response::new_ok(id, resp),
Err(e) => Response::new_err(id, e.code, e.message),
};
Ok(res)
let result = handler(self, req.id.clone(), req.params);
match result {
Ok(Some(())) => {}
_ => self.client.respond(result_to_response(req.id, result)),
}
}
@ -345,5 +330,3 @@ impl CompileState {
Ok(())
}
}
struct Cancelled;

View file

@ -1,4 +1,4 @@
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use log::{error, info};
use lsp_types::ExecuteCommandParams;
@ -6,47 +6,35 @@ use serde::Deserialize;
use serde_json::Value as JsonValue;
use tinymist_query::{ExportKind, PageSelection};
use crate::{internal_error, invalid_params, method_not_found, run_query, LspResult};
use crate::{internal_error, invalid_params, method_not_found, run_query};
use super::compile::*;
use super::*;
macro_rules! exec_fn {
($ty: ty, Self::$method: ident, $($arg_key:ident),+ $(,)?) => {{
const E: $ty = |this, $($arg_key),+| this.$method($($arg_key),+);
E
}};
}
#[derive(Debug, Clone, Default, Deserialize)]
struct ExportOpts {
page: PageSelection,
}
type ExecuteCmdMap = HashMap<&'static str, LspHandler<Vec<JsonValue>, JsonValue>>;
impl CompileState {
pub fn get_exec_commands() -> ExecuteCmdMap {
macro_rules! redirected_command {
($key: expr, Self::$method: ident) => {
(
$key,
exec_fn!(LspHandler<Vec<JsonValue>, JsonValue>, Self::$method, inputs),
)
};
}
pub fn get_exec_commands() -> ExecuteCmdMap<Self> {
type State = CompileState;
ExecuteCmdMap::from_iter([
redirected_command!("tinymist.exportPdf", Self::export_pdf),
redirected_command!("tinymist.exportSvg", Self::export_svg),
redirected_command!("tinymist.exportPng", Self::export_png),
redirected_command!("tinymist.doClearCache", Self::clear_cache),
redirected_command!("tinymist.changeEntry", Self::change_entry),
exec_fn_!("tinymist.exportPdf", State::export_pdf),
exec_fn_!("tinymist.exportSvg", State::export_svg),
exec_fn_!("tinymist.exportPng", State::export_png),
exec_fn!("tinymist.doClearCache", State::clear_cache),
exec_fn!("tinymist.changeEntry", State::change_entry),
])
}
/// The entry point for the `workspace/executeCommand` request.
pub fn execute_command(&mut self, params: ExecuteCommandParams) -> LspResult<JsonValue> {
pub fn execute_command(
&mut self,
req_id: RequestId,
params: ExecuteCommandParams,
) -> ScheduledResult {
let ExecuteCommandParams {
command,
arguments: args,
@ -56,55 +44,57 @@ impl CompileState {
error!("asked to execute unknown command");
return Err(method_not_found());
};
handler(self, args)
handler(self, req_id, args)
}
/// Export the current document as a PDF file.
pub fn export_pdf(&self, args: Vec<JsonValue>) -> LspResult<JsonValue> {
self.export(ExportKind::Pdf, args)
pub fn export_pdf(&mut self, req_id: RequestId, args: Vec<JsonValue>) -> ScheduledResult {
self.export(req_id, ExportKind::Pdf, args)
}
/// Export the current document as a Svg file.
pub fn export_svg(&self, mut args: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn export_svg(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
let opts = get_arg_or_default!(args[1] as ExportOpts);
self.export(ExportKind::Svg { page: opts.page }, args)
self.export(req_id, ExportKind::Svg { page: opts.page }, args)
}
/// Export the current document as a Png file.
pub fn export_png(&self, mut args: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn export_png(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
let opts = get_arg_or_default!(args[1] as ExportOpts);
self.export(ExportKind::Png { page: opts.page }, args)
self.export(req_id, ExportKind::Png { page: opts.page }, args)
}
/// Export the current document as some format. The client is responsible
/// for passing the correct absolute path of typst document.
pub fn export(&self, kind: ExportKind, mut args: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn export(
&mut self,
req_id: RequestId,
kind: ExportKind,
mut args: Vec<JsonValue>,
) -> ScheduledResult {
let path = get_arg!(args[0] as PathBuf);
let res = run_query!(self.OnExport(path, kind))?;
let res = serde_json::to_value(res).map_err(|_| internal_error("Cannot serialize path"))?;
Ok(res)
run_query!(req_id, self.OnExport(path, kind))
}
/// Clear all cached resources.
///
/// # Errors
/// Errors if the cache could not be cleared.
pub fn clear_cache(&self, _arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn clear_cache(&self, _arguments: Vec<JsonValue>) -> AnySchedulableResponse {
comemo::evict(0);
self.compiler().clear_cache();
Ok(JsonValue::Null)
just_ok!(JsonValue::Null)
}
/// Focus main file to some path.
pub fn change_entry(&mut self, mut args: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn change_entry(&mut self, mut args: Vec<JsonValue>) -> AnySchedulableResponse {
let entry = get_arg!(args[0] as Option<PathBuf>).map(From::from);
let update_result = self.do_change_entry(entry.clone());
update_result.map_err(|err| internal_error(format!("could not focus file: {err}")))?;
info!("entry changed: {entry:?}");
Ok(JsonValue::Null)
just_ok!(JsonValue::Null)
}
}

View file

@ -1,15 +1,15 @@
//! tinymist LSP mode
use std::path::PathBuf;
use std::sync::Arc;
use std::time::Instant;
use std::{collections::HashMap, path::PathBuf};
use anyhow::{bail, Context};
use futures::future::BoxFuture;
use log::{error, info, trace, warn};
use lsp_server::{ErrorCode, Message, Notification, Request, RequestId, Response, ResponseError};
use lsp_types::notification::Notification as NotificationTrait;
use lsp_types::request::{GotoDeclarationParams, GotoDeclarationResponse, WorkspaceConfiguration};
use lsp_types::request::{GotoDeclarationParams, WorkspaceConfiguration};
use lsp_types::*;
use parking_lot::RwLock;
use serde::{Deserialize, Serialize};
@ -21,7 +21,7 @@ use tinymist_query::{
use tokio::sync::mpsc;
use typst_ts_core::ImmutPath;
use super::lsp_init::*;
use super::{lsp_init::*, *};
use crate::actor::editor::EditorRequest;
use crate::actor::format::{FormatConfig, FormatRequest};
use crate::actor::typ_client::CompileClientActor;
@ -29,67 +29,10 @@ use crate::actor::user_action::UserActionRequest;
use crate::compile::CompileState;
use crate::compile_init::ConstCompileConfig;
use crate::harness::{InitializedLspDriver, LspHost};
use crate::{run_query, LspResult};
use crate::{run_query, run_query_tail, LspResult};
pub type MaySyncResult<'a> = Result<JsonValue, BoxFuture<'a, JsonValue>>;
type LspMethod<Res> = fn(srv: &mut LanguageState, args: JsonValue) -> LspResult<Res>;
type LspHandler<Req, Res> = fn(srv: &mut LanguageState, args: Req) -> LspResult<Res>;
/// Returns Ok(Some()) -> Already responded
/// Returns Ok(None) -> Need to respond none
/// Returns Err(..) -> Need to respond error
type LspRawHandler<T> =
fn(srv: &mut LanguageState, req_id: RequestId, args: T) -> LspResult<Option<()>>;
type ExecuteCmdMap = HashMap<&'static str, LspRawHandler<Vec<JsonValue>>>;
type NotifyCmdMap = HashMap<&'static str, LspMethod<()>>;
type RegularCmdMap = HashMap<&'static str, LspRawHandler<JsonValue>>;
type ResourceMap = HashMap<ImmutPath, LspHandler<Vec<JsonValue>, JsonValue>>;
macro_rules! request_fn_ {
($desc: ty, Self::$method: ident) => {
(<$desc>::METHOD, {
const E: LspRawHandler<JsonValue> = |this, req_id, req| {
let req: <$desc as lsp_types::request::Request>::Params =
serde_json::from_value(req).unwrap(); // todo: soft unwrap
this.$method(req_id, req)
};
E
})
};
}
macro_rules! request_fn {
($desc: ty, Self::$method: ident) => {
(<$desc>::METHOD, {
const E: LspRawHandler<JsonValue> = |this, req_id, req| {
let req: <$desc as lsp_types::request::Request>::Params =
serde_json::from_value(req).unwrap(); // todo: soft unwrap
let res = this.$method(req);
this.client.respond(result_to_response(req_id, res));
Ok(Some(()))
};
E
})
};
}
macro_rules! notify_fn {
($desc: ty, Self::$method: ident) => {
(<$desc>::METHOD, {
const E: LspMethod<()> = |this, input| {
let input: <$desc as lsp_types::notification::Notification>::Params =
serde_json::from_value(input).unwrap(); // todo: soft unwrap
this.$method(input)
};
E
})
};
}
pub(super) fn as_path(inp: TextDocumentIdentifier) -> PathBuf {
as_path_(inp.uri)
}
@ -132,15 +75,17 @@ pub struct LanguageState {
// Command maps
/// Extra commands provided with `textDocument/executeCommand`.
pub exec_cmds: ExecuteCmdMap,
pub exec_cmds: ExecuteCmdMap<Self>,
/// Regular notifications for dispatching.
pub notify_cmds: NotifyCmdMap,
pub notify_cmds: NotifyCmdMap<Self>,
/// Regular commands for dispatching.
pub regular_cmds: RegularCmdMap,
pub regular_cmds: RegularCmdMap<Self>,
/// Regular commands for dispatching.
pub resource_routes: ResourceMap,
pub resource_routes: ResourceMap<Self>,
// Resources
/// The tokio handle.
pub handle: tokio::runtime::Handle,
/// The semantic token context.
pub tokens_ctx: SemanticTokenContext,
/// The compiler for general purpose.
@ -178,8 +123,9 @@ impl LanguageState {
position_encoding: const_config.position_encoding,
},
editor_tx,
handle,
handle.clone(),
),
handle,
dedicates: Vec::new(),
shutdown_requested: false,
ever_focusing_by_activities: false,
@ -213,50 +159,80 @@ impl LanguageState {
}
#[rustfmt::skip]
fn get_regular_cmds() -> RegularCmdMap {
fn get_regular_cmds() -> RegularCmdMap<Self> {
use lsp_types::request::*;
RegularCmdMap::from_iter([
request_fn!(Shutdown, Self::shutdown),
request_fn!(Shutdown, LanguageState::shutdown),
// lantency sensitive
request_fn!(Completion, Self::completion),
request_fn!(SemanticTokensFullRequest, Self::semantic_tokens_full),
request_fn!(SemanticTokensFullDeltaRequest, Self::semantic_tokens_full_delta),
request_fn!(DocumentHighlightRequest, Self::document_highlight),
request_fn!(DocumentSymbolRequest, Self::document_symbol),
request_fn_!(Completion, LanguageState::completion),
request_fn_!(SemanticTokensFullRequest, LanguageState::semantic_tokens_full),
request_fn_!(SemanticTokensFullDeltaRequest, LanguageState::semantic_tokens_full_delta),
request_fn_!(DocumentHighlightRequest, LanguageState::document_highlight),
request_fn_!(DocumentSymbolRequest, LanguageState::document_symbol),
// Sync for low latency
request_fn_!(Formatting, Self::formatting),
request_fn!(SelectionRangeRequest, Self::selection_range),
request_fn_!(Formatting, LanguageState::formatting),
request_fn_!(SelectionRangeRequest, LanguageState::selection_range),
// latency insensitive
request_fn!(InlayHintRequest, Self::inlay_hint),
request_fn!(DocumentColor, Self::document_color),
request_fn!(ColorPresentationRequest, Self::color_presentation),
request_fn!(HoverRequest, Self::hover),
request_fn!(CodeActionRequest, Self::code_action),
request_fn!(CodeLensRequest, Self::code_lens),
request_fn!(FoldingRangeRequest, Self::folding_range),
request_fn!(SignatureHelpRequest, Self::signature_help),
request_fn!(PrepareRenameRequest, Self::prepare_rename),
request_fn!(Rename, Self::rename),
request_fn!(GotoDefinition, Self::goto_definition),
request_fn!(GotoDeclaration, Self::goto_declaration),
request_fn!(References, Self::references),
request_fn!(WorkspaceSymbolRequest, Self::symbol),
request_fn!(OnEnter, Self::on_enter),
request_fn_!(ExecuteCommand, Self::on_execute_command),
request_fn_!(InlayHintRequest, LanguageState::inlay_hint),
request_fn_!(DocumentColor, LanguageState::document_color),
request_fn_!(ColorPresentationRequest, LanguageState::color_presentation),
request_fn_!(HoverRequest, LanguageState::hover),
request_fn_!(CodeActionRequest, LanguageState::code_action),
request_fn_!(CodeLensRequest, LanguageState::code_lens),
request_fn_!(FoldingRangeRequest, LanguageState::folding_range),
request_fn_!(SignatureHelpRequest, LanguageState::signature_help),
request_fn_!(PrepareRenameRequest, LanguageState::prepare_rename),
request_fn_!(Rename, LanguageState::rename),
request_fn_!(GotoDefinition, LanguageState::goto_definition),
request_fn_!(GotoDeclaration, LanguageState::goto_declaration),
request_fn_!(References, LanguageState::references),
request_fn_!(WorkspaceSymbolRequest, LanguageState::symbol),
request_fn_!(OnEnter, LanguageState::on_enter),
request_fn_!(ExecuteCommand, LanguageState::on_execute_command),
])
}
fn get_notify_cmds() -> NotifyCmdMap {
fn get_notify_cmds() -> NotifyCmdMap<Self> {
// todo: .on_sync_mut::<notifs::Cancel>(handlers::handle_cancel)?
use lsp_types::notification::*;
NotifyCmdMap::from_iter([
notify_fn!(DidOpenTextDocument, Self::did_open),
notify_fn!(DidCloseTextDocument, Self::did_close),
notify_fn!(DidChangeTextDocument, Self::did_change),
notify_fn!(DidSaveTextDocument, Self::did_save),
notify_fn!(DidChangeConfiguration, Self::did_change_configuration),
notify_fn!(DidOpenTextDocument, LanguageState::did_open),
notify_fn!(DidCloseTextDocument, LanguageState::did_close),
notify_fn!(DidChangeTextDocument, LanguageState::did_change),
notify_fn!(DidSaveTextDocument, LanguageState::did_save),
notify_fn!(
DidChangeConfiguration,
LanguageState::did_change_configuration
),
])
}
pub fn schedule<T: Serialize + 'static>(
&mut self,
req_id: RequestId,
resp: SchedulableResponse<T>,
) -> ScheduledResult {
let resp = resp?;
use futures::future::MaybeDone::*;
match resp {
Done(output) => {
self.client.respond(result_to_response(req_id, output));
}
Future(fut) => {
let client = self.client.clone();
let req_id = req_id.clone();
self.handle.spawn(async move {
client.respond(result_to_response(req_id, fut.await));
});
}
Gone => {
log::warn!("response for request({req_id:?}) already taken");
}
};
Ok(Some(()))
}
}
impl InitializedLspDriver for LanguageState {
@ -368,7 +344,11 @@ impl LanguageState {
return;
};
let _ = handler(self, req.id.clone(), req.params);
let result = handler(self, req.id.clone(), req.params);
match result {
Ok(Some(())) => {}
_ => self.client.respond(result_to_response(req.id, result)),
}
}
/// The entry point for the `workspace/executeCommand` request.
@ -512,9 +492,9 @@ impl LanguageState {
/// This method is guaranteed to only execute once. If the client sends this
/// request to the server again, the server will respond with JSON-RPC
/// error code `-32600` (invalid request).
fn shutdown(&mut self, _params: ()) -> LspResult<()> {
fn shutdown(&mut self, _params: ()) -> SchedulableResponse<()> {
self.shutdown_requested = true;
Ok(())
just_ok!(())
}
}
@ -551,7 +531,7 @@ impl LanguageState {
fn did_save(&mut self, params: DidSaveTextDocumentParams) -> LspResult<()> {
let path = as_path(params.text_document);
let _ = run_query!(self.OnSaveExport(path));
run_query_tail!(self.OnSaveExport(path));
Ok(())
}
@ -635,96 +615,96 @@ impl LanguageState {
impl LanguageState {
fn goto_definition(
&mut self,
req_id: RequestId,
params: GotoDefinitionParams,
) -> LspResult<Option<GotoDefinitionResponse>> {
) -> ScheduledResult {
let (path, position) = as_path_pos(params.text_document_position_params);
run_query!(self.GotoDefinition(path, position))
run_query!(req_id, self.GotoDefinition(path, position))
}
fn goto_declaration(
&mut self,
req_id: RequestId,
params: GotoDeclarationParams,
) -> LspResult<Option<GotoDeclarationResponse>> {
) -> ScheduledResult {
let (path, position) = as_path_pos(params.text_document_position_params);
run_query!(self.GotoDeclaration(path, position))
run_query!(req_id, self.GotoDeclaration(path, position))
}
fn references(&mut self, params: ReferenceParams) -> LspResult<Option<Vec<Location>>> {
fn references(&mut self, req_id: RequestId, params: ReferenceParams) -> ScheduledResult {
let (path, position) = as_path_pos(params.text_document_position);
run_query!(self.References(path, position))
run_query!(req_id, self.References(path, position))
}
fn hover(&mut self, params: HoverParams) -> LspResult<Option<Hover>> {
fn hover(&mut self, req_id: RequestId, params: HoverParams) -> ScheduledResult {
let (path, position) = as_path_pos(params.text_document_position_params);
self.implicit_focus_entry(|| Some(path.as_path().into()), 'h');
run_query!(self.Hover(path, position))
run_query!(req_id, self.Hover(path, position))
}
fn folding_range(
&mut self,
params: FoldingRangeParams,
) -> LspResult<Option<Vec<FoldingRange>>> {
fn folding_range(&mut self, req_id: RequestId, params: FoldingRangeParams) -> ScheduledResult {
let path = as_path(params.text_document);
let line_folding_only = self.const_config().doc_line_folding_only;
self.implicit_focus_entry(|| Some(path.as_path().into()), 'f');
run_query!(self.FoldingRange(path, line_folding_only))
run_query!(req_id, self.FoldingRange(path, line_folding_only))
}
fn selection_range(
&mut self,
req_id: RequestId,
params: SelectionRangeParams,
) -> LspResult<Option<Vec<SelectionRange>>> {
) -> ScheduledResult {
let path = as_path(params.text_document);
let positions = params.positions;
run_query!(self.SelectionRange(path, positions))
run_query!(req_id, self.SelectionRange(path, positions))
}
fn document_highlight(
&mut self,
req_id: RequestId,
params: DocumentHighlightParams,
) -> LspResult<Option<Vec<DocumentHighlight>>> {
) -> ScheduledResult {
let (path, position) = as_path_pos(params.text_document_position_params);
run_query!(self.DocumentHighlight(path, position))
run_query!(req_id, self.DocumentHighlight(path, position))
}
fn document_symbol(
&mut self,
req_id: RequestId,
params: DocumentSymbolParams,
) -> LspResult<Option<DocumentSymbolResponse>> {
) -> ScheduledResult {
let path = as_path(params.text_document);
run_query!(self.DocumentSymbol(path))
run_query!(req_id, self.DocumentSymbol(path))
}
fn semantic_tokens_full(
&mut self,
req_id: RequestId,
params: SemanticTokensParams,
) -> LspResult<Option<SemanticTokensResult>> {
) -> ScheduledResult {
let path = as_path(params.text_document);
self.implicit_focus_entry(|| Some(path.as_path().into()), 't');
run_query!(self.SemanticTokensFull(path))
run_query!(req_id, self.SemanticTokensFull(path))
}
fn semantic_tokens_full_delta(
&mut self,
req_id: RequestId,
params: SemanticTokensDeltaParams,
) -> LspResult<Option<SemanticTokensFullDeltaResult>> {
) -> ScheduledResult {
let path = as_path(params.text_document);
let previous_result_id = params.previous_result_id;
self.implicit_focus_entry(|| Some(path.as_path().into()), 't');
run_query!(self.SemanticTokensDelta(path, previous_result_id))
run_query!(req_id, self.SemanticTokensDelta(path, previous_result_id))
}
fn formatting(
&self,
req_id: RequestId,
params: DocumentFormattingParams,
) -> LspResult<Option<()>> {
fn formatting(&self, req_id: RequestId, params: DocumentFormattingParams) -> ScheduledResult {
if matches!(self.config.formatter, FormatterMode::Disable) {
return Ok(None);
}
let path = as_path(params.text_document).as_path().into();
self.query_source(path, |source| {
let path: ImmutPath = as_path(params.text_document).as_path().into();
self.query_source(path, |source: typst::syntax::Source| {
if let Some(f) = &self.format_thread {
f.send(FormatRequest::Format(req_id, source.clone()))?;
} else {
@ -736,84 +716,89 @@ impl LanguageState {
.map_err(|e| internal_error(format!("could not format document: {e}")))
}
fn inlay_hint(&mut self, params: InlayHintParams) -> LspResult<Option<Vec<InlayHint>>> {
fn inlay_hint(&mut self, req_id: RequestId, params: InlayHintParams) -> ScheduledResult {
let path = as_path(params.text_document);
let range = params.range;
run_query!(self.InlayHint(path, range))
run_query!(req_id, self.InlayHint(path, range))
}
fn document_color(
&mut self,
req_id: RequestId,
params: DocumentColorParams,
) -> LspResult<Option<Vec<ColorInformation>>> {
) -> ScheduledResult {
let path = as_path(params.text_document);
run_query!(self.DocumentColor(path))
run_query!(req_id, self.DocumentColor(path))
}
fn color_presentation(
&mut self,
req_id: RequestId,
params: ColorPresentationParams,
) -> LspResult<Option<Vec<ColorPresentation>>> {
) -> ScheduledResult {
let path = as_path(params.text_document);
let color = params.color;
let range = params.range;
run_query!(self.ColorPresentation(path, color, range))
run_query!(req_id, self.ColorPresentation(path, color, range))
}
fn code_action(
&mut self,
params: CodeActionParams,
) -> LspResult<Option<Vec<CodeActionOrCommand>>> {
fn code_action(&mut self, req_id: RequestId, params: CodeActionParams) -> ScheduledResult {
let path = as_path(params.text_document);
let range = params.range;
run_query!(self.CodeAction(path, range))
run_query!(req_id, self.CodeAction(path, range))
}
fn code_lens(&mut self, params: CodeLensParams) -> LspResult<Option<Vec<CodeLens>>> {
fn code_lens(&mut self, req_id: RequestId, params: CodeLensParams) -> ScheduledResult {
let path = as_path(params.text_document);
run_query!(self.CodeLens(path))
run_query!(req_id, self.CodeLens(path))
}
fn completion(&mut self, params: CompletionParams) -> LspResult<Option<CompletionResponse>> {
fn completion(&mut self, req_id: RequestId, params: CompletionParams) -> ScheduledResult {
let (path, position) = as_path_pos(params.text_document_position);
let explicit = params
.context
.map(|context| context.trigger_kind == CompletionTriggerKind::INVOKED)
.unwrap_or(false);
run_query!(self.Completion(path, position, explicit))
run_query!(req_id, self.Completion(path, position, explicit))
}
fn signature_help(&mut self, params: SignatureHelpParams) -> LspResult<Option<SignatureHelp>> {
fn signature_help(
&mut self,
req_id: RequestId,
params: SignatureHelpParams,
) -> ScheduledResult {
let (path, position) = as_path_pos(params.text_document_position_params);
run_query!(self.SignatureHelp(path, position))
run_query!(req_id, self.SignatureHelp(path, position))
}
fn rename(&mut self, params: RenameParams) -> LspResult<Option<WorkspaceEdit>> {
fn rename(&mut self, req_id: RequestId, params: RenameParams) -> ScheduledResult {
let (path, position) = as_path_pos(params.text_document_position);
let new_name = params.new_name;
run_query!(self.Rename(path, position, new_name))
run_query!(req_id, self.Rename(path, position, new_name))
}
fn prepare_rename(
&mut self,
req_id: RequestId,
params: TextDocumentPositionParams,
) -> LspResult<Option<PrepareRenameResponse>> {
) -> ScheduledResult {
let (path, position) = as_path_pos(params);
run_query!(self.PrepareRename(path, position))
run_query!(req_id, self.PrepareRename(path, position))
}
fn symbol(
&mut self,
params: WorkspaceSymbolParams,
) -> LspResult<Option<Vec<SymbolInformation>>> {
fn symbol(&mut self, req_id: RequestId, params: WorkspaceSymbolParams) -> ScheduledResult {
let pattern = (!params.query.is_empty()).then_some(params.query);
run_query!(self.Symbol(pattern))
run_query!(req_id, self.Symbol(pattern))
}
fn on_enter(&mut self, params: TextDocumentPositionParams) -> LspResult<Option<Vec<TextEdit>>> {
fn on_enter(
&mut self,
req_id: RequestId,
params: TextDocumentPositionParams,
) -> ScheduledResult {
let (path, position) = as_path_pos(params);
run_query!(self.OnEnter(path, position))
run_query!(req_id, self.OnEnter(path, position))
}
}
@ -838,6 +823,14 @@ pub fn internal_error(msg: impl Into<String>) -> ResponseError {
}
}
pub fn z_internal_error(msg: typst_ts_core::Error) -> ResponseError {
ResponseError {
code: ErrorCode::InternalError as i32,
message: format!("internal: {msg:?}"),
data: None,
}
}
pub fn method_not_found() -> ResponseError {
ResponseError {
code: ErrorCode::MethodNotFound as i32,

View file

@ -2,9 +2,8 @@
use std::ops::Deref;
use std::path::Path;
use std::{collections::HashMap, path::PathBuf};
use std::path::PathBuf;
use anyhow::{bail, Context};
use log::{error, info};
use lsp_server::RequestId;
use lsp_types::*;
@ -13,8 +12,8 @@ use serde_json::Value as JsonValue;
use tinymist_query::ExportKind;
use typst::diag::StrResult;
use typst::syntax::package::{PackageSpec, VersionlessPackageSpec};
use typst_ts_core::error::prelude::*;
use typst_ts_core::path::PathClean;
use typst_ts_core::{error::prelude::*, ImmutPath};
use crate::actor::user_action::{TraceParams, UserActionRequest};
use crate::tools::package::InitTask;
@ -23,93 +22,59 @@ use crate::{run_query, LspResult};
use super::lsp::*;
use super::*;
macro_rules! exec_fn_ {
($key: expr, Self::$method: ident) => {
($key, {
const E: LspRawHandler<Vec<JsonValue>> = |this, req_id, req| this.$method(req_id, req);
E
})
};
}
macro_rules! exec_fn {
($key: expr, Self::$method: ident) => {
($key, {
const E: LspRawHandler<Vec<JsonValue>> = |this, req_id, args| {
let res = this.$method(args);
this.client.respond(result_to_response(req_id, res));
Ok(Some(()))
};
E
})
};
}
macro_rules! resource_fn {
($ty: ty, Self::$method: ident, $($arg_key:ident),+ $(,)?) => {{
const E: $ty = |this, $($arg_key),+| this.$method($($arg_key),+);
E
}};
}
type LspHandler<Req, Res> = fn(srv: &mut LanguageState, args: Req) -> LspResult<Res>;
/// Returns Ok(Some()) -> Already responded
/// Returns Ok(None) -> Need to respond none
/// Returns Err(..) -> Need to respond error
type LspRawHandler<T> =
fn(srv: &mut LanguageState, req_id: RequestId, args: T) -> LspResult<Option<()>>;
type ExecuteCmdMap = HashMap<&'static str, LspRawHandler<Vec<JsonValue>>>;
type ResourceMap = HashMap<ImmutPath, LspHandler<Vec<JsonValue>, JsonValue>>;
/// Here are implemented the handlers for each command.
impl LanguageState {
pub fn get_exec_commands() -> ExecuteCmdMap {
pub fn get_exec_commands() -> ExecuteCmdMap<Self> {
type State = LanguageState;
ExecuteCmdMap::from_iter([
exec_fn!("tinymist.exportPdf", Self::export_pdf),
exec_fn!("tinymist.exportSvg", Self::export_svg),
exec_fn!("tinymist.exportPng", Self::export_png),
exec_fn!("tinymist.doClearCache", Self::clear_cache),
exec_fn!("tinymist.pinMain", Self::pin_document),
exec_fn!("tinymist.focusMain", Self::focus_document),
exec_fn!("tinymist.doInitTemplate", Self::init_template),
exec_fn!("tinymist.doGetTemplateEntry", Self::do_get_template_entry),
exec_fn!("tinymist.interactCodeContext", Self::interact_code_context),
exec_fn_!("tinymist.getDocumentTrace", Self::get_document_trace),
exec_fn!("tinymist.getDocumentMetrics", Self::get_document_metrics),
exec_fn!("tinymist.getServerInfo", Self::get_server_info),
exec_fn_!("tinymist.exportPdf", State::export_pdf),
exec_fn_!("tinymist.exportSvg", State::export_svg),
exec_fn_!("tinymist.exportPng", State::export_png),
exec_fn!("tinymist.doClearCache", State::clear_cache),
exec_fn!("tinymist.pinMain", State::pin_document),
exec_fn!("tinymist.focusMain", State::focus_document),
exec_fn!("tinymist.doInitTemplate", State::init_template),
exec_fn!("tinymist.doGetTemplateEntry", State::do_get_template_entry),
exec_fn_!("tinymist.interactCodeContext", State::interact_code_context),
exec_fn_!("tinymist.getDocumentTrace", State::get_document_trace),
exec_fn_!("tinymist.getDocumentMetrics", State::get_document_metrics),
exec_fn_!("tinymist.getServerInfo", State::get_server_info),
// For Documentations
exec_fn!("tinymist.getResources", Self::get_resources),
exec_fn_!("tinymist.getResources", State::get_resources),
])
}
/// Export the current document as a PDF file.
pub fn export_pdf(&mut self, args: Vec<JsonValue>) -> LspResult<JsonValue> {
self.primary.export_pdf(args)
pub fn export_pdf(&mut self, req_id: RequestId, args: Vec<JsonValue>) -> ScheduledResult {
self.primary.export_pdf(req_id, args)
}
/// Export the current document as a Svg file.
pub fn export_svg(&mut self, args: Vec<JsonValue>) -> LspResult<JsonValue> {
self.primary.export_svg(args)
pub fn export_svg(&mut self, req_id: RequestId, args: Vec<JsonValue>) -> ScheduledResult {
self.primary.export_svg(req_id, args)
}
/// Export the current document as a Png file.
pub fn export_png(&mut self, args: Vec<JsonValue>) -> LspResult<JsonValue> {
self.primary.export_png(args)
pub fn export_png(&mut self, req_id: RequestId, args: Vec<JsonValue>) -> ScheduledResult {
self.primary.export_png(req_id, args)
}
/// Export the current document as some format. The client is responsible
/// for passing the correct absolute path of typst document.
pub fn export(&mut self, kind: ExportKind, args: Vec<JsonValue>) -> LspResult<JsonValue> {
self.primary.export(kind, args)
pub fn export(
&mut self,
req_id: RequestId,
kind: ExportKind,
args: Vec<JsonValue>,
) -> ScheduledResult {
self.primary.export(req_id, kind, args)
}
/// Clear all cached resources.
///
/// # Errors
/// Errors if the cache could not be cleared.
pub fn clear_cache(&self, _arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn clear_cache(&self, _arguments: Vec<JsonValue>) -> AnySchedulableResponse {
comemo::evict(0);
for v in Some(self.primary())
.into_iter()
@ -117,22 +82,22 @@ impl LanguageState {
{
v.clear_cache();
}
Ok(JsonValue::Null)
just_ok!(JsonValue::Null)
}
/// Pin main file to some path.
pub fn pin_document(&mut self, mut args: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn pin_document(&mut self, mut args: Vec<JsonValue>) -> AnySchedulableResponse {
let entry = get_arg!(args[0] as Option<PathBuf>).map(From::from);
let update_result = self.pin_entry(entry.clone());
update_result.map_err(|err| internal_error(format!("could not pin file: {err}")))?;
info!("file pinned: {entry:?}");
Ok(JsonValue::Null)
just_ok!(JsonValue::Null)
}
/// Focus main file to some path.
pub fn focus_document(&mut self, mut args: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn focus_document(&mut self, mut args: Vec<JsonValue>) -> AnySchedulableResponse {
let entry = get_arg!(args[0] as Option<PathBuf>).map(From::from);
if !self.ever_manual_focusing {
@ -146,11 +111,11 @@ impl LanguageState {
if ok {
info!("file focused: {entry:?}");
}
Ok(JsonValue::Null)
just_ok!(JsonValue::Null)
}
/// Initialize a new template.
pub fn init_template(&self, mut args: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn init_template(&self, mut args: Vec<JsonValue>) -> AnySchedulableResponse {
use crate::tools::package::{self, determine_latest_version, TemplateSource};
#[derive(Debug, Serialize)]
@ -161,10 +126,9 @@ impl LanguageState {
let from_source = get_arg!(args[0] as String);
let to_path = get_arg!(args[1] as Option<PathBuf>).map(From::from);
let res = self
.primary()
.steal(move |c| {
let world = c.verse.spawn();
let snap = self.primary().sync_snapshot().map_err(z_internal_error)?;
// Parse the package specification. If the user didn't specify the version,
// we try to figure it out automatically by downloading the package index
// or searching the disk.
@ -174,40 +138,39 @@ impl LanguageState {
// Try to parse without version, but prefer the error message of the
// normal package spec parsing if it fails.
let spec: VersionlessPackageSpec = from_source.parse().map_err(|_| err)?;
let version = determine_latest_version(&c.verse, &spec)?;
let version = determine_latest_version(&snap.world, &spec)?;
StrResult::Ok(spec.at(version))
})
.map_err(map_string_err("failed to parse package spec"))?;
.map_err(map_string_err("failed to parse package spec"))
.map_err(z_internal_error)?;
let from_source = TemplateSource::Package(spec);
let entry_path = package::init(
&world,
&snap.world,
InitTask {
tmpl: from_source.clone(),
dir: to_path.clone(),
},
)
.map_err(map_string_err("failed to initialize template"))?;
.map_err(map_string_err("failed to initialize template"))
.map_err(z_internal_error)?;
info!("template initialized: {from_source:?} to {to_path:?}");
ZResult::Ok(InitResult { entry_path })
})
.and_then(|e| e)
.map_err(|e| invalid_params(format!("failed to determine template source: {e}")))?;
serde_json::to_value(res).map_err(|_| internal_error("Cannot serialize path"))
let res = serde_json::to_value(InitResult { entry_path })
.map_err(|_| internal_error("Cannot serialize path"));
just_result!(res)
}
/// Get the entry of a template.
pub fn do_get_template_entry(&self, mut args: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn do_get_template_entry(&self, mut args: Vec<JsonValue>) -> AnySchedulableResponse {
use crate::tools::package::{self, determine_latest_version, TemplateSource};
let from_source = get_arg!(args[0] as String);
let entry = self
.primary()
.steal(move |c| {
let snap = self.primary().sync_snapshot().map_err(z_internal_error)?;
// Parse the package specification. If the user didn't specify the version,
// we try to figure it out automatically by downloading the package index
// or searching the disk.
@ -217,29 +180,30 @@ impl LanguageState {
// Try to parse without version, but prefer the error message of the
// normal package spec parsing if it fails.
let spec: VersionlessPackageSpec = from_source.parse().map_err(|_| err)?;
let version = determine_latest_version(&c.verse, &spec)?;
let version = determine_latest_version(&snap.world, &spec)?;
StrResult::Ok(spec.at(version))
})
.map_err(map_string_err("failed to parse package spec"))?;
.map_err(map_string_err("failed to parse package spec"))
.map_err(z_internal_error)?;
let from_source = TemplateSource::Package(spec);
let entry = package::get_entry(&c.verse, from_source)
.map_err(map_string_err("failed to get template entry"))?;
ZResult::Ok(entry)
})
.and_then(|e| e)
.map_err(|e| invalid_params(format!("failed to determine template entry: {e}")))?;
let entry = package::get_entry(&snap.world, from_source)
.map_err(map_string_err("failed to get template entry"))
.map_err(z_internal_error)?;
let entry = String::from_utf8(entry.to_vec())
.map_err(|_| invalid_params("template entry is not a valid UTF-8 string"))?;
Ok(JsonValue::String(entry))
just_ok!(JsonValue::String(entry))
}
/// Interact with the code context at the source file.
pub fn interact_code_context(&mut self, _arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn interact_code_context(
&mut self,
req_id: RequestId,
_arguments: Vec<JsonValue>,
) -> ScheduledResult {
let queries = _arguments.into_iter().next().ok_or_else(|| {
invalid_params("The first parameter is not a valid code context query array")
})?;
@ -256,11 +220,7 @@ impl LanguageState {
let path = as_path(params.text_document);
let query = params.query;
let res = run_query!(self.InteractCodeContext(path, query))?;
let res =
serde_json::to_value(res).map_err(|_| internal_error("Cannot serialize responses"))?;
Ok(res)
run_query!(req_id, self.InteractCodeContext(path, query))
}
/// Get the trace data of the document.
@ -278,73 +238,65 @@ impl LanguageState {
let thread = self.user_action_thread.clone();
let entry = self.config.compile.determine_entry(Some(path));
let res = self
.primary()
.steal(move |c| {
let verse = &c.verse;
let snap = self.primary().sync_snapshot().map_err(z_internal_error)?;
// todo: rootless file
// todo: memory dirty file
let root = entry.root().ok_or_else(|| {
anyhow::anyhow!("root must be determined for trace, got {entry:?}")
})?;
let root = entry.root().ok_or_else(
|| error_once!("root must be determined for trace, got", entry: format!("{entry:?}")),
).map_err(z_internal_error)?;
let main = entry
.main()
.and_then(|e| e.vpath().resolve(&root))
.ok_or_else(|| anyhow::anyhow!("main file must be resolved, got {entry:?}"))?;
.ok_or_else(
|| error_once!("main file must be resolved, got", entry: format!("{entry:?}")),
)
.map_err(z_internal_error)?;
let Some(f) = thread else {
return Err(internal_error("user action thread is not available"))?;
};
if let Some(f) = thread {
f.send(UserActionRequest::Trace(
req_id,
TraceParams {
compiler_program: self_path,
root: root.as_ref().to_owned(),
main,
inputs: verse.inputs().as_ref().deref().clone(),
font_paths: verse.font_resolver.font_paths().to_owned(),
inputs: snap.world.inputs().as_ref().deref().clone(),
font_paths: snap.world.font_resolver.font_paths().to_owned(),
},
))
.context("cannot send trace request")?;
} else {
bail!("user action thread is not available");
}
Ok(Some(()))
})
.context("cannot steal primary compiler");
let res = match res {
Ok(res) => res,
Err(res) => Err(res),
};
res.map_err(|e| internal_error(format!("could not get document trace: {e}")))
.map_err(|_| internal_error("cannot send trace request"))
.map(Some)
}
/// Get the metrics of the document.
pub fn get_document_metrics(&mut self, mut args: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn get_document_metrics(
&mut self,
req_id: RequestId,
mut args: Vec<JsonValue>,
) -> ScheduledResult {
let path = get_arg!(args[0] as PathBuf);
let res = run_query!(self.DocumentMetrics(path))?;
let res = serde_json::to_value(res)
.map_err(|e| internal_error(format!("Cannot serialize response {e}")))?;
Ok(res)
run_query!(req_id, self.DocumentMetrics(path))
}
/// Get the server info.
pub fn get_server_info(&mut self, _arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
let res = run_query!(self.ServerInfo())?;
let res = serde_json::to_value(res)
.map_err(|e| internal_error(format!("Cannot serialize response {e}")))?;
Ok(res)
pub fn get_server_info(
&mut self,
req_id: RequestId,
_arguments: Vec<JsonValue>,
) -> ScheduledResult {
run_query!(req_id, self.ServerInfo())
}
/// Get static resources with help of tinymist service, for example, a
/// static help pages for some typst function.
pub fn get_resources(&mut self, mut args: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn get_resources(
&mut self,
req_id: RequestId,
mut args: Vec<JsonValue>,
) -> ScheduledResult {
let path = get_arg!(args[0] as PathBuf);
let Some(handler) = self.resource_routes.get(path.as_path()) else {
@ -353,35 +305,36 @@ impl LanguageState {
};
// Note our redirection will keep the first path argument in the args vec.
handler(self, args)
handler(self, req_id, args)
}
}
impl LanguageState {
pub fn get_resource_routes() -> ResourceMap {
pub fn get_resource_routes() -> ResourceMap<Self> {
// LspHandler<Vec<JsonValue>, JsonValue>,
macro_rules! resources_at {
($key: expr, Self::$method: ident) => {
($key: expr, LanguageState::$method: ident) => {
(
Path::new($key).clean().as_path().into(),
resource_fn!(LspHandler<Vec<JsonValue>, JsonValue>, Self::$method, inputs),
resource_fn!(LanguageState::$method),
)
};
}
ResourceMap::from_iter([
resources_at!("/symbols", Self::resource_symbols),
resources_at!("/tutorial", Self::resource_tutoral),
resources_at!("/symbols", LanguageState::resource_symbols),
resources_at!("/tutorial", LanguageState::resource_tutoral),
])
}
/// Get the all valid symbols
pub fn resource_symbols(&self, _arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn resource_symbols(&self, _arguments: Vec<JsonValue>) -> AnySchedulableResponse {
let resp = self.get_symbol_resources();
resp.map_err(|e| internal_error(e.to_string()))
just_ok!(resp.map_err(|e| internal_error(e.to_string()))?)
}
/// Get tutorial web page
pub fn resource_tutoral(&self, _arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
pub fn resource_tutoral(&self, _arguments: Vec<JsonValue>) -> AnySchedulableResponse {
Err(method_not_found())
}
}

View file

@ -6,9 +6,148 @@ pub mod compile;
pub mod compile_cmd;
pub mod compile_init;
#[cfg(feature = "preview")]
pub mod preview;
use serde_json::from_value;
use std::collections::HashMap;
use lsp_server::RequestId;
use reflexo::ImmutPath;
use serde_json::{from_value, Value as JsonValue};
/// Returns Ok(Some()) -> Already responded
/// Returns Ok(None) -> Need to respond none
/// Returns Err(..) -> Need to respond error
type LspRawPureHandler<S, T> = fn(srv: &mut S, args: T) -> LspResult<()>;
type LspRawHandler<S, T> = fn(srv: &mut S, req_id: RequestId, args: T) -> LspResult<Option<()>>;
type ExecuteCmdMap<S> = HashMap<&'static str, LspRawHandler<S, Vec<JsonValue>>>;
type RegularCmdMap<S> = HashMap<&'static str, LspRawHandler<S, JsonValue>>;
// type LspMethod<Res> = fn(srv: &mut LanguageState, args: JsonValue) ->
// LspResult<Res>;
// type LspHandler<Req, Res> = fn(srv: &mut LanguageState, args:
// Req) -> LspResult<Res>;
type NotifyCmdMap<S> = HashMap<&'static str, LspRawPureHandler<S, JsonValue>>;
type ResourceMap<S> = HashMap<ImmutPath, LspRawHandler<S, Vec<JsonValue>>>;
use crate::ScheduledResult;
type SchedulableResponse<T> = LspResponseFuture<LspResult<T>>;
type AnySchedulableResponse = SchedulableResponse<JsonValue>;
// type AnySchedulableResponse = LspResult<JsonValue>;
macro_rules! request_fn_ {
($desc: ty, $s: ident::$method: ident) => {
(<$desc>::METHOD, {
const E: LspRawHandler<$s, JsonValue> = |this, req_id, req| {
let req: <$desc as lsp_types::request::Request>::Params =
serde_json::from_value(req).unwrap(); // todo: soft unwrap
this.$method(req_id, req)
};
E
})
};
}
use request_fn_;
macro_rules! request_fn {
($desc: ty, $s: ident::$method: ident) => {
(<$desc>::METHOD, {
const E: LspRawHandler<$s, JsonValue> = |this, req_id, req| {
let req: <$desc as lsp_types::request::Request>::Params =
serde_json::from_value(req).unwrap(); // todo: soft unwrap
let res = this.$method(req);
this.schedule(req_id, res)
};
E
})
};
}
use request_fn;
macro_rules! exec_fn_ {
($key: expr, $s: ident::$method: ident) => {
($key, {
const E: LspRawHandler<$s, Vec<JsonValue>> =
|this, req_id, req| this.$method(req_id, req);
E
})
};
}
use exec_fn_;
// let result = handler(self, req.id.clone(), req.params);
// match result {
// Ok(Some(())) => {}
// _ => self.client.respond(result_to_response(req.id, result)),
// }
macro_rules! exec_fn {
($key: expr, $s: ident::$method: ident) => {
($key, {
const E: LspRawHandler<$s, Vec<JsonValue>> = |this, req_id, req| {
let res = this.$method(req);
this.schedule(req_id, res)
};
E
})
};
}
use exec_fn;
macro_rules! resource_fn {
($s: ident::$method: ident) => {{
const E: LspRawHandler<$s, Vec<JsonValue>> = |this, req_id, req| {
let res = this.$method(req);
this.schedule(req_id, res)
};
E
}};
}
use resource_fn;
macro_rules! notify_fn {
($desc: ty, $s: ident::$method: ident) => {
(<$desc>::METHOD, {
const E: LspRawPureHandler<$s, JsonValue> = |this, input| {
let input: <$desc as lsp_types::notification::Notification>::Params =
serde_json::from_value(input).unwrap(); // todo: soft unwrap
this.$method(input)
};
E
})
};
}
use notify_fn;
// #[macro_export]
// macro_rules! request_fn {
// ($desc: ty, Self::$method: ident) => {
// (<$desc>::METHOD, {
// const E: LspMethod<JsonValue> = |this, req| {
// let req: <$desc as lsp_types::request::Request>::Params =
// serde_json::from_value(req).unwrap(); // todo: soft
// unwrap this.$method(req)
// };
// E
// })
// };
// }
// #[macro_export]
// macro_rules! notify_fn {
// ($desc: ty, Self::$method: ident) => {
// (<$desc>::METHOD, {
// const E: LspMethod<()> = |this, input| {
// let input: <$desc as
// lsp_types::notification::Notification>::Params =
// serde_json::from_value(input).unwrap(); // todo: soft unwrap
// this.$method(input) };
// E
// })
// };
// }
use crate::{just_ok, just_result};
/// Get a parsed command argument.
/// Return `INVALID_PARAMS` when no arg or parse failed.
@ -39,3 +178,5 @@ macro_rules! get_arg_or_default {
}};
}
use get_arg_or_default;
use crate::{LspResponseFuture, LspResult};

View file

@ -5,7 +5,6 @@ use await_tree::InstrumentAwait;
use log::{error, info};
use typst::foundations::{Str, Value};
use typst_ts_compiler::{service::CompileDriver, TypstSystemUniverse};
use typst_ts_core::config::{compiler::EntryOpts, CompileOpts};
use hyper::{
@ -56,7 +55,7 @@ pub struct PreviewCliArgs {
mod compiler;
use compiler::CompileServer;
use crate::compile_init::CompileOnceArgs;
use crate::{compile_init::CompileOnceArgs, LspUniverse};
pub fn make_static_host(
previewer: &Previewer,
@ -137,8 +136,8 @@ pub async fn preview_main(args: PreviewCliArgs) -> anyhow::Result<()> {
std::process::exit(1);
}
let compiler_driver = {
let world = TypstSystemUniverse::new(CompileOpts {
let world = {
let world = LspUniverse::new(CompileOpts {
entry: EntryOpts::new_rooted(root.clone(), Some(entry.clone())),
inputs,
no_system_fonts: args.compile.font.ignore_system_fonts,
@ -148,7 +147,7 @@ pub async fn preview_main(args: PreviewCliArgs) -> anyhow::Result<()> {
})
.expect("incorrect options");
CompileDriver::new(std::marker::PhantomData, world.with_entry_file(entry))
world.with_entry_file(entry)
};
tokio::spawn(async move {
@ -160,7 +159,7 @@ pub async fn preview_main(args: PreviewCliArgs) -> anyhow::Result<()> {
let previewer = preview(
args.preview,
move |handle| {
let compile_server = CompileServer::new(compiler_driver, handle);
let compile_server = CompileServer::new(world, handle);
compile_server.spawn().unwrap()
},

View file

@ -2,106 +2,69 @@ use std::sync::Arc;
use await_tree::InstrumentAwait;
use tokio::sync::mpsc;
use typst::diag::SourceResult;
use typst::model::Document;
use typst::World;
use typst_ts_compiler::service::{CompileDriver, CompileMiddleware};
use typst_ts_compiler::service::{CompileExporter, Compiler, PureCompiler, WorldExporter};
use typst_ts_compiler::{EntryReader, TypstSystemWorld};
use tinymist_query::analysis::Analysis;
use tinymist_query::PositionEncoding;
use tokio::sync::{mpsc, watch};
use typst_preview::CompilationHandleImpl;
use typst_ts_compiler::EntryReader;
use typst_ts_core::Error;
use typst_preview::{CompilationHandle, CompileStatus};
use crate::actor::typ_client::CompileClientActorImpl;
use crate::actor::typ_client::{CompileClientActor, CompileHandler};
use crate::actor::typ_server::CompileServerActor;
use crate::compile_init::CompileConfig;
use crate::world::{LspCompilerFeat, LspWorld};
use crate::world::LspCompilerFeat;
use crate::LspUniverse;
pub type CompileService<H> =
CompileServerActor<Reporter<CompileExporter<PureCompiler<LspWorld>>, H>, LspCompilerFeat>;
pub type CompileClient<H> =
CompileClientActorImpl<Reporter<CompileExporter<PureCompiler<LspWorld>>, H>>;
pub type CompileService = CompileServerActor<LspCompilerFeat>;
pub type CompileClient = CompileClientActor;
pub struct CompileServer<H: CompilationHandle> {
inner: CompileService<H>,
pub struct CompileServer {
inner: CompileService,
handle: Arc<CompileHandler>,
}
pub struct Reporter<C, H> {
inner: C,
cb: H,
}
impl<C: Compiler, H: CompilationHandle> CompileMiddleware for Reporter<C, H> {
type Compiler = C;
fn inner(&self) -> &Self::Compiler {
&self.inner
}
fn inner_mut(&mut self) -> &mut Self::Compiler {
&mut self.inner
}
fn wrap_compile(
&mut self,
world: &<C as typst_ts_compiler::service::Compiler>::W,
env: &mut typst_ts_compiler::service::CompileEnv,
) -> SourceResult<Arc<Document>> {
self.cb.status(CompileStatus::Compiling);
match self.inner_mut().compile(world, env) {
Ok(doc) => {
self.cb.notify_compile(Ok(doc.clone()));
Ok(doc)
}
Err(err) => {
self.cb.notify_compile(Err(CompileStatus::CompileError));
Err(err)
}
}
}
}
impl<W: World, C: Compiler<W = W> + WorldExporter<W>, H> WorldExporter<W> for Reporter<C, H> {
fn export(&mut self, world: &W, output: Arc<typst::model::Document>) -> SourceResult<()> {
self.inner.export(world, output)
}
}
impl<H: CompilationHandle> CompileServer<H> {
pub fn new(
compiler_driver: CompileDriver<PureCompiler<TypstSystemWorld>>,
cb: H,
// renderer_sender: broadcast::Sender<RenderActorRequest>,
// editor_conn_sender: mpsc::UnboundedSender<EditorActorRequest>,
) -> Self {
let (intr_tx, intr_rx) = mpsc::unbounded_channel();
let CompileDriver { compiler, universe } = compiler_driver;
let entry = universe.entry_state();
// CompileExporter + DynamicLayoutCompiler + WatchDriver
let driver = CompileExporter::new(compiler);
let driver = Reporter { inner: driver, cb };
let inner =
CompileServerActor::new(driver, universe, entry, intr_tx, intr_rx).with_watch(true);
Self { inner }
}
pub fn spawn(self) -> Result<CompileClient<H>, Error> {
impl CompileServer {
pub fn new(verse: LspUniverse, cb: CompilationHandleImpl) -> Self {
// type EditorSender = mpsc::UnboundedSender<EditorRequest>;
let (doc_tx, _) = watch::channel(None);
let (export_tx, mut export_rx) = mpsc::unbounded_channel();
let intr_tx = self.inner.intr_tx();
let (editor_tx, mut editor_rx) = mpsc::unbounded_channel();
let handle = Arc::new(CompileHandler {
inner: std::sync::Arc::new(Some(cb)),
diag_group: "main".to_owned(),
doc_tx,
export_tx,
editor_tx,
analysis: Analysis {
position_encoding: PositionEncoding::Utf16,
enable_periscope: false,
caches: Default::default(),
},
periscope: tinymist_render::PeriscopeRenderer::default(),
});
// Consume export_tx and editor_rx
tokio::spawn(async move { while export_rx.recv().await.is_some() {} });
tokio::spawn(async move { while editor_rx.recv().await.is_some() {} });
let (intr_tx, intr_rx) = mpsc::unbounded_channel();
let inner =
CompileServerActor::new(verse, intr_tx, intr_rx).with_watch(Some(handle.clone()));
Self { inner, handle }
}
pub fn spawn(self) -> Result<CompileClient, Error> {
let intr_tx = self.inner.intr_tx.clone();
let entry = self.inner.verse.entry_state();
tokio::spawn(self.inner.spawn().instrument_await("spawn typst server"));
// drop all export events
tokio::spawn(async move { while export_rx.recv().await.is_some() {} });
Ok(CompileClient::new(
"main".to_owned(),
self.handle,
CompileConfig::default(),
entry,
intr_tx,
export_tx,
))
}
}

View file

@ -3,10 +3,12 @@
use std::path::PathBuf;
use anyhow::anyhow;
use futures::future::MaybeDone;
use lsp_server::RequestId;
use lsp_types::TextDocumentContentChangeEvent;
use tinymist_query::{
lsp_to_typst, CompilerQueryRequest, CompilerQueryResponse, FoldRequestFeature, OnExportRequest,
OnSaveExportRequest, PositionEncoding, SemanticRequest, StatefulRequest, SyntaxRequest,
OnSaveExportRequest, PositionEncoding, SyntaxRequest,
};
use typst::{diag::FileResult, syntax::Source};
use typst_ts_compiler::{
@ -15,7 +17,7 @@ use typst_ts_compiler::{
};
use typst_ts_core::{error::prelude::*, Bytes, Error, ImmutPath};
use crate::{actor::typ_client::CompileClientActor, compile::CompileState, LanguageState};
use crate::{actor::typ_client::CompileClientActor, compile::CompileState, *};
impl CompileState {
/// Focus main file to some path.
@ -181,25 +183,59 @@ impl LanguageState {
self.update_source(files)
}
pub fn query_source<T>(
&self,
path: ImmutPath,
f: impl FnOnce(Source) -> anyhow::Result<T>,
) -> anyhow::Result<T> {
let snapshot = self.primary.memory_changes.get(&path);
let snapshot = snapshot.ok_or_else(|| anyhow!("file missing {path:?}"))?;
let source = snapshot.content.clone();
f(source)
}
pub fn snapshot(&self) -> LanguageStateSnapshot {
LanguageStateSnapshot {}
}
pub fn schedule_query(&mut self, req_id: RequestId, query_fut: QueryFuture) -> ScheduledResult {
let fut = query_fut.map_err(|e| internal_error(e.to_string()))?;
let fut: AnySchedulableResponse = Ok(match fut {
MaybeDone::Done(res) => MaybeDone::Done(
res.and_then(|res| Ok(res.to_untyped()?))
.map_err(|err| internal_error(err.to_string())),
),
MaybeDone::Future(fut) => MaybeDone::Future(Box::pin(async move {
let res = fut.await;
res.and_then(|res| Ok(res.to_untyped()?))
.map_err(|err| internal_error(err.to_string()))
})),
MaybeDone::Gone => MaybeDone::Gone,
});
self.schedule(req_id, fut)
}
}
pub struct LanguageStateSnapshot {}
#[macro_export]
macro_rules! run_query_tail {
($self: ident.$query: ident ($($arg_key:ident),* $(,)?)) => {{
use tinymist_query::*;
let req = paste::paste! { [<$query Request>] { $($arg_key),* } };
let query_fut = $self.query(CompilerQueryRequest::$query(req.clone()));
$self.handle.spawn(query_fut.map_err(|e| internal_error(e.to_string()))?)
}};
}
#[macro_export]
macro_rules! run_query {
($self: ident.$query: ident ($($arg_key:ident),* $(,)?)) => {{
($req_id: ident, $self: ident.$query: ident ($($arg_key:ident),* $(,)?)) => {{
use tinymist_query::*;
let req = paste::paste! { [<$query Request>] { $($arg_key),* } };
$self
.query(CompilerQueryRequest::$query(req.clone()))
.map_err(|err| {
error!("error getting $query: {err} with request {req:?}");
internal_error("Internal error")
})
.map(|resp| {
let CompilerQueryResponse::$query(resp) = resp else {
unreachable!()
};
resp
})
let query_fut = $self.query(CompilerQueryRequest::$query(req.clone()));
$self.schedule_query($req_id, query_fut)
}};
}
@ -228,69 +264,81 @@ macro_rules! query_tokens_cache {
macro_rules! query_state {
($self:ident, $method:ident, $req:expr) => {{
let res = $self.steal_state(move |w, doc| $req.request(w, doc));
res.map(CompilerQueryResponse::$method)
let snap = $self.snapshot()?;
#[cfg(feature = "stable-server")]
{
just_result!(snap.stateful_sync($req).map(CompilerQueryResponse::$method))
}
#[cfg(not(feature = "stable-server"))]
{
just_future!(async move {
snap.stateful($req)
.await
.map(CompilerQueryResponse::$method)
})
}
}};
}
macro_rules! query_world {
($self:ident, $method:ident, $req:expr) => {{
let res = $self.steal_world(move |w| $req.request(w));
res.map(CompilerQueryResponse::$method)
let snap = $self.snapshot()?;
#[cfg(feature = "stable-server")]
{
just_result!(snap.semantic_sync($req).map(CompilerQueryResponse::$method))
}
#[cfg(not(feature = "stable-server"))]
{
just_future!(async move {
snap.semantic($req)
.await
.map(CompilerQueryResponse::$method)
})
}
}};
}
impl LanguageState {
pub fn query_source<T>(
&self,
path: ImmutPath,
f: impl FnOnce(Source) -> anyhow::Result<T>,
) -> anyhow::Result<T> {
let snapshot = self.primary.memory_changes.get(&path);
let snapshot = snapshot.ok_or_else(|| anyhow!("file missing {path:?}"))?;
let source = snapshot.content.clone();
f(source)
}
pub fn query(&mut self, query: CompilerQueryRequest) -> anyhow::Result<CompilerQueryResponse> {
pub fn query(&mut self, query: CompilerQueryRequest) -> QueryFuture {
use CompilerQueryRequest::*;
match query {
InteractCodeContext(req) => query_source!(self, InteractCodeContext, req),
SemanticTokensFull(req) => query_tokens_cache!(self, SemanticTokensFull, req),
SemanticTokensDelta(req) => query_tokens_cache!(self, SemanticTokensDelta, req),
FoldingRange(req) => query_source!(self, FoldingRange, req),
SelectionRange(req) => query_source!(self, SelectionRange, req),
DocumentSymbol(req) => query_source!(self, DocumentSymbol, req),
OnEnter(req) => query_source!(self, OnEnter, req),
ColorPresentation(req) => Ok(CompilerQueryResponse::ColorPresentation(req.request())),
let query = match query {
InteractCodeContext(req) => query_source!(self, InteractCodeContext, req)?,
SemanticTokensFull(req) => query_tokens_cache!(self, SemanticTokensFull, req)?,
SemanticTokensDelta(req) => query_tokens_cache!(self, SemanticTokensDelta, req)?,
FoldingRange(req) => query_source!(self, FoldingRange, req)?,
SelectionRange(req) => query_source!(self, SelectionRange, req)?,
DocumentSymbol(req) => query_source!(self, DocumentSymbol, req)?,
OnEnter(req) => query_source!(self, OnEnter, req)?,
ColorPresentation(req) => CompilerQueryResponse::ColorPresentation(req.request()),
_ => {
let client = &mut self.primary;
if !self.pinning && !self.config.compile.has_default_entry_path {
// todo: race condition, we need atomic primary query
if let Some(path) = query.associated_path() {
// todo!!!!!!!!!!!!!!
client.do_change_entry(Some(path.into()))?;
}
}
Self::query_on(client.compiler(), query)
}
return Self::query_on(client.compiler(), query);
}
};
just_ok!(query)
}
fn query_on(
client: &CompileClientActor,
query: CompilerQueryRequest,
) -> anyhow::Result<CompilerQueryResponse> {
fn query_on(client: &CompileClientActor, query: CompilerQueryRequest) -> QueryFuture {
use CompilerQueryRequest::*;
assert!(query.fold_feature() != FoldRequestFeature::ContextFreeUnique);
match query {
OnExport(OnExportRequest { kind, path }) => Ok(CompilerQueryResponse::OnExport(
OnExport(OnExportRequest { kind, path }) => just_ok!(CompilerQueryResponse::OnExport(
client.on_export(kind, path)?,
)),
OnSaveExport(OnSaveExportRequest { path }) => {
client.on_save_export(path)?;
Ok(CompilerQueryResponse::OnSaveExport(()))
just_ok!(CompilerQueryResponse::OnSaveExport(()))
}
Hover(req) => query_state!(client, Hover, req),
GotoDefinition(req) => query_state!(client, GotoDefinition, req),
@ -309,7 +357,7 @@ impl LanguageState {
DocumentMetrics(req) => query_state!(client, DocumentMetrics, req),
ServerInfo(_) => {
let res = client.collect_server_info()?;
Ok(CompilerQueryResponse::ServerInfo(Some(res)))
just_ok!(CompilerQueryResponse::ServerInfo(Some(res)))
}
_ => unreachable!(),
}
@ -317,8 +365,25 @@ impl LanguageState {
}
impl CompileState {
pub fn query(&self, query: CompilerQueryRequest) -> anyhow::Result<CompilerQueryResponse> {
pub fn query(&self, query: CompilerQueryRequest) -> QueryFuture {
let client = self.compiler.as_ref().unwrap();
LanguageState::query_on(client, query)
}
pub fn schedule_query(&mut self, req_id: RequestId, query_fut: QueryFuture) -> ScheduledResult {
let fut = query_fut.map_err(|e| internal_error(e.to_string()))?;
let fut: AnySchedulableResponse = Ok(match fut {
MaybeDone::Done(res) => MaybeDone::Done(
res.and_then(|res| Ok(res.to_untyped()?))
.map_err(|err| internal_error(err.to_string())),
),
MaybeDone::Future(fut) => MaybeDone::Future(Box::pin(async move {
let res = fut.await;
res.and_then(|res| Ok(res.to_untyped()?))
.map_err(|err| internal_error(err.to_string()))
})),
MaybeDone::Gone => MaybeDone::Gone,
});
self.schedule(req_id, fut)
}
}

View file

@ -7,7 +7,7 @@ use typst::syntax::VirtualPath;
use typst::World;
use typst_ts_core::{Bytes, ImmutPath, TypstFileId};
use crate::world::{LspUniverse, LspWorld};
use crate::world::LspWorld;
#[derive(Debug, Clone)]
pub enum TemplateSource {
@ -20,15 +20,13 @@ pub struct InitTask {
}
/// Execute an initialization command.
pub fn get_entry(verse: &LspUniverse, tmpl: TemplateSource) -> StrResult<Bytes> {
pub fn get_entry(world: &LspWorld, tmpl: TemplateSource) -> StrResult<Bytes> {
let TemplateSource::Package(spec) = tmpl;
let toml_id = TypstFileId::new(Some(spec.clone()), VirtualPath::new("typst.toml"));
let world = verse.spawn();
// Parse the manifest.
let manifest = parse_manifest(&world, toml_id)?;
let manifest = parse_manifest(world, toml_id)?;
manifest.validate(&spec)?;
// Ensure that it is indeed a template.

View file

@ -2,14 +2,14 @@ use typst::diag::{eco_format, StrResult};
use typst::syntax::package::{PackageVersion, VersionlessPackageSpec};
use typst_ts_compiler::package::Registry;
use crate::world::LspUniverse;
use crate::LspWorld;
mod init;
pub use init::*;
/// Try to determine the latest version of a package.
pub fn determine_latest_version(
world: &LspUniverse,
world: &LspWorld,
spec: &VersionlessPackageSpec,
) -> StrResult<PackageVersion> {
if spec.namespace == "preview" {

View file

@ -10,23 +10,19 @@ use typst_preview::{
SourceFileServer,
};
use typst_ts_compiler::vfs::notify::{FileChangeSet, MemoryEvent};
use typst_ts_compiler::{service::Compiler, EntryReader};
use typst_ts_compiler::EntryReader;
use typst_ts_core::debug_loc::SourceSpanOffset;
use typst_ts_core::{Error, TypstDocument, TypstFileId};
use crate::actor::typ_client::CompileClientActorImpl;
use crate::world::LspWorld;
use crate::actor::typ_client::CompileClientActor;
use crate::actor::typ_server::CompileSnapshot;
use crate::world::{LspCompilerFeat, LspWorld};
impl<C: Compiler<W = LspWorld> + Send> SourceFileServer for CompileClientActorImpl<C> {
impl CompileClientActor {
/// fixme: character is 0-based, UTF-16 code unit.
/// We treat it as UTF-8 now.
async fn resolve_source_span(
&mut self,
loc: Location,
) -> Result<Option<SourceSpanOffset>, Error> {
fn resolve_source_span(world: &LspWorld, loc: Location) -> Option<SourceSpanOffset> {
let Location::Src(loc) = loc;
self.steal_async(move |this| {
let world = this.verse.spawn();
let filepath = Path::new(&loc.filepath);
let relative_path = filepath.strip_prefix(&world.workspace_root()?).ok()?;
@ -44,26 +40,22 @@ impl<C: Compiler<W = LspWorld> + Send> SourceFileServer for CompileClientActorIm
let offset = cursor.saturating_sub(node.offset());
Some(SourceSpanOffset { span, offset })
})
.await
}
/// fixme: character is 0-based, UTF-16 code unit.
/// We treat it as UTF-8 now.
async fn resolve_document_position(
&mut self,
// resolve_document_position
fn resolve_document_position(
snap: &CompileSnapshot<LspCompilerFeat>,
loc: Location,
) -> Result<Option<Position>, Error> {
) -> Option<Position> {
let Location::Src(src_loc) = loc;
let path = Path::new(&src_loc.filepath).to_owned();
let line = src_loc.pos.line;
let column = src_loc.pos.column;
self.steal_async(move |this| {
let doc = this.latest_doc.as_deref()?;
let world = this.verse.spawn();
let doc = snap.doc().ok();
let doc = doc.as_deref()?;
let world = &snap.world;
let relative_path = path.strip_prefix(&world.workspace_root()?).ok()?;
@ -72,8 +64,51 @@ impl<C: Compiler<W = LspWorld> + Send> SourceFileServer for CompileClientActorIm
let cursor = source.line_column_to_byte(line, column)?;
jump_from_cursor(doc, &source, cursor)
}
fn resolve_source_location(
world: &LspWorld,
span: Span,
offset: Option<usize>,
) -> Option<DocToSrcJumpInfo> {
let resolve_off =
|src: &Source, off: usize| src.byte_to_line(off).zip(src.byte_to_column(off));
let source = world.source(span.id()?).ok()?;
let mut range = source.find(span)?.range();
if let Some(off) = offset {
if off < range.len() {
range.start += off;
}
}
let filepath = world.path_for_id(span.id()?).ok()?;
Some(DocToSrcJumpInfo {
filepath: filepath.to_string_lossy().to_string(),
start: resolve_off(&source, range.start),
end: resolve_off(&source, range.end),
})
.await
}
}
impl SourceFileServer for CompileClientActor {
/// fixme: character is 0-based, UTF-16 code unit.
/// We treat it as UTF-8 now.
async fn resolve_source_span(
&mut self,
loc: Location,
) -> Result<Option<SourceSpanOffset>, Error> {
let snap = self.snapshot()?.snapshot().await?;
Ok(Self::resolve_source_span(&snap.world, loc))
}
/// fixme: character is 0-based, UTF-16 code unit.
/// We treat it as UTF-8 now.
async fn resolve_document_position(
&mut self,
loc: Location,
) -> Result<Option<Position>, Error> {
let snap = self.snapshot()?.snapshot().await?;
Ok(Self::resolve_document_position(&snap, loc))
}
async fn resolve_source_location(
@ -81,35 +116,8 @@ impl<C: Compiler<W = LspWorld> + Send> SourceFileServer for CompileClientActorIm
span: Span,
offset: Option<usize>,
) -> Result<Option<DocToSrcJumpInfo>, Error> {
let resolve_off =
|src: &Source, off: usize| src.byte_to_line(off).zip(src.byte_to_column(off));
let ret = self
.steal_async(move |this| {
let world = this.verse.spawn();
let src_id = span.id()?;
let source = world.source(src_id).ok()?;
let mut range = source.find(span)?.range();
if let Some(off) = offset {
if off < range.len() {
range.start += off;
}
}
let filepath = world.path_for_id(src_id).ok()?;
Some(DocToSrcJumpInfo {
filepath: filepath.to_string_lossy().to_string(),
start: resolve_off(&source, range.start),
end: resolve_off(&source, range.end),
})
})
.await
.map_err(|err| {
log::error!("TypstActor: failed to resolve span and offset: {:#}", err);
})
.ok()
.flatten();
Ok(ret)
let snap = self.snapshot()?.snapshot().await?;
Ok(Self::resolve_source_location(&snap.world, span, offset))
}
}
@ -178,7 +186,7 @@ fn find_in_frame(frame: &Frame, span: Span, min_dis: &mut u64, p: &mut Point) ->
None
}
impl<C: Compiler<W = LspWorld> + Send> EditorServer for CompileClientActorImpl<C> {
impl EditorServer for CompileClientActor {
async fn update_memory_files(
&mut self,
files: MemoryFiles,
@ -215,4 +223,4 @@ impl<C: Compiler<W = LspWorld> + Send> EditorServer for CompileClientActorImpl<C
}
}
impl<C: Compiler<W = LspWorld> + Send> CompileHost for CompileClientActorImpl<C> {}
impl CompileHost for CompileClientActor {}

View file

@ -172,7 +172,6 @@ impl RenderActor {
} else {
self.renderer.pack_delta(document)
};
comemo::evict(30);
let Ok(_) = self.svg_sender.send(data) else {
info!("RenderActor: svg_sender is dropped");
break;
@ -248,7 +247,6 @@ impl OutlineRenderActor {
continue;
};
let data = self.outline(&document).instrument_await("outline").await;
comemo::evict(30);
debug!("OutlineRenderActor: sending outline");
let Ok(_) = self.editor_tx.send(EditorActorRequest::Outline(data)) else {
info!("OutlineRenderActor: outline_sender is dropped");

View file

@ -3,6 +3,6 @@ if (-Not (Test-Path $InstallPath)) {
New-Item -ItemType Directory $InstallPath
}
cargo build --release --bin tinymist
cargo build --release --bin tinymist --features stable-server
Copy-Item -Path ".\target\release\tinymist.exe" -Destination "$InstallPath\tinymist.exe" -Force
cargo insta test -p tests --accept