dev: refactor cluster struct into server

This commit is contained in:
Myriad-Dreamin 2024-03-10 19:04:15 +08:00
parent 9c9fcd91ba
commit 199f826a7b
8 changed files with 987 additions and 1031 deletions

View file

@ -0,0 +1,46 @@
//! Bootstrap actors for Tinymist.
pub mod cluster;
pub mod render;
pub mod typst;
use std::{borrow::Cow, path::PathBuf};
use ::typst::util::Deferred;
use tokio::sync::{broadcast, watch};
use typst_ts_core::config::CompileOpts;
use self::{
render::PdfExportActor,
typst::{create_server, CompileActor},
};
use crate::TypstLanguageServer;
impl TypstLanguageServer {
pub fn server(&self, name: String, entry: Option<PathBuf>) -> Deferred<CompileActor> {
let (doc_tx, doc_rx) = watch::channel(None);
let (render_tx, _) = broadcast::channel(10);
// Run the PDF export actor before preparing cluster to avoid loss of events
tokio::spawn(PdfExportActor::new(doc_rx.clone(), render_tx.subscribe()).run());
let roots = self.roots.clone();
let opts = CompileOpts {
root_dir: roots.first().cloned().unwrap_or_default(),
// todo: font paths
// font_paths: arguments.font_paths.clone(),
with_embedded_fonts: typst_assets::fonts().map(Cow::Borrowed).collect(),
..CompileOpts::default()
};
create_server(
name,
self.const_config(),
roots,
opts,
entry,
self.diag_tx.clone(),
doc_tx,
render_tx,
)
}
}

View file

@ -0,0 +1,174 @@
//! The cluster actor running in background
use std::collections::HashMap;
use futures::future::join_all;
use log::info;
use lsp_types::{Diagnostic, Url};
use tinymist_query::{DiagnosticsMap, LspDiagnostic};
use tokio::sync::mpsc;
use crate::LspHost;
pub struct CompileClusterActor {
pub host: LspHost,
pub diag_rx: mpsc::UnboundedReceiver<(String, Option<DiagnosticsMap>)>,
pub diagnostics: HashMap<Url, HashMap<String, Vec<LspDiagnostic>>>,
pub affect_map: HashMap<String, Vec<Url>>,
pub published_primary: bool,
}
impl CompileClusterActor {
pub async fn run(mut self) {
loop {
tokio::select! {
e = self.diag_rx.recv() => {
let Some((group, diagnostics)) = e else {
break;
};
info!("received diagnostics from {}: diag({:?})", group, diagnostics.as_ref().map(|e| e.len()));
let with_primary = (self.affect_map.len() <= 1 && self.affect_map.contains_key("primary")) && group == "primary";
self.publish(group, diagnostics, with_primary).await;
if !with_primary {
let again_with_primary = self.affect_map.len() == 1 && self.affect_map.contains_key("primary");
if self.published_primary != again_with_primary {
self.flush_primary_diagnostics(again_with_primary).await;
self.published_primary = again_with_primary;
}
}
}
}
info!("compile cluster actor is stopped");
}
}
pub async fn do_publish_diagnostics(
host: &LspHost,
uri: Url,
diags: Vec<Diagnostic>,
version: Option<i32>,
ignored: bool,
) {
if ignored {
return;
}
host.publish_diagnostics(uri, diags, version)
}
async fn flush_primary_diagnostics(&mut self, enable: bool) {
let affected = self.affect_map.get("primary");
let tasks = affected.into_iter().flatten().map(|url| {
let path_diags = self.diagnostics.get(url);
let diags = path_diags.into_iter().flatten().filter_map(|(g, diags)| {
if g == "primary" {
return enable.then_some(diags);
}
Some(diags)
});
// todo: .flatten() removed
// let to_publish = diags.flatten().cloned().collect();
let to_publish = diags.flatten().cloned().collect();
Self::do_publish_diagnostics(&self.host, url.clone(), to_publish, None, false)
});
join_all(tasks).await;
}
pub async fn publish(
&mut self,
group: String,
next_diagnostics: Option<DiagnosticsMap>,
with_primary: bool,
) {
let is_primary = group == "primary";
let affected = self.affect_map.get_mut(&group);
let affected = affected.map(std::mem::take);
// Gets sources which had some diagnostic published last time, but not this
// time. The LSP specifies that files will not have diagnostics
// updated, including removed, without an explicit update, so we need
// to send an empty `Vec` of diagnostics to these sources.
// todo: merge
let clear_list = if let Some(n) = next_diagnostics.as_ref() {
affected
.into_iter()
.flatten()
.filter(|e| !n.contains_key(e))
.map(|e| (e, None))
.collect::<Vec<_>>()
} else {
affected
.into_iter()
.flatten()
.map(|e| (e, None))
.collect::<Vec<_>>()
};
let next_affected = if let Some(n) = next_diagnostics.as_ref() {
n.keys().cloned().collect()
} else {
Vec::new()
};
let clear_all = next_diagnostics.is_none();
// Gets touched updates
let update_list = next_diagnostics
.into_iter()
.flatten()
.map(|(x, y)| (x, Some(y)));
let tasks = clear_list.into_iter().chain(update_list);
let tasks = tasks.map(|(url, next)| {
let path_diags = self.diagnostics.entry(url.clone()).or_default();
let rest_all = path_diags
.iter()
.filter_map(|(g, diags)| {
if !with_primary && g == "primary" {
return None;
}
if g != &group {
Some(diags)
} else {
None
}
})
.flatten()
.cloned();
let next_all = next.clone().into_iter().flatten();
let to_publish = rest_all.chain(next_all).collect();
match next {
Some(next) => {
path_diags.insert(group.clone(), next);
}
None => {
path_diags.remove(&group);
}
}
Self::do_publish_diagnostics(
&self.host,
url,
to_publish,
None,
is_primary && !with_primary,
)
});
join_all(tasks).await;
if clear_all {
// We just used the cache, and won't need it again, so we can update it now
self.affect_map.remove(&group);
} else {
// We just used the cache, and won't need it again, so we can update it now
self.affect_map.insert(group, next_affected);
}
}
}

View file

@ -1,98 +0,0 @@
//! Bootstrap actors for Tinymist.
use std::{borrow::Cow, path::PathBuf, sync::Arc};
use ::typst::util::Deferred;
use parking_lot::Mutex;
use tinymist_query::DiagnosticsMap;
use tokio::sync::{broadcast, mpsc, watch};
use typst_ts_core::config::CompileOpts;
use self::{
render::PdfExportActor,
typst::{create_server, CompileCluster, CompileHandler, CompileNode},
};
use crate::{ConstConfig, LspHost};
pub mod render;
pub mod typst;
struct Repr {
config: ConstConfig,
diag_tx: mpsc::UnboundedSender<(String, Option<DiagnosticsMap>)>,
diag_rx: Option<mpsc::UnboundedReceiver<(String, Option<DiagnosticsMap>)>>,
}
impl Repr {
fn server(
&mut self,
name: String,
roots: Vec<PathBuf>,
entry: Option<PathBuf>,
) -> Deferred<CompileNode<CompileHandler>> {
let (doc_tx, doc_rx) = watch::channel(None);
let (render_tx, _) = broadcast::channel(10);
// Run the PDF export actor before preparing cluster to avoid loss of events
tokio::spawn(PdfExportActor::new(doc_rx.clone(), render_tx.subscribe()).run());
let opts = CompileOpts {
root_dir: roots.first().cloned().unwrap_or_default(),
// todo: font paths
// font_paths: arguments.font_paths.clone(),
with_embedded_fonts: typst_assets::fonts().map(Cow::Borrowed).collect(),
..CompileOpts::default()
};
create_server(
name,
&self.config,
roots.clone(),
opts,
entry,
self.diag_tx.clone(),
doc_tx,
render_tx,
)
}
pub fn prepare_cluster(
&mut self,
fac: ActorFactory,
host: LspHost,
roots: Vec<PathBuf>,
) -> CompileCluster {
let diag_rx = self.diag_rx.take().expect("diag_rx is poisoned");
let primary = self.server("primary".to_owned(), roots.clone(), None);
CompileCluster::new(fac, host, roots, &self.config, primary, diag_rx)
}
}
#[derive(Clone)]
pub struct ActorFactory(Arc<Mutex<Repr>>);
impl ActorFactory {
pub fn new(config: ConstConfig) -> Self {
let (diag_tx, diag_rx) = mpsc::unbounded_channel();
Self(Arc::new(Mutex::new(Repr {
config,
diag_tx,
diag_rx: Some(diag_rx),
})))
}
fn server(
&self,
name: String,
roots: Vec<PathBuf>,
entry: Option<PathBuf>,
) -> Deferred<CompileNode<CompileHandler>> {
self.0.lock().server(name, roots, entry)
}
pub fn prepare_cluster(&self, host: LspHost, roots: Vec<PathBuf>) -> CompileCluster {
self.0.lock().prepare_cluster(self.clone(), host, roots)
}
}

View file

@ -1,25 +1,21 @@
//! The typst actors running compilations.
use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::{Arc, Mutex as SyncMutex},
};
use anyhow::anyhow;
use futures::future::join_all;
use log::{debug, error, info, trace, warn};
use lsp_types::{Diagnostic, TextDocumentContentChangeEvent, Url};
use parking_lot::{Mutex, RwLock};
use log::{debug, error, trace, warn};
use parking_lot::Mutex;
use tinymist_query::{
lsp_to_typst, CompilerQueryRequest, CompilerQueryResponse, DiagnosticsMap, FoldRequestFeature,
LspDiagnostic, OnSaveExportRequest, PositionEncoding, SemanticTokenCache,
CompilerQueryRequest, CompilerQueryResponse, DiagnosticsMap, FoldRequestFeature,
OnSaveExportRequest, PositionEncoding,
};
use tokio::sync::{broadcast, mpsc, watch};
use typst::{
diag::{FileResult, SourceDiagnostic, SourceResult},
diag::{SourceDiagnostic, SourceResult},
layout::Position,
syntax::{Source, Span, VirtualPath},
syntax::{Span, VirtualPath},
util::Deferred,
};
use typst_preview::{
@ -28,8 +24,9 @@ use typst_preview::{
};
use typst_ts_compiler::{
service::{
CompileActor, CompileClient as TsCompileClient, CompileDriver as CompileDriverInner,
CompileExporter, CompileMiddleware, Compiler, WorkspaceProvider, WorldExporter,
CompileActor as CompileActorInner, CompileClient as TsCompileClient,
CompileDriver as CompileDriverInner, CompileExporter, CompileMiddleware, Compiler,
WorkspaceProvider, WorldExporter,
},
vfs::notify::{FileChangeSet, MemoryEvent},
Time, TypstSystemWorld,
@ -41,105 +38,12 @@ use typst_ts_core::{
use crate::actor::render::RenderActorRequest;
use crate::ConstConfig;
use crate::LspHost;
use super::ActorFactory;
type CompileService<H> = CompileActor<Reporter<CompileExporter<CompileDriver>, H>>;
type CompileService<H> = CompileActorInner<Reporter<CompileExporter<CompileDriver>, H>>;
type CompileClient<H> = TsCompileClient<CompileService<H>>;
type Node = CompileNode<CompileHandler>;
type DiagnosticsSender = mpsc::UnboundedSender<(String, Option<DiagnosticsMap>)>;
pub struct CompileCluster {
roots: Vec<PathBuf>,
actor_factory: ActorFactory,
position_encoding: PositionEncoding,
memory_changes: RwLock<HashMap<Arc<Path>, MemoryFileMeta>>,
primary: Deferred<Node>,
main: Arc<Mutex<Option<Deferred<Node>>>>,
pub tokens_cache: SemanticTokenCache,
actor: Option<CompileClusterActor>,
}
impl CompileCluster {
pub fn new(
actor_factory: ActorFactory,
host: LspHost,
roots: Vec<PathBuf>,
cfg: &ConstConfig,
primary: Deferred<Node>,
diag_rx: mpsc::UnboundedReceiver<(String, Option<DiagnosticsMap>)>,
) -> Self {
Self {
roots,
actor_factory,
position_encoding: cfg.position_encoding,
memory_changes: RwLock::new(HashMap::new()),
primary,
main: Arc::new(Mutex::new(None)),
tokens_cache: Default::default(),
actor: Some(CompileClusterActor {
host,
diag_rx,
diagnostics: HashMap::new(),
affect_map: HashMap::new(),
published_primary: false,
}),
}
}
pub fn split(mut self) -> (Self, CompileClusterActor) {
let actor = self.actor.take().expect("actor is poisoned");
(self, actor)
}
pub fn activate_doc(&self, new_entry: Option<ImmutPath>) -> Result<(), Error> {
match new_entry {
Some(new_entry) => self.primary.wait().change_entry(new_entry)?,
None => {
self.primary.wait().disable();
}
}
Ok(())
}
pub fn pin_main(&self, new_entry: Option<Url>) -> Result<(), Error> {
let mut m = self.main.lock();
match (new_entry, m.is_some()) {
(Some(new_entry), true) => {
let path = new_entry
.to_file_path()
.map_err(|_| error_once!("invalid url"))?;
let path = path.as_path().into();
m.as_mut().unwrap().wait().change_entry(path)
}
(Some(new_entry), false) => {
let path = new_entry
.to_file_path()
.map_err(|_| error_once!("invalid url"))?;
let path = path.as_path().into();
let main_node =
self.actor_factory
.server("main".to_owned(), self.roots.clone(), Some(path));
*m = Some(main_node);
Ok(())
}
(None, true) => {
// todo: unpin main
m.as_mut().unwrap().wait().disable();
Ok(())
}
(None, false) => Ok(()),
}
}
}
#[allow(clippy::too_many_arguments)]
pub fn create_server(
diag_group: String,
@ -150,7 +54,7 @@ pub fn create_server(
diag_tx: DiagnosticsSender,
doc_sender: watch::Sender<Option<Arc<TypstDocument>>>,
render_tx: broadcast::Sender<RenderActorRequest>,
) -> Deferred<Node> {
) -> Deferred<CompileActor> {
let cfg = cfg.clone();
let current_runtime = tokio::runtime::Handle::current();
Deferred::new(move || {
@ -174,13 +78,13 @@ pub fn create_server(
inner: driver,
cb: handler.clone(),
};
let driver = CompileActor::new(driver, root).with_watch(true);
let driver = CompileActorInner::new(driver, root).with_watch(true);
let (server, client) = driver.split();
current_runtime.spawn(server.spawn());
let this = CompileNode::new(diag_group, cfg.position_encoding, handler, client);
let this = CompileActor::new(diag_group, cfg.position_encoding, handler, client);
// todo: less bug-prone code
if let Some(entry) = entry {
@ -191,266 +95,6 @@ pub fn create_server(
})
}
pub struct CompileClusterActor {
host: LspHost,
diag_rx: mpsc::UnboundedReceiver<(String, Option<DiagnosticsMap>)>,
diagnostics: HashMap<Url, HashMap<String, Vec<LspDiagnostic>>>,
affect_map: HashMap<String, Vec<Url>>,
published_primary: bool,
}
impl CompileClusterActor {
pub async fn run(mut self) {
loop {
tokio::select! {
e = self.diag_rx.recv() => {
let Some((group, diagnostics)) = e else {
break;
};
info!("received diagnostics from {}: diag({:?})", group, diagnostics.as_ref().map(|e| e.len()));
let with_primary = (self.affect_map.len() <= 1 && self.affect_map.contains_key("primary")) && group == "primary";
self.publish(group, diagnostics, with_primary).await;
if !with_primary {
let again_with_primary = self.affect_map.len() == 1 && self.affect_map.contains_key("primary");
if self.published_primary != again_with_primary {
self.flush_primary_diagnostics(again_with_primary).await;
self.published_primary = again_with_primary;
}
}
}
}
info!("compile cluster actor is stopped");
}
}
pub async fn do_publish_diagnostics(
host: &LspHost,
uri: Url,
diags: Vec<Diagnostic>,
version: Option<i32>,
ignored: bool,
) {
if ignored {
return;
}
host.publish_diagnostics(uri, diags, version)
}
async fn flush_primary_diagnostics(&mut self, enable: bool) {
let affected = self.affect_map.get("primary");
let tasks = affected.into_iter().flatten().map(|url| {
let path_diags = self.diagnostics.get(url);
let diags = path_diags.into_iter().flatten().filter_map(|(g, diags)| {
if g == "primary" {
return enable.then_some(diags);
}
Some(diags)
});
// todo: .flatten() removed
// let to_publish = diags.flatten().cloned().collect();
let to_publish = diags.flatten().cloned().collect();
Self::do_publish_diagnostics(&self.host, url.clone(), to_publish, None, false)
});
join_all(tasks).await;
}
pub async fn publish(
&mut self,
group: String,
next_diagnostics: Option<DiagnosticsMap>,
with_primary: bool,
) {
let is_primary = group == "primary";
let affected = self.affect_map.get_mut(&group);
let affected = affected.map(std::mem::take);
// Gets sources which had some diagnostic published last time, but not this
// time. The LSP specifies that files will not have diagnostics
// updated, including removed, without an explicit update, so we need
// to send an empty `Vec` of diagnostics to these sources.
// todo: merge
let clear_list = if let Some(n) = next_diagnostics.as_ref() {
affected
.into_iter()
.flatten()
.filter(|e| !n.contains_key(e))
.map(|e| (e, None))
.collect::<Vec<_>>()
} else {
affected
.into_iter()
.flatten()
.map(|e| (e, None))
.collect::<Vec<_>>()
};
let next_affected = if let Some(n) = next_diagnostics.as_ref() {
n.keys().cloned().collect()
} else {
Vec::new()
};
let clear_all = next_diagnostics.is_none();
// Gets touched updates
let update_list = next_diagnostics
.into_iter()
.flatten()
.map(|(x, y)| (x, Some(y)));
let tasks = clear_list.into_iter().chain(update_list);
let tasks = tasks.map(|(url, next)| {
let path_diags = self.diagnostics.entry(url.clone()).or_default();
let rest_all = path_diags
.iter()
.filter_map(|(g, diags)| {
if !with_primary && g == "primary" {
return None;
}
if g != &group {
Some(diags)
} else {
None
}
})
.flatten()
.cloned();
let next_all = next.clone().into_iter().flatten();
let to_publish = rest_all.chain(next_all).collect();
match next {
Some(next) => {
path_diags.insert(group.clone(), next);
}
None => {
path_diags.remove(&group);
}
}
Self::do_publish_diagnostics(
&self.host,
url,
to_publish,
None,
is_primary && !with_primary,
)
});
join_all(tasks).await;
if clear_all {
// We just used the cache, and won't need it again, so we can update it now
self.affect_map.remove(&group);
} else {
// We just used the cache, and won't need it again, so we can update it now
self.affect_map.insert(group, next_affected);
}
}
}
#[derive(Debug, Clone)]
struct MemoryFileMeta {
mt: Time,
content: Source,
}
impl CompileCluster {
fn update_source(&self, files: FileChangeSet) -> Result<(), Error> {
let primary = self.primary.clone();
let main = self.main.clone();
let primary = Some(&primary);
let main = main.lock();
let main = main.as_ref();
let clients_to_notify = (primary.iter()).chain(main.iter());
for client in clients_to_notify {
let iw = client.wait().inner.lock();
iw.add_memory_changes(MemoryEvent::Update(files.clone()));
}
Ok(())
}
pub fn create_source(&self, path: PathBuf, content: String) -> Result<(), Error> {
let now = Time::now();
let path: ImmutPath = path.into();
self.memory_changes.write().insert(
path.clone(),
MemoryFileMeta {
mt: now,
content: Source::detached(content.clone()),
},
);
let content: Bytes = content.as_bytes().into();
log::info!("create source: {:?}", path);
// todo: is it safe to believe that the path is normalized?
let files = FileChangeSet::new_inserts(vec![(path, FileResult::Ok((now, content)).into())]);
self.update_source(files)
}
pub fn remove_source(&self, path: PathBuf) -> Result<(), Error> {
let path: ImmutPath = path.into();
self.memory_changes.write().remove(&path);
log::info!("remove source: {:?}", path);
// todo: is it safe to believe that the path is normalized?
let files = FileChangeSet::new_removes(vec![path]);
self.update_source(files)
}
pub fn edit_source(
&self,
path: PathBuf,
content: Vec<TextDocumentContentChangeEvent>,
position_encoding: PositionEncoding,
) -> Result<(), Error> {
let now = Time::now();
let path: ImmutPath = path.into();
let mut memory_changes = self.memory_changes.write();
let meta = memory_changes
.get_mut(&path)
.ok_or_else(|| error_once!("file missing", path: path.display()))?;
for change in content {
let replacement = change.text;
match change.range {
Some(lsp_range) => {
let range = lsp_to_typst::range(lsp_range, position_encoding, &meta.content)
.expect("invalid range");
meta.content.edit(range, &replacement);
}
None => {
meta.content.replace(&replacement);
}
}
}
meta.mt = now;
let snapshot = FileResult::Ok((now, meta.content.text().as_bytes().into())).into();
drop(memory_changes);
let files = FileChangeSet::new_inserts(vec![(path.clone(), snapshot)]);
self.update_source(files)
}
}
macro_rules! query_state {
($self:ident, $method:ident, $req:expr) => {{
let doc = $self.handler.result.lock().unwrap().clone().ok();
@ -468,64 +112,6 @@ macro_rules! query_world {
}};
}
macro_rules! query_source {
($self:ident, $method:ident, $req:expr) => {{
let path: ImmutPath = $req.path.clone().into();
let vfs = $self.memory_changes.read();
let snapshot = vfs
.get(&path)
.ok_or_else(|| anyhow!("file missing {:?}", $self.memory_changes))?;
let source = snapshot.content.clone();
let enc = $self.position_encoding;
let res = $req.request(source, enc);
Ok(CompilerQueryResponse::$method(res))
}};
}
macro_rules! query_tokens_cache {
($self:ident, $method:ident, $req:expr) => {{
let path: ImmutPath = $req.path.clone().into();
let vfs = $self.memory_changes.read();
let snapshot = vfs.get(&path).ok_or_else(|| anyhow!("file missing"))?;
let source = snapshot.content.clone();
let enc = $self.position_encoding;
let res = $req.request(&$self.tokens_cache, source, enc);
Ok(CompilerQueryResponse::$method(res))
}};
}
impl CompileCluster {
pub fn query(&self, query: CompilerQueryRequest) -> anyhow::Result<CompilerQueryResponse> {
use CompilerQueryRequest::*;
match query {
SemanticTokensFull(req) => query_tokens_cache!(self, SemanticTokensFull, req),
SemanticTokensDelta(req) => query_tokens_cache!(self, SemanticTokensDelta, req),
FoldingRange(req) => query_source!(self, FoldingRange, req),
SelectionRange(req) => query_source!(self, SelectionRange, req),
DocumentSymbol(req) => query_source!(self, DocumentSymbol, req),
_ => {
let main = self.main.lock();
let query_target = match main.as_ref() {
Some(main) => main,
None => {
// todo: race condition, we need atomic primary query
if let Some(path) = query.associated_path() {
self.primary.wait().change_entry(path.into())?;
}
&self.primary
}
};
query_target.wait().query(query)
}
}
}
}
#[derive(Clone)]
pub struct CompileHandler {
result: Arc<SyncMutex<Result<Arc<TypstDocument>, CompileStatus>>>,
@ -699,39 +285,39 @@ impl<C: Compiler<World = TypstSystemWorld>, H> Reporter<C, H> {
}
}
pub struct CompileNode<H: CompilationHandle> {
pub struct CompileActor {
diag_group: String,
position_encoding: PositionEncoding,
handler: CompileHandler,
entry: Arc<SyncMutex<Option<ImmutPath>>>,
inner: Mutex<CompileClient<H>>,
pub inner: Mutex<CompileClient<CompileHandler>>,
}
// todo: remove unsafe impl send
/// SAFETY:
/// This is safe because the not send types are only used in compiler time
/// hints.
unsafe impl<H: CompilationHandle> Send for CompileNode<H> {}
unsafe impl Send for CompileActor {}
/// SAFETY:
/// This is safe because the not sync types are only used in compiler time
/// hints.
unsafe impl<H: CompilationHandle> Sync for CompileNode<H> {}
unsafe impl Sync for CompileActor {}
impl<H: CompilationHandle> CompileNode<H> {
fn inner(&mut self) -> &mut CompileClient<H> {
impl CompileActor {
fn inner(&mut self) -> &mut CompileClient<CompileHandler> {
self.inner.get_mut()
}
/// Steal the compiler thread and run the given function.
pub fn steal<Ret: Send + 'static>(
&self,
f: impl FnOnce(&mut CompileService<H>) -> Ret + Send + 'static,
f: impl FnOnce(&mut CompileService<CompileHandler>) -> Ret + Send + 'static,
) -> ZResult<Ret> {
self.inner.lock().steal(f)
}
// todo: stop main
fn disable(&self) {
pub fn disable(&self) {
let res = self.steal(move |compiler| {
let path = Path::new("detached.typ");
let root = compiler.compiler.world().workspace_root();
@ -752,7 +338,7 @@ impl<H: CompilationHandle> CompileNode<H> {
}
}
fn change_entry(&self, path: ImmutPath) -> Result<(), Error> {
pub fn change_entry(&self, path: ImmutPath) -> Result<(), Error> {
if !path.is_absolute() {
return Err(error_once!("entry file must be absolute", path: path.display()));
}
@ -807,7 +393,7 @@ impl<H: CompilationHandle> CompileNode<H> {
}
}
impl<H: CompilationHandle> SourceFileServer for CompileNode<H> {
impl SourceFileServer for CompileActor {
async fn resolve_source_span(
&mut self,
loc: Location,
@ -853,7 +439,7 @@ impl<H: CompilationHandle> SourceFileServer for CompileNode<H> {
}
}
impl<H: CompilationHandle> EditorServer for CompileNode<H> {
impl EditorServer for CompileActor {
async fn update_memory_files(
&mut self,
files: MemoryFiles,
@ -890,14 +476,14 @@ impl<H: CompilationHandle> EditorServer for CompileNode<H> {
}
}
impl<H: CompilationHandle> CompileHost for CompileNode<H> {}
impl CompileHost for CompileActor {}
impl<H: CompilationHandle> CompileNode<H> {
impl CompileActor {
fn new(
diag_group: String,
position_encoding: PositionEncoding,
handler: CompileHandler,
inner: CompileClient<H>,
inner: CompileClient<CompileHandler>,
) -> Self {
Self {
diag_group,

461
crates/tinymist/src/init.rs Normal file
View file

@ -0,0 +1,461 @@
use core::fmt;
use std::{collections::HashMap, path::PathBuf};
use anyhow::bail;
use itertools::Itertools;
use lsp_types::*;
use serde::Deserialize;
use serde_json::{Map, Value as JsonValue};
use tinymist_query::{get_semantic_tokens_options, PositionEncoding};
use tokio::sync::mpsc;
use crate::actor::cluster::CompileClusterActor;
use crate::{invalid_params, LspHost, LspResult, TypstLanguageServer};
trait InitializeParamsExt {
fn position_encodings(&self) -> &[PositionEncodingKind];
fn supports_config_change_registration(&self) -> bool;
fn semantic_tokens_capabilities(&self) -> Option<&SemanticTokensClientCapabilities>;
fn document_formatting_capabilities(&self) -> Option<&DocumentFormattingClientCapabilities>;
fn supports_semantic_tokens_dynamic_registration(&self) -> bool;
fn supports_document_formatting_dynamic_registration(&self) -> bool;
fn line_folding_only(&self) -> bool;
fn root_paths(&self) -> Vec<PathBuf>;
}
impl InitializeParamsExt for InitializeParams {
fn position_encodings(&self) -> &[PositionEncodingKind] {
const DEFAULT_ENCODING: &[PositionEncodingKind; 1] = &[PositionEncodingKind::UTF16];
self.capabilities
.general
.as_ref()
.and_then(|general| general.position_encodings.as_ref())
.map(|encodings| encodings.as_slice())
.unwrap_or(DEFAULT_ENCODING)
}
fn supports_config_change_registration(&self) -> bool {
self.capabilities
.workspace
.as_ref()
.and_then(|workspace| workspace.configuration)
.unwrap_or(false)
}
fn line_folding_only(&self) -> bool {
self.capabilities
.text_document
.as_ref()
.and_then(|workspace| workspace.folding_range.as_ref())
.and_then(|folding| folding.line_folding_only)
.unwrap_or(false)
}
fn semantic_tokens_capabilities(&self) -> Option<&SemanticTokensClientCapabilities> {
self.capabilities
.text_document
.as_ref()?
.semantic_tokens
.as_ref()
}
fn document_formatting_capabilities(&self) -> Option<&DocumentFormattingClientCapabilities> {
self.capabilities
.text_document
.as_ref()?
.formatting
.as_ref()
}
fn supports_semantic_tokens_dynamic_registration(&self) -> bool {
self.semantic_tokens_capabilities()
.and_then(|semantic_tokens| semantic_tokens.dynamic_registration)
.unwrap_or(false)
}
fn supports_document_formatting_dynamic_registration(&self) -> bool {
self.document_formatting_capabilities()
.and_then(|document_format| document_format.dynamic_registration)
.unwrap_or(false)
}
#[allow(deprecated)] // `self.root_path` is marked as deprecated
fn root_paths(&self) -> Vec<PathBuf> {
match self.workspace_folders.as_ref() {
Some(roots) => roots
.iter()
.map(|root| &root.uri)
.map(Url::to_file_path)
.collect::<Result<Vec<_>, _>>()
.unwrap(),
None => self
.root_uri
.as_ref()
.map(|uri| uri.to_file_path().unwrap())
.or_else(|| self.root_path.clone().map(PathBuf::from))
.into_iter()
.collect(),
}
}
}
/// The mode of the experimental formatter.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ExperimentalFormatterMode {
/// Disable the experimental formatter.
#[default]
Disable,
/// Enable the experimental formatter.
Enable,
}
/// The mode of PDF export.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ExportPdfMode {
/// Don't export PDF automatically.
Never,
/// Export PDF on saving the document, i.e. on `textDocument/didSave`
/// events.
#[default]
OnSave,
/// Export PDF on typing, i.e. on `textDocument/didChange` events.
OnType,
}
/// The mode of semantic tokens.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum SemanticTokensMode {
/// Disable the semantic tokens.
Disable,
/// Enable the semantic tokens.
#[default]
Enable,
}
type Listener<T> = Box<dyn FnMut(&T) -> anyhow::Result<()>>;
const CONFIG_ITEMS: &[&str] = &[
"exportPdf",
"rootPath",
"semanticTokens",
"experimentalFormatterMode",
];
/// The user configuration read from the editor.
#[derive(Default)]
pub struct Config {
/// The mode of PDF export.
pub export_pdf: ExportPdfMode,
/// Specifies the root path of the project manually.
pub root_path: Option<PathBuf>,
/// Dynamic configuration for semantic tokens.
pub semantic_tokens: SemanticTokensMode,
/// Dynamic configuration for the experimental formatter.
pub formatter: ExperimentalFormatterMode,
semantic_tokens_listeners: Vec<Listener<SemanticTokensMode>>,
formatter_listeners: Vec<Listener<ExperimentalFormatterMode>>,
}
impl Config {
/// Gets items for serialization.
pub fn get_items() -> Vec<ConfigurationItem> {
let sections = CONFIG_ITEMS
.iter()
.flat_map(|item| [format!("tinymist.{item}"), item.to_string()]);
sections
.map(|section| ConfigurationItem {
section: Some(section),
..Default::default()
})
.collect()
}
/// Converts values to a map.
pub fn values_to_map(values: Vec<JsonValue>) -> Map<String, JsonValue> {
let unpaired_values = values
.into_iter()
.tuples()
.map(|(a, b)| if !a.is_null() { a } else { b });
CONFIG_ITEMS
.iter()
.map(|item| item.to_string())
.zip(unpaired_values)
.collect()
}
/// Updates the configuration with a JSON object.
///
/// # Errors
/// Errors if the update is invalid.
pub fn update(&mut self, update: &JsonValue) -> anyhow::Result<()> {
if let JsonValue::Object(update) = update {
self.update_by_map(update)
} else {
bail!("got invalid configuration object {update}")
}
}
/// Updates the configuration with a map.
///
/// # Errors
/// Errors if the update is invalid.
pub fn update_by_map(&mut self, update: &Map<String, JsonValue>) -> anyhow::Result<()> {
let export_pdf = update
.get("exportPdf")
.map(ExportPdfMode::deserialize)
.and_then(Result::ok);
if let Some(export_pdf) = export_pdf {
self.export_pdf = export_pdf;
}
let root_path = update.get("rootPath");
if let Some(root_path) = root_path {
if root_path.is_null() {
self.root_path = None;
}
if let Some(root_path) = root_path.as_str().map(PathBuf::from) {
self.root_path = Some(root_path);
}
}
let semantic_tokens = update
.get("semanticTokens")
.map(SemanticTokensMode::deserialize)
.and_then(Result::ok);
if let Some(semantic_tokens) = semantic_tokens {
for listener in &mut self.semantic_tokens_listeners {
listener(&semantic_tokens)?;
}
self.semantic_tokens = semantic_tokens;
}
let formatter = update
.get("experimentalFormatterMode")
.map(ExperimentalFormatterMode::deserialize)
.and_then(Result::ok);
if let Some(formatter) = formatter {
for listener in &mut self.formatter_listeners {
listener(&formatter)?;
}
self.formatter = formatter;
}
Ok(())
}
pub(crate) fn listen_semantic_tokens(&mut self, listener: Listener<SemanticTokensMode>) {
self.semantic_tokens_listeners.push(listener);
}
// pub fn listen_formatting(&mut self, listener:
// Listener<ExperimentalFormatterMode>) { self.formatter_listeners.
// push(listener); }
}
impl fmt::Debug for Config {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Config")
.field("export_pdf", &self.export_pdf)
.field("formatter", &self.formatter)
.field("semantic_tokens", &self.semantic_tokens)
.field(
"semantic_tokens_listeners",
&format_args!("Vec[len = {}]", self.semantic_tokens_listeners.len()),
)
.field(
"formatter_listeners",
&format_args!("Vec[len = {}]", self.formatter_listeners.len()),
)
.finish()
}
}
/// Configuration set at initialization that won't change within a single
/// session
#[derive(Debug, Clone)]
pub struct ConstConfig {
/// The position encoding, either UTF-8 or UTF-16.
/// Defaults to UTF-16 if not specified.
pub position_encoding: PositionEncoding,
/// Whether the client supports dynamic registration of semantic tokens.
pub supports_semantic_tokens_dynamic_registration: bool,
/// Whether the client supports dynamic registration of document formatting.
pub supports_document_formatting_dynamic_registration: bool,
/// Whether the client supports dynamic registration of configuration
/// changes.
pub supports_config_change_registration: bool,
/// Whether the client only supports line folding.
pub line_folding_only: bool,
}
impl ConstConfig {
fn choose_encoding(params: &InitializeParams) -> PositionEncoding {
let encodings = params.position_encodings();
if encodings.contains(&PositionEncodingKind::UTF8) {
PositionEncoding::Utf8
} else {
PositionEncoding::Utf16
}
}
}
impl From<&InitializeParams> for ConstConfig {
fn from(params: &InitializeParams) -> Self {
Self {
position_encoding: Self::choose_encoding(params),
supports_semantic_tokens_dynamic_registration: params
.supports_semantic_tokens_dynamic_registration(),
supports_document_formatting_dynamic_registration: params
.supports_document_formatting_dynamic_registration(),
supports_config_change_registration: params.supports_config_change_registration(),
line_folding_only: params.line_folding_only(),
}
}
}
pub struct Init {
pub host: LspHost,
}
impl Init {
/// The [`initialize`] request is the first request sent from the client to
/// the server.
///
/// [`initialize`]: https://microsoft.github.io/language-server-protocol/specification#initialize
///
/// This method is guaranteed to only execute once. If the client sends this
/// request to the server again, the server will respond with JSON-RPC
/// error code `-32600` (invalid request).
///
/// # Panics
/// Panics if the const configuration is already initialized.
/// Panics if the cluster is already initialized.
///
/// # Errors
/// Errors if the configuration could not be updated.
pub fn initialize(
self,
params: InitializeParams,
) -> (TypstLanguageServer, LspResult<InitializeResult>) {
// self.tracing_init();
// Initialize configurations
let cc = ConstConfig::from(&params);
let mut config = Config::default();
// Bootstrap server
let (diag_tx, diag_rx) = mpsc::unbounded_channel();
let service =
TypstLanguageServer::new(self.host.clone(), params.root_paths(), &cc, diag_tx);
if let Some(init) = &params.initialization_options {
if let Err(err) = config
.update(init)
.as_ref()
.map_err(ToString::to_string)
.map_err(invalid_params)
{
return (service, Err(err));
}
}
let cluster_actor = CompileClusterActor {
host: self.host.clone(),
diag_rx,
diagnostics: HashMap::new(),
affect_map: HashMap::new(),
published_primary: false,
};
let primary = service.server("primary".to_owned(), None);
service.primary.get_or_init(|| primary);
// Run the cluster in the background after we referencing it
tokio::spawn(cluster_actor.run());
// Respond to the host (LSP client)
let semantic_tokens_provider = match config.semantic_tokens {
SemanticTokensMode::Enable
if !params.supports_semantic_tokens_dynamic_registration() =>
{
Some(get_semantic_tokens_options().into())
}
_ => None,
};
let document_formatting_provider = match config.formatter {
ExperimentalFormatterMode::Enable
if !params.supports_document_formatting_dynamic_registration() =>
{
Some(OneOf::Left(true))
}
_ => None,
};
let res = InitializeResult {
capabilities: ServerCapabilities {
hover_provider: Some(HoverProviderCapability::Simple(true)),
signature_help_provider: Some(SignatureHelpOptions {
trigger_characters: Some(vec!["(".to_string(), ",".to_string()]),
retrigger_characters: None,
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
}),
definition_provider: Some(OneOf::Left(true)),
completion_provider: Some(CompletionOptions {
trigger_characters: Some(vec![
String::from("#"),
String::from("."),
String::from("@"),
]),
..Default::default()
}),
text_document_sync: Some(TextDocumentSyncCapability::Options(
TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::INCREMENTAL),
save: Some(TextDocumentSyncSaveOptions::Supported(true)),
..Default::default()
},
)),
semantic_tokens_provider,
execute_command_provider: Some(ExecuteCommandOptions {
commands: service.exec_cmds.keys().map(ToString::to_string).collect(),
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
}),
document_symbol_provider: Some(OneOf::Left(true)),
workspace_symbol_provider: Some(OneOf::Left(true)),
selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
rename_provider: Some(OneOf::Right(RenameOptions {
prepare_provider: Some(true),
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
})),
folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
workspace: Some(WorkspaceServerCapabilities {
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
supported: Some(true),
change_notifications: Some(OneOf::Left(true)),
}),
..Default::default()
}),
document_formatting_provider,
inlay_hint_provider: Some(OneOf::Left(true)),
..Default::default()
},
..Default::default()
};
(service, Ok(res))
}
}

View file

@ -28,6 +28,8 @@
// pub mod formatting;
mod actor;
pub mod init;
mod query;
pub mod transport;
use core::fmt;
@ -36,10 +38,10 @@ use std::sync::Arc;
use std::time::Instant;
use std::{collections::HashMap, path::PathBuf};
use anyhow::{bail, Context};
use actor::typst::CompileActor;
use anyhow::Context;
use crossbeam_channel::select;
use crossbeam_channel::Receiver;
use itertools::Itertools;
use log::{debug, error, info, trace, warn};
use lsp_server::{ErrorCode, Message, Notification, Request, ResponseError};
use lsp_types::notification::{Notification as NotificationTrait, PublishDiagnostics};
@ -48,14 +50,16 @@ use lsp_types::*;
use once_cell::sync::OnceCell;
use parking_lot::{Mutex, RwLock};
use paste::paste;
use serde::Deserialize;
use query::MemoryFileMeta;
use serde_json::{Map, Value as JsonValue};
use tinymist_query::{
get_semantic_tokens_options, get_semantic_tokens_registration,
get_semantic_tokens_unregistration, PositionEncoding,
get_semantic_tokens_unregistration, DiagnosticsMap, PositionEncoding, SemanticTokenCache,
};
use tokio::sync::mpsc;
use typst::util::Deferred;
use crate::actor::typst::CompileCluster;
use crate::init::*;
// Enforces drop order
pub struct Handle<H, C> {
@ -245,11 +249,10 @@ macro_rules! run_query {
use tinymist_query::*;
let req = paste! { [<$query Request>] { $($arg_key),+ } };
$self
.universe()
.query(CompilerQueryRequest::$query(req.clone()))
.map_err(|err| {
error!("error getting $query: {err} with request {req:?}");
internal_error()
internal_error("Internal error")
})
.map(|resp| {
let CompilerQueryResponse::$query(resp) = resp else {
@ -281,27 +284,45 @@ pub struct TypstLanguageServer {
/// Regular commands for dispatching.
pub regular_cmds: RegularCmdMap,
/// User configuration from the editor.
pub config: Arc<RwLock<Config>>,
pub config: Config,
/// Const configuration initialized at the start of the session.
/// For example, the position encoding.
pub const_config: OnceCell<ConstConfig>,
/// The compiler cluster.
pub universe: OnceCell<CompileCluster>,
diag_tx: mpsc::UnboundedSender<(String, Option<DiagnosticsMap>)>,
roots: Vec<PathBuf>,
position_encoding: PositionEncoding,
memory_changes: RwLock<HashMap<Arc<Path>, MemoryFileMeta>>,
primary: OnceCell<Deferred<CompileActor>>,
main: Arc<Mutex<Option<Deferred<CompileActor>>>>,
tokens_cache: SemanticTokenCache,
}
/// Getters and the main loop.
impl TypstLanguageServer {
/// Create a new language server.
pub fn new(client: LspHost) -> Self {
pub fn new(
client: LspHost,
roots: Vec<PathBuf>,
cfg: &ConstConfig,
diag_tx: mpsc::UnboundedSender<(String, Option<DiagnosticsMap>)>,
) -> Self {
Self {
client,
client: client.clone(),
shutdown_requested: false,
config: Default::default(),
const_config: Default::default(),
universe: Default::default(),
exec_cmds: Self::get_exec_commands(),
regular_cmds: Self::get_regular_cmds(),
notify_cmds: Self::get_notify_cmds(),
diag_tx,
roots,
position_encoding: cfg.position_encoding,
memory_changes: RwLock::new(HashMap::new()),
primary: OnceCell::new(),
main: Arc::new(Mutex::new(None)),
tokens_cache: Default::default(),
}
}
@ -315,12 +336,12 @@ impl TypstLanguageServer {
.expect("const config should be initialized")
}
/// Get the compiler cluster.
///
/// # Panics
/// Panics if the universe is not initialized.
pub fn universe(&self) -> &CompileCluster {
self.universe.get().expect("universe should be initialized")
fn primary_deferred(&self) -> &Deferred<CompileActor> {
self.primary.get().expect("primary")
}
fn primary(&self) -> &CompileActor {
self.primary_deferred().wait()
}
#[rustfmt::skip]
@ -509,133 +530,6 @@ impl TypstLanguageServer {
///
/// [Language Server Protocol]: https://microsoft.github.io/language-server-protocol/
impl TypstLanguageServer {
/// The [`initialize`] request is the first request sent from the client to
/// the server.
///
/// [`initialize`]: https://microsoft.github.io/language-server-protocol/specification#initialize
///
/// This method is guaranteed to only execute once. If the client sends this
/// request to the server again, the server will respond with JSON-RPC
/// error code `-32600` (invalid request).
///
/// # Panics
/// Panics if the const configuration is already initialized.
/// Panics if the cluster is already initialized.
///
/// # Errors
/// Errors if the configuration could not be updated.
pub fn initialize(&self, params: InitializeParams) -> LspResult<InitializeResult> {
// self.tracing_init();
// Initialize configurations
let cc = &self.const_config;
cc.set(ConstConfig::from(&params))
.expect("const config is initialized");
let cc = cc.get().expect("const config is not initialized").clone();
if let Some(init) = &params.initialization_options {
let mut config = self.config.write();
config
.update(init)
.as_ref()
.map_err(ToString::to_string)
.map_err(invalid_params)?;
}
// Bootstrap actors
let actor_factory = actor::ActorFactory::new(cc);
// Bootstrap the cluster
let cluster = actor_factory.prepare_cluster(self.client.clone(), params.root_paths());
let (cluster, cluster_bg) = cluster.split();
self.universe
.set(cluster)
.map_err(|_| ())
.expect("the cluster is already initialized");
// Run the cluster in the background after we referencing it
tokio::spawn(cluster_bg.run());
// Respond to the host (LSP client)
let config = self.config.read();
let semantic_tokens_provider = match config.semantic_tokens {
SemanticTokensMode::Enable
if !params.supports_semantic_tokens_dynamic_registration() =>
{
Some(get_semantic_tokens_options().into())
}
_ => None,
};
let document_formatting_provider = match config.formatter {
ExperimentalFormatterMode::Enable
if !params.supports_document_formatting_dynamic_registration() =>
{
Some(OneOf::Left(true))
}
_ => None,
};
Ok(InitializeResult {
capabilities: ServerCapabilities {
hover_provider: Some(HoverProviderCapability::Simple(true)),
signature_help_provider: Some(SignatureHelpOptions {
trigger_characters: Some(vec!["(".to_string(), ",".to_string()]),
retrigger_characters: None,
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
}),
definition_provider: Some(OneOf::Left(true)),
completion_provider: Some(CompletionOptions {
trigger_characters: Some(vec![
String::from("#"),
String::from("."),
String::from("@"),
]),
..Default::default()
}),
text_document_sync: Some(TextDocumentSyncCapability::Options(
TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::INCREMENTAL),
save: Some(TextDocumentSyncSaveOptions::Supported(true)),
..Default::default()
},
)),
semantic_tokens_provider,
execute_command_provider: Some(ExecuteCommandOptions {
commands: self.exec_cmds.keys().map(ToString::to_string).collect(),
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
}),
document_symbol_provider: Some(OneOf::Left(true)),
workspace_symbol_provider: Some(OneOf::Left(true)),
selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
rename_provider: Some(OneOf::Right(RenameOptions {
prepare_provider: Some(true),
work_done_progress_options: WorkDoneProgressOptions {
work_done_progress: None,
},
})),
folding_range_provider: Some(FoldingRangeProviderCapability::Simple(true)),
workspace: Some(WorkspaceServerCapabilities {
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
supported: Some(true),
change_notifications: Some(OneOf::Left(true)),
}),
..Default::default()
}),
document_formatting_provider,
inlay_hint_provider: Some(OneOf::Left(true)),
..Default::default()
},
..Default::default()
})
}
/// The [`initialized`] notification is sent from the client to the server
/// after the client received the result of the initialize request but
/// before the client sends anything else.
@ -644,11 +538,11 @@ impl TypstLanguageServer {
///
/// The server can use the `initialized` notification, for example, to
/// dynamically register capabilities with the client.
pub fn initialized(&self, _: InitializedParams) {
let const_config = self.const_config();
let mut config = self.config.write();
if const_config.supports_semantic_tokens_dynamic_registration {
pub fn initialized(&mut self, _: InitializedParams) {
if self
.const_config()
.supports_semantic_tokens_dynamic_registration
{
trace!("setting up to dynamically register semantic token support");
let client = self.client.clone();
@ -670,19 +564,20 @@ impl TypstLanguageServer {
.context("could not unregister semantic tokens")
};
if config.semantic_tokens == SemanticTokensMode::Enable {
if self.config.semantic_tokens == SemanticTokensMode::Enable {
if let Some(err) = register().err() {
error!("could not dynamically register semantic tokens: {err}");
}
}
config.listen_semantic_tokens(Box::new(move |mode| match mode {
SemanticTokensMode::Enable => register(),
SemanticTokensMode::Disable => unregister(),
}));
self.config
.listen_semantic_tokens(Box::new(move |mode| match mode {
SemanticTokensMode::Enable => register(),
SemanticTokensMode::Disable => unregister(),
}));
}
if const_config.supports_config_change_registration {
if self.const_config().supports_config_change_registration {
trace!("setting up to request config change notifications");
const CONFIG_REGISTRATION_ID: &str = "config";
@ -795,7 +690,7 @@ impl TypstLanguageServer {
///
/// # Errors
/// Errors if a provided file URI is not a valid file URI.
pub fn pin_main(&self, arguments: Vec<JsonValue>) -> LspResult<()> {
pub fn pin_main(&mut self, arguments: Vec<JsonValue>) -> LspResult<()> {
let Some(file_uri) = arguments.first().and_then(|v| v.as_str()) else {
return Err(invalid_params("Missing file path as the first argument"));
};
@ -806,11 +701,41 @@ impl TypstLanguageServer {
Some(Url::parse(file_uri).map_err(|_| invalid_params("Parameter is not a valid URI"))?)
};
let update_result = self.universe().pin_main(file_uri.clone());
let new_entry = file_uri.clone();
let mut m = self.main.lock();
let update_result = match (new_entry, m.is_some()) {
(Some(new_entry), true) => {
let path = new_entry
.to_file_path()
.map_err(|_| invalid_params("invalid url"))?;
let path = path.as_path().into();
m.as_mut().unwrap().wait().change_entry(path)
}
(Some(new_entry), false) => {
let path = new_entry
.to_file_path()
.map_err(|_| invalid_params("invalid url"))?;
let path = path.as_path().into();
let main_node = self.server("main".to_owned(), Some(path));
*m = Some(main_node);
Ok(())
}
(None, true) => {
// todo: unpin main
m.as_mut().unwrap().wait().disable();
Ok(())
}
(None, false) => Ok(()),
};
drop(m);
update_result.map_err(|err| {
error!("could not set main file: {err}");
internal_error()
internal_error("Internal error")
})?;
info!("main file pinned: {main_url:?}", main_url = file_uri);
@ -832,12 +757,20 @@ impl TypstLanguageServer {
_ => return Err(invalid_params("Path Parameter is not a string or null")),
};
let update_result = self.universe().activate_doc(path.clone());
match path.clone() {
Some(new_entry) => self
.primary()
.change_entry(new_entry)
.map_err(|e| internal_error(e.to_string()))?,
None => {
self.primary().disable();
}
};
update_result.map_err(|err| {
error!("could not set active document: {err}");
internal_error()
})?;
// update_result.map_err(|err| {
// error!("could not set active document: {err}");
// internal_error("Internal error")
// })?;
info!("active document set: {path:?}", path = path);
Ok(())
@ -850,16 +783,14 @@ impl TypstLanguageServer {
let path = params.text_document.uri.to_file_path().unwrap();
let text = params.text_document.text;
let universe = self.universe();
universe.create_source(path.clone(), text).unwrap();
self.create_source(path.clone(), text).unwrap();
Ok(())
}
fn did_close(&self, params: DidCloseTextDocumentParams) -> LspResult<()> {
let path = params.text_document.uri.to_file_path().unwrap();
let universe = self.universe();
universe.remove_source(path.clone()).unwrap();
self.remove_source(path.clone()).unwrap();
// self.client.publish_diagnostics(uri, Vec::new(), None);
Ok(())
}
@ -868,9 +799,7 @@ impl TypstLanguageServer {
let path = params.text_document.uri.to_file_path().unwrap();
let changes = params.content_changes;
let universe = self.universe();
universe
.edit_source(path.clone(), changes, self.const_config().position_encoding)
self.edit_source(path.clone(), changes, self.const_config().position_encoding)
.unwrap();
Ok(())
}
@ -879,18 +808,14 @@ impl TypstLanguageServer {
let uri = params.text_document.uri;
let path = uri.to_file_path().unwrap();
let config = self.config.read();
if config.export_pdf == ExportPdfMode::OnSave {
if self.config.export_pdf == ExportPdfMode::OnSave {
let _ = run_query!(self.OnSaveExport(path));
}
Ok(())
}
fn on_changed_configuration(&self, values: Map<String, JsonValue>) -> LspResult<()> {
let mut config = self.config.write();
match config.update_by_map(&values) {
fn on_changed_configuration(&mut self, values: Map<String, JsonValue>) -> LspResult<()> {
match self.config.update_by_map(&values) {
Ok(()) => {
info!("new settings applied");
}
@ -902,7 +827,7 @@ impl TypstLanguageServer {
Ok(())
}
fn did_change_configuration(&self, params: DidChangeConfigurationParams) -> LspResult<()> {
fn did_change_configuration(&mut self, params: DidChangeConfigurationParams) -> LspResult<()> {
// For some clients, we don't get the actual changed configuration and need to
// poll for it https://github.com/microsoft/language-server-protocol/issues/676
match params.settings {
@ -1031,312 +956,6 @@ impl TypstLanguageServer {
}
}
/// The mode of the experimental formatter.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ExperimentalFormatterMode {
/// Disable the experimental formatter.
#[default]
Disable,
/// Enable the experimental formatter.
Enable,
}
/// The mode of PDF export.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum ExportPdfMode {
/// Don't export PDF automatically.
Never,
/// Export PDF on saving the document, i.e. on `textDocument/didSave`
/// events.
#[default]
OnSave,
/// Export PDF on typing, i.e. on `textDocument/didChange` events.
OnType,
}
/// The mode of semantic tokens.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum SemanticTokensMode {
/// Disable the semantic tokens.
Disable,
/// Enable the semantic tokens.
#[default]
Enable,
}
type Listener<T> = Box<dyn FnMut(&T) -> anyhow::Result<()>>;
const CONFIG_ITEMS: &[&str] = &[
"exportPdf",
"rootPath",
"semanticTokens",
"experimentalFormatterMode",
];
/// The user configuration read from the editor.
#[derive(Default)]
pub struct Config {
/// The mode of PDF export.
pub export_pdf: ExportPdfMode,
/// Specifies the root path of the project manually.
pub root_path: Option<PathBuf>,
/// Dynamic configuration for semantic tokens.
pub semantic_tokens: SemanticTokensMode,
/// Dynamic configuration for the experimental formatter.
pub formatter: ExperimentalFormatterMode,
semantic_tokens_listeners: Vec<Listener<SemanticTokensMode>>,
formatter_listeners: Vec<Listener<ExperimentalFormatterMode>>,
}
impl Config {
/// Gets items for serialization.
pub fn get_items() -> Vec<ConfigurationItem> {
let sections = CONFIG_ITEMS
.iter()
.flat_map(|item| [format!("tinymist.{item}"), item.to_string()]);
sections
.map(|section| ConfigurationItem {
section: Some(section),
..Default::default()
})
.collect()
}
/// Converts values to a map.
pub fn values_to_map(values: Vec<JsonValue>) -> Map<String, JsonValue> {
let unpaired_values = values
.into_iter()
.tuples()
.map(|(a, b)| if !a.is_null() { a } else { b });
CONFIG_ITEMS
.iter()
.map(|item| item.to_string())
.zip(unpaired_values)
.collect()
}
/// Updates the configuration with a JSON object.
///
/// # Errors
/// Errors if the update is invalid.
pub fn update(&mut self, update: &JsonValue) -> anyhow::Result<()> {
if let JsonValue::Object(update) = update {
self.update_by_map(update)
} else {
bail!("got invalid configuration object {update}")
}
}
/// Updates the configuration with a map.
///
/// # Errors
/// Errors if the update is invalid.
pub fn update_by_map(&mut self, update: &Map<String, JsonValue>) -> anyhow::Result<()> {
let export_pdf = update
.get("exportPdf")
.map(ExportPdfMode::deserialize)
.and_then(Result::ok);
if let Some(export_pdf) = export_pdf {
self.export_pdf = export_pdf;
}
let root_path = update.get("rootPath");
if let Some(root_path) = root_path {
if root_path.is_null() {
self.root_path = None;
}
if let Some(root_path) = root_path.as_str().map(PathBuf::from) {
self.root_path = Some(root_path);
}
}
let semantic_tokens = update
.get("semanticTokens")
.map(SemanticTokensMode::deserialize)
.and_then(Result::ok);
if let Some(semantic_tokens) = semantic_tokens {
for listener in &mut self.semantic_tokens_listeners {
listener(&semantic_tokens)?;
}
self.semantic_tokens = semantic_tokens;
}
let formatter = update
.get("experimentalFormatterMode")
.map(ExperimentalFormatterMode::deserialize)
.and_then(Result::ok);
if let Some(formatter) = formatter {
for listener in &mut self.formatter_listeners {
listener(&formatter)?;
}
self.formatter = formatter;
}
Ok(())
}
fn listen_semantic_tokens(&mut self, listener: Listener<SemanticTokensMode>) {
self.semantic_tokens_listeners.push(listener);
}
// pub fn listen_formatting(&mut self, listener:
// Listener<ExperimentalFormatterMode>) { self.formatter_listeners.
// push(listener); }
}
impl fmt::Debug for Config {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.debug_struct("Config")
.field("export_pdf", &self.export_pdf)
.field("formatter", &self.formatter)
.field("semantic_tokens", &self.semantic_tokens)
.field(
"semantic_tokens_listeners",
&format_args!("Vec[len = {}]", self.semantic_tokens_listeners.len()),
)
.field(
"formatter_listeners",
&format_args!("Vec[len = {}]", self.formatter_listeners.len()),
)
.finish()
}
}
/// Configuration set at initialization that won't change within a single
/// session
#[derive(Debug, Clone)]
pub struct ConstConfig {
/// The position encoding, either UTF-8 or UTF-16.
/// Defaults to UTF-16 if not specified.
pub position_encoding: PositionEncoding,
/// Whether the client supports dynamic registration of semantic tokens.
pub supports_semantic_tokens_dynamic_registration: bool,
/// Whether the client supports dynamic registration of document formatting.
pub supports_document_formatting_dynamic_registration: bool,
/// Whether the client supports dynamic registration of configuration
/// changes.
pub supports_config_change_registration: bool,
/// Whether the client only supports line folding.
pub line_folding_only: bool,
}
impl ConstConfig {
fn choose_encoding(params: &InitializeParams) -> PositionEncoding {
let encodings = params.position_encodings();
if encodings.contains(&PositionEncodingKind::UTF8) {
PositionEncoding::Utf8
} else {
PositionEncoding::Utf16
}
}
}
impl From<&InitializeParams> for ConstConfig {
fn from(params: &InitializeParams) -> Self {
Self {
position_encoding: Self::choose_encoding(params),
supports_semantic_tokens_dynamic_registration: params
.supports_semantic_tokens_dynamic_registration(),
supports_document_formatting_dynamic_registration: params
.supports_document_formatting_dynamic_registration(),
supports_config_change_registration: params.supports_config_change_registration(),
line_folding_only: params.line_folding_only(),
}
}
}
trait InitializeParamsExt {
fn position_encodings(&self) -> &[PositionEncodingKind];
fn supports_config_change_registration(&self) -> bool;
fn semantic_tokens_capabilities(&self) -> Option<&SemanticTokensClientCapabilities>;
fn document_formatting_capabilities(&self) -> Option<&DocumentFormattingClientCapabilities>;
fn supports_semantic_tokens_dynamic_registration(&self) -> bool;
fn supports_document_formatting_dynamic_registration(&self) -> bool;
fn line_folding_only(&self) -> bool;
fn root_paths(&self) -> Vec<PathBuf>;
}
impl InitializeParamsExt for InitializeParams {
fn position_encodings(&self) -> &[PositionEncodingKind] {
const DEFAULT_ENCODING: &[PositionEncodingKind; 1] = &[PositionEncodingKind::UTF16];
self.capabilities
.general
.as_ref()
.and_then(|general| general.position_encodings.as_ref())
.map(|encodings| encodings.as_slice())
.unwrap_or(DEFAULT_ENCODING)
}
fn supports_config_change_registration(&self) -> bool {
self.capabilities
.workspace
.as_ref()
.and_then(|workspace| workspace.configuration)
.unwrap_or(false)
}
fn line_folding_only(&self) -> bool {
self.capabilities
.text_document
.as_ref()
.and_then(|workspace| workspace.folding_range.as_ref())
.and_then(|folding| folding.line_folding_only)
.unwrap_or(false)
}
fn semantic_tokens_capabilities(&self) -> Option<&SemanticTokensClientCapabilities> {
self.capabilities
.text_document
.as_ref()?
.semantic_tokens
.as_ref()
}
fn document_formatting_capabilities(&self) -> Option<&DocumentFormattingClientCapabilities> {
self.capabilities
.text_document
.as_ref()?
.formatting
.as_ref()
}
fn supports_semantic_tokens_dynamic_registration(&self) -> bool {
self.semantic_tokens_capabilities()
.and_then(|semantic_tokens| semantic_tokens.dynamic_registration)
.unwrap_or(false)
}
fn supports_document_formatting_dynamic_registration(&self) -> bool {
self.document_formatting_capabilities()
.and_then(|document_format| document_format.dynamic_registration)
.unwrap_or(false)
}
#[allow(deprecated)] // `self.root_path` is marked as deprecated
fn root_paths(&self) -> Vec<PathBuf> {
match self.workspace_folders.as_ref() {
Some(roots) => roots
.iter()
.map(|root| &root.uri)
.map(Url::to_file_path)
.collect::<Result<Vec<_>, _>>()
.unwrap(),
None => self
.root_uri
.as_ref()
.map(|uri| uri.to_file_path().unwrap())
.or_else(|| self.root_path.clone().map(PathBuf::from))
.into_iter()
.collect(),
}
}
}
fn invalid_params(msg: impl Into<String>) -> ResponseError {
ResponseError {
code: ErrorCode::InvalidParams as i32,
@ -1345,10 +964,10 @@ fn invalid_params(msg: impl Into<String>) -> ResponseError {
}
}
fn internal_error() -> ResponseError {
fn internal_error(msg: impl Into<String>) -> ResponseError {
ResponseError {
code: ErrorCode::InternalError as i32,
message: "Internal error".to_string(),
message: msg.into(),
data: None,
}
}

View file

@ -8,7 +8,7 @@ use clap::Parser;
use log::{info, trace, warn};
use lsp_types::{InitializeParams, InitializedParams};
use serde::de::DeserializeOwned;
use tinymist::{transport::io_transport, LspHost, TypstLanguageServer};
use tinymist::{init::Init, transport::io_transport, LspHost};
use crate::args::CliArguments;
@ -88,9 +88,8 @@ async fn main() -> anyhow::Result<()> {
let initialize_params = from_json::<InitializeParams>("InitializeParams", &initialize_params)?;
let host = LspHost::new(connection.sender);
let mut service = TypstLanguageServer::new(host.clone());
let initialize_result = service.initialize(initialize_params.clone());
let (mut service, initialize_result) =
Init { host: host.clone() }.initialize(initialize_params.clone());
// todo: better send
host.complete_request(

View file

@ -0,0 +1,169 @@
//! Bootstrap actors for Tinymist.
use std::path::PathBuf;
use ::typst::{diag::FileResult, syntax::Source};
use anyhow::anyhow;
use lsp_types::TextDocumentContentChangeEvent;
use tinymist_query::{lsp_to_typst, CompilerQueryRequest, CompilerQueryResponse, PositionEncoding};
use typst_ts_compiler::{
vfs::notify::{FileChangeSet, MemoryEvent},
Time,
};
use typst_ts_core::{error::prelude::*, Bytes, Error, ImmutPath};
use crate::TypstLanguageServer;
#[derive(Debug, Clone)]
pub struct MemoryFileMeta {
mt: Time,
content: Source,
}
impl TypstLanguageServer {
fn update_source(&self, files: FileChangeSet) -> Result<(), Error> {
let main = self.main.clone();
let primary = Some(self.primary_deferred());
let main = main.lock();
let main = main.as_ref();
let clients_to_notify = (primary.iter()).chain(main.iter());
for client in clients_to_notify {
let iw = client.wait().inner.lock();
iw.add_memory_changes(MemoryEvent::Update(files.clone()));
}
Ok(())
}
pub fn create_source(&self, path: PathBuf, content: String) -> Result<(), Error> {
let now = Time::now();
let path: ImmutPath = path.into();
self.memory_changes.write().insert(
path.clone(),
MemoryFileMeta {
mt: now,
content: Source::detached(content.clone()),
},
);
let content: Bytes = content.as_bytes().into();
log::info!("create source: {:?}", path);
// todo: is it safe to believe that the path is normalized?
let files = FileChangeSet::new_inserts(vec![(path, FileResult::Ok((now, content)).into())]);
self.update_source(files)
}
pub fn remove_source(&self, path: PathBuf) -> Result<(), Error> {
let path: ImmutPath = path.into();
self.memory_changes.write().remove(&path);
log::info!("remove source: {:?}", path);
// todo: is it safe to believe that the path is normalized?
let files = FileChangeSet::new_removes(vec![path]);
self.update_source(files)
}
pub fn edit_source(
&self,
path: PathBuf,
content: Vec<TextDocumentContentChangeEvent>,
position_encoding: PositionEncoding,
) -> Result<(), Error> {
let now = Time::now();
let path: ImmutPath = path.into();
let mut memory_changes = self.memory_changes.write();
let meta = memory_changes
.get_mut(&path)
.ok_or_else(|| error_once!("file missing", path: path.display()))?;
for change in content {
let replacement = change.text;
match change.range {
Some(lsp_range) => {
let range = lsp_to_typst::range(lsp_range, position_encoding, &meta.content)
.expect("invalid range");
meta.content.edit(range, &replacement);
}
None => {
meta.content.replace(&replacement);
}
}
}
meta.mt = now;
let snapshot = FileResult::Ok((now, meta.content.text().as_bytes().into())).into();
drop(memory_changes);
let files = FileChangeSet::new_inserts(vec![(path.clone(), snapshot)]);
self.update_source(files)
}
}
macro_rules! query_source {
($self:ident, $method:ident, $req:expr) => {{
let path: ImmutPath = $req.path.clone().into();
let vfs = $self.memory_changes.read();
let snapshot = vfs
.get(&path)
.ok_or_else(|| anyhow!("file missing {:?}", $self.memory_changes))?;
let source = snapshot.content.clone();
let enc = $self.position_encoding;
let res = $req.request(source, enc);
Ok(CompilerQueryResponse::$method(res))
}};
}
macro_rules! query_tokens_cache {
($self:ident, $method:ident, $req:expr) => {{
let path: ImmutPath = $req.path.clone().into();
let vfs = $self.memory_changes.read();
let snapshot = vfs.get(&path).ok_or_else(|| anyhow!("file missing"))?;
let source = snapshot.content.clone();
let enc = $self.position_encoding;
let res = $req.request(&$self.tokens_cache, source, enc);
Ok(CompilerQueryResponse::$method(res))
}};
}
impl TypstLanguageServer {
pub fn query(&self, query: CompilerQueryRequest) -> anyhow::Result<CompilerQueryResponse> {
use CompilerQueryRequest::*;
match query {
SemanticTokensFull(req) => query_tokens_cache!(self, SemanticTokensFull, req),
SemanticTokensDelta(req) => query_tokens_cache!(self, SemanticTokensDelta, req),
FoldingRange(req) => query_source!(self, FoldingRange, req),
SelectionRange(req) => query_source!(self, SelectionRange, req),
DocumentSymbol(req) => query_source!(self, DocumentSymbol, req),
_ => {
let main = self.main.lock();
let query_target = match main.as_ref() {
Some(main) => main.wait(),
None => {
// todo: race condition, we need atomic primary query
if let Some(path) = query.associated_path() {
self.primary().change_entry(path.into())?;
}
self.primary()
}
};
query_target.query(query)
}
}
}
}