mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-07-24 13:13:43 +00:00
feat: allow tracing typst programs in subprocess (#112)
* feat: add compiler mode * feat: implement compiler mode * dev: imtroduce typst timings
This commit is contained in:
parent
b84aaec294
commit
0114bf4a3b
21 changed files with 2596 additions and 1644 deletions
|
@ -39,6 +39,7 @@ typst.workspace = true
|
|||
typst-svg.workspace = true
|
||||
typst-pdf.workspace = true
|
||||
typst-render.workspace = true
|
||||
typst-timing.workspace = true
|
||||
typst-assets = { workspace = true, features = ["fonts"] }
|
||||
|
||||
typst-ts-core = { workspace = true, default-features = false, features = [
|
||||
|
|
|
@ -6,7 +6,7 @@ pub mod typ_client;
|
|||
pub mod typ_server;
|
||||
|
||||
use tokio::sync::{broadcast, watch};
|
||||
use typst::{diag::FileResult, util::Deferred};
|
||||
use typst::util::Deferred;
|
||||
use typst_ts_compiler::{
|
||||
service::CompileDriverImpl,
|
||||
vfs::notify::{FileChangeSet, MemoryEvent},
|
||||
|
@ -19,13 +19,14 @@ use self::{
|
|||
typ_server::CompileServerActor,
|
||||
};
|
||||
use crate::{
|
||||
compiler::CompileServer,
|
||||
world::{ImmutDict, LspWorld, LspWorldBuilder},
|
||||
TypstLanguageServer,
|
||||
};
|
||||
|
||||
type CompileDriverInner = CompileDriverImpl<LspWorld>;
|
||||
|
||||
impl TypstLanguageServer {
|
||||
impl CompileServer {
|
||||
pub fn server(
|
||||
&self,
|
||||
diag_group: String,
|
||||
|
@ -36,7 +37,7 @@ impl TypstLanguageServer {
|
|||
let (render_tx, _) = broadcast::channel(10);
|
||||
|
||||
// Run the Export actor before preparing cluster to avoid loss of events
|
||||
tokio::spawn(
|
||||
self.handle.spawn(
|
||||
ExportActor::new(
|
||||
doc_rx.clone(),
|
||||
render_tx.subscribe(),
|
||||
|
@ -50,19 +51,11 @@ impl TypstLanguageServer {
|
|||
);
|
||||
|
||||
// Take all dirty files in memory as the initial snapshot
|
||||
let snapshot = FileChangeSet::new_inserts(
|
||||
self.memory_changes
|
||||
.iter()
|
||||
.map(|(path, meta)| {
|
||||
let content = meta.content.clone().text().as_bytes().into();
|
||||
(path.clone(), FileResult::Ok((meta.mt, content)).into())
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
let snapshot = FileChangeSet::default();
|
||||
|
||||
// Create the server
|
||||
let inner = Deferred::new({
|
||||
let current_runtime = tokio::runtime::Handle::current();
|
||||
let current_runtime = self.handle.clone();
|
||||
let handler = CompileHandler {
|
||||
#[cfg(feature = "preview")]
|
||||
inner: std::sync::Arc::new(parking_lot::Mutex::new(None)),
|
||||
|
@ -110,3 +103,14 @@ impl TypstLanguageServer {
|
|||
CompileClientActor::new(diag_group, self.config.clone(), entry, inner, render_tx)
|
||||
}
|
||||
}
|
||||
|
||||
impl TypstLanguageServer {
|
||||
pub fn server(
|
||||
&self,
|
||||
diag_group: String,
|
||||
entry: EntryState,
|
||||
inputs: ImmutDict,
|
||||
) -> CompileClientActor {
|
||||
self.primary.server(diag_group, entry, inputs)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,16 +2,15 @@
|
|||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use futures::future::join_all;
|
||||
use log::info;
|
||||
use lsp_types::{Diagnostic, Url};
|
||||
use lsp_types::Url;
|
||||
use tinymist_query::{DiagnosticsMap, LspDiagnostic};
|
||||
use tokio::sync::mpsc;
|
||||
|
||||
use crate::LspHost;
|
||||
use crate::{LspHost, TypstLanguageServer};
|
||||
|
||||
pub struct CompileClusterActor {
|
||||
pub host: LspHost,
|
||||
pub host: LspHost<TypstLanguageServer>,
|
||||
pub diag_rx: mpsc::UnboundedReceiver<(String, Option<DiagnosticsMap>)>,
|
||||
|
||||
pub diagnostics: HashMap<Url, HashMap<String, Vec<LspDiagnostic>>>,
|
||||
|
@ -47,24 +46,10 @@ impl CompileClusterActor {
|
|||
info!("compile cluster actor is stopped");
|
||||
}
|
||||
|
||||
pub async fn do_publish_diagnostics(
|
||||
host: &LspHost,
|
||||
uri: Url,
|
||||
diags: Vec<Diagnostic>,
|
||||
version: Option<i32>,
|
||||
ignored: bool,
|
||||
) {
|
||||
if ignored {
|
||||
return;
|
||||
}
|
||||
|
||||
host.publish_diagnostics(uri, diags, version)
|
||||
}
|
||||
|
||||
async fn flush_primary_diagnostics(&mut self, enable: bool) {
|
||||
let affected = self.affect_map.get("primary");
|
||||
|
||||
let tasks = affected.into_iter().flatten().map(|url| {
|
||||
for url in affected.into_iter().flatten() {
|
||||
let path_diags = self.diagnostics.get(url);
|
||||
|
||||
let diags = path_diags.into_iter().flatten().filter_map(|(g, diags)| {
|
||||
|
@ -75,10 +60,8 @@ impl CompileClusterActor {
|
|||
});
|
||||
let to_publish = diags.flatten().cloned().collect();
|
||||
|
||||
Self::do_publish_diagnostics(&self.host, url.clone(), to_publish, None, false)
|
||||
});
|
||||
|
||||
join_all(tasks).await;
|
||||
self.host.publish_diagnostics(url.clone(), to_publish, None);
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn publish(
|
||||
|
@ -111,7 +94,7 @@ impl CompileClusterActor {
|
|||
let next_aff = next_diag.into_iter().flatten().map(|(x, y)| (x, Some(y)));
|
||||
|
||||
let tasks = prev_aff.into_iter().chain(next_aff);
|
||||
let tasks = tasks.map(|(url, next)| {
|
||||
for (url, next) in tasks {
|
||||
// Get the diagnostics from other groups
|
||||
let path_diags = self.diagnostics.entry(url.clone()).or_default();
|
||||
let rest_all = path_diags
|
||||
|
@ -139,16 +122,10 @@ impl CompileClusterActor {
|
|||
}
|
||||
}
|
||||
|
||||
Self::do_publish_diagnostics(
|
||||
&self.host,
|
||||
url,
|
||||
to_publish,
|
||||
None,
|
||||
is_primary && !with_primary,
|
||||
)
|
||||
});
|
||||
|
||||
join_all(tasks).await;
|
||||
if !is_primary || with_primary {
|
||||
self.host.publish_diagnostics(url, to_publish, None)
|
||||
}
|
||||
}
|
||||
|
||||
if clear_all {
|
||||
// We just used the cache, and won't need it again, so we can update it now
|
||||
|
|
|
@ -56,15 +56,16 @@ use typst_ts_core::{
|
|||
|
||||
use super::typ_server::CompileClient as TsCompileClient;
|
||||
use super::{render::ExportConfig, typ_server::CompileServerActor};
|
||||
use crate::world::LspWorld;
|
||||
use crate::{
|
||||
actor::render::{OneshotRendering, PathVars, RenderActorRequest},
|
||||
compiler_init::CompileConfig,
|
||||
utils,
|
||||
};
|
||||
use crate::{
|
||||
actor::typ_server::EntryStateExt,
|
||||
tools::preview::{CompilationHandle, CompileStatus},
|
||||
};
|
||||
use crate::{world::LspWorld, Config};
|
||||
|
||||
type CompileDriverInner = CompileDriverImpl<LspWorld>;
|
||||
type CompileService = CompileServerActor<CompileDriver>;
|
||||
|
@ -178,7 +179,7 @@ impl CompileDriver {
|
|||
}
|
||||
}
|
||||
|
||||
fn run_analysis<T>(
|
||||
pub fn run_analysis<T>(
|
||||
&mut self,
|
||||
f: impl FnOnce(&mut AnalysisContext<'_>) -> T,
|
||||
) -> anyhow::Result<T> {
|
||||
|
@ -233,7 +234,7 @@ impl CompileDriver {
|
|||
|
||||
pub struct CompileClientActor {
|
||||
diag_group: String,
|
||||
config: Config,
|
||||
config: CompileConfig,
|
||||
entry: Arc<Mutex<EntryState>>,
|
||||
inner: Deferred<CompileClient>,
|
||||
render_tx: broadcast::Sender<RenderActorRequest>,
|
||||
|
@ -242,7 +243,7 @@ pub struct CompileClientActor {
|
|||
impl CompileClientActor {
|
||||
pub(crate) fn new(
|
||||
diag_group: String,
|
||||
config: Config,
|
||||
config: CompileConfig,
|
||||
entry: EntryState,
|
||||
inner: Deferred<CompileClient>,
|
||||
render_tx: broadcast::Sender<RenderActorRequest>,
|
||||
|
|
|
@ -1,38 +1,52 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use tinymist::transport::MirrorArgs;
|
||||
|
||||
#[cfg(feature = "clap")]
|
||||
const ENV_PATH_SEP: char = if cfg!(windows) { ';' } else { ':' };
|
||||
use tinymist::compiler_init::{CompileOnceArgs, FontArgs};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[cfg_attr(feature = "clap", derive(clap::Parser))]
|
||||
#[cfg_attr(feature = "clap", clap(name = "tinymist", author, version, about, long_version(LONG_VERSION.as_str())))]
|
||||
pub struct CliArguments {
|
||||
/// Mode of the binary
|
||||
#[cfg_attr(
|
||||
feature = "clap",
|
||||
clap(long, default_value = "server", value_name = "FILE")
|
||||
)]
|
||||
pub mode: String,
|
||||
/// Mirror the stdin to the file
|
||||
#[cfg_attr(feature = "clap", clap(long, default_value = "", value_name = "FILE"))]
|
||||
pub mirror: String,
|
||||
/// Replay input from the file
|
||||
#[cfg_attr(feature = "clap", clap(long, default_value = "", value_name = "FILE"))]
|
||||
pub replay: String,
|
||||
/// Font paths, which doesn't allow for dynamic configuration
|
||||
#[cfg_attr(feature = "clap", clap(
|
||||
long = "font-path",
|
||||
value_name = "DIR",
|
||||
action = clap::ArgAction::Append,
|
||||
env = "TYPST_FONT_PATHS",
|
||||
value_delimiter = ENV_PATH_SEP
|
||||
))]
|
||||
pub font_paths: Vec<PathBuf>,
|
||||
/// Exclude system fonts
|
||||
#[cfg_attr(feature = "clap", clap(subcommand))]
|
||||
pub command: Option<Commands>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[cfg_attr(feature = "clap", derive(clap::Subcommand))]
|
||||
pub enum Commands {
|
||||
/// Run Language Server
|
||||
Lsp(LspArgs),
|
||||
/// Run Compile Server
|
||||
Compile(CompileArgs),
|
||||
/// Probe
|
||||
Probe,
|
||||
}
|
||||
|
||||
impl Default for Commands {
|
||||
fn default() -> Self {
|
||||
Self::Lsp(Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[cfg_attr(feature = "clap", derive(clap::Parser))]
|
||||
pub struct CompileArgs {
|
||||
#[cfg_attr(feature = "clap", clap(long, default_value = "false"))]
|
||||
pub no_system_fonts: bool,
|
||||
pub persist: bool,
|
||||
#[cfg_attr(feature = "clap", clap(flatten))]
|
||||
pub mirror: MirrorArgs,
|
||||
#[cfg_attr(feature = "clap", clap(flatten))]
|
||||
pub compile: CompileOnceArgs,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[cfg_attr(feature = "clap", derive(clap::Parser))]
|
||||
pub struct LspArgs {
|
||||
#[cfg_attr(feature = "clap", clap(flatten))]
|
||||
pub mirror: MirrorArgs,
|
||||
#[cfg_attr(feature = "clap", clap(flatten))]
|
||||
pub font: FontArgs,
|
||||
}
|
||||
|
||||
pub static LONG_VERSION: Lazy<String> = Lazy::new(|| {
|
||||
|
|
283
crates/tinymist/src/harness.rs
Normal file
283
crates/tinymist/src/harness.rs
Normal file
|
@ -0,0 +1,283 @@
|
|||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
use log::{info, trace, warn};
|
||||
use lsp_types::InitializedParams;
|
||||
use parking_lot::RwLock;
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
|
||||
use lsp_server::{Connection, Message, Response};
|
||||
|
||||
use lsp_types::notification::PublishDiagnostics;
|
||||
use lsp_types::request::{RegisterCapability, UnregisterCapability};
|
||||
use lsp_types::*;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
// Enforces drop order
|
||||
pub struct Handle<H, C> {
|
||||
pub handle: H,
|
||||
pub receiver: C,
|
||||
}
|
||||
|
||||
pub type ReqHandler<S> = for<'a> fn(&'a mut S, lsp_server::Response);
|
||||
type ReqQueue<S> = lsp_server::ReqQueue<(String, Instant), ReqHandler<S>>;
|
||||
|
||||
/// The host for the language server, or known as the LSP client.
|
||||
#[derive(Debug)]
|
||||
pub struct LspHost<S> {
|
||||
sender: Arc<RwLock<Option<crossbeam_channel::Sender<Message>>>>,
|
||||
req_queue: Arc<Mutex<ReqQueue<S>>>,
|
||||
}
|
||||
|
||||
impl<S> Clone for LspHost<S> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
sender: self.sender.clone(),
|
||||
req_queue: self.req_queue.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> LspHost<S> {
|
||||
/// Creates a new language server host.
|
||||
pub fn new(sender: Arc<RwLock<Option<crossbeam_channel::Sender<Message>>>>) -> Self {
|
||||
Self {
|
||||
sender,
|
||||
req_queue: Arc::new(Mutex::new(ReqQueue::default())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn send_request<R: lsp_types::request::Request>(
|
||||
&self,
|
||||
params: R::Params,
|
||||
handler: ReqHandler<S>,
|
||||
) {
|
||||
let mut req_queue = self.req_queue.lock();
|
||||
let sender = self.sender.read();
|
||||
let Some(sender) = sender.as_ref() else {
|
||||
warn!("closed connection, failed to send request");
|
||||
return;
|
||||
};
|
||||
let request = req_queue
|
||||
.outgoing
|
||||
.register(R::METHOD.to_owned(), params, handler);
|
||||
let Err(res) = sender.send(request.into()) else {
|
||||
return;
|
||||
};
|
||||
warn!("failed to send request: {res:?}");
|
||||
}
|
||||
|
||||
pub fn complete_request(&self, service: &mut S, response: lsp_server::Response) {
|
||||
let mut req_queue = self.req_queue.lock();
|
||||
let Some(handler) = req_queue.outgoing.complete(response.id.clone()) else {
|
||||
warn!("received response for unknown request");
|
||||
return;
|
||||
};
|
||||
drop(req_queue);
|
||||
handler(service, response)
|
||||
}
|
||||
|
||||
pub fn send_notification<N: lsp_types::notification::Notification>(&self, params: N::Params) {
|
||||
let not = lsp_server::Notification::new(N::METHOD.to_owned(), params);
|
||||
|
||||
let sender = self.sender.read();
|
||||
let Some(sender) = sender.as_ref() else {
|
||||
warn!("closed connection, failed to send request");
|
||||
return;
|
||||
};
|
||||
let Err(res) = sender.send(not.into()) else {
|
||||
return;
|
||||
};
|
||||
warn!("failed to send notification: {res:?}");
|
||||
}
|
||||
|
||||
pub fn register_request(&self, request: &lsp_server::Request, request_received: Instant) {
|
||||
let mut req_queue = self.req_queue.lock();
|
||||
info!(
|
||||
"handling {} - ({}) at {:0.2?}",
|
||||
request.method, request.id, request_received
|
||||
);
|
||||
req_queue.incoming.register(
|
||||
request.id.clone(),
|
||||
(request.method.clone(), request_received),
|
||||
);
|
||||
}
|
||||
pub fn respond(&self, response: lsp_server::Response) {
|
||||
let mut req_queue = self.req_queue.lock();
|
||||
if let Some((method, start)) = req_queue.incoming.complete(response.id.clone()) {
|
||||
let sender = self.sender.read();
|
||||
let Some(sender) = sender.as_ref() else {
|
||||
warn!("closed connection, failed to send request");
|
||||
return;
|
||||
};
|
||||
|
||||
// if let Some(err) = &response.error {
|
||||
// if err.message.starts_with("server panicked") {
|
||||
// self.poke_rust_analyzer_developer(format!("{}, check the log",
|
||||
// err.message)) }
|
||||
// }
|
||||
|
||||
let duration = start.elapsed();
|
||||
info!(
|
||||
"handled {} - ({}) in {:0.2?}",
|
||||
method, response.id, duration
|
||||
);
|
||||
let Err(res) = sender.send(response.into()) else {
|
||||
return;
|
||||
};
|
||||
warn!("failed to send response: {res:?}");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn publish_diagnostics(
|
||||
&self,
|
||||
uri: Url,
|
||||
diagnostics: Vec<Diagnostic>,
|
||||
version: Option<i32>,
|
||||
) {
|
||||
self.send_notification::<PublishDiagnostics>(PublishDiagnosticsParams {
|
||||
uri,
|
||||
diagnostics,
|
||||
version,
|
||||
});
|
||||
}
|
||||
|
||||
// todo: handle error
|
||||
pub fn register_capability(&self, registrations: Vec<Registration>) -> anyhow::Result<()> {
|
||||
self.send_request::<RegisterCapability>(RegistrationParams { registrations }, |_, resp| {
|
||||
if let Some(err) = resp.error {
|
||||
log::error!("failed to register capability: {err:?}");
|
||||
}
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn unregister_capability(
|
||||
&self,
|
||||
unregisterations: Vec<Unregistration>,
|
||||
) -> anyhow::Result<()> {
|
||||
self.send_request::<UnregisterCapability>(
|
||||
UnregistrationParams { unregisterations },
|
||||
|_, resp| {
|
||||
if let Some(err) = resp.error {
|
||||
log::error!("failed to unregister capability: {err:?}");
|
||||
}
|
||||
},
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub trait LspDriver {
|
||||
type InitParams: DeserializeOwned;
|
||||
type InitResult: Serialize;
|
||||
type InitializedSelf: InitializedLspDriver;
|
||||
|
||||
fn initialize(
|
||||
self,
|
||||
host: LspHost<Self::InitializedSelf>,
|
||||
params: Self::InitParams,
|
||||
) -> (
|
||||
Self::InitializedSelf,
|
||||
Result<Self::InitResult, lsp_server::ResponseError>,
|
||||
);
|
||||
}
|
||||
|
||||
pub trait InitializedLspDriver {
|
||||
fn initialized(&mut self, params: InitializedParams);
|
||||
fn main_loop(&mut self, receiver: crossbeam_channel::Receiver<Message>) -> anyhow::Result<()>;
|
||||
}
|
||||
|
||||
pub fn lsp_harness<D: LspDriver>(
|
||||
driver: D,
|
||||
connection: Connection,
|
||||
force_exit: &mut bool,
|
||||
) -> anyhow::Result<()> {
|
||||
*force_exit = false;
|
||||
// todo: ugly code
|
||||
let (initialize_id, initialize_params) = match connection.initialize_start() {
|
||||
Ok(it) => it,
|
||||
Err(e) => {
|
||||
log::error!("failed to initialize: {e}");
|
||||
*force_exit = !e.channel_is_disconnected();
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
let request_received = std::time::Instant::now();
|
||||
trace!("InitializeParams: {initialize_params}");
|
||||
let sender = Arc::new(RwLock::new(Some(connection.sender)));
|
||||
let host = LspHost::new(sender.clone());
|
||||
|
||||
let _drop_connection = ForceDrop(sender);
|
||||
|
||||
let req = lsp_server::Request::new(initialize_id, "initialize".to_owned(), initialize_params);
|
||||
host.register_request(&req, request_received);
|
||||
let lsp_server::Request {
|
||||
id: initialize_id,
|
||||
params: initialize_params,
|
||||
..
|
||||
} = req;
|
||||
|
||||
let initialize_params = from_json::<D::InitParams>("InitializeParams", &initialize_params)?;
|
||||
|
||||
let (mut service, initialize_result) = driver.initialize(host.clone(), initialize_params);
|
||||
|
||||
host.respond(match initialize_result {
|
||||
Ok(cap) => Response::new_ok(initialize_id, Some(cap)),
|
||||
Err(err) => Response::new_err(initialize_id, err.code, err.message),
|
||||
});
|
||||
|
||||
info!("waiting for initialized notification");
|
||||
let initialized_ack = match &connection.receiver.recv() {
|
||||
Ok(Message::Notification(n)) if n.method == "initialized" => Ok(()),
|
||||
Ok(msg) => Err(ProtocolError::new(format!(
|
||||
r#"expected initialized notification, got: {msg:?}"#
|
||||
))),
|
||||
Err(e) => {
|
||||
log::error!("failed to receive initialized notification: {e}");
|
||||
Err(ProtocolError::disconnected())
|
||||
}
|
||||
};
|
||||
if let Err(e) = initialized_ack {
|
||||
*force_exit = !e.channel_is_disconnected();
|
||||
return Err(anyhow::anyhow!(
|
||||
"failed to receive initialized notification: {e:?}"
|
||||
));
|
||||
}
|
||||
|
||||
service.initialized(InitializedParams {});
|
||||
service.main_loop(connection.receiver)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ProtocolError(String, bool);
|
||||
|
||||
impl ProtocolError {
|
||||
pub(crate) fn new(msg: impl Into<String>) -> Self {
|
||||
ProtocolError(msg.into(), false)
|
||||
}
|
||||
|
||||
pub(crate) fn disconnected() -> ProtocolError {
|
||||
ProtocolError("disconnected channel".into(), true)
|
||||
}
|
||||
|
||||
/// Whether this error occured due to a disconnected channel.
|
||||
pub fn channel_is_disconnected(&self) -> bool {
|
||||
self.1
|
||||
}
|
||||
}
|
||||
|
||||
struct ForceDrop<T>(Arc<RwLock<Option<T>>>);
|
||||
impl<T> Drop for ForceDrop<T> {
|
||||
fn drop(&mut self) {
|
||||
self.0.write().take();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_json<T: DeserializeOwned>(
|
||||
what: &'static str,
|
||||
json: &serde_json::Value,
|
||||
) -> anyhow::Result<T> {
|
||||
serde_json::from_value(json.clone())
|
||||
.map_err(|e| anyhow::format_err!("Failed to deserialize {what}: {e}; {json}"))
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,11 +1,26 @@
|
|||
#![doc = include_str!("../README.md")]
|
||||
|
||||
mod args;
|
||||
mod modes;
|
||||
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use args::CompileArgs;
|
||||
use clap::Parser;
|
||||
use comemo::Prehashed;
|
||||
use lsp_types::{InitializeParams, InitializedParams};
|
||||
use parking_lot::RwLock;
|
||||
use tinymist::{
|
||||
compiler_init::{CompileInit, CompileInitializeParams},
|
||||
harness::{lsp_harness, InitializedLspDriver, LspDriver, LspHost},
|
||||
transport::with_stdio_transport,
|
||||
CompileFontOpts, CompileOpts, Init, LspWorld, TypstLanguageServer,
|
||||
};
|
||||
use tokio::sync::mpsc;
|
||||
use typst::{foundations::IntoValue, syntax::Span};
|
||||
use typst_ts_compiler::service::{Compiler, EntryManager};
|
||||
use typst_ts_core::TypstDict;
|
||||
|
||||
use crate::args::CliArguments;
|
||||
use crate::args::{CliArguments, Commands, LspArgs};
|
||||
|
||||
#[cfg(feature = "dhat-heap")]
|
||||
#[global_allocator]
|
||||
|
@ -31,14 +46,203 @@ async fn main() -> anyhow::Result<()> {
|
|||
|
||||
// Parse command line arguments
|
||||
let args = CliArguments::parse();
|
||||
log::info!("Arguments: {:#?}", args);
|
||||
|
||||
match args.mode.as_str() {
|
||||
"server" => modes::lsp_main(args),
|
||||
"probe" => Ok(()),
|
||||
_ => Err(anyhow::anyhow!(
|
||||
"unknown mode: {mode}, expected one of: server or probe",
|
||||
mode = args.mode,
|
||||
)),
|
||||
match args.command.unwrap_or_default() {
|
||||
Commands::Lsp(args) => lsp_main(args),
|
||||
Commands::Compile(args) => compiler_main(args).await,
|
||||
Commands::Probe => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lsp_main(args: LspArgs) -> anyhow::Result<()> {
|
||||
log::info!("starting generic LSP server: {:#?}", args);
|
||||
|
||||
with_stdio_transport(args.mirror.clone(), |conn, force_exit| {
|
||||
lsp_harness(Lsp { args }, conn, force_exit)
|
||||
})?;
|
||||
|
||||
log::info!("LSP server did shut down");
|
||||
|
||||
struct Lsp {
|
||||
args: LspArgs,
|
||||
}
|
||||
|
||||
impl LspDriver for Lsp {
|
||||
type InitParams = InitializeParams;
|
||||
type InitResult = lsp_types::InitializeResult;
|
||||
type InitializedSelf = TypstLanguageServer;
|
||||
|
||||
fn initialize(
|
||||
self,
|
||||
host: LspHost<Self::InitializedSelf>,
|
||||
params: Self::InitParams,
|
||||
) -> (
|
||||
Self::InitializedSelf,
|
||||
Result<Self::InitResult, lsp_server::ResponseError>,
|
||||
) {
|
||||
Init {
|
||||
host,
|
||||
compile_opts: CompileOpts {
|
||||
font: CompileFontOpts {
|
||||
font_paths: self.args.font.font_paths.clone(),
|
||||
no_system_fonts: self.args.font.no_system_fonts,
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
.initialize(params)
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn compiler_main(args: CompileArgs) -> anyhow::Result<()> {
|
||||
let (diag_tx, _diag_rx) = mpsc::unbounded_channel();
|
||||
|
||||
let mut input = PathBuf::from(args.compile.input.unwrap());
|
||||
|
||||
let mut root_path = args.compile.root.unwrap_or(PathBuf::from("."));
|
||||
|
||||
if root_path.is_relative() {
|
||||
root_path = std::env::current_dir()?.join(root_path);
|
||||
}
|
||||
if input.is_relative() {
|
||||
input = std::env::current_dir()?.join(input);
|
||||
}
|
||||
if !input.starts_with(&root_path) {
|
||||
return Err(anyhow::anyhow!(
|
||||
"input file is not within the root path: {input:?} not in {root_path:?}"
|
||||
));
|
||||
}
|
||||
|
||||
let inputs = Arc::new(Prehashed::new(if args.compile.inputs.is_empty() {
|
||||
TypstDict::default()
|
||||
} else {
|
||||
let pairs = args.compile.inputs.iter();
|
||||
let pairs = pairs.map(|(k, v)| (k.as_str().into(), v.as_str().into_value()));
|
||||
pairs.collect()
|
||||
}));
|
||||
|
||||
let init = CompileInit {
|
||||
handle: tokio::runtime::Handle::current(),
|
||||
font: CompileFontOpts {
|
||||
font_paths: args.compile.font.font_paths.clone(),
|
||||
no_system_fonts: args.compile.font.no_system_fonts,
|
||||
..Default::default()
|
||||
},
|
||||
diag_tx,
|
||||
};
|
||||
if args.persist {
|
||||
log::info!("starting compile server");
|
||||
|
||||
with_stdio_transport(args.mirror.clone(), |conn, force_exit| {
|
||||
lsp_harness(init, conn, force_exit)
|
||||
})?;
|
||||
|
||||
log::info!("compile server did shut down");
|
||||
} else {
|
||||
{
|
||||
let (s, _) = crossbeam_channel::unbounded();
|
||||
let sender = Arc::new(RwLock::new(Some(s)));
|
||||
let host = LspHost::new(sender.clone());
|
||||
|
||||
let _drop_connection = ForceDrop(sender);
|
||||
|
||||
let (mut service, res) = init.initialize(
|
||||
host,
|
||||
CompileInitializeParams {
|
||||
config: serde_json::json!({
|
||||
"rootPath": root_path,
|
||||
}),
|
||||
position_encoding: None,
|
||||
},
|
||||
);
|
||||
|
||||
res.unwrap();
|
||||
|
||||
service.initialized(InitializedParams {});
|
||||
|
||||
let entry = service.config.determine_entry(Some(input.as_path().into()));
|
||||
let (timings, doc) = service
|
||||
.compiler()
|
||||
.steal_async(|w, _| {
|
||||
w.compiler.world_mut().mutate_entry(entry).unwrap();
|
||||
w.compiler.world_mut().inputs = inputs;
|
||||
|
||||
let mut env = Default::default();
|
||||
typst_timing::enable();
|
||||
let res = match w.compiler.pure_compile(&mut env) {
|
||||
Ok(doc) => Ok(doc),
|
||||
Err(errors) => {
|
||||
let diagnostics = w.compiler.compiler.run_analysis(|ctx| {
|
||||
tinymist_query::convert_diagnostics(ctx, errors.iter())
|
||||
});
|
||||
|
||||
Err(diagnostics.unwrap_or_default())
|
||||
}
|
||||
};
|
||||
|
||||
let w = w.compiler.world();
|
||||
let mut writer = std::io::BufWriter::new(Vec::new());
|
||||
let _ = typst_timing::export_json(&mut writer, |span| {
|
||||
resolve_span(w, span).unwrap_or_else(|| ("unknown".to_string(), 0))
|
||||
});
|
||||
|
||||
let s = String::from_utf8(writer.into_inner().unwrap()).unwrap();
|
||||
|
||||
(s, res)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let msg = match doc {
|
||||
Ok(_doc) => {
|
||||
// let p = typst_pdf::pdf(&_doc, typst::foundations::Smart::Auto, None);
|
||||
// let output: PathBuf = input.with_extension("pdf");
|
||||
// tokio::fs::write(output, p).await.unwrap();
|
||||
|
||||
lsp_server::Message::Notification(lsp_server::Notification {
|
||||
method: "tinymistExt/diagnostics".to_owned(),
|
||||
params: serde_json::json!([]),
|
||||
})
|
||||
.write(&mut std::io::stdout().lock())
|
||||
.unwrap();
|
||||
lsp_server::Message::Response(lsp_server::Response {
|
||||
id: 0.into(),
|
||||
result: Some(serde_json::json!({
|
||||
"tracing_data": timings,
|
||||
})),
|
||||
error: None,
|
||||
})
|
||||
}
|
||||
Err(diags) => lsp_server::Message::Notification(lsp_server::Notification {
|
||||
method: "tinymistExt/diagnostics".to_owned(),
|
||||
params: serde_json::json!(diags),
|
||||
}),
|
||||
};
|
||||
|
||||
msg.write(&mut std::io::stdout().lock()).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct ForceDrop<T>(Arc<RwLock<Option<T>>>);
|
||||
impl<T> Drop for ForceDrop<T> {
|
||||
fn drop(&mut self) {
|
||||
self.0.write().take();
|
||||
}
|
||||
}
|
||||
|
||||
/// Turns a span into a (file, line) pair.
|
||||
fn resolve_span(world: &LspWorld, span: Span) -> Option<(String, u32)> {
|
||||
use typst::World;
|
||||
let id = span.id()?;
|
||||
let source = world.source(id).ok()?;
|
||||
let range = source.range(span)?;
|
||||
let line = source.byte_to_line(range.start)?;
|
||||
Some((format!("{id:?}"), line as u32 + 1))
|
||||
}
|
||||
|
|
|
@ -1,191 +0,0 @@
|
|||
use std::{
|
||||
io::{self, BufRead, Read, Write},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use log::{info, trace, warn};
|
||||
use lsp_types::{InitializeParams, InitializedParams};
|
||||
use parking_lot::RwLock;
|
||||
use serde::de::DeserializeOwned;
|
||||
use tinymist::{init::Init, transport::io_transport, CompileFontOpts, CompileOpts, LspHost};
|
||||
|
||||
use crate::args::CliArguments;
|
||||
|
||||
use lsp_server::{Connection, Message, Response};
|
||||
|
||||
pub fn lsp_main(args: CliArguments) -> anyhow::Result<()> {
|
||||
// Note that we must have our logging only write out to stderr.
|
||||
info!("starting generic LSP server");
|
||||
|
||||
// Set up input and output
|
||||
let replay = args.replay.clone();
|
||||
let mirror = args.mirror.clone();
|
||||
let i = move || -> Box<dyn BufRead> {
|
||||
if !replay.is_empty() {
|
||||
// Get input from file
|
||||
let file = std::fs::File::open(&replay).unwrap();
|
||||
let file = std::io::BufReader::new(file);
|
||||
Box::new(file)
|
||||
} else if mirror.is_empty() {
|
||||
// Get input from stdin
|
||||
let stdin = std::io::stdin().lock();
|
||||
Box::new(stdin)
|
||||
} else {
|
||||
let file = std::fs::File::create(&mirror).unwrap();
|
||||
let stdin = std::io::stdin().lock();
|
||||
Box::new(MirrorWriter(stdin, file, std::sync::Once::new()))
|
||||
}
|
||||
};
|
||||
let o = || std::io::stdout().lock();
|
||||
|
||||
// Create the transport. Includes the stdio (stdin and stdout) versions but this
|
||||
// could also be implemented to use sockets or HTTP.
|
||||
let (sender, receiver, io_threads) = io_transport(i, o);
|
||||
let connection = Connection { sender, receiver };
|
||||
|
||||
// Start the LSP server
|
||||
let mut force_exit = false;
|
||||
lsp(args, connection, &mut force_exit)?;
|
||||
|
||||
if !force_exit {
|
||||
io_threads.join()?;
|
||||
}
|
||||
info!("server did shut down");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn lsp(args: CliArguments, connection: Connection, force_exit: &mut bool) -> anyhow::Result<()> {
|
||||
*force_exit = false;
|
||||
// todo: ugly code
|
||||
let (initialize_id, initialize_params) = match connection.initialize_start() {
|
||||
Ok(it) => it,
|
||||
Err(e) => {
|
||||
log::error!("failed to initialize: {e}");
|
||||
*force_exit = !e.channel_is_disconnected();
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
let request_received = std::time::Instant::now();
|
||||
trace!("InitializeParams: {initialize_params}");
|
||||
let sender = Arc::new(RwLock::new(Some(connection.sender)));
|
||||
let host = LspHost::new(sender.clone());
|
||||
|
||||
let _drop_connection = ForceDrop(sender);
|
||||
|
||||
let req = lsp_server::Request::new(initialize_id, "initialize".to_owned(), initialize_params);
|
||||
host.register_request(&req, request_received);
|
||||
let lsp_server::Request {
|
||||
id: initialize_id,
|
||||
params: initialize_params,
|
||||
..
|
||||
} = req;
|
||||
|
||||
let initialize_params = from_json::<InitializeParams>("InitializeParams", &initialize_params)?;
|
||||
|
||||
let (mut service, initialize_result) = Init {
|
||||
host: host.clone(),
|
||||
compile_opts: CompileOpts {
|
||||
font: CompileFontOpts {
|
||||
font_paths: args.font_paths,
|
||||
no_system_fonts: args.no_system_fonts,
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
.initialize(initialize_params.clone());
|
||||
|
||||
host.respond(match initialize_result {
|
||||
Ok(cap) => Response::new_ok(initialize_id, Some(cap)),
|
||||
Err(err) => Response::new_err(initialize_id, err.code, err.message),
|
||||
});
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ProtocolError(String, bool);
|
||||
|
||||
impl ProtocolError {
|
||||
pub(crate) fn new(msg: impl Into<String>) -> Self {
|
||||
ProtocolError(msg.into(), false)
|
||||
}
|
||||
|
||||
pub(crate) fn disconnected() -> ProtocolError {
|
||||
ProtocolError("disconnected channel".into(), true)
|
||||
}
|
||||
|
||||
/// Whether this error occured due to a disconnected channel.
|
||||
pub fn channel_is_disconnected(&self) -> bool {
|
||||
self.1
|
||||
}
|
||||
}
|
||||
|
||||
info!("waiting for initialized notification");
|
||||
let initialized_ack = match &connection.receiver.recv() {
|
||||
Ok(Message::Notification(n)) if n.method == "initialized" => Ok(()),
|
||||
Ok(msg) => Err(ProtocolError::new(format!(
|
||||
r#"expected initialized notification, got: {msg:?}"#
|
||||
))),
|
||||
Err(e) => {
|
||||
log::error!("failed to receive initialized notification: {e}");
|
||||
Err(ProtocolError::disconnected())
|
||||
}
|
||||
};
|
||||
if let Err(e) = initialized_ack {
|
||||
*force_exit = !e.channel_is_disconnected();
|
||||
return Err(anyhow::anyhow!(
|
||||
"failed to receive initialized notification: {e:?}"
|
||||
));
|
||||
}
|
||||
|
||||
service.initialized(InitializedParams {});
|
||||
|
||||
service.main_loop(connection.receiver)
|
||||
}
|
||||
|
||||
struct ForceDrop<T>(Arc<RwLock<Option<T>>>);
|
||||
impl<T> Drop for ForceDrop<T> {
|
||||
fn drop(&mut self) {
|
||||
self.0.write().take();
|
||||
}
|
||||
}
|
||||
|
||||
struct MirrorWriter<R: Read, W: Write>(R, W, std::sync::Once);
|
||||
|
||||
impl<R: Read, W: Write> Read for MirrorWriter<R, W> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
let res = self.0.read(buf)?;
|
||||
|
||||
if let Err(err) = self.1.write_all(&buf[..res]) {
|
||||
self.2.call_once(|| {
|
||||
warn!("failed to write to mirror: {err}");
|
||||
});
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read + BufRead, W: Write> BufRead for MirrorWriter<R, W> {
|
||||
fn fill_buf(&mut self) -> io::Result<&[u8]> {
|
||||
self.0.fill_buf()
|
||||
}
|
||||
|
||||
fn consume(&mut self, amt: usize) {
|
||||
let buf = self.0.fill_buf().unwrap();
|
||||
|
||||
if let Err(err) = self.1.write_all(&buf[..amt]) {
|
||||
self.2.call_once(|| {
|
||||
warn!("failed to write to mirror: {err}");
|
||||
});
|
||||
}
|
||||
|
||||
self.0.consume(amt);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_json<T: DeserializeOwned>(
|
||||
what: &'static str,
|
||||
json: &serde_json::Value,
|
||||
) -> anyhow::Result<T> {
|
||||
serde_json::from_value(json.clone())
|
||||
.map_err(|e| anyhow::format_err!("Failed to deserialize {what}: {e}; {json}"))
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
mod lsp;
|
||||
pub use lsp::lsp_main;
|
465
crates/tinymist/src/server/compiler.rs
Normal file
465
crates/tinymist/src/server/compiler.rs
Normal file
|
@ -0,0 +1,465 @@
|
|||
use core::fmt;
|
||||
use std::{collections::HashMap, path::Path, time::Instant};
|
||||
|
||||
use crossbeam_channel::{select, Receiver};
|
||||
use log::{error, info, warn};
|
||||
use lsp_server::{Notification, Request, ResponseError};
|
||||
use lsp_types::{notification::Notification as _, ExecuteCommandParams};
|
||||
use paste::paste;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Map, Value as JsonValue};
|
||||
use tinymist_query::{DiagnosticsMap, ExportKind, PageSelection};
|
||||
use tokio::sync::mpsc;
|
||||
use typst::util::Deferred;
|
||||
use typst_ts_core::ImmutPath;
|
||||
|
||||
use crate::{
|
||||
actor::{render::ExportConfig, typ_client::CompileClientActor},
|
||||
compiler_init::{CompileConfig, CompilerConstConfig},
|
||||
harness::InitializedLspDriver,
|
||||
internal_error, invalid_params, method_not_found, run_query,
|
||||
world::SharedFontResolver,
|
||||
LspHost, LspResult,
|
||||
};
|
||||
|
||||
type LspMethod<Res> = fn(srv: &mut CompileServer, args: JsonValue) -> LspResult<Res>;
|
||||
type LspHandler<Req, Res> = fn(srv: &mut CompileServer, args: Req) -> LspResult<Res>;
|
||||
|
||||
type ExecuteCmdMap = HashMap<&'static str, LspHandler<Vec<JsonValue>, JsonValue>>;
|
||||
type NotifyCmdMap = HashMap<&'static str, LspMethod<()>>;
|
||||
type RegularCmdMap = HashMap<&'static str, LspMethod<JsonValue>>;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! exec_fn {
|
||||
($ty: ty, Self::$method: ident, $($arg_key:ident),+ $(,)?) => {{
|
||||
const E: $ty = |this, $($arg_key),+| this.$method($($arg_key),+);
|
||||
E
|
||||
}};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! request_fn {
|
||||
($desc: ty, Self::$method: ident) => {
|
||||
(<$desc>::METHOD, {
|
||||
const E: LspMethod<JsonValue> = |this, req| {
|
||||
let req: <$desc as lsp_types::request::Request>::Params =
|
||||
serde_json::from_value(req).unwrap(); // todo: soft unwrap
|
||||
let res = this.$method(req)?;
|
||||
let res = serde_json::to_value(res).unwrap(); // todo: soft unwrap
|
||||
Ok(res)
|
||||
};
|
||||
E
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! notify_fn {
|
||||
($desc: ty, Self::$method: ident) => {
|
||||
(<$desc>::METHOD, {
|
||||
const E: LspMethod<()> = |this, input| {
|
||||
let input: <$desc as lsp_types::notification::Notification>::Params =
|
||||
serde_json::from_value(input).unwrap(); // todo: soft unwrap
|
||||
this.$method(input)
|
||||
};
|
||||
E
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
pub struct CompileServerArgs {
|
||||
pub client: LspHost<CompileServer>,
|
||||
pub compile_config: CompileConfig,
|
||||
pub const_config: CompilerConstConfig,
|
||||
pub diag_tx: mpsc::UnboundedSender<(String, Option<DiagnosticsMap>)>,
|
||||
pub font: Deferred<SharedFontResolver>,
|
||||
pub handle: tokio::runtime::Handle,
|
||||
}
|
||||
|
||||
/// The object providing the language server functionality.
|
||||
pub struct CompileServer {
|
||||
/// The language server client.
|
||||
pub client: LspHost<CompileServer>,
|
||||
|
||||
// Command maps
|
||||
/// Extra commands provided with `textDocument/executeCommand`.
|
||||
pub exec_cmds: ExecuteCmdMap,
|
||||
/// Regular notifications for dispatching.
|
||||
pub notify_cmds: NotifyCmdMap,
|
||||
/// Regular commands for dispatching.
|
||||
pub regular_cmds: RegularCmdMap,
|
||||
|
||||
// State to synchronize with the client.
|
||||
/// Whether the server is shutting down.
|
||||
pub shutdown_requested: bool,
|
||||
// Configurations
|
||||
/// User configuration from the editor.
|
||||
pub config: CompileConfig,
|
||||
/// Const configuration initialized at the start of the session.
|
||||
/// For example, the position encoding.
|
||||
pub const_config: CompilerConstConfig,
|
||||
// /// The default opts for the compiler.
|
||||
// pub compile_opts: CompileOnceOpts,
|
||||
pub diag_tx: mpsc::UnboundedSender<(String, Option<DiagnosticsMap>)>,
|
||||
// pub memory_changes: HashMap<Arc<Path>, MemoryFileMeta>,
|
||||
pub font: Deferred<SharedFontResolver>,
|
||||
pub compiler: Option<CompileClientActor>,
|
||||
pub handle: tokio::runtime::Handle,
|
||||
}
|
||||
|
||||
impl CompileServer {
|
||||
pub fn new(args: CompileServerArgs) -> Self {
|
||||
let CompileServerArgs {
|
||||
client,
|
||||
compile_config,
|
||||
const_config,
|
||||
diag_tx,
|
||||
font,
|
||||
handle,
|
||||
} = args;
|
||||
|
||||
CompileServer {
|
||||
client,
|
||||
diag_tx,
|
||||
shutdown_requested: false,
|
||||
config: compile_config,
|
||||
const_config,
|
||||
font,
|
||||
compiler: None,
|
||||
handle,
|
||||
|
||||
exec_cmds: Self::get_exec_commands(),
|
||||
regular_cmds: Self::get_regular_cmds(),
|
||||
notify_cmds: Self::get_notify_cmds(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn const_config(&self) -> &CompilerConstConfig {
|
||||
&self.const_config
|
||||
}
|
||||
|
||||
pub fn compiler(&self) -> &CompileClientActor {
|
||||
self.compiler.as_ref().unwrap()
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn get_regular_cmds() -> RegularCmdMap {
|
||||
use lsp_types::request::*;
|
||||
RegularCmdMap::from_iter([
|
||||
request_fn!(ExecuteCommand, Self::execute_command),
|
||||
])
|
||||
}
|
||||
|
||||
fn get_notify_cmds() -> NotifyCmdMap {
|
||||
// todo: .on_sync_mut::<notifs::Cancel>(handlers::handle_cancel)?
|
||||
// use lsp_types::notification::*;
|
||||
NotifyCmdMap::from_iter([
|
||||
// notify_fn!(DidOpenTextDocument, Self::did_open),
|
||||
// notify_fn!(DidCloseTextDocument, Self::did_close),
|
||||
// notify_fn!(DidChangeTextDocument, Self::did_change),
|
||||
// notify_fn!(DidSaveTextDocument, Self::did_save),
|
||||
// notify_fn!(DidChangeConfiguration, Self::did_change_configuration),
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum Event {
|
||||
Lsp(lsp_server::Message),
|
||||
}
|
||||
|
||||
impl fmt::Display for Event {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Event::Lsp(_) => write!(f, "Event::Lsp"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InitializedLspDriver for CompileServer {
|
||||
fn initialized(&mut self, _params: lsp_types::InitializedParams) {}
|
||||
|
||||
fn main_loop(
|
||||
&mut self,
|
||||
inbox: crossbeam_channel::Receiver<lsp_server::Message>,
|
||||
) -> anyhow::Result<()> {
|
||||
while let Some(event) = self.next_event(&inbox) {
|
||||
if matches!(
|
||||
&event,
|
||||
Event::Lsp(lsp_server::Message::Notification(Notification { method, .. }))
|
||||
if method == lsp_types::notification::Exit::METHOD
|
||||
) {
|
||||
return Ok(());
|
||||
}
|
||||
self.handle_event(event)?;
|
||||
}
|
||||
|
||||
warn!("client exited without proper shutdown sequence");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl CompileServer {
|
||||
fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
|
||||
select! {
|
||||
recv(inbox) -> msg =>
|
||||
msg.ok().map(Event::Lsp),
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_event(&mut self, event: Event) -> anyhow::Result<()> {
|
||||
let loop_start = Instant::now();
|
||||
|
||||
// let was_quiescent = self.is_quiescent();
|
||||
match event {
|
||||
Event::Lsp(msg) => match msg {
|
||||
lsp_server::Message::Request(req) => self.on_new_request(loop_start, req),
|
||||
lsp_server::Message::Notification(not) => self.on_notification(loop_start, not)?,
|
||||
lsp_server::Message::Response(resp) => {
|
||||
self.client.clone().complete_request(self, resp)
|
||||
}
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Registers and handles a request. This should only be called once per
|
||||
/// incoming request.
|
||||
fn on_new_request(&mut self, request_received: Instant, req: Request) {
|
||||
self.client.register_request(&req, request_received);
|
||||
self.on_request(req);
|
||||
}
|
||||
|
||||
/// Handles a request.
|
||||
fn on_request(&mut self, req: Request) {
|
||||
if self.shutdown_requested {
|
||||
self.client.respond(lsp_server::Response::new_err(
|
||||
req.id.clone(),
|
||||
lsp_server::ErrorCode::InvalidRequest as i32,
|
||||
"Shutdown already requested.".to_owned(),
|
||||
));
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(handler) = self.regular_cmds.get(req.method.as_str()) else {
|
||||
warn!("unhandled request: {}", req.method);
|
||||
return;
|
||||
};
|
||||
|
||||
let result = handler(self, req.params);
|
||||
|
||||
if let Ok(response) = result_to_response(req.id, result) {
|
||||
self.client.respond(response);
|
||||
}
|
||||
|
||||
fn result_to_response(
|
||||
id: lsp_server::RequestId,
|
||||
result: Result<JsonValue, ResponseError>,
|
||||
) -> Result<lsp_server::Response, Cancelled> {
|
||||
let res = match result {
|
||||
Ok(resp) => lsp_server::Response::new_ok(id, resp),
|
||||
Err(e) => lsp_server::Response::new_err(id, e.code, e.message),
|
||||
};
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
/// Handles an incoming notification.
|
||||
fn on_notification(
|
||||
&mut self,
|
||||
request_received: Instant,
|
||||
not: Notification,
|
||||
) -> anyhow::Result<()> {
|
||||
info!("notifying {} - at {:0.2?}", not.method, request_received);
|
||||
|
||||
let Some(handler) = self.notify_cmds.get(not.method.as_str()) else {
|
||||
warn!("unhandled notification: {}", not.method);
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let result = handler(self, not.params);
|
||||
|
||||
let request_duration = request_received.elapsed();
|
||||
if let Err(err) = result {
|
||||
error!(
|
||||
"notifing {} failed in {:0.2?}: {:?}",
|
||||
not.method, request_duration, err
|
||||
);
|
||||
} else {
|
||||
info!(
|
||||
"notifing {} succeeded in {:0.2?}",
|
||||
not.method, request_duration
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl CompileServer {
|
||||
pub fn on_changed_configuration(&mut self, values: Map<String, JsonValue>) -> LspResult<()> {
|
||||
let config = self.config.clone();
|
||||
match self.config.update_by_map(&values) {
|
||||
Ok(()) => {}
|
||||
Err(err) => {
|
||||
self.config = config;
|
||||
error!("error applying new settings: {err}");
|
||||
return Err(invalid_params(format!(
|
||||
"error applying new settings: {err}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
info!("new settings applied");
|
||||
if config.output_path != self.config.output_path
|
||||
|| config.export_pdf != self.config.export_pdf
|
||||
{
|
||||
let config = ExportConfig {
|
||||
substitute_pattern: self.config.output_path.clone(),
|
||||
mode: self.config.export_pdf,
|
||||
..ExportConfig::default()
|
||||
};
|
||||
|
||||
self.compiler
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.change_export_pdf(config.clone());
|
||||
}
|
||||
|
||||
// todo: watch changes of the root path
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
struct Cancelled;
|
||||
|
||||
impl CompileServer {
|
||||
fn get_exec_commands() -> ExecuteCmdMap {
|
||||
macro_rules! redirected_command {
|
||||
($key: expr, Self::$method: ident) => {
|
||||
(
|
||||
$key,
|
||||
exec_fn!(LspHandler<Vec<JsonValue>, JsonValue>, Self::$method, inputs),
|
||||
)
|
||||
};
|
||||
}
|
||||
|
||||
ExecuteCmdMap::from_iter([
|
||||
redirected_command!("tinymist.exportPdf", Self::export_pdf),
|
||||
redirected_command!("tinymist.exportSvg", Self::export_svg),
|
||||
redirected_command!("tinymist.exportPng", Self::export_png),
|
||||
redirected_command!("tinymist.doClearCache", Self::clear_cache),
|
||||
redirected_command!("tinymist.focusMain", Self::focus_document),
|
||||
])
|
||||
}
|
||||
|
||||
/// The entry point for the `workspace/executeCommand` request.
|
||||
fn execute_command(&mut self, params: ExecuteCommandParams) -> LspResult<Option<JsonValue>> {
|
||||
let ExecuteCommandParams {
|
||||
command,
|
||||
arguments,
|
||||
work_done_progress_params: _,
|
||||
} = params;
|
||||
let Some(handler) = self.exec_cmds.get(command.as_str()) else {
|
||||
error!("asked to execute unknown command");
|
||||
return Err(method_not_found());
|
||||
};
|
||||
|
||||
Ok(Some(handler(self, arguments)?))
|
||||
}
|
||||
|
||||
/// Export the current document as a PDF file.
|
||||
pub fn export_pdf(&self, arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
|
||||
self.export(ExportKind::Pdf, arguments)
|
||||
}
|
||||
|
||||
/// Export the current document as a Svg file.
|
||||
pub fn export_svg(&self, arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
|
||||
let opts = parse_opts(arguments.get(1))?;
|
||||
self.export(ExportKind::Svg { page: opts.page }, arguments)
|
||||
}
|
||||
|
||||
/// Export the current document as a Png file.
|
||||
pub fn export_png(&self, arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
|
||||
let opts = parse_opts(arguments.get(1))?;
|
||||
self.export(ExportKind::Png { page: opts.page }, arguments)
|
||||
}
|
||||
|
||||
/// Export the current document as some format. The client is responsible
|
||||
/// for passing the correct absolute path of typst document.
|
||||
pub fn export(&self, kind: ExportKind, arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
|
||||
let path = parse_path(arguments.first())?.as_ref().to_owned();
|
||||
|
||||
let res = run_query!(self.OnExport(path, kind))?;
|
||||
let res = serde_json::to_value(res).map_err(|_| internal_error("Cannot serialize path"))?;
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
/// Clear all cached resources.
|
||||
///
|
||||
/// # Errors
|
||||
/// Errors if the cache could not be cleared.
|
||||
pub fn clear_cache(&self, _arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
|
||||
comemo::evict(0);
|
||||
Ok(JsonValue::Null)
|
||||
}
|
||||
|
||||
/// Focus main file to some path.
|
||||
pub fn focus_document(&self, arguments: Vec<JsonValue>) -> LspResult<JsonValue> {
|
||||
let new_entry = parse_path_or_null(arguments.first())?;
|
||||
|
||||
let update_result = self
|
||||
.compiler
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.change_entry(new_entry.clone());
|
||||
update_result.map_err(|err| internal_error(format!("could not focus file: {err}")))?;
|
||||
|
||||
info!("file focused: {entry:?}", entry = new_entry);
|
||||
Ok(JsonValue::Null)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct ExportOpts {
|
||||
page: PageSelection,
|
||||
}
|
||||
|
||||
fn parse_opts(v: Option<&JsonValue>) -> LspResult<ExportOpts> {
|
||||
Ok(match v {
|
||||
Some(opts) => serde_json::from_value::<ExportOpts>(opts.clone())
|
||||
.map_err(|_| invalid_params("The third argument is not a valid object"))?,
|
||||
_ => ExportOpts {
|
||||
page: PageSelection::First,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_path(v: Option<&JsonValue>) -> LspResult<ImmutPath> {
|
||||
let new_entry = match v {
|
||||
Some(JsonValue::String(s)) => {
|
||||
let s = Path::new(s);
|
||||
if !s.is_absolute() {
|
||||
return Err(invalid_params("entry should be absolute"));
|
||||
}
|
||||
|
||||
s.into()
|
||||
}
|
||||
_ => {
|
||||
return Err(invalid_params(
|
||||
"The first parameter is not a valid path or null",
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
Ok(new_entry)
|
||||
}
|
||||
|
||||
fn parse_path_or_null(v: Option<&JsonValue>) -> LspResult<Option<ImmutPath>> {
|
||||
match v {
|
||||
Some(JsonValue::Null) => Ok(None),
|
||||
v => Ok(Some(parse_path(v)?)),
|
||||
}
|
||||
}
|
381
crates/tinymist/src/server/compiler_init.rs
Normal file
381
crates/tinymist/src/server/compiler_init.rs
Normal file
|
@ -0,0 +1,381 @@
|
|||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::bail;
|
||||
use clap::builder::ValueParser;
|
||||
use clap::{ArgAction, Parser};
|
||||
use comemo::Prehashed;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
use serde_json::{Map, Value as JsonValue};
|
||||
use tinymist_query::{DiagnosticsMap, PositionEncoding};
|
||||
use tokio::sync::mpsc;
|
||||
use typst::foundations::IntoValue;
|
||||
use typst::syntax::FileId;
|
||||
use typst::syntax::VirtualPath;
|
||||
use typst::util::Deferred;
|
||||
use typst_ts_core::config::compiler::EntryState;
|
||||
use typst_ts_core::{ImmutPath, TypstDict};
|
||||
|
||||
use crate::compiler::{CompileServer, CompileServerArgs};
|
||||
use crate::harness::LspDriver;
|
||||
use crate::world::{ImmutDict, SharedFontResolver};
|
||||
use crate::{CompileExtraOpts, CompileFontOpts, ExportMode, LspHost};
|
||||
|
||||
#[cfg(feature = "clap")]
|
||||
const ENV_PATH_SEP: char = if cfg!(windows) { ';' } else { ':' };
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[cfg_attr(feature = "clap", derive(clap::Parser))]
|
||||
pub struct FontArgs {
|
||||
/// Font paths, which doesn't allow for dynamic configuration
|
||||
#[cfg_attr(feature = "clap", clap(
|
||||
long = "font-path",
|
||||
value_name = "DIR",
|
||||
action = clap::ArgAction::Append,
|
||||
env = "TYPST_FONT_PATHS",
|
||||
value_delimiter = ENV_PATH_SEP
|
||||
))]
|
||||
pub font_paths: Vec<PathBuf>,
|
||||
/// Exclude system fonts
|
||||
#[cfg_attr(feature = "clap", clap(long, default_value = "false"))]
|
||||
pub no_system_fonts: bool,
|
||||
}
|
||||
|
||||
/// Common arguments of compile, watch, and query.
|
||||
#[derive(Debug, Clone, Parser, Default)]
|
||||
pub struct CompileOnceArgs {
|
||||
/// Path to input Typst file, use `-` to read input from stdin
|
||||
#[clap(value_name = "INPUT")]
|
||||
pub input: Option<String>,
|
||||
|
||||
/// Configures the project root (for absolute paths)
|
||||
#[clap(long = "root", value_name = "DIR")]
|
||||
pub root: Option<PathBuf>,
|
||||
|
||||
/// Add a string key-value pair visible through `sys.inputs`
|
||||
#[clap(
|
||||
long = "input",
|
||||
value_name = "key=value",
|
||||
action = ArgAction::Append,
|
||||
value_parser = ValueParser::new(parse_input_pair),
|
||||
)]
|
||||
pub inputs: Vec<(String, String)>,
|
||||
|
||||
#[cfg_attr(feature = "clap", clap(flatten))]
|
||||
pub font: FontArgs,
|
||||
}
|
||||
|
||||
/// Parses key/value pairs split by the first equal sign.
|
||||
///
|
||||
/// This function will return an error if the argument contains no equals sign
|
||||
/// or contains the key (before the equals sign) is empty.
|
||||
fn parse_input_pair(raw: &str) -> Result<(String, String), String> {
|
||||
let (key, val) = raw
|
||||
.split_once('=')
|
||||
.ok_or("input must be a key and a value separated by an equal sign")?;
|
||||
let key = key.trim().to_owned();
|
||||
if key.is_empty() {
|
||||
return Err("the key was missing or empty".to_owned());
|
||||
}
|
||||
let val = val.trim().to_owned();
|
||||
Ok((key, val))
|
||||
}
|
||||
|
||||
/// The user configuration read from the editor.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct CompileConfig {
|
||||
/// The workspace roots from initialization.
|
||||
pub roots: Vec<PathBuf>,
|
||||
/// The output directory for PDF export.
|
||||
pub output_path: String,
|
||||
/// The mode of PDF export.
|
||||
pub export_pdf: ExportMode,
|
||||
/// Specifies the root path of the project manually.
|
||||
pub root_path: Option<PathBuf>,
|
||||
/// Typst extra arguments.
|
||||
pub typst_extra_args: Option<CompileExtraOpts>,
|
||||
}
|
||||
|
||||
impl CompileConfig {
|
||||
/// Updates the configuration with a JSON object.
|
||||
///
|
||||
/// # Errors
|
||||
/// Errors if the update is invalid.
|
||||
pub fn update(&mut self, update: &JsonValue) -> anyhow::Result<()> {
|
||||
if let JsonValue::Object(update) = update {
|
||||
self.update_by_map(update)
|
||||
} else {
|
||||
bail!("got invalid configuration object {update}")
|
||||
}
|
||||
}
|
||||
|
||||
/// Updates the configuration with a map.
|
||||
///
|
||||
/// # Errors
|
||||
/// Errors if the update is invalid.
|
||||
pub fn update_by_map(&mut self, update: &Map<String, JsonValue>) -> anyhow::Result<()> {
|
||||
if let Some(JsonValue::String(output_path)) = update.get("outputPath") {
|
||||
self.output_path = output_path.to_owned();
|
||||
} else {
|
||||
self.output_path = String::new();
|
||||
}
|
||||
|
||||
let export_pdf = update
|
||||
.get("exportPdf")
|
||||
.map(ExportMode::deserialize)
|
||||
.and_then(Result::ok);
|
||||
if let Some(export_pdf) = export_pdf {
|
||||
self.export_pdf = export_pdf;
|
||||
} else {
|
||||
self.export_pdf = ExportMode::default();
|
||||
}
|
||||
|
||||
let root_path = update.get("rootPath");
|
||||
if let Some(root_path) = root_path {
|
||||
if root_path.is_null() {
|
||||
self.root_path = None;
|
||||
}
|
||||
if let Some(root_path) = root_path.as_str().map(PathBuf::from) {
|
||||
self.root_path = Some(root_path);
|
||||
}
|
||||
} else {
|
||||
self.root_path = None;
|
||||
}
|
||||
|
||||
'parse_extra_args: {
|
||||
if let Some(typst_extra_args) = update.get("typstExtraArgs") {
|
||||
let typst_args: Vec<String> = match serde_json::from_value(typst_extra_args.clone())
|
||||
{
|
||||
Ok(e) => e,
|
||||
Err(e) => {
|
||||
log::error!("failed to parse typstExtraArgs: {e}");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
let command = match CompileOnceArgs::try_parse_from(
|
||||
Some("typst-cli".to_owned()).into_iter().chain(typst_args),
|
||||
) {
|
||||
Ok(e) => e,
|
||||
Err(e) => {
|
||||
log::error!("failed to parse typstExtraArgs: {e}");
|
||||
break 'parse_extra_args;
|
||||
}
|
||||
};
|
||||
|
||||
// Convert the input pairs to a dictionary.
|
||||
let inputs: TypstDict = if command.inputs.is_empty() {
|
||||
TypstDict::default()
|
||||
} else {
|
||||
let pairs = command.inputs.iter();
|
||||
let pairs = pairs.map(|(k, v)| (k.as_str().into(), v.as_str().into_value()));
|
||||
pairs.collect()
|
||||
};
|
||||
|
||||
// todo: the command.root may be not absolute
|
||||
self.typst_extra_args = Some(CompileExtraOpts {
|
||||
entry: command.input.map(PathBuf::from),
|
||||
root_dir: command.root,
|
||||
inputs: Arc::new(Prehashed::new(inputs)),
|
||||
font_paths: command.font.font_paths,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
self.validate()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn determine_root(&self, entry: Option<&ImmutPath>) -> Option<ImmutPath> {
|
||||
if let Some(path) = &self.root_path {
|
||||
return Some(path.as_path().into());
|
||||
}
|
||||
|
||||
if let Some(extras) = &self.typst_extra_args {
|
||||
if let Some(root) = &extras.root_dir {
|
||||
return Some(root.as_path().into());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(path) = &self
|
||||
.typst_extra_args
|
||||
.as_ref()
|
||||
.and_then(|x| x.root_dir.clone())
|
||||
{
|
||||
return Some(path.as_path().into());
|
||||
}
|
||||
|
||||
if let Some(entry) = entry {
|
||||
for root in self.roots.iter() {
|
||||
if entry.starts_with(root) {
|
||||
return Some(root.as_path().into());
|
||||
}
|
||||
}
|
||||
|
||||
if !self.roots.is_empty() {
|
||||
log::warn!("entry is not in any set root directory");
|
||||
}
|
||||
|
||||
if let Some(parent) = entry.parent() {
|
||||
return Some(parent.into());
|
||||
}
|
||||
}
|
||||
|
||||
if !self.roots.is_empty() {
|
||||
return Some(self.roots[0].as_path().into());
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn determine_entry(&self, entry: Option<ImmutPath>) -> EntryState {
|
||||
// todo: don't ignore entry from typst_extra_args
|
||||
// entry: command.input,
|
||||
|
||||
let root_dir = self.determine_root(entry.as_ref());
|
||||
|
||||
let entry = match (entry, root_dir) {
|
||||
(Some(entry), Some(root)) => match entry.strip_prefix(&root) {
|
||||
Ok(stripped) => Some(EntryState::new_rooted(
|
||||
root,
|
||||
Some(FileId::new(None, VirtualPath::new(stripped))),
|
||||
)),
|
||||
Err(err) => {
|
||||
log::info!("Entry is not in root directory: err {err:?}: entry: {entry:?}, root: {root:?}");
|
||||
EntryState::new_rootless(entry)
|
||||
}
|
||||
},
|
||||
(Some(entry), None) => EntryState::new_rootless(entry),
|
||||
(None, Some(root)) => Some(EntryState::new_workspace(root)),
|
||||
(None, None) => None,
|
||||
};
|
||||
|
||||
entry.unwrap_or_else(|| match self.determine_root(None) {
|
||||
Some(root) => EntryState::new_workspace(root),
|
||||
// todo
|
||||
None => EntryState::new_detached(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn determine_inputs(&self) -> ImmutDict {
|
||||
static EMPTY: Lazy<ImmutDict> = Lazy::new(ImmutDict::default);
|
||||
|
||||
if let Some(extras) = &self.typst_extra_args {
|
||||
return extras.inputs.clone();
|
||||
}
|
||||
|
||||
EMPTY.clone()
|
||||
}
|
||||
|
||||
pub fn validate(&self) -> anyhow::Result<()> {
|
||||
if let Some(root) = &self.root_path {
|
||||
if !root.is_absolute() {
|
||||
bail!("rootPath must be an absolute path: {root:?}");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(extra_args) = &self.typst_extra_args {
|
||||
if let Some(root) = &extra_args.root_dir {
|
||||
if !root.is_absolute() {
|
||||
bail!("typstExtraArgs.root must be an absolute path: {root:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration set at initialization that won't change within a single
|
||||
/// session.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CompilerConstConfig {
|
||||
/// Determined position encoding, either UTF-8 or UTF-16.
|
||||
/// Defaults to UTF-16 if not specified.
|
||||
pub position_encoding: PositionEncoding,
|
||||
}
|
||||
|
||||
impl Default for CompilerConstConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
position_encoding: PositionEncoding::Utf16,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CompileInit {
|
||||
pub handle: tokio::runtime::Handle,
|
||||
pub font: CompileFontOpts,
|
||||
pub diag_tx: mpsc::UnboundedSender<(String, Option<DiagnosticsMap>)>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CompileInitializeParams {
|
||||
pub config: serde_json::Value,
|
||||
pub position_encoding: Option<lsp_types::PositionEncodingKind>,
|
||||
}
|
||||
|
||||
impl LspDriver for CompileInit {
|
||||
type InitParams = CompileInitializeParams;
|
||||
type InitResult = ();
|
||||
type InitializedSelf = CompileServer;
|
||||
|
||||
fn initialize(
|
||||
self,
|
||||
client: LspHost<Self::InitializedSelf>,
|
||||
params: Self::InitParams,
|
||||
) -> (
|
||||
Self::InitializedSelf,
|
||||
Result<Self::InitResult, lsp_server::ResponseError>,
|
||||
) {
|
||||
let mut compile_config = CompileConfig::default();
|
||||
compile_config.update(¶ms.config).unwrap();
|
||||
|
||||
// prepare fonts
|
||||
// todo: on font resolving failure, downgrade to a fake font book
|
||||
let font = {
|
||||
let mut opts = self.font;
|
||||
if let Some(font_paths) = compile_config
|
||||
.typst_extra_args
|
||||
.as_ref()
|
||||
.map(|x| &x.font_paths)
|
||||
{
|
||||
opts.font_paths = font_paths.clone();
|
||||
}
|
||||
|
||||
Deferred::new(|| SharedFontResolver::new(opts).expect("failed to create font book"))
|
||||
};
|
||||
|
||||
let args = CompileServerArgs {
|
||||
client,
|
||||
compile_config,
|
||||
const_config: CompilerConstConfig {
|
||||
position_encoding: params
|
||||
.position_encoding
|
||||
.map(|x| match x.as_str() {
|
||||
"utf-16" => PositionEncoding::Utf16,
|
||||
_ => PositionEncoding::Utf8,
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
},
|
||||
diag_tx: self.diag_tx,
|
||||
handle: self.handle,
|
||||
font,
|
||||
};
|
||||
|
||||
let mut service = CompileServer::new(args);
|
||||
|
||||
let primary = service.server(
|
||||
"primary".to_owned(),
|
||||
service.config.determine_entry(None),
|
||||
service.config.determine_inputs(),
|
||||
);
|
||||
if service.compiler.is_some() {
|
||||
panic!("primary already initialized");
|
||||
}
|
||||
service.compiler = Some(primary);
|
||||
|
||||
(service, Ok(()))
|
||||
}
|
||||
}
|
1000
crates/tinymist/src/server/lsp.rs
Normal file
1000
crates/tinymist/src/server/lsp.rs
Normal file
File diff suppressed because it is too large
Load diff
|
@ -2,29 +2,22 @@ use std::sync::Arc;
|
|||
use std::{collections::HashMap, path::PathBuf};
|
||||
|
||||
use anyhow::bail;
|
||||
use clap::builder::ValueParser;
|
||||
use clap::{ArgAction, Parser};
|
||||
use comemo::Prehashed;
|
||||
use itertools::Itertools;
|
||||
use log::{error, info, warn};
|
||||
use log::info;
|
||||
use lsp_types::*;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde::Deserialize;
|
||||
use serde_json::{Map, Value as JsonValue};
|
||||
use tinymist_query::{get_semantic_tokens_options, PositionEncoding};
|
||||
use tokio::sync::mpsc;
|
||||
use typst::foundations::IntoValue;
|
||||
use typst::syntax::VirtualPath;
|
||||
use typst::util::Deferred;
|
||||
use typst_ts_core::config::compiler::EntryState;
|
||||
use typst_ts_core::error::prelude::*;
|
||||
use typst_ts_core::{ImmutPath, TypstDict, TypstFileId as FileId};
|
||||
|
||||
use crate::actor::cluster::CompileClusterActor;
|
||||
use crate::compiler_init::CompileConfig;
|
||||
use crate::harness::LspHost;
|
||||
use crate::world::{CompileOpts, ImmutDict, SharedFontResolver};
|
||||
use crate::{
|
||||
invalid_params, CompileFontOpts, LspHost, LspResult, TypstLanguageServer,
|
||||
TypstLanguageServerArgs,
|
||||
invalid_params, CompileFontOpts, LspResult, TypstLanguageServer, TypstLanguageServerArgs,
|
||||
};
|
||||
|
||||
// todo: svelte-language-server responds to a Goto Definition request with
|
||||
|
@ -97,61 +90,12 @@ const CONFIG_ITEMS: &[&str] = &[
|
|||
/// The user configuration read from the editor.
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct Config {
|
||||
/// The workspace roots from initialization.
|
||||
pub roots: Vec<PathBuf>,
|
||||
/// The output directory for PDF export.
|
||||
pub output_path: String,
|
||||
/// The mode of PDF export.
|
||||
pub export_pdf: ExportMode,
|
||||
/// Specifies the root path of the project manually.
|
||||
pub root_path: Option<PathBuf>,
|
||||
/// The compile configurations
|
||||
pub compile: CompileConfig,
|
||||
/// Dynamic configuration for semantic tokens.
|
||||
pub semantic_tokens: SemanticTokensMode,
|
||||
/// Dynamic configuration for the experimental formatter.
|
||||
pub formatter: ExperimentalFormatterMode,
|
||||
/// Typst extra arguments.
|
||||
pub typst_extra_args: Option<CompileExtraOpts>,
|
||||
}
|
||||
|
||||
/// Common arguments of compile, watch, and query.
|
||||
#[derive(Debug, Clone, Parser)]
|
||||
pub struct TypstArgs {
|
||||
/// Path to input Typst file, use `-` to read input from stdin
|
||||
#[clap(value_name = "INPUT")]
|
||||
pub input: Option<PathBuf>,
|
||||
|
||||
/// Configures the project root (for absolute paths)
|
||||
#[clap(long = "root", value_name = "DIR")]
|
||||
pub root: Option<PathBuf>,
|
||||
|
||||
/// Add a string key-value pair visible through `sys.inputs`
|
||||
#[clap(
|
||||
long = "input",
|
||||
value_name = "key=value",
|
||||
action = ArgAction::Append,
|
||||
value_parser = ValueParser::new(parse_input_pair),
|
||||
)]
|
||||
pub inputs: Vec<(String, String)>,
|
||||
|
||||
/// Adds additional directories to search for fonts
|
||||
#[clap(long = "font-path", value_name = "DIR")]
|
||||
pub font_paths: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
/// Parses key/value pairs split by the first equal sign.
|
||||
///
|
||||
/// This function will return an error if the argument contains no equals sign
|
||||
/// or contains the key (before the equals sign) is empty.
|
||||
fn parse_input_pair(raw: &str) -> Result<(String, String), String> {
|
||||
let (key, val) = raw
|
||||
.split_once('=')
|
||||
.ok_or("input must be a key and a value separated by an equal sign")?;
|
||||
let key = key.trim().to_owned();
|
||||
if key.is_empty() {
|
||||
return Err("the key was missing or empty".to_owned());
|
||||
}
|
||||
let val = val.trim().to_owned();
|
||||
Ok((key, val))
|
||||
}
|
||||
|
||||
impl Config {
|
||||
|
@ -200,34 +144,6 @@ impl Config {
|
|||
/// # Errors
|
||||
/// Errors if the update is invalid.
|
||||
pub fn update_by_map(&mut self, update: &Map<String, JsonValue>) -> anyhow::Result<()> {
|
||||
if let Some(JsonValue::String(output_path)) = update.get("outputPath") {
|
||||
self.output_path = output_path.to_owned();
|
||||
} else {
|
||||
self.output_path = String::new();
|
||||
}
|
||||
|
||||
let export_pdf = update
|
||||
.get("exportPdf")
|
||||
.map(ExportMode::deserialize)
|
||||
.and_then(Result::ok);
|
||||
if let Some(export_pdf) = export_pdf {
|
||||
self.export_pdf = export_pdf;
|
||||
} else {
|
||||
self.export_pdf = ExportMode::default();
|
||||
}
|
||||
|
||||
let root_path = update.get("rootPath");
|
||||
if let Some(root_path) = root_path {
|
||||
if root_path.is_null() {
|
||||
self.root_path = None;
|
||||
}
|
||||
if let Some(root_path) = root_path.as_str().map(PathBuf::from) {
|
||||
self.root_path = Some(root_path);
|
||||
}
|
||||
} else {
|
||||
self.root_path = None;
|
||||
}
|
||||
|
||||
let semantic_tokens = update
|
||||
.get("semanticTokens")
|
||||
.map(SemanticTokensMode::deserialize)
|
||||
|
@ -244,145 +160,14 @@ impl Config {
|
|||
self.formatter = formatter;
|
||||
}
|
||||
|
||||
'parse_extra_args: {
|
||||
if let Some(typst_extra_args) = update.get("typstExtraArgs") {
|
||||
let typst_args: Vec<String> = match serde_json::from_value(typst_extra_args.clone())
|
||||
{
|
||||
Ok(e) => e,
|
||||
Err(e) => {
|
||||
error!("failed to parse typstExtraArgs: {e}");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
|
||||
let command = match TypstArgs::try_parse_from(
|
||||
Some("typst-cli".to_owned()).into_iter().chain(typst_args),
|
||||
) {
|
||||
Ok(e) => e,
|
||||
Err(e) => {
|
||||
error!("failed to parse typstExtraArgs: {e}");
|
||||
break 'parse_extra_args;
|
||||
}
|
||||
};
|
||||
|
||||
// Convert the input pairs to a dictionary.
|
||||
let inputs: TypstDict = if command.inputs.is_empty() {
|
||||
TypstDict::default()
|
||||
} else {
|
||||
let pairs = command.inputs.iter();
|
||||
let pairs = pairs.map(|(k, v)| (k.as_str().into(), v.as_str().into_value()));
|
||||
pairs.collect()
|
||||
};
|
||||
|
||||
// todo: the command.root may be not absolute
|
||||
self.typst_extra_args = Some(CompileExtraOpts {
|
||||
entry: command.input,
|
||||
root_dir: command.root,
|
||||
inputs: Arc::new(Prehashed::new(inputs)),
|
||||
font_paths: command.font_paths,
|
||||
});
|
||||
}
|
||||
}
|
||||
self.compile.update_by_map(update)?;
|
||||
|
||||
self.validate()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn determine_root(&self, entry: Option<&ImmutPath>) -> Option<ImmutPath> {
|
||||
if let Some(path) = &self.root_path {
|
||||
return Some(path.as_path().into());
|
||||
}
|
||||
|
||||
if let Some(extras) = &self.typst_extra_args {
|
||||
if let Some(root) = &extras.root_dir {
|
||||
return Some(root.as_path().into());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(path) = &self
|
||||
.typst_extra_args
|
||||
.as_ref()
|
||||
.and_then(|x| x.root_dir.clone())
|
||||
{
|
||||
return Some(path.as_path().into());
|
||||
}
|
||||
|
||||
if let Some(entry) = entry {
|
||||
for root in self.roots.iter() {
|
||||
if entry.starts_with(root) {
|
||||
return Some(root.as_path().into());
|
||||
}
|
||||
}
|
||||
|
||||
if !self.roots.is_empty() {
|
||||
warn!("entry is not in any set root directory");
|
||||
}
|
||||
|
||||
if let Some(parent) = entry.parent() {
|
||||
return Some(parent.into());
|
||||
}
|
||||
}
|
||||
|
||||
if !self.roots.is_empty() {
|
||||
return Some(self.roots[0].as_path().into());
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn determine_entry(&self, entry: Option<ImmutPath>) -> EntryState {
|
||||
// todo: don't ignore entry from typst_extra_args
|
||||
// entry: command.input,
|
||||
|
||||
let root_dir = self.determine_root(entry.as_ref());
|
||||
|
||||
let entry = match (entry, root_dir) {
|
||||
(Some(entry), Some(root)) => match entry.strip_prefix(&root) {
|
||||
Ok(stripped) => Some(EntryState::new_rooted(
|
||||
root,
|
||||
Some(FileId::new(None, VirtualPath::new(stripped))),
|
||||
)),
|
||||
Err(err) => {
|
||||
log::info!("Entry is not in root directory: err {err:?}: entry: {entry:?}, root: {root:?}");
|
||||
EntryState::new_rootless(entry)
|
||||
}
|
||||
},
|
||||
(Some(entry), None) => EntryState::new_rootless(entry),
|
||||
(None, Some(root)) => Some(EntryState::new_workspace(root)),
|
||||
(None, None) => None,
|
||||
};
|
||||
|
||||
entry.unwrap_or_else(|| match self.determine_root(None) {
|
||||
Some(root) => EntryState::new_workspace(root),
|
||||
// todo
|
||||
None => EntryState::new_detached(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn determine_inputs(&self) -> ImmutDict {
|
||||
static EMPTY: Lazy<ImmutDict> = Lazy::new(ImmutDict::default);
|
||||
|
||||
if let Some(extras) = &self.typst_extra_args {
|
||||
return extras.inputs.clone();
|
||||
}
|
||||
|
||||
EMPTY.clone()
|
||||
}
|
||||
|
||||
fn validate(&self) -> anyhow::Result<()> {
|
||||
if let Some(root) = &self.root_path {
|
||||
if !root.is_absolute() {
|
||||
bail!("rootPath must be an absolute path: {root:?}");
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(extra_args) = &self.typst_extra_args {
|
||||
if let Some(root) = &extra_args.root_dir {
|
||||
if !root.is_absolute() {
|
||||
bail!("typstExtraArgs.root must be an absolute path: {root:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
self.compile.validate()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -470,7 +255,7 @@ impl From<&InitializeParams> for ConstConfig {
|
|||
}
|
||||
|
||||
pub struct Init {
|
||||
pub host: LspHost,
|
||||
pub host: LspHost<TypstLanguageServer>,
|
||||
pub compile_opts: CompileOpts,
|
||||
}
|
||||
|
||||
|
@ -503,21 +288,24 @@ impl Init {
|
|||
const_config = cc
|
||||
);
|
||||
let mut config = Config {
|
||||
roots: match params.workspace_folders.as_ref() {
|
||||
Some(roots) => roots
|
||||
.iter()
|
||||
.map(|root| &root.uri)
|
||||
.map(Url::to_file_path)
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.unwrap(),
|
||||
#[allow(deprecated)] // `params.root_path` is marked as deprecated
|
||||
None => params
|
||||
.root_uri
|
||||
.as_ref()
|
||||
.map(|uri| uri.to_file_path().unwrap())
|
||||
.or_else(|| params.root_path.clone().map(PathBuf::from))
|
||||
.into_iter()
|
||||
.collect(),
|
||||
compile: CompileConfig {
|
||||
roots: match params.workspace_folders.as_ref() {
|
||||
Some(roots) => roots
|
||||
.iter()
|
||||
.map(|root| &root.uri)
|
||||
.map(Url::to_file_path)
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.unwrap(),
|
||||
#[allow(deprecated)] // `params.root_path` is marked as deprecated
|
||||
None => params
|
||||
.root_uri
|
||||
.as_ref()
|
||||
.map(|uri| uri.to_file_path().unwrap())
|
||||
.or_else(|| params.root_path.clone().map(PathBuf::from))
|
||||
.into_iter()
|
||||
.collect(),
|
||||
},
|
||||
..CompileConfig::default()
|
||||
},
|
||||
..Config::default()
|
||||
};
|
||||
|
@ -532,10 +320,15 @@ impl Init {
|
|||
// prepare fonts
|
||||
// todo: on font resolving failure, downgrade to a fake font book
|
||||
let font = {
|
||||
let opts = std::mem::take(&mut self.compile_opts.font);
|
||||
let mut opts = std::mem::take(&mut self.compile_opts.font);
|
||||
if opts.font_paths.is_empty() {
|
||||
if let Some(font_paths) = config.typst_extra_args.as_ref().map(|x| &x.font_paths) {
|
||||
self.compile_opts.font.font_paths = font_paths.clone();
|
||||
if let Some(font_paths) = config
|
||||
.compile
|
||||
.typst_extra_args
|
||||
.as_ref()
|
||||
.map(|x| &x.font_paths)
|
||||
{
|
||||
opts.font_paths = font_paths.clone();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -558,6 +351,7 @@ impl Init {
|
|||
}
|
||||
|
||||
info!("initialized with config {config:?}", config = config);
|
||||
service.primary.config = config.compile.clone();
|
||||
service.config = config;
|
||||
|
||||
let cluster_actor = CompileClusterActor {
|
||||
|
@ -570,13 +364,13 @@ impl Init {
|
|||
|
||||
let primary = service.server(
|
||||
"primary".to_owned(),
|
||||
service.config.determine_entry(None),
|
||||
service.config.determine_inputs(),
|
||||
service.config.compile.determine_entry(None),
|
||||
service.config.compile.determine_inputs(),
|
||||
);
|
||||
if service.primary.is_some() {
|
||||
if service.primary.compiler.is_some() {
|
||||
panic!("primary already initialized");
|
||||
}
|
||||
service.primary = Some(primary);
|
||||
service.primary.compiler = Some(primary);
|
||||
|
||||
// Run the cluster in the background after we referencing it
|
||||
tokio::spawn(cluster_actor.run());
|
||||
|
@ -692,13 +486,13 @@ mod tests {
|
|||
|
||||
config.update(&update).unwrap();
|
||||
|
||||
assert_eq!(config.output_path, "out");
|
||||
assert_eq!(config.export_pdf, ExportMode::OnSave);
|
||||
assert_eq!(config.root_path, Some(PathBuf::from(root_path)));
|
||||
assert_eq!(config.compile.output_path, "out");
|
||||
assert_eq!(config.compile.export_pdf, ExportMode::OnSave);
|
||||
assert_eq!(config.compile.root_path, Some(PathBuf::from(root_path)));
|
||||
assert_eq!(config.semantic_tokens, SemanticTokensMode::Enable);
|
||||
assert_eq!(config.formatter, ExperimentalFormatterMode::Enable);
|
||||
assert_eq!(
|
||||
config.typst_extra_args,
|
||||
config.compile.typst_extra_args,
|
||||
Some(CompileExtraOpts {
|
||||
root_dir: Some(PathBuf::from(root_path)),
|
||||
..Default::default()
|
5
crates/tinymist/src/server/mod.rs
Normal file
5
crates/tinymist/src/server/mod.rs
Normal file
|
@ -0,0 +1,5 @@
|
|||
pub mod lsp;
|
||||
pub mod lsp_init;
|
||||
|
||||
pub mod compiler;
|
||||
pub mod compiler_init;
|
|
@ -15,7 +15,7 @@ use typst_ts_compiler::{
|
|||
};
|
||||
use typst_ts_core::{error::prelude::*, Bytes, Error, ImmutPath};
|
||||
|
||||
use crate::{actor::typ_client::CompileClientActor, TypstLanguageServer};
|
||||
use crate::{actor::typ_client::CompileClientActor, compiler::CompileServer, TypstLanguageServer};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MemoryFileMeta {
|
||||
|
@ -35,8 +35,8 @@ impl TypstLanguageServer {
|
|||
(Some(new_entry), false) => {
|
||||
let main_node = self.server(
|
||||
"main".to_owned(),
|
||||
self.config.determine_entry(Some(new_entry)),
|
||||
self.config.determine_inputs(),
|
||||
self.config.compile.determine_entry(Some(new_entry)),
|
||||
self.config.compile.determine_inputs(),
|
||||
);
|
||||
|
||||
self.main = Some(main_node);
|
||||
|
@ -259,3 +259,10 @@ impl TypstLanguageServer {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CompileServer {
|
||||
pub fn query(&self, query: CompilerQueryRequest) -> anyhow::Result<CompilerQueryResponse> {
|
||||
let client = self.compiler.as_ref().unwrap();
|
||||
TypstLanguageServer::query_on(client, query)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,12 +3,68 @@ use std::{
|
|||
thread,
|
||||
};
|
||||
|
||||
use log::{info, trace};
|
||||
|
||||
use crossbeam_channel::{bounded, Receiver, Sender};
|
||||
|
||||
use crate::Message;
|
||||
|
||||
use std::io::Read;
|
||||
|
||||
use lsp_server::Connection;
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
#[cfg_attr(feature = "clap", derive(clap::Parser))]
|
||||
pub struct MirrorArgs {
|
||||
/// Mirror the stdin to the file
|
||||
#[cfg_attr(feature = "clap", clap(long, default_value = "", value_name = "FILE"))]
|
||||
pub mirror: String,
|
||||
/// Replay input from the file
|
||||
#[cfg_attr(feature = "clap", clap(long, default_value = "", value_name = "FILE"))]
|
||||
pub replay: String,
|
||||
}
|
||||
|
||||
/// Note that we must have our logging only write out to stderr.
|
||||
pub fn with_stdio_transport(
|
||||
args: MirrorArgs,
|
||||
f: impl FnOnce(Connection, &mut bool) -> anyhow::Result<()>,
|
||||
) -> anyhow::Result<()> {
|
||||
// Set up input and output
|
||||
let replay = args.replay.clone();
|
||||
let mirror = args.mirror.clone();
|
||||
let i = move || -> Box<dyn BufRead> {
|
||||
if !replay.is_empty() {
|
||||
// Get input from file
|
||||
let file = std::fs::File::open(&replay).unwrap();
|
||||
let file = std::io::BufReader::new(file);
|
||||
Box::new(file)
|
||||
} else if mirror.is_empty() {
|
||||
// Get input from stdin
|
||||
let stdin = std::io::stdin().lock();
|
||||
Box::new(stdin)
|
||||
} else {
|
||||
let file = std::fs::File::create(&mirror).unwrap();
|
||||
let stdin = std::io::stdin().lock();
|
||||
Box::new(MirrorWriter(stdin, file, std::sync::Once::new()))
|
||||
}
|
||||
};
|
||||
let o = || std::io::stdout().lock();
|
||||
|
||||
// Create the transport. Includes the stdio (stdin and stdout) versions but this
|
||||
// could also be implemented to use sockets or HTTP.
|
||||
let (sender, receiver, io_threads) = io_transport(i, o);
|
||||
let connection = Connection { sender, receiver };
|
||||
|
||||
// Start the LSP server
|
||||
let mut force_exit = false;
|
||||
|
||||
f(connection, &mut force_exit)?;
|
||||
|
||||
if !force_exit {
|
||||
io_threads.join()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Creates an LSP connection via io.
|
||||
///
|
||||
/// # Example
|
||||
|
@ -33,7 +89,7 @@ pub fn io_transport<I: BufRead, O: Write>(
|
|||
.into_iter()
|
||||
.try_for_each(|it| it.write(&mut out));
|
||||
|
||||
info!("writer thread finished");
|
||||
log::info!("writer thread finished");
|
||||
res
|
||||
});
|
||||
let (reader_sender, reader_receiver) = bounded::<Message>(0);
|
||||
|
@ -42,7 +98,7 @@ pub fn io_transport<I: BufRead, O: Write>(
|
|||
while let Some(msg) = Message::read(&mut inp)? {
|
||||
let is_exit = matches!(&msg, Message::Notification(n) if n.method == "exit");
|
||||
|
||||
trace!("sending message {:#?}", msg);
|
||||
log::trace!("sending message {:#?}", msg);
|
||||
reader_sender
|
||||
.send(msg)
|
||||
.expect("receiver was dropped, failed to send a message");
|
||||
|
@ -52,7 +108,7 @@ pub fn io_transport<I: BufRead, O: Write>(
|
|||
}
|
||||
}
|
||||
|
||||
info!("reader thread finished");
|
||||
log::info!("reader thread finished");
|
||||
Ok(())
|
||||
});
|
||||
let threads = IoThreads { reader, writer };
|
||||
|
@ -84,3 +140,37 @@ impl IoThreads {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct MirrorWriter<R: Read, W: Write>(R, W, std::sync::Once);
|
||||
|
||||
impl<R: Read, W: Write> Read for MirrorWriter<R, W> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
let res = self.0.read(buf)?;
|
||||
|
||||
if let Err(err) = self.1.write_all(&buf[..res]) {
|
||||
self.2.call_once(|| {
|
||||
log::warn!("failed to write to mirror: {err}");
|
||||
});
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read + BufRead, W: Write> BufRead for MirrorWriter<R, W> {
|
||||
fn fill_buf(&mut self) -> io::Result<&[u8]> {
|
||||
self.0.fill_buf()
|
||||
}
|
||||
|
||||
fn consume(&mut self, amt: usize) {
|
||||
let buf = self.0.fill_buf().unwrap();
|
||||
|
||||
if let Err(err) = self.1.write_all(&buf[..amt]) {
|
||||
self.2.call_once(|| {
|
||||
log::warn!("failed to write to mirror: {err}");
|
||||
});
|
||||
}
|
||||
|
||||
self.0.consume(amt);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,6 +67,15 @@ impl FontResolver for SharedFontResolver {
|
|||
}
|
||||
}
|
||||
|
||||
impl SharedFontResolver {
|
||||
pub fn new(opts: CompileFontOpts) -> ZResult<Self> {
|
||||
let res = crate::world::LspWorldBuilder::resolve_fonts(opts)?;
|
||||
Ok(Self {
|
||||
inner: Arc::new(res),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// type trait of [`LspWorld`].
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct SystemCompilerFeat;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue