mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-07-24 13:13:43 +00:00
refactor: clean code & fix suffix computation & combine export actors (#263)
* dev: use bail! macro * dev: remove redundant arg structures as we have inlay hints * dev: unify actor namings * dev: unify actor namings (cont) * fix: suffix computation * dev: add EditorActor::new * dev: unify index file flavors * dev: combine export actors * dev: small tweaks * dev: mimic try blocks * dev: accumulate export requests * dev: remove unnecessary Option * dev: unify serde renames * dev: remove unnecessary Option * dev: small tweaks
This commit is contained in:
parent
d05c5012ff
commit
94a0a1b23a
21 changed files with 663 additions and 1036 deletions
|
@ -132,17 +132,15 @@ mod polymorphic {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub enum PageSelection {
|
pub enum PageSelection {
|
||||||
#[serde(rename = "first")]
|
|
||||||
First,
|
First,
|
||||||
#[serde(rename = "merged")]
|
|
||||||
Merged,
|
Merged,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum ExportKind {
|
pub enum ExportKind {
|
||||||
Pdf,
|
Pdf,
|
||||||
WordCount,
|
|
||||||
Svg { page: PageSelection },
|
Svg { page: PageSelection },
|
||||||
Png { page: PageSelection },
|
Png { page: PageSelection },
|
||||||
}
|
}
|
||||||
|
@ -151,7 +149,6 @@ mod polymorphic {
|
||||||
pub fn extension(&self) -> &str {
|
pub fn extension(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
Self::Pdf => "pdf",
|
Self::Pdf => "pdf",
|
||||||
Self::WordCount => "txt",
|
|
||||||
Self::Svg { .. } => "svg",
|
Self::Svg { .. } => "svg",
|
||||||
Self::Png { .. } => "png",
|
Self::Png { .. } => "png",
|
||||||
}
|
}
|
||||||
|
@ -179,12 +176,11 @@ mod polymorphic {
|
||||||
pub struct ServerInfoRequest {}
|
pub struct ServerInfoRequest {}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct ServerInfoResponse {
|
pub struct ServerInfoResponse {
|
||||||
pub root: Option<PathBuf>,
|
pub root: Option<PathBuf>,
|
||||||
#[serde(rename = "fontPaths")]
|
|
||||||
pub font_paths: Vec<PathBuf>,
|
pub font_paths: Vec<PathBuf>,
|
||||||
pub inputs: Dict,
|
pub inputs: Dict,
|
||||||
#[serde(rename = "estimatedMemoryUsage")]
|
|
||||||
pub estimated_memory_usage: HashMap<String, usize>,
|
pub estimated_memory_usage: HashMap<String, usize>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,17 +27,15 @@ impl ExportFeature for PeriscopeExportFeature {
|
||||||
|
|
||||||
/// The arguments for periscope renderer.
|
/// The arguments for periscope renderer.
|
||||||
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct PeriscopeArgs {
|
pub struct PeriscopeArgs {
|
||||||
/// The distance above the center line.
|
/// The distance above the center line.
|
||||||
#[serde(rename = "yAbove")]
|
|
||||||
pub y_above: f32,
|
pub y_above: f32,
|
||||||
/// The distance below the center line.
|
/// The distance below the center line.
|
||||||
#[serde(rename = "yBelow")]
|
|
||||||
pub y_below: f32,
|
pub y_below: f32,
|
||||||
/// The scale of the image.
|
/// The scale of the image.
|
||||||
pub scale: f32,
|
pub scale: f32,
|
||||||
/// Whether to invert the color. (will become smarter in the future)
|
/// Whether to invert the color. (will become smarter in the future)
|
||||||
#[serde(rename = "invertColor")]
|
|
||||||
pub invert_color: String,
|
pub invert_color: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! The cluster actor running in background
|
//! The actor that send notifications to the client.
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
@ -9,29 +9,44 @@ use tokio::sync::mpsc;
|
||||||
|
|
||||||
use crate::{tools::word_count::WordsCount, LspHost, TypstLanguageServer};
|
use crate::{tools::word_count::WordsCount, LspHost, TypstLanguageServer};
|
||||||
|
|
||||||
pub enum CompileClusterRequest {
|
pub enum EditorRequest {
|
||||||
Diag(String, Option<DiagnosticsMap>),
|
Diag(String, Option<DiagnosticsMap>),
|
||||||
Status(String, TinymistCompileStatusEnum),
|
Status(String, TinymistCompileStatusEnum),
|
||||||
WordCount(String, Option<WordsCount>),
|
WordCount(String, WordsCount),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct EditorActor {
|
pub struct EditorActor {
|
||||||
pub host: LspHost<TypstLanguageServer>,
|
host: LspHost<TypstLanguageServer>,
|
||||||
pub diag_rx: mpsc::UnboundedReceiver<CompileClusterRequest>,
|
editor_rx: mpsc::UnboundedReceiver<EditorRequest>,
|
||||||
|
|
||||||
pub diagnostics: HashMap<Url, HashMap<String, Vec<LspDiagnostic>>>,
|
diagnostics: HashMap<Url, HashMap<String, Vec<LspDiagnostic>>>,
|
||||||
pub affect_map: HashMap<String, Vec<Url>>,
|
affect_map: HashMap<String, Vec<Url>>,
|
||||||
pub published_primary: bool,
|
published_primary: bool,
|
||||||
pub notify_compile_status: bool,
|
notify_compile_status: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl EditorActor {
|
impl EditorActor {
|
||||||
|
pub fn new(
|
||||||
|
host: LspHost<TypstLanguageServer>,
|
||||||
|
editor_rx: mpsc::UnboundedReceiver<EditorRequest>,
|
||||||
|
notify_compile_status: bool,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
host,
|
||||||
|
editor_rx,
|
||||||
|
diagnostics: HashMap::new(),
|
||||||
|
affect_map: HashMap::new(),
|
||||||
|
published_primary: false,
|
||||||
|
notify_compile_status,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub async fn run(mut self) {
|
pub async fn run(mut self) {
|
||||||
let mut compile_status = TinymistCompileStatusEnum::Compiling;
|
let mut compile_status = TinymistCompileStatusEnum::Compiling;
|
||||||
let mut words_count = None;
|
let mut words_count = None;
|
||||||
while let Some(req) = self.diag_rx.recv().await {
|
while let Some(req) = self.editor_rx.recv().await {
|
||||||
match req {
|
match req {
|
||||||
CompileClusterRequest::Diag(group, diagnostics) => {
|
EditorRequest::Diag(group, diagnostics) => {
|
||||||
info!(
|
info!(
|
||||||
"received diagnostics from {group}: diag({:?})",
|
"received diagnostics from {group}: diag({:?})",
|
||||||
diagnostics.as_ref().map(|e| e.len())
|
diagnostics.as_ref().map(|e| e.len())
|
||||||
|
@ -52,7 +67,7 @@ impl EditorActor {
|
||||||
self.published_primary = again_with_primary;
|
self.published_primary = again_with_primary;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CompileClusterRequest::Status(group, status) => {
|
EditorRequest::Status(group, status) => {
|
||||||
log::debug!("received status request");
|
log::debug!("received status request");
|
||||||
if self.notify_compile_status && group == "primary" {
|
if self.notify_compile_status && group == "primary" {
|
||||||
compile_status = status;
|
compile_status = status;
|
||||||
|
@ -64,10 +79,10 @@ impl EditorActor {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CompileClusterRequest::WordCount(group, wc) => {
|
EditorRequest::WordCount(group, wc) => {
|
||||||
log::debug!("received word count request");
|
log::debug!("received word count request");
|
||||||
if self.notify_compile_status && group == "primary" {
|
if self.notify_compile_status && group == "primary" {
|
||||||
words_count = wc;
|
words_count = Some(wc);
|
||||||
self.host.send_notification::<TinymistCompileStatus>(
|
self.host.send_notification::<TinymistCompileStatus>(
|
||||||
TinymistCompileStatus {
|
TinymistCompileStatus {
|
||||||
status: compile_status.clone(),
|
status: compile_status.clone(),
|
||||||
|
@ -170,9 +185,9 @@ pub enum TinymistCompileStatusEnum {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct TinymistCompileStatus {
|
pub struct TinymistCompileStatus {
|
||||||
pub status: TinymistCompileStatusEnum,
|
pub status: TinymistCompileStatusEnum,
|
||||||
#[serde(rename = "wordsCount")]
|
|
||||||
pub words_count: Option<WordsCount>,
|
pub words_count: Option<WordsCount>,
|
||||||
}
|
}
|
||||||
|
|
269
crates/tinymist/src/actor/export.rs
Normal file
269
crates/tinymist/src/actor/export.rs
Normal file
|
@ -0,0 +1,269 @@
|
||||||
|
//! The actor that handles PDF export.
|
||||||
|
|
||||||
|
use std::{
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
|
use anyhow::bail;
|
||||||
|
use anyhow::Context;
|
||||||
|
use log::{error, info};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use tinymist_query::{ExportKind, PageSelection};
|
||||||
|
use tokio::sync::{mpsc, oneshot, watch};
|
||||||
|
use typst::{foundations::Smart, layout::Abs, layout::Frame, visualize::Color};
|
||||||
|
use typst_ts_core::{config::compiler::EntryState, path::PathClean, ImmutPath, TypstDocument};
|
||||||
|
|
||||||
|
use crate::{tools::word_count, ExportMode};
|
||||||
|
|
||||||
|
use super::editor::EditorRequest;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Default)]
|
||||||
|
pub struct ExportConfig {
|
||||||
|
pub substitute_pattern: String,
|
||||||
|
pub entry: EntryState,
|
||||||
|
pub mode: ExportMode,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum ExportRequest {
|
||||||
|
OnTyped,
|
||||||
|
OnSaved(PathBuf),
|
||||||
|
Oneshot(Option<ExportKind>, oneshot::Sender<Option<PathBuf>>),
|
||||||
|
ChangeConfig(ExportConfig),
|
||||||
|
ChangeExportPath(EntryState),
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct ExportActor {
|
||||||
|
group: String,
|
||||||
|
editor_tx: mpsc::UnboundedSender<EditorRequest>,
|
||||||
|
export_rx: mpsc::UnboundedReceiver<ExportRequest>,
|
||||||
|
document: watch::Receiver<Option<Arc<TypstDocument>>>,
|
||||||
|
|
||||||
|
config: ExportConfig,
|
||||||
|
kind: ExportKind,
|
||||||
|
count_words: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExportActor {
|
||||||
|
pub fn new(
|
||||||
|
group: String,
|
||||||
|
document: watch::Receiver<Option<Arc<TypstDocument>>>,
|
||||||
|
editor_tx: mpsc::UnboundedSender<EditorRequest>,
|
||||||
|
export_rx: mpsc::UnboundedReceiver<ExportRequest>,
|
||||||
|
config: ExportConfig,
|
||||||
|
kind: ExportKind,
|
||||||
|
count_words: bool,
|
||||||
|
) -> Self {
|
||||||
|
Self {
|
||||||
|
group,
|
||||||
|
editor_tx,
|
||||||
|
export_rx,
|
||||||
|
document,
|
||||||
|
config,
|
||||||
|
kind,
|
||||||
|
count_words,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn run(mut self) {
|
||||||
|
while let Some(mut req) = self.export_rx.recv().await {
|
||||||
|
let Some(doc) = self.document.borrow().clone() else {
|
||||||
|
info!("RenderActor: document is not ready");
|
||||||
|
continue;
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut need_export = false;
|
||||||
|
|
||||||
|
'accumulate: loop {
|
||||||
|
log::debug!("RenderActor: received request: {req:?}");
|
||||||
|
match req {
|
||||||
|
ExportRequest::ChangeConfig(cfg) => self.config = cfg,
|
||||||
|
ExportRequest::ChangeExportPath(entry) => self.config.entry = entry,
|
||||||
|
ExportRequest::OnTyped => need_export |= self.config.mode == ExportMode::OnType,
|
||||||
|
ExportRequest::OnSaved(..) => match self.config.mode {
|
||||||
|
ExportMode::OnSave => need_export = true,
|
||||||
|
ExportMode::OnDocumentHasTitle => need_export |= doc.title.is_some(),
|
||||||
|
_ => {}
|
||||||
|
},
|
||||||
|
ExportRequest::Oneshot(kind, callback) => {
|
||||||
|
// Do oneshot export instantly without accumulation.
|
||||||
|
let kind = kind.as_ref().unwrap_or(&self.kind);
|
||||||
|
let resp = self.check_mode_and_export(kind, &doc).await;
|
||||||
|
if let Err(err) = callback.send(resp) {
|
||||||
|
error!("RenderActor(@{kind:?}): failed to send response: {err:?}");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try to accumulate more requests.
|
||||||
|
match self.export_rx.try_recv() {
|
||||||
|
Ok(new_req) => req = new_req,
|
||||||
|
_ => break 'accumulate,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if need_export {
|
||||||
|
self.check_mode_and_export(&self.kind, &doc).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
if self.count_words {
|
||||||
|
let wc = word_count::word_count(&doc);
|
||||||
|
log::debug!("word count: {wc:?}");
|
||||||
|
let _ = self
|
||||||
|
.editor_tx
|
||||||
|
.send(EditorRequest::WordCount(self.group.clone(), wc));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
info!("RenderActor(@{:?}): stopped", &self.kind);
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn check_mode_and_export(
|
||||||
|
&self,
|
||||||
|
kind: &ExportKind,
|
||||||
|
doc: &TypstDocument,
|
||||||
|
) -> Option<PathBuf> {
|
||||||
|
// pub entry: EntryState,
|
||||||
|
let root = self.config.entry.root();
|
||||||
|
let main = self.config.entry.main();
|
||||||
|
|
||||||
|
info!(
|
||||||
|
"RenderActor: check path {:?} and root {:?} with output directory {}",
|
||||||
|
main, root, self.config.substitute_pattern
|
||||||
|
);
|
||||||
|
|
||||||
|
let root = root?;
|
||||||
|
let main = main?;
|
||||||
|
|
||||||
|
// todo: package??
|
||||||
|
if main.package().is_some() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = main.vpath().resolve(&root)?;
|
||||||
|
|
||||||
|
match self.export(kind, doc, &root, &path).await {
|
||||||
|
Ok(pdf) => Some(pdf),
|
||||||
|
Err(err) => {
|
||||||
|
error!("RenderActor({kind:?}): failed to export {err}");
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn export(
|
||||||
|
&self,
|
||||||
|
kind: &ExportKind,
|
||||||
|
doc: &TypstDocument,
|
||||||
|
root: &Path,
|
||||||
|
path: &Path,
|
||||||
|
) -> anyhow::Result<PathBuf> {
|
||||||
|
use ExportKind::*;
|
||||||
|
use PageSelection::*;
|
||||||
|
|
||||||
|
let Some(to) = substitute_path(&self.config.substitute_pattern, root, path) else {
|
||||||
|
bail!("RenderActor({kind:?}): failed to substitute path");
|
||||||
|
};
|
||||||
|
if to.is_relative() {
|
||||||
|
bail!("RenderActor({kind:?}): path is relative: {to:?}");
|
||||||
|
}
|
||||||
|
if to.is_dir() {
|
||||||
|
bail!("RenderActor({kind:?}): path is a directory: {to:?}");
|
||||||
|
}
|
||||||
|
|
||||||
|
let to = to.with_extension(kind.extension());
|
||||||
|
info!("RenderActor({kind:?}): exporting {path:?} to {to:?}");
|
||||||
|
|
||||||
|
if let Some(e) = to.parent() {
|
||||||
|
if !e.exists() {
|
||||||
|
std::fs::create_dir_all(e).with_context(|| {
|
||||||
|
format!("RenderActor({kind:?}): failed to create directory")
|
||||||
|
})?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static BLANK: Lazy<Frame> = Lazy::new(Frame::default);
|
||||||
|
let first_frame = || doc.pages.first().map(|f| &f.frame).unwrap_or(&*BLANK);
|
||||||
|
let data = match kind {
|
||||||
|
Pdf => {
|
||||||
|
// todo: Some(pdf_uri.as_str())
|
||||||
|
// todo: timestamp world.now()
|
||||||
|
typst_pdf::pdf(doc, Smart::Auto, None)
|
||||||
|
}
|
||||||
|
Svg { page: First } => typst_svg::svg(first_frame()).into_bytes(),
|
||||||
|
Svg { page: Merged } => typst_svg::svg_merged(doc, Abs::zero()).into_bytes(),
|
||||||
|
Png { page: First } => typst_render::render(first_frame(), 3., Color::WHITE)
|
||||||
|
.encode_png()
|
||||||
|
.map_err(|err| anyhow::anyhow!("failed to encode PNG ({err})"))?,
|
||||||
|
Png { page: Merged } => {
|
||||||
|
typst_render::render_merged(doc, 3., Color::WHITE, Abs::zero(), Color::WHITE)
|
||||||
|
.encode_png()
|
||||||
|
.map_err(|err| anyhow::anyhow!("failed to encode PNG ({err})"))?
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
std::fs::write(&to, data)
|
||||||
|
.with_context(|| format!("RenderActor({kind:?}): failed to export"))?;
|
||||||
|
|
||||||
|
info!("RenderActor({kind:?}): export complete");
|
||||||
|
Ok(to)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[comemo::memoize]
|
||||||
|
fn substitute_path(substitute_pattern: &str, root: &Path, path: &Path) -> Option<ImmutPath> {
|
||||||
|
if let Ok(path) = path.strip_prefix("/untitled") {
|
||||||
|
let tmp = std::env::temp_dir();
|
||||||
|
let path = tmp.join("typst").join(path);
|
||||||
|
return Some(path.as_path().into());
|
||||||
|
}
|
||||||
|
|
||||||
|
if substitute_pattern.is_empty() {
|
||||||
|
return Some(path.to_path_buf().clean().into());
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = path.strip_prefix(root).ok()?;
|
||||||
|
let dir = path.parent();
|
||||||
|
let file_name = path.file_name().unwrap_or_default();
|
||||||
|
|
||||||
|
let w = root.to_string_lossy();
|
||||||
|
let f = file_name.to_string_lossy();
|
||||||
|
|
||||||
|
// replace all $root
|
||||||
|
let mut path = substitute_pattern.replace("$root", &w);
|
||||||
|
if let Some(dir) = dir {
|
||||||
|
let d = dir.to_string_lossy();
|
||||||
|
path = path.replace("$dir", &d);
|
||||||
|
}
|
||||||
|
path = path.replace("$name", &f);
|
||||||
|
|
||||||
|
Some(PathBuf::from(path).clean().into())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_substitute_path() {
|
||||||
|
let root = Path::new("/root");
|
||||||
|
let path = Path::new("/root/dir1/dir2/file.txt");
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
substitute_path("/substitute/$dir/$name", root, path),
|
||||||
|
Some(PathBuf::from("/substitute/dir1/dir2/file.txt").into())
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
substitute_path("/substitute/$dir/../$name", root, path),
|
||||||
|
Some(PathBuf::from("/substitute/dir1/file.txt").into())
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
substitute_path("/substitute/$name", root, path),
|
||||||
|
Some(PathBuf::from("/substitute/file.txt").into())
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
substitute_path("/substitute/target/$dir/$name", root, path),
|
||||||
|
Some(PathBuf::from("/substitute/target/dir1/dir2/file.txt").into())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,66 +1,63 @@
|
||||||
|
//! The actor that handles formatting.
|
||||||
|
|
||||||
|
use std::iter::zip;
|
||||||
|
|
||||||
use lsp_server::RequestId;
|
use lsp_server::RequestId;
|
||||||
use lsp_types::TextEdit;
|
use lsp_types::TextEdit;
|
||||||
use tinymist_query::{typst_to_lsp, PositionEncoding};
|
use tinymist_query::{typst_to_lsp, PositionEncoding};
|
||||||
use typst::syntax::Source;
|
use typst::syntax::Source;
|
||||||
|
|
||||||
use crate::{result_to_response_, FormatterMode, LspHost, LspResult, TypstLanguageServer};
|
use crate::{result_to_response, FormatterMode, LspHost, LspResult, TypstLanguageServer};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct FormattingConfig {
|
pub struct FormatConfig {
|
||||||
pub mode: FormatterMode,
|
pub mode: FormatterMode,
|
||||||
pub width: u32,
|
pub width: u32,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum FormattingRequest {
|
pub enum FormatRequest {
|
||||||
ChangeConfig(FormattingConfig),
|
ChangeConfig(FormatConfig),
|
||||||
Formatting((RequestId, Source)),
|
Format(RequestId, Source),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_format_thread(
|
pub fn run_format_thread(
|
||||||
init_c: FormattingConfig,
|
config: FormatConfig,
|
||||||
rx_req: crossbeam_channel::Receiver<FormattingRequest>,
|
format_rx: crossbeam_channel::Receiver<FormatRequest>,
|
||||||
client: LspHost<TypstLanguageServer>,
|
client: LspHost<TypstLanguageServer>,
|
||||||
position_encoding: PositionEncoding,
|
position_encoding: PositionEncoding,
|
||||||
) {
|
) {
|
||||||
type FmtFn = Box<dyn Fn(Source) -> LspResult<Option<Vec<TextEdit>>>>;
|
type FmtFn = Box<dyn Fn(Source) -> LspResult<Option<Vec<TextEdit>>>>;
|
||||||
let compile = |c: FormattingConfig| -> FmtFn {
|
let compile = |c: FormatConfig| -> FmtFn {
|
||||||
log::info!("formatting thread with config: {c:#?}");
|
log::info!("formatting thread with config: {c:#?}");
|
||||||
match c.mode {
|
match c.mode {
|
||||||
FormatterMode::Typstyle => {
|
FormatterMode::Typstyle => {
|
||||||
let cw = c.width as usize;
|
let cw = c.width as usize;
|
||||||
let f: FmtFn = Box::new(move |e: Source| {
|
Box::new(move |e: Source| {
|
||||||
let res = typstyle_core::Typstyle::new_with_src(e.clone(), cw).pretty_print();
|
let res = typstyle_core::Typstyle::new_with_src(e.clone(), cw).pretty_print();
|
||||||
Ok(calc_diff(e, res, position_encoding))
|
Ok(calc_diff(e, res, position_encoding))
|
||||||
});
|
})
|
||||||
f
|
|
||||||
}
|
}
|
||||||
FormatterMode::Typstfmt => {
|
FormatterMode::Typstfmt => {
|
||||||
let config = typstfmt_lib::Config {
|
let config = typstfmt_lib::Config {
|
||||||
max_line_length: c.width as usize,
|
max_line_length: c.width as usize,
|
||||||
..typstfmt_lib::Config::default()
|
..typstfmt_lib::Config::default()
|
||||||
};
|
};
|
||||||
let f: FmtFn = Box::new(move |e: Source| {
|
Box::new(move |e: Source| {
|
||||||
let res = typstfmt_lib::format(e.text(), config);
|
let res = typstfmt_lib::format(e.text(), config);
|
||||||
Ok(calc_diff(e, res, position_encoding))
|
Ok(calc_diff(e, res, position_encoding))
|
||||||
});
|
})
|
||||||
f
|
|
||||||
}
|
|
||||||
FormatterMode::Disable => {
|
|
||||||
let f: FmtFn = Box::new(|_| Ok(None));
|
|
||||||
f
|
|
||||||
}
|
}
|
||||||
|
FormatterMode::Disable => Box::new(|_| Ok(None)),
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut f: FmtFn = compile(init_c);
|
let mut f: FmtFn = compile(config);
|
||||||
while let Ok(req) = rx_req.recv() {
|
while let Ok(req) = format_rx.recv() {
|
||||||
match req {
|
match req {
|
||||||
FormattingRequest::ChangeConfig(c) => f = compile(c),
|
FormatRequest::ChangeConfig(c) => f = compile(c),
|
||||||
FormattingRequest::Formatting((id, source)) => {
|
FormatRequest::Format(id, source) => {
|
||||||
let res = f(source);
|
let res = f(source);
|
||||||
if let Ok(response) = result_to_response_(id, res) {
|
client.respond(result_to_response(id, res));
|
||||||
client.respond(response);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -74,10 +71,7 @@ fn calc_diff(prev: Source, next: String, encoding: PositionEncoding) -> Option<V
|
||||||
let old = prev.text();
|
let old = prev.text();
|
||||||
let new = &next;
|
let new = &next;
|
||||||
|
|
||||||
let mut prefix = old
|
let mut prefix = zip(old.bytes(), new.bytes())
|
||||||
.as_bytes()
|
|
||||||
.iter()
|
|
||||||
.zip(new.as_bytes())
|
|
||||||
.take_while(|(x, y)| x == y)
|
.take_while(|(x, y)| x == y)
|
||||||
.count();
|
.count();
|
||||||
|
|
||||||
|
@ -89,11 +83,7 @@ fn calc_diff(prev: Source, next: String, encoding: PositionEncoding) -> Option<V
|
||||||
prefix -= 1;
|
prefix -= 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut suffix = old[prefix..]
|
let mut suffix = zip(old[prefix..].bytes().rev(), new[prefix..].bytes().rev())
|
||||||
.as_bytes()
|
|
||||||
.iter()
|
|
||||||
.zip(new[prefix..].as_bytes())
|
|
||||||
.rev()
|
|
||||||
.take_while(|(x, y)| x == y)
|
.take_while(|(x, y)| x == y)
|
||||||
.count();
|
.count();
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
//! Bootstrap actors for Tinymist.
|
//! Bootstrap actors for Tinymist.
|
||||||
|
|
||||||
pub mod cluster;
|
pub mod editor;
|
||||||
mod formatting;
|
pub mod export;
|
||||||
pub mod render;
|
pub mod format;
|
||||||
pub mod typ_client;
|
pub mod typ_client;
|
||||||
pub mod typ_server;
|
pub mod typ_server;
|
||||||
mod user_action;
|
pub mod user_action;
|
||||||
|
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use tinymist_query::analysis::Analysis;
|
use tinymist_query::analysis::Analysis;
|
||||||
use tinymist_query::ExportKind;
|
use tinymist_query::ExportKind;
|
||||||
use tinymist_render::PeriscopeRenderer;
|
use tinymist_render::PeriscopeRenderer;
|
||||||
use tokio::sync::{broadcast, watch};
|
use tokio::sync::{mpsc, watch};
|
||||||
use typst::util::Deferred;
|
use typst::util::Deferred;
|
||||||
use typst_ts_compiler::{
|
use typst_ts_compiler::{
|
||||||
service::CompileDriverImpl,
|
service::CompileDriverImpl,
|
||||||
|
@ -21,8 +21,8 @@ use typst_ts_compiler::{
|
||||||
use typst_ts_core::config::compiler::EntryState;
|
use typst_ts_core::config::compiler::EntryState;
|
||||||
|
|
||||||
use self::{
|
use self::{
|
||||||
formatting::run_format_thread,
|
export::{ExportActor, ExportConfig},
|
||||||
render::{ExportActor, ExportConfig},
|
format::run_format_thread,
|
||||||
typ_client::{CompileClientActor, CompileDriver, CompileHandler},
|
typ_client::{CompileClientActor, CompileDriver, CompileHandler},
|
||||||
typ_server::CompileServerActor,
|
typ_server::CompileServerActor,
|
||||||
user_action::run_user_action_thread,
|
user_action::run_user_action_thread,
|
||||||
|
@ -30,12 +30,9 @@ use self::{
|
||||||
use crate::{
|
use crate::{
|
||||||
compiler::CompileServer,
|
compiler::CompileServer,
|
||||||
world::{ImmutDict, LspWorld, LspWorldBuilder},
|
world::{ImmutDict, LspWorld, LspWorldBuilder},
|
||||||
ExportMode, TypstLanguageServer,
|
TypstLanguageServer,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub use formatting::{FormattingConfig, FormattingRequest};
|
|
||||||
pub use user_action::{UserActionRequest, UserActionTraceRequest};
|
|
||||||
|
|
||||||
type CompileDriverInner = CompileDriverImpl<LspWorld>;
|
type CompileDriverInner = CompileDriverImpl<LspWorld>;
|
||||||
|
|
||||||
impl CompileServer {
|
impl CompileServer {
|
||||||
|
@ -47,41 +44,25 @@ impl CompileServer {
|
||||||
snapshot: FileChangeSet,
|
snapshot: FileChangeSet,
|
||||||
) -> CompileClientActor {
|
) -> CompileClientActor {
|
||||||
let (doc_tx, doc_rx) = watch::channel(None);
|
let (doc_tx, doc_rx) = watch::channel(None);
|
||||||
let (render_tx, _) = broadcast::channel(10);
|
let (export_tx, export_rx) = mpsc::unbounded_channel();
|
||||||
|
|
||||||
let config = ExportConfig {
|
|
||||||
substitute_pattern: self.config.output_path.clone(),
|
|
||||||
entry: entry.clone(),
|
|
||||||
mode: self.config.export_pdf,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Run Export actors before preparing cluster to avoid loss of events
|
// Run Export actors before preparing cluster to avoid loss of events
|
||||||
self.handle.spawn(
|
self.handle.spawn(
|
||||||
ExportActor::new(
|
ExportActor::new(
|
||||||
editor_group.clone(),
|
editor_group.clone(),
|
||||||
doc_rx.clone(),
|
doc_rx,
|
||||||
self.diag_tx.clone(),
|
self.editor_tx.clone(),
|
||||||
render_tx.subscribe(),
|
export_rx,
|
||||||
config.clone(),
|
ExportConfig {
|
||||||
|
substitute_pattern: self.config.output_path.clone(),
|
||||||
|
entry: entry.clone(),
|
||||||
|
mode: self.config.export_pdf,
|
||||||
|
},
|
||||||
ExportKind::Pdf,
|
ExportKind::Pdf,
|
||||||
|
self.config.notify_compile_status,
|
||||||
)
|
)
|
||||||
.run(),
|
.run(),
|
||||||
);
|
);
|
||||||
if self.config.notify_compile_status {
|
|
||||||
let mut config = config;
|
|
||||||
config.mode = ExportMode::OnType;
|
|
||||||
self.handle.spawn(
|
|
||||||
ExportActor::new(
|
|
||||||
editor_group.clone(),
|
|
||||||
doc_rx.clone(),
|
|
||||||
self.diag_tx.clone(),
|
|
||||||
render_tx.subscribe(),
|
|
||||||
config,
|
|
||||||
ExportKind::WordCount,
|
|
||||||
)
|
|
||||||
.run(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the server
|
// Create the server
|
||||||
let inner = Deferred::new({
|
let inner = Deferred::new({
|
||||||
|
@ -91,8 +72,8 @@ impl CompileServer {
|
||||||
inner: std::sync::Arc::new(parking_lot::Mutex::new(None)),
|
inner: std::sync::Arc::new(parking_lot::Mutex::new(None)),
|
||||||
diag_group: editor_group.clone(),
|
diag_group: editor_group.clone(),
|
||||||
doc_tx,
|
doc_tx,
|
||||||
render_tx: render_tx.clone(),
|
export_tx: export_tx.clone(),
|
||||||
editor_tx: self.diag_tx.clone(),
|
editor_tx: self.editor_tx.clone(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let position_encoding = self.const_config().position_encoding;
|
let position_encoding = self.const_config().position_encoding;
|
||||||
|
@ -138,7 +119,7 @@ impl CompileServer {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
CompileClientActor::new(editor_group, self.config.clone(), entry, inner, render_tx)
|
CompileClientActor::new(editor_group, self.config.clone(), entry, inner, export_tx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -161,24 +142,23 @@ impl TypstLanguageServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
let (tx_req, rx_req) = crossbeam_channel::unbounded();
|
let (tx_req, rx_req) = crossbeam_channel::unbounded();
|
||||||
self.format_thread = Some(tx_req.clone());
|
self.format_thread = Some(tx_req);
|
||||||
|
|
||||||
let client = self.client.clone();
|
let client = self.client.clone();
|
||||||
let mode = self.config.formatter;
|
let mode = self.config.formatter;
|
||||||
let enc = self.const_config.position_encoding;
|
let enc = self.const_config.position_encoding;
|
||||||
std::thread::spawn(move || {
|
let config = format::FormatConfig { mode, width: 120 };
|
||||||
run_format_thread(FormattingConfig { mode, width: 120 }, rx_req, client, enc)
|
std::thread::spawn(move || run_format_thread(config, rx_req, client, enc));
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_user_action_thread(&mut self) {
|
pub fn run_user_action_thread(&mut self) {
|
||||||
if self.user_action_threads.is_some() {
|
if self.user_action_thread.is_some() {
|
||||||
log::error!("user action threads are already started");
|
log::error!("user action threads are already started");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (tx_req, rx_req) = crossbeam_channel::unbounded();
|
let (tx_req, rx_req) = crossbeam_channel::unbounded();
|
||||||
self.user_action_threads = Some(tx_req.clone());
|
self.user_action_thread = Some(tx_req);
|
||||||
|
|
||||||
let client = self.client.clone();
|
let client = self.client.clone();
|
||||||
std::thread::spawn(move || run_user_action_thread(rx_req, client));
|
std::thread::spawn(move || run_user_action_thread(rx_req, client));
|
|
@ -1,349 +0,0 @@
|
||||||
//! The (PDF) render actor
|
|
||||||
|
|
||||||
use std::{
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
|
|
||||||
use anyhow::Context;
|
|
||||||
use log::{error, info};
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use parking_lot::Mutex;
|
|
||||||
use tinymist_query::{ExportKind, PageSelection};
|
|
||||||
use tokio::sync::{
|
|
||||||
broadcast::{self, error::RecvError},
|
|
||||||
mpsc, oneshot, watch,
|
|
||||||
};
|
|
||||||
use typst::{foundations::Smart, layout::Frame};
|
|
||||||
use typst_ts_core::{config::compiler::EntryState, path::PathClean, ImmutPath, TypstDocument};
|
|
||||||
|
|
||||||
use crate::{tools::word_count, ExportMode};
|
|
||||||
|
|
||||||
use super::cluster::CompileClusterRequest;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct OneshotRendering {
|
|
||||||
pub kind: Option<ExportKind>,
|
|
||||||
// todo: bad arch...
|
|
||||||
pub callback: Arc<Mutex<Option<oneshot::Sender<Option<PathBuf>>>>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub enum RenderActorRequest {
|
|
||||||
OnTyped,
|
|
||||||
Oneshot(OneshotRendering),
|
|
||||||
OnSaved(PathBuf),
|
|
||||||
ChangeExportPath(PathVars),
|
|
||||||
ChangeConfig(ExportConfig),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct PathVars {
|
|
||||||
pub entry: EntryState,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Default)]
|
|
||||||
pub struct ExportConfig {
|
|
||||||
pub substitute_pattern: String,
|
|
||||||
pub entry: EntryState,
|
|
||||||
pub mode: ExportMode,
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct ExportActor {
|
|
||||||
group: String,
|
|
||||||
editor_tx: mpsc::UnboundedSender<CompileClusterRequest>,
|
|
||||||
render_rx: broadcast::Receiver<RenderActorRequest>,
|
|
||||||
document: watch::Receiver<Option<Arc<TypstDocument>>>,
|
|
||||||
|
|
||||||
pub substitute_pattern: String,
|
|
||||||
pub entry: EntryState,
|
|
||||||
pub mode: ExportMode,
|
|
||||||
pub kind: ExportKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExportActor {
|
|
||||||
pub fn new(
|
|
||||||
group: String,
|
|
||||||
document: watch::Receiver<Option<Arc<TypstDocument>>>,
|
|
||||||
editor_tx: mpsc::UnboundedSender<CompileClusterRequest>,
|
|
||||||
render_rx: broadcast::Receiver<RenderActorRequest>,
|
|
||||||
config: ExportConfig,
|
|
||||||
kind: ExportKind,
|
|
||||||
) -> Self {
|
|
||||||
Self {
|
|
||||||
group,
|
|
||||||
editor_tx,
|
|
||||||
render_rx,
|
|
||||||
document,
|
|
||||||
substitute_pattern: config.substitute_pattern,
|
|
||||||
entry: config.entry,
|
|
||||||
mode: config.mode,
|
|
||||||
kind,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn run(mut self) {
|
|
||||||
let kind = &self.kind;
|
|
||||||
loop {
|
|
||||||
let req = match self.render_rx.recv().await {
|
|
||||||
Ok(req) => req,
|
|
||||||
Err(RecvError::Closed) => {
|
|
||||||
info!("RenderActor(@{kind:?}): channel closed");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
Err(RecvError::Lagged(_)) => {
|
|
||||||
info!("RenderActor(@{kind:?}): channel lagged");
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
log::debug!("RenderActor: received request: {req:?}");
|
|
||||||
match req {
|
|
||||||
RenderActorRequest::ChangeConfig(cfg) => {
|
|
||||||
self.substitute_pattern = cfg.substitute_pattern;
|
|
||||||
self.entry = cfg.entry;
|
|
||||||
self.mode = cfg.mode;
|
|
||||||
}
|
|
||||||
RenderActorRequest::ChangeExportPath(cfg) => {
|
|
||||||
self.entry = cfg.entry;
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
let cb = match &req {
|
|
||||||
RenderActorRequest::Oneshot(oneshot) => Some(oneshot.callback.clone()),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
let resp = self.check_mode_and_export(req).await;
|
|
||||||
if let Some(cb) = cb {
|
|
||||||
let Some(cb) = cb.lock().take() else {
|
|
||||||
error!("RenderActor(@{kind:?}): oneshot.callback is None");
|
|
||||||
continue;
|
|
||||||
};
|
|
||||||
if let Err(e) = cb.send(resp) {
|
|
||||||
error!("RenderActor(@{kind:?}): failed to send response: {e:?}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
info!("RenderActor(@{kind:?}): stopped");
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn check_mode_and_export(&self, req: RenderActorRequest) -> Option<PathBuf> {
|
|
||||||
let Some(document) = self.document.borrow().clone() else {
|
|
||||||
info!("RenderActor: document is not ready");
|
|
||||||
return None;
|
|
||||||
};
|
|
||||||
|
|
||||||
let eq_mode = match req {
|
|
||||||
RenderActorRequest::OnTyped => ExportMode::OnType,
|
|
||||||
RenderActorRequest::Oneshot(..) => ExportMode::OnSave,
|
|
||||||
RenderActorRequest::OnSaved(..) => ExportMode::OnSave,
|
|
||||||
_ => unreachable!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let kind = match &req {
|
|
||||||
RenderActorRequest::Oneshot(oneshot) => oneshot.kind.as_ref(),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
let kind = kind.unwrap_or(&self.kind);
|
|
||||||
|
|
||||||
// pub entry: EntryState,
|
|
||||||
let root = self.entry.root();
|
|
||||||
let main = self.entry.main();
|
|
||||||
|
|
||||||
info!(
|
|
||||||
"RenderActor: check path {:?} and root {:?} with output directory {}",
|
|
||||||
main, root, self.substitute_pattern
|
|
||||||
);
|
|
||||||
|
|
||||||
let root = root?;
|
|
||||||
let main = main?;
|
|
||||||
|
|
||||||
// todo: package??
|
|
||||||
if main.package().is_some() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = main.vpath().resolve(&root)?;
|
|
||||||
|
|
||||||
let should_do = matches!(req, RenderActorRequest::Oneshot(..)) || eq_mode == self.mode || {
|
|
||||||
let mode = self.mode;
|
|
||||||
info!(
|
|
||||||
"RenderActor: validating document for export mode {mode:?} title is {title}",
|
|
||||||
title = document.title.is_some()
|
|
||||||
);
|
|
||||||
mode == ExportMode::OnDocumentHasTitle
|
|
||||||
&& document.title.is_some()
|
|
||||||
&& matches!(req, RenderActorRequest::OnSaved(..))
|
|
||||||
};
|
|
||||||
if should_do {
|
|
||||||
return match self.export(kind, &document, &root, &path).await {
|
|
||||||
Ok(pdf) => Some(pdf),
|
|
||||||
Err(err) => {
|
|
||||||
error!("RenderActor({kind:?}): failed to export {err}");
|
|
||||||
None
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn export(
|
|
||||||
&self,
|
|
||||||
kind: &ExportKind,
|
|
||||||
doc: &TypstDocument,
|
|
||||||
root: &Path,
|
|
||||||
path: &Path,
|
|
||||||
) -> anyhow::Result<PathBuf> {
|
|
||||||
let Some(to) = substitute_path(&self.substitute_pattern, root, path) else {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"RenderActor({kind:?}): failed to substitute path"
|
|
||||||
));
|
|
||||||
};
|
|
||||||
if to.is_relative() {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"RenderActor({kind:?}): path is relative: {to:?}"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
if to.is_dir() {
|
|
||||||
return Err(anyhow::anyhow!(
|
|
||||||
"RenderActor({kind:?}): path is a directory: {to:?}"
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let to = to.with_extension(kind.extension());
|
|
||||||
info!("RenderActor({kind:?}): exporting {path:?} to {to:?}");
|
|
||||||
|
|
||||||
if let Some(e) = to.parent() {
|
|
||||||
if !e.exists() {
|
|
||||||
std::fs::create_dir_all(e).with_context(|| {
|
|
||||||
format!("RenderActor({kind:?}): failed to create directory")
|
|
||||||
})?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static DEFAULT_FRAME: Lazy<Frame> = Lazy::new(Frame::default);
|
|
||||||
let data = match kind {
|
|
||||||
ExportKind::Pdf => {
|
|
||||||
// todo: Some(pdf_uri.as_str())
|
|
||||||
// todo: timestamp world.now()
|
|
||||||
typst_pdf::pdf(doc, Smart::Auto, None)
|
|
||||||
}
|
|
||||||
ExportKind::Svg {
|
|
||||||
page: PageSelection::First,
|
|
||||||
} => typst_svg::svg(
|
|
||||||
doc.pages
|
|
||||||
.first()
|
|
||||||
.map(|f| &f.frame)
|
|
||||||
.unwrap_or(&*DEFAULT_FRAME),
|
|
||||||
)
|
|
||||||
.into_bytes(),
|
|
||||||
ExportKind::Svg {
|
|
||||||
page: PageSelection::Merged,
|
|
||||||
} => typst_svg::svg_merged(doc, typst::layout::Abs::zero()).into_bytes(),
|
|
||||||
ExportKind::Png {
|
|
||||||
page: PageSelection::First,
|
|
||||||
} => {
|
|
||||||
let pixmap = typst_render::render(
|
|
||||||
doc.pages
|
|
||||||
.first()
|
|
||||||
.map(|f| &f.frame)
|
|
||||||
.unwrap_or(&*DEFAULT_FRAME),
|
|
||||||
3.,
|
|
||||||
typst::visualize::Color::WHITE,
|
|
||||||
);
|
|
||||||
pixmap
|
|
||||||
.encode_png()
|
|
||||||
.map_err(|err| anyhow::anyhow!("failed to encode PNG ({err})"))?
|
|
||||||
}
|
|
||||||
ExportKind::Png {
|
|
||||||
page: PageSelection::Merged,
|
|
||||||
} => {
|
|
||||||
let pixmap = typst_render::render_merged(
|
|
||||||
doc,
|
|
||||||
3.,
|
|
||||||
typst::visualize::Color::WHITE,
|
|
||||||
typst::layout::Abs::zero(),
|
|
||||||
typst::visualize::Color::WHITE,
|
|
||||||
);
|
|
||||||
pixmap
|
|
||||||
.encode_png()
|
|
||||||
.map_err(|err| anyhow::anyhow!("failed to encode PNG ({err})"))?
|
|
||||||
}
|
|
||||||
ExportKind::WordCount => {
|
|
||||||
let wc = word_count::word_count(doc);
|
|
||||||
log::debug!("word count: {wc:?}");
|
|
||||||
let _ = self.editor_tx.send(CompileClusterRequest::WordCount(
|
|
||||||
self.group.clone(),
|
|
||||||
Some(wc),
|
|
||||||
));
|
|
||||||
return Ok(PathBuf::new());
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
std::fs::write(&to, data)
|
|
||||||
.with_context(|| format!("RenderActor({kind:?}): failed to export"))?;
|
|
||||||
|
|
||||||
info!("RenderActor({kind:?}): export complete");
|
|
||||||
Ok(to)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[comemo::memoize]
|
|
||||||
fn substitute_path(substitute_pattern: &str, root: &Path, path: &Path) -> Option<ImmutPath> {
|
|
||||||
if let Ok(path) = path.strip_prefix("/untitled") {
|
|
||||||
let tmp = std::env::temp_dir();
|
|
||||||
let path = tmp.join("typst").join(path);
|
|
||||||
return Some(path.as_path().into());
|
|
||||||
}
|
|
||||||
|
|
||||||
if substitute_pattern.is_empty() {
|
|
||||||
return Some(path.to_path_buf().clean().into());
|
|
||||||
}
|
|
||||||
|
|
||||||
let path = path.strip_prefix(root).ok()?;
|
|
||||||
let dir = path.parent();
|
|
||||||
let file_name = path.file_name().unwrap_or_default();
|
|
||||||
|
|
||||||
let w = root.to_string_lossy();
|
|
||||||
let f = file_name.to_string_lossy();
|
|
||||||
|
|
||||||
// replace all $root
|
|
||||||
let mut path = substitute_pattern.replace("$root", &w);
|
|
||||||
if let Some(dir) = dir {
|
|
||||||
let d = dir.to_string_lossy();
|
|
||||||
path = path.replace("$dir", &d);
|
|
||||||
}
|
|
||||||
path = path.replace("$name", &f);
|
|
||||||
|
|
||||||
Some(PathBuf::from(path).clean().into())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_substitute_path() {
|
|
||||||
let root = Path::new("/root");
|
|
||||||
let path = Path::new("/root/dir1/dir2/file.txt");
|
|
||||||
|
|
||||||
assert_eq!(
|
|
||||||
substitute_path("/substitute/$dir/$name", root, path),
|
|
||||||
Some(PathBuf::from("/substitute/dir1/dir2/file.txt").into())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
substitute_path("/substitute/$dir/../$name", root, path),
|
|
||||||
Some(PathBuf::from("/substitute/dir1/file.txt").into())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
substitute_path("/substitute/$name", root, path),
|
|
||||||
Some(PathBuf::from("/substitute/file.txt").into())
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
substitute_path("/substitute/target/$dir/$name", root, path),
|
|
||||||
Some(PathBuf::from("/substitute/target/dir1/dir2/file.txt").into())
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,4 +1,4 @@
|
||||||
//! The typst actors running compilations.
|
//! The actor that runs compilations.
|
||||||
//!
|
//!
|
||||||
//! ```ascii
|
//! ```ascii
|
||||||
//! ┌────────────────────────────────┐
|
//! ┌────────────────────────────────┐
|
||||||
|
@ -32,7 +32,7 @@ use std::{
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
use anyhow::anyhow;
|
use anyhow::{anyhow, bail};
|
||||||
use log::{error, info, trace};
|
use log::{error, info, trace};
|
||||||
use parking_lot::Mutex;
|
use parking_lot::Mutex;
|
||||||
use tinymist_query::{
|
use tinymist_query::{
|
||||||
|
@ -40,7 +40,7 @@ use tinymist_query::{
|
||||||
DiagnosticsMap, ExportKind, ServerInfoResponse, VersionedDocument,
|
DiagnosticsMap, ExportKind, ServerInfoResponse, VersionedDocument,
|
||||||
};
|
};
|
||||||
use tinymist_render::PeriscopeRenderer;
|
use tinymist_render::PeriscopeRenderer;
|
||||||
use tokio::sync::{broadcast, mpsc, oneshot, watch};
|
use tokio::sync::{mpsc, oneshot, watch};
|
||||||
use typst::{
|
use typst::{
|
||||||
diag::{PackageError, SourceDiagnostic, SourceResult},
|
diag::{PackageError, SourceDiagnostic, SourceResult},
|
||||||
layout::Position,
|
layout::Position,
|
||||||
|
@ -60,12 +60,12 @@ use typst_ts_core::{
|
||||||
};
|
};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
cluster::{CompileClusterRequest, TinymistCompileStatusEnum},
|
editor::{EditorRequest, TinymistCompileStatusEnum},
|
||||||
render::ExportConfig,
|
export::ExportConfig,
|
||||||
typ_server::{CompileClient as TsCompileClient, CompileServerActor},
|
typ_server::{CompileClient as TsCompileClient, CompileServerActor},
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
actor::render::{OneshotRendering, PathVars, RenderActorRequest},
|
actor::export::ExportRequest,
|
||||||
actor::typ_server::EntryStateExt,
|
actor::typ_server::EntryStateExt,
|
||||||
compiler_init::CompileConfig,
|
compiler_init::CompileConfig,
|
||||||
tools::preview::{CompilationHandle, CompileStatus},
|
tools::preview::{CompilationHandle, CompileStatus},
|
||||||
|
@ -77,7 +77,7 @@ type CompileDriverInner = CompileDriverImpl<LspWorld>;
|
||||||
type CompileService = CompileServerActor<CompileDriver>;
|
type CompileService = CompileServerActor<CompileDriver>;
|
||||||
type CompileClient = TsCompileClient<CompileService>;
|
type CompileClient = TsCompileClient<CompileService>;
|
||||||
|
|
||||||
type EditorSender = mpsc::UnboundedSender<CompileClusterRequest>;
|
type EditorSender = mpsc::UnboundedSender<EditorRequest>;
|
||||||
|
|
||||||
pub struct CompileHandler {
|
pub struct CompileHandler {
|
||||||
pub(super) diag_group: String,
|
pub(super) diag_group: String,
|
||||||
|
@ -86,7 +86,7 @@ pub struct CompileHandler {
|
||||||
pub(super) inner: Arc<Mutex<Option<typst_preview::CompilationHandleImpl>>>,
|
pub(super) inner: Arc<Mutex<Option<typst_preview::CompilationHandleImpl>>>,
|
||||||
|
|
||||||
pub(super) doc_tx: watch::Sender<Option<Arc<TypstDocument>>>,
|
pub(super) doc_tx: watch::Sender<Option<Arc<TypstDocument>>>,
|
||||||
pub(super) render_tx: broadcast::Sender<RenderActorRequest>,
|
pub(super) export_tx: mpsc::UnboundedSender<ExportRequest>,
|
||||||
pub(super) editor_tx: EditorSender,
|
pub(super) editor_tx: EditorSender,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -101,12 +101,11 @@ impl CompilationHandle for CompileHandler {
|
||||||
fn notify_compile(&self, res: Result<Arc<TypstDocument>, CompileStatus>) {
|
fn notify_compile(&self, res: Result<Arc<TypstDocument>, CompileStatus>) {
|
||||||
if let Ok(doc) = res.clone() {
|
if let Ok(doc) = res.clone() {
|
||||||
let _ = self.doc_tx.send(Some(doc.clone()));
|
let _ = self.doc_tx.send(Some(doc.clone()));
|
||||||
// todo: is it right that ignore zero broadcast receiver?
|
let _ = self.export_tx.send(ExportRequest::OnTyped);
|
||||||
let _ = self.render_tx.send(RenderActorRequest::OnTyped);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.editor_tx
|
self.editor_tx
|
||||||
.send(CompileClusterRequest::Status(
|
.send(EditorRequest::Status(
|
||||||
self.diag_group.clone(),
|
self.diag_group.clone(),
|
||||||
if res.is_ok() {
|
if res.is_ok() {
|
||||||
TinymistCompileStatusEnum::CompileSuccess
|
TinymistCompileStatusEnum::CompileSuccess
|
||||||
|
@ -125,10 +124,9 @@ impl CompilationHandle for CompileHandler {
|
||||||
|
|
||||||
impl CompileHandler {
|
impl CompileHandler {
|
||||||
fn push_diagnostics(&mut self, diagnostics: Option<DiagnosticsMap>) {
|
fn push_diagnostics(&mut self, diagnostics: Option<DiagnosticsMap>) {
|
||||||
let res = self.editor_tx.send(CompileClusterRequest::Diag(
|
let res = self
|
||||||
self.diag_group.clone(),
|
.editor_tx
|
||||||
diagnostics,
|
.send(EditorRequest::Diag(self.diag_group.clone(), diagnostics));
|
||||||
));
|
|
||||||
if let Err(err) = res {
|
if let Err(err) = res {
|
||||||
error!("failed to send diagnostics: {err:#}");
|
error!("failed to send diagnostics: {err:#}");
|
||||||
}
|
}
|
||||||
|
@ -157,7 +155,7 @@ impl CompileMiddleware for CompileDriver {
|
||||||
fn wrap_compile(&mut self, env: &mut CompileEnv) -> SourceResult<Arc<typst::model::Document>> {
|
fn wrap_compile(&mut self, env: &mut CompileEnv) -> SourceResult<Arc<typst::model::Document>> {
|
||||||
self.handler
|
self.handler
|
||||||
.editor_tx
|
.editor_tx
|
||||||
.send(CompileClusterRequest::Status(
|
.send(EditorRequest::Status(
|
||||||
self.handler.diag_group.clone(),
|
self.handler.diag_group.clone(),
|
||||||
TinymistCompileStatusEnum::Compiling,
|
TinymistCompileStatusEnum::Compiling,
|
||||||
))
|
))
|
||||||
|
@ -217,11 +215,11 @@ impl CompileDriver {
|
||||||
|
|
||||||
let Some(main) = w.main_id() else {
|
let Some(main) = w.main_id() else {
|
||||||
error!("TypstActor: main file is not set");
|
error!("TypstActor: main file is not set");
|
||||||
return Err(anyhow!("main file is not set"));
|
bail!("main file is not set");
|
||||||
};
|
};
|
||||||
let Some(root) = w.entry.root() else {
|
let Some(root) = w.entry.root() else {
|
||||||
error!("TypstActor: root is not set");
|
error!("TypstActor: root is not set");
|
||||||
return Err(anyhow!("root is not set"));
|
bail!("root is not set");
|
||||||
};
|
};
|
||||||
w.source(main).map_err(|err| {
|
w.source(main).map_err(|err| {
|
||||||
info!("TypstActor: failed to prepare main file: {err:?}");
|
info!("TypstActor: failed to prepare main file: {err:?}");
|
||||||
|
@ -277,7 +275,7 @@ pub struct CompileClientActor {
|
||||||
pub config: CompileConfig,
|
pub config: CompileConfig,
|
||||||
entry: EntryState,
|
entry: EntryState,
|
||||||
inner: Deferred<CompileClient>,
|
inner: Deferred<CompileClient>,
|
||||||
render_tx: broadcast::Sender<RenderActorRequest>,
|
export_tx: mpsc::UnboundedSender<ExportRequest>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompileClientActor {
|
impl CompileClientActor {
|
||||||
|
@ -286,14 +284,14 @@ impl CompileClientActor {
|
||||||
config: CompileConfig,
|
config: CompileConfig,
|
||||||
entry: EntryState,
|
entry: EntryState,
|
||||||
inner: Deferred<CompileClient>,
|
inner: Deferred<CompileClient>,
|
||||||
render_tx: broadcast::Sender<RenderActorRequest>,
|
export_tx: mpsc::UnboundedSender<ExportRequest>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
diag_group,
|
diag_group,
|
||||||
config,
|
config,
|
||||||
entry,
|
entry,
|
||||||
inner,
|
inner,
|
||||||
render_tx,
|
export_tx,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -364,8 +362,8 @@ impl CompileClientActor {
|
||||||
})??;
|
})??;
|
||||||
|
|
||||||
let entry = next_entry.clone();
|
let entry = next_entry.clone();
|
||||||
let req = RenderActorRequest::ChangeExportPath(PathVars { entry });
|
let req = ExportRequest::ChangeExportPath(entry);
|
||||||
self.render_tx.send(req).unwrap();
|
let _ = self.export_tx.send(req);
|
||||||
|
|
||||||
// todo: better way to trigger recompile
|
// todo: better way to trigger recompile
|
||||||
let files = FileChangeSet::new_inserts(vec![]);
|
let files = FileChangeSet::new_inserts(vec![]);
|
||||||
|
@ -381,14 +379,13 @@ impl CompileClientActor {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn change_export_pdf(&mut self, config: ExportConfig) {
|
pub(crate) fn change_export_pdf(&mut self, config: ExportConfig) {
|
||||||
|
let entry = self.entry.clone();
|
||||||
let _ = self
|
let _ = self
|
||||||
.render_tx
|
.export_tx
|
||||||
.send(RenderActorRequest::ChangeConfig(ExportConfig {
|
.send(ExportRequest::ChangeConfig(ExportConfig {
|
||||||
substitute_pattern: config.substitute_pattern,
|
entry,
|
||||||
entry: self.entry.clone(),
|
..config
|
||||||
mode: config.mode,
|
}));
|
||||||
}))
|
|
||||||
.unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn clear_cache(&self) {
|
pub fn clear_cache(&self) {
|
||||||
|
@ -422,25 +419,16 @@ impl CompileClientActor {
|
||||||
info!("CompileActor: on export: {}", path.display());
|
info!("CompileActor: on export: {}", path.display());
|
||||||
|
|
||||||
let (tx, rx) = oneshot::channel();
|
let (tx, rx) = oneshot::channel();
|
||||||
|
let _ = self.export_tx.send(ExportRequest::Oneshot(Some(kind), tx));
|
||||||
let callback = Arc::new(Mutex::new(Some(tx)));
|
|
||||||
self.render_tx
|
|
||||||
.send(RenderActorRequest::Oneshot(OneshotRendering {
|
|
||||||
kind: Some(kind),
|
|
||||||
callback,
|
|
||||||
}))
|
|
||||||
.map_err(map_string_err("failed to send to sync_render"))?;
|
|
||||||
|
|
||||||
let res: Option<PathBuf> = utils::threaded_receive(rx)?;
|
let res: Option<PathBuf> = utils::threaded_receive(rx)?;
|
||||||
|
|
||||||
info!("CompileActor: on export end: {path:?} as {res:?}");
|
info!("CompileActor: on export end: {path:?} as {res:?}");
|
||||||
|
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn on_save_export(&self, path: PathBuf) -> anyhow::Result<()> {
|
pub fn on_save_export(&self, path: PathBuf) -> anyhow::Result<()> {
|
||||||
info!("CompileActor: on save export: {}", path.display());
|
info!("CompileActor: on save export: {}", path.display());
|
||||||
let _ = self.render_tx.send(RenderActorRequest::OnSaved(path));
|
let _ = self.export_tx.send(ExportRequest::OnSaved(path));
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -242,24 +242,28 @@ where
|
||||||
|
|
||||||
// Wait for first events.
|
// Wait for first events.
|
||||||
'event_loop: while let Some(mut event) = self.steal_rx.blocking_recv() {
|
'event_loop: while let Some(mut event) = self.steal_rx.blocking_recv() {
|
||||||
// Accumulate events, the order of processing which is critical.
|
|
||||||
let mut need_compile = false;
|
let mut need_compile = false;
|
||||||
|
|
||||||
'accumulate: loop {
|
'accumulate: loop {
|
||||||
// Warp the logical clock by one.
|
// Warp the logical clock by one.
|
||||||
self.logical_tick += 1;
|
self.logical_tick += 1;
|
||||||
|
|
||||||
|
// If settle, stop the actor.
|
||||||
if let Interrupt::Settle(e) = event {
|
if let Interrupt::Settle(e) = event {
|
||||||
log::info!("CompileServerActor: requested stop");
|
log::info!("CompileServerActor: requested stop");
|
||||||
e.send(()).ok();
|
e.send(()).ok();
|
||||||
break 'event_loop;
|
break 'event_loop;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ensure complied before executing tasks.
|
||||||
if matches!(event, Interrupt::Task(_)) && need_compile {
|
if matches!(event, Interrupt::Task(_)) && need_compile {
|
||||||
self.compile(&compiler_ack);
|
self.compile(&compiler_ack);
|
||||||
need_compile = false;
|
need_compile = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
need_compile |= self.process(event, &compiler_ack);
|
need_compile |= self.process(event, &compiler_ack);
|
||||||
|
|
||||||
|
// Try to accumulate more events.
|
||||||
match self.steal_rx.try_recv() {
|
match self.steal_rx.try_recv() {
|
||||||
Ok(new_event) => event = new_event,
|
Ok(new_event) => event = new_event,
|
||||||
_ => break 'accumulate,
|
_ => break 'accumulate,
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
//! The actor that runs user actions.
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
|
@ -6,36 +8,33 @@ use lsp_server::RequestId;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use typst_ts_core::TypstDict;
|
use typst_ts_core::TypstDict;
|
||||||
|
|
||||||
use crate::{internal_error, result_to_response_, LspHost, TypstLanguageServer};
|
use crate::{internal_error, result_to_response, LspHost, TypstLanguageServer};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct UserActionTraceRequest {
|
#[serde(rename_all = "camelCase")]
|
||||||
#[serde(rename = "compilerProgram")]
|
pub struct TraceParams {
|
||||||
pub compiler_program: PathBuf,
|
pub compiler_program: PathBuf,
|
||||||
pub root: PathBuf,
|
pub root: PathBuf,
|
||||||
pub main: PathBuf,
|
pub main: PathBuf,
|
||||||
pub inputs: TypstDict,
|
pub inputs: TypstDict,
|
||||||
#[serde(rename = "fontPaths")]
|
|
||||||
pub font_paths: Vec<PathBuf>,
|
pub font_paths: Vec<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum UserActionRequest {
|
pub enum UserActionRequest {
|
||||||
Trace((RequestId, UserActionTraceRequest)),
|
Trace(RequestId, TraceParams),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_user_action_thread(
|
pub fn run_user_action_thread(
|
||||||
rx_req: crossbeam_channel::Receiver<UserActionRequest>,
|
user_action_rx: crossbeam_channel::Receiver<UserActionRequest>,
|
||||||
client: LspHost<TypstLanguageServer>,
|
client: LspHost<TypstLanguageServer>,
|
||||||
) {
|
) {
|
||||||
while let Ok(req) = rx_req.recv() {
|
while let Ok(req) = user_action_rx.recv() {
|
||||||
match req {
|
match req {
|
||||||
UserActionRequest::Trace((id, req)) => {
|
UserActionRequest::Trace(id, params) => {
|
||||||
let res = run_trace_program(req)
|
let res = run_trace_program(params)
|
||||||
.map_err(|e| internal_error(format!("failed to run trace program: {:?}", e)));
|
.map_err(|e| internal_error(format!("failed to run trace program: {:?}", e)));
|
||||||
|
|
||||||
if let Ok(response) = result_to_response_(id, res) {
|
client.respond(result_to_response(id, res));
|
||||||
client.respond(response);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -44,26 +43,26 @@ pub fn run_user_action_thread(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Run a perf trace to some typst program
|
/// Run a perf trace to some typst program
|
||||||
fn run_trace_program(req: UserActionTraceRequest) -> anyhow::Result<TraceReport> {
|
fn run_trace_program(params: TraceParams) -> anyhow::Result<TraceReport> {
|
||||||
// Typst compile root, input, font paths, inputs
|
// Typst compile root, input, font paths, inputs
|
||||||
let mut cmd = std::process::Command::new(&req.compiler_program);
|
let mut cmd = std::process::Command::new(¶ms.compiler_program);
|
||||||
let mut cmd = &mut cmd;
|
let mut cmd = &mut cmd;
|
||||||
|
|
||||||
cmd = cmd.arg("compile");
|
cmd = cmd.arg("compile");
|
||||||
|
|
||||||
cmd = cmd
|
cmd = cmd
|
||||||
.arg("--root")
|
.arg("--root")
|
||||||
.arg(req.root.as_path())
|
.arg(params.root.as_path())
|
||||||
.arg(req.main.as_path());
|
.arg(params.main.as_path());
|
||||||
|
|
||||||
// todo: test space in input?
|
// todo: test space in input?
|
||||||
for (k, v) in req.inputs.iter() {
|
for (k, v) in params.inputs.iter() {
|
||||||
let typst::foundations::Value::Str(s) = v else {
|
let typst::foundations::Value::Str(s) = v else {
|
||||||
bail!("input value must be string, got {:?} for {:?}", v, k);
|
bail!("input value must be string, got {:?} for {:?}", v, k);
|
||||||
};
|
};
|
||||||
cmd = cmd.arg(format!("--input={k}={}", s.as_str()));
|
cmd = cmd.arg(format!("--input={k}={}", s.as_str()));
|
||||||
}
|
}
|
||||||
for p in &req.font_paths {
|
for p in ¶ms.font_paths {
|
||||||
cmd = cmd.arg(format!("--font-path={}", p.as_path().display()));
|
cmd = cmd.arg(format!("--font-path={}", p.as_path().display()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,7 +88,7 @@ fn run_trace_program(req: UserActionTraceRequest) -> anyhow::Result<TraceReport>
|
||||||
let stderr = base64::engine::general_purpose::STANDARD.encode(stderr);
|
let stderr = base64::engine::general_purpose::STANDARD.encode(stderr);
|
||||||
|
|
||||||
Ok(TraceReport {
|
Ok(TraceReport {
|
||||||
request: req,
|
request: params,
|
||||||
messages,
|
messages,
|
||||||
stderr,
|
stderr,
|
||||||
})
|
})
|
||||||
|
@ -97,7 +96,7 @@ fn run_trace_program(req: UserActionTraceRequest) -> anyhow::Result<TraceReport>
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
struct TraceReport {
|
struct TraceReport {
|
||||||
request: UserActionTraceRequest,
|
request: TraceParams,
|
||||||
messages: Vec<lsp_server::Message>,
|
messages: Vec<lsp_server::Message>,
|
||||||
stderr: String,
|
stderr: String,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,17 +1,14 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Instant;
|
use std::time::Instant;
|
||||||
|
|
||||||
use log::{info, trace, warn};
|
use anyhow::bail;
|
||||||
use lsp_types::InitializedParams;
|
use log::{error, info, trace, warn};
|
||||||
use parking_lot::RwLock;
|
|
||||||
use serde::{de::DeserializeOwned, Serialize};
|
|
||||||
|
|
||||||
use lsp_server::{Connection, Message, Response};
|
use lsp_server::{Connection, Message, Response};
|
||||||
|
|
||||||
use lsp_types::notification::PublishDiagnostics;
|
use lsp_types::notification::PublishDiagnostics;
|
||||||
use lsp_types::request::{RegisterCapability, UnregisterCapability};
|
use lsp_types::request::{RegisterCapability, UnregisterCapability};
|
||||||
use lsp_types::*;
|
use lsp_types::*;
|
||||||
use parking_lot::Mutex;
|
use parking_lot::{Mutex, RwLock};
|
||||||
|
use serde::{de::DeserializeOwned, Serialize};
|
||||||
|
|
||||||
// Enforces drop order
|
// Enforces drop order
|
||||||
pub struct Handle<H, C> {
|
pub struct Handle<H, C> {
|
||||||
|
@ -55,7 +52,7 @@ impl<S> LspHost<S> {
|
||||||
let mut req_queue = self.req_queue.lock();
|
let mut req_queue = self.req_queue.lock();
|
||||||
let sender = self.sender.read();
|
let sender = self.sender.read();
|
||||||
let Some(sender) = sender.as_ref() else {
|
let Some(sender) = sender.as_ref() else {
|
||||||
warn!("closed connection, failed to send request");
|
warn!("failed to send request: connection closed");
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
let request = req_queue
|
let request = req_queue
|
||||||
|
@ -82,7 +79,7 @@ impl<S> LspHost<S> {
|
||||||
|
|
||||||
let sender = self.sender.read();
|
let sender = self.sender.read();
|
||||||
let Some(sender) = sender.as_ref() else {
|
let Some(sender) = sender.as_ref() else {
|
||||||
warn!("closed connection, failed to send request");
|
warn!("failed to send notification: connection closed");
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
let Err(res) = sender.send(not.into()) else {
|
let Err(res) = sender.send(not.into()) else {
|
||||||
|
@ -102,12 +99,13 @@ impl<S> LspHost<S> {
|
||||||
(request.method.clone(), request_received),
|
(request.method.clone(), request_received),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn respond(&self, response: lsp_server::Response) {
|
pub fn respond(&self, response: lsp_server::Response) {
|
||||||
let mut req_queue = self.req_queue.lock();
|
let mut req_queue = self.req_queue.lock();
|
||||||
if let Some((method, start)) = req_queue.incoming.complete(response.id.clone()) {
|
if let Some((method, start)) = req_queue.incoming.complete(response.id.clone()) {
|
||||||
let sender = self.sender.read();
|
let sender = self.sender.read();
|
||||||
let Some(sender) = sender.as_ref() else {
|
let Some(sender) = sender.as_ref() else {
|
||||||
warn!("closed connection, failed to send request");
|
warn!("failed to send response: connection closed");
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -146,7 +144,7 @@ impl<S> LspHost<S> {
|
||||||
pub fn register_capability(&self, registrations: Vec<Registration>) -> anyhow::Result<()> {
|
pub fn register_capability(&self, registrations: Vec<Registration>) -> anyhow::Result<()> {
|
||||||
self.send_request::<RegisterCapability>(RegistrationParams { registrations }, |_, resp| {
|
self.send_request::<RegisterCapability>(RegistrationParams { registrations }, |_, resp| {
|
||||||
if let Some(err) = resp.error {
|
if let Some(err) = resp.error {
|
||||||
log::error!("failed to register capability: {err:?}");
|
error!("failed to register capability: {err:?}");
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -160,7 +158,7 @@ impl<S> LspHost<S> {
|
||||||
UnregistrationParams { unregisterations },
|
UnregistrationParams { unregisterations },
|
||||||
|_, resp| {
|
|_, resp| {
|
||||||
if let Some(err) = resp.error {
|
if let Some(err) = resp.error {
|
||||||
log::error!("failed to unregister capability: {err:?}");
|
error!("failed to unregister capability: {err:?}");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
@ -198,7 +196,7 @@ pub fn lsp_harness<D: LspDriver>(
|
||||||
let (initialize_id, initialize_params) = match connection.initialize_start() {
|
let (initialize_id, initialize_params) = match connection.initialize_start() {
|
||||||
Ok(it) => it,
|
Ok(it) => it,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("failed to initialize: {e}");
|
error!("failed to initialize: {e}");
|
||||||
*force_exit = !e.channel_is_disconnected();
|
*force_exit = !e.channel_is_disconnected();
|
||||||
return Err(e.into());
|
return Err(e.into());
|
||||||
}
|
}
|
||||||
|
@ -208,7 +206,7 @@ pub fn lsp_harness<D: LspDriver>(
|
||||||
let sender = Arc::new(RwLock::new(Some(connection.sender)));
|
let sender = Arc::new(RwLock::new(Some(connection.sender)));
|
||||||
let host = LspHost::new(sender.clone());
|
let host = LspHost::new(sender.clone());
|
||||||
|
|
||||||
let _drop_connection = ForceDrop(sender);
|
let _drop_guard = ForceDrop(sender);
|
||||||
|
|
||||||
let req = lsp_server::Request::new(initialize_id, "initialize".to_owned(), initialize_params);
|
let req = lsp_server::Request::new(initialize_id, "initialize".to_owned(), initialize_params);
|
||||||
host.register_request(&req, request_received);
|
host.register_request(&req, request_received);
|
||||||
|
@ -234,15 +232,13 @@ pub fn lsp_harness<D: LspDriver>(
|
||||||
r#"expected initialized notification, got: {msg:?}"#
|
r#"expected initialized notification, got: {msg:?}"#
|
||||||
))),
|
))),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("failed to receive initialized notification: {e}");
|
error!("failed to receive initialized notification: {e}");
|
||||||
Err(ProtocolError::disconnected())
|
Err(ProtocolError::disconnected())
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if let Err(e) = initialized_ack {
|
if let Err(e) = initialized_ack {
|
||||||
*force_exit = !e.channel_is_disconnected();
|
*force_exit = !e.channel_is_disconnected();
|
||||||
return Err(anyhow::anyhow!(
|
bail!("failed to receive initialized notification: {e:?}");
|
||||||
"failed to receive initialized notification: {e:?}"
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
service.initialized(InitializedParams {});
|
service.initialized(InitializedParams {});
|
||||||
|
@ -270,7 +266,7 @@ impl ProtocolError {
|
||||||
struct ForceDrop<T>(Arc<RwLock<Option<T>>>);
|
struct ForceDrop<T>(Arc<RwLock<Option<T>>>);
|
||||||
impl<T> Drop for ForceDrop<T> {
|
impl<T> Drop for ForceDrop<T> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
self.0.write().take();
|
*self.0.write() = None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -279,5 +275,5 @@ pub fn from_json<T: DeserializeOwned>(
|
||||||
json: &serde_json::Value,
|
json: &serde_json::Value,
|
||||||
) -> anyhow::Result<T> {
|
) -> anyhow::Result<T> {
|
||||||
serde_json::from_value(json.clone())
|
serde_json::from_value(json.clone())
|
||||||
.map_err(|e| anyhow::format_err!("Failed to deserialize {what}: {e}; {json}"))
|
.map_err(|e| anyhow::anyhow!("Failed to deserialize {what}: {e}; {json}"))
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,24 +4,24 @@ mod args;
|
||||||
|
|
||||||
use std::{path::PathBuf, sync::Arc};
|
use std::{path::PathBuf, sync::Arc};
|
||||||
|
|
||||||
use args::CompileArgs;
|
use anyhow::bail;
|
||||||
use clap::Parser;
|
use clap::Parser;
|
||||||
use comemo::Prehashed;
|
use comemo::Prehashed;
|
||||||
use lsp_types::{InitializeParams, InitializedParams};
|
use lsp_types::{InitializeParams, InitializedParams};
|
||||||
use once_cell::sync::Lazy;
|
use once_cell::sync::Lazy;
|
||||||
use parking_lot::RwLock;
|
use parking_lot::RwLock;
|
||||||
|
use tokio::sync::mpsc;
|
||||||
|
use typst::{eval::Tracer, foundations::IntoValue, syntax::Span};
|
||||||
|
use typst_ts_compiler::service::{CompileEnv, Compiler, EntryManager};
|
||||||
|
use typst_ts_core::{typst::prelude::EcoVec, TypstDict};
|
||||||
|
|
||||||
|
use crate::args::{CliArguments, Commands, CompileArgs, LspArgs};
|
||||||
use tinymist::{
|
use tinymist::{
|
||||||
compiler_init::{CompileInit, CompileInitializeParams},
|
compiler_init::{CompileInit, CompileInitializeParams},
|
||||||
harness::{lsp_harness, InitializedLspDriver, LspDriver, LspHost},
|
harness::{lsp_harness, InitializedLspDriver, LspDriver, LspHost},
|
||||||
transport::with_stdio_transport,
|
transport::with_stdio_transport,
|
||||||
CompileFontOpts, Init, LspWorld, TypstLanguageServer,
|
CompileFontOpts, Init, LspWorld, TypstLanguageServer,
|
||||||
};
|
};
|
||||||
use tokio::sync::mpsc;
|
|
||||||
use typst::{eval::Tracer, foundations::IntoValue, syntax::Span};
|
|
||||||
use typst_ts_compiler::service::{CompileEnv, Compiler, EntryManager};
|
|
||||||
use typst_ts_core::{typst::prelude::EcoVec, TypstDict};
|
|
||||||
|
|
||||||
use crate::args::{CliArguments, Commands, LspArgs};
|
|
||||||
|
|
||||||
#[cfg(feature = "dhat-heap")]
|
#[cfg(feature = "dhat-heap")]
|
||||||
#[global_allocator]
|
#[global_allocator]
|
||||||
|
@ -114,7 +114,7 @@ pub fn lsp_main(args: LspArgs) -> anyhow::Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compiler_main(args: CompileArgs) -> anyhow::Result<()> {
|
pub fn compiler_main(args: CompileArgs) -> anyhow::Result<()> {
|
||||||
let (diag_tx, _diag_rx) = mpsc::unbounded_channel();
|
let (editor_tx, _editor_rx) = mpsc::unbounded_channel();
|
||||||
|
|
||||||
let mut input = PathBuf::from(args.compile.input.unwrap());
|
let mut input = PathBuf::from(args.compile.input.unwrap());
|
||||||
|
|
||||||
|
@ -127,9 +127,7 @@ pub fn compiler_main(args: CompileArgs) -> anyhow::Result<()> {
|
||||||
input = std::env::current_dir()?.join(input);
|
input = std::env::current_dir()?.join(input);
|
||||||
}
|
}
|
||||||
if !input.starts_with(&root_path) {
|
if !input.starts_with(&root_path) {
|
||||||
return Err(anyhow::anyhow!(
|
bail!("input file is not within the root path: {input:?} not in {root_path:?}");
|
||||||
"input file is not within the root path: {input:?} not in {root_path:?}"
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let inputs = Arc::new(Prehashed::new(if args.compile.inputs.is_empty() {
|
let inputs = Arc::new(Prehashed::new(if args.compile.inputs.is_empty() {
|
||||||
|
@ -147,7 +145,7 @@ pub fn compiler_main(args: CompileArgs) -> anyhow::Result<()> {
|
||||||
no_system_fonts: args.compile.font.no_system_fonts,
|
no_system_fonts: args.compile.font.no_system_fonts,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
diag_tx,
|
editor_tx,
|
||||||
};
|
};
|
||||||
if args.persist {
|
if args.persist {
|
||||||
log::info!("starting compile server");
|
log::info!("starting compile server");
|
||||||
|
@ -163,7 +161,7 @@ pub fn compiler_main(args: CompileArgs) -> anyhow::Result<()> {
|
||||||
let sender = Arc::new(RwLock::new(Some(s)));
|
let sender = Arc::new(RwLock::new(Some(s)));
|
||||||
let host = LspHost::new(sender.clone());
|
let host = LspHost::new(sender.clone());
|
||||||
|
|
||||||
let _drop_connection = ForceDrop(sender);
|
let _drop_guard = ForceDrop(sender);
|
||||||
|
|
||||||
let (mut service, res) = init.initialize(
|
let (mut service, res) = init.initialize(
|
||||||
host,
|
host,
|
||||||
|
@ -252,9 +250,10 @@ pub fn compiler_main(args: CompileArgs) -> anyhow::Result<()> {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ForceDrop<T>(Arc<RwLock<Option<T>>>);
|
struct ForceDrop<T>(Arc<RwLock<Option<T>>>);
|
||||||
|
|
||||||
impl<T> Drop for ForceDrop<T> {
|
impl<T> Drop for ForceDrop<T> {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
self.0.write().take();
|
*self.0.write() = None;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
pub use super::prelude::*;
|
pub use super::prelude::*;
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
struct ResourceSymbolResponse {
|
struct ResourceSymbolResponse {
|
||||||
symbols: HashMap<String, ResourceSymbolItem>,
|
symbols: HashMap<String, ResourceSymbolItem>,
|
||||||
#[serde(rename = "fontSelects")]
|
|
||||||
font_selects: Vec<FontItem>,
|
font_selects: Vec<FontItem>,
|
||||||
#[serde(rename = "glyphDefs")]
|
|
||||||
glyph_defs: String,
|
glyph_defs: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,43 +16,34 @@ struct ResourceSymbolItem {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
enum SymCategory {
|
enum SymCategory {
|
||||||
#[serde(rename = "accent")]
|
|
||||||
Accent,
|
Accent,
|
||||||
#[serde(rename = "greek")]
|
|
||||||
Greek,
|
Greek,
|
||||||
#[serde(rename = "misc")]
|
|
||||||
Misc,
|
Misc,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
struct ResourceGlyphDesc {
|
struct ResourceGlyphDesc {
|
||||||
#[serde(rename = "fontIndex")]
|
|
||||||
font_index: u32,
|
font_index: u32,
|
||||||
#[serde(rename = "xAdvance")]
|
|
||||||
x_advance: Option<u16>,
|
x_advance: Option<u16>,
|
||||||
#[serde(rename = "yAdvance")]
|
|
||||||
y_advance: Option<u16>,
|
y_advance: Option<u16>,
|
||||||
#[serde(rename = "xMin")]
|
|
||||||
x_min: Option<i16>,
|
x_min: Option<i16>,
|
||||||
#[serde(rename = "xMax")]
|
|
||||||
x_max: Option<i16>,
|
x_max: Option<i16>,
|
||||||
#[serde(rename = "yMin")]
|
|
||||||
y_min: Option<i16>,
|
y_min: Option<i16>,
|
||||||
#[serde(rename = "yMax")]
|
|
||||||
y_max: Option<i16>,
|
y_max: Option<i16>,
|
||||||
name: Option<String>,
|
name: Option<String>,
|
||||||
shape: Option<String>,
|
shape: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
struct FontItem {
|
struct FontItem {
|
||||||
family: String,
|
family: String,
|
||||||
#[serde(rename = "capHeight")]
|
|
||||||
cap_height: f32,
|
cap_height: f32,
|
||||||
ascender: f32,
|
ascender: f32,
|
||||||
descender: f32,
|
descender: f32,
|
||||||
#[serde(rename = "unitsPerEm")]
|
|
||||||
units_per_em: f32,
|
units_per_em: f32,
|
||||||
// vertical: bool,
|
// vertical: bool,
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,9 +3,8 @@ use std::{collections::HashMap, path::Path, sync::Arc, time::Instant};
|
||||||
|
|
||||||
use crossbeam_channel::{select, Receiver};
|
use crossbeam_channel::{select, Receiver};
|
||||||
use log::{error, info, warn};
|
use log::{error, info, warn};
|
||||||
use lsp_server::{Notification, Request, ResponseError};
|
use lsp_server::{ErrorCode, Message, Notification, Request, RequestId, Response, ResponseError};
|
||||||
use lsp_types::{notification::Notification as _, ExecuteCommandParams};
|
use lsp_types::{notification::Notification as _, ExecuteCommandParams};
|
||||||
use paste::paste;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Map, Value as JsonValue};
|
use serde_json::{Map, Value as JsonValue};
|
||||||
use tinymist_query::{ExportKind, PageSelection};
|
use tinymist_query::{ExportKind, PageSelection};
|
||||||
|
@ -15,7 +14,7 @@ use typst_ts_compiler::vfs::notify::FileChangeSet;
|
||||||
use typst_ts_core::{config::compiler::DETACHED_ENTRY, ImmutPath};
|
use typst_ts_core::{config::compiler::DETACHED_ENTRY, ImmutPath};
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
actor::{cluster::CompileClusterRequest, render::ExportConfig, typ_client::CompileClientActor},
|
actor::{editor::EditorRequest, export::ExportConfig, typ_client::CompileClientActor},
|
||||||
compiler_init::{CompileConfig, CompilerConstConfig},
|
compiler_init::{CompileConfig, CompilerConstConfig},
|
||||||
harness::InitializedLspDriver,
|
harness::InitializedLspDriver,
|
||||||
internal_error, invalid_params, method_not_found, run_query,
|
internal_error, invalid_params, method_not_found, run_query,
|
||||||
|
@ -46,9 +45,7 @@ macro_rules! request_fn {
|
||||||
const E: LspMethod<JsonValue> = |this, req| {
|
const E: LspMethod<JsonValue> = |this, req| {
|
||||||
let req: <$desc as lsp_types::request::Request>::Params =
|
let req: <$desc as lsp_types::request::Request>::Params =
|
||||||
serde_json::from_value(req).unwrap(); // todo: soft unwrap
|
serde_json::from_value(req).unwrap(); // todo: soft unwrap
|
||||||
let res = this.$method(req)?;
|
this.$method(req)
|
||||||
let res = serde_json::to_value(res).unwrap(); // todo: soft unwrap
|
|
||||||
Ok(res)
|
|
||||||
};
|
};
|
||||||
E
|
E
|
||||||
})
|
})
|
||||||
|
@ -69,15 +66,6 @@ macro_rules! notify_fn {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CompileServerArgs {
|
|
||||||
pub client: LspHost<CompileServer>,
|
|
||||||
pub compile_config: CompileConfig,
|
|
||||||
pub const_config: CompilerConstConfig,
|
|
||||||
pub diag_tx: mpsc::UnboundedSender<CompileClusterRequest>,
|
|
||||||
pub font: Deferred<SharedFontResolver>,
|
|
||||||
pub handle: tokio::runtime::Handle,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The object providing the language server functionality.
|
/// The object providing the language server functionality.
|
||||||
pub struct CompileServer {
|
pub struct CompileServer {
|
||||||
/// The language server client.
|
/// The language server client.
|
||||||
|
@ -110,25 +98,23 @@ pub struct CompileServer {
|
||||||
/// Source synchronized with client
|
/// Source synchronized with client
|
||||||
pub memory_changes: HashMap<Arc<Path>, MemoryFileMeta>,
|
pub memory_changes: HashMap<Arc<Path>, MemoryFileMeta>,
|
||||||
/// The diagnostics sender to send diagnostics to `crate::actor::cluster`.
|
/// The diagnostics sender to send diagnostics to `crate::actor::cluster`.
|
||||||
pub diag_tx: mpsc::UnboundedSender<CompileClusterRequest>,
|
pub editor_tx: mpsc::UnboundedSender<EditorRequest>,
|
||||||
/// The compiler actor.
|
/// The compiler actor.
|
||||||
pub compiler: Option<CompileClientActor>,
|
pub compiler: Option<CompileClientActor>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompileServer {
|
impl CompileServer {
|
||||||
pub fn new(args: CompileServerArgs) -> Self {
|
pub fn new(
|
||||||
let CompileServerArgs {
|
client: LspHost<CompileServer>,
|
||||||
client,
|
compile_config: CompileConfig,
|
||||||
compile_config,
|
const_config: CompilerConstConfig,
|
||||||
const_config,
|
editor_tx: mpsc::UnboundedSender<EditorRequest>,
|
||||||
diag_tx,
|
font: Deferred<SharedFontResolver>,
|
||||||
font,
|
handle: tokio::runtime::Handle,
|
||||||
handle,
|
) -> Self {
|
||||||
} = args;
|
|
||||||
|
|
||||||
CompileServer {
|
CompileServer {
|
||||||
client,
|
client,
|
||||||
diag_tx,
|
editor_tx,
|
||||||
shutdown_requested: false,
|
shutdown_requested: false,
|
||||||
config: compile_config,
|
config: compile_config,
|
||||||
const_config,
|
const_config,
|
||||||
|
@ -210,7 +196,7 @@ impl CompileServer {
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum Event {
|
enum Event {
|
||||||
Lsp(lsp_server::Message),
|
Lsp(Message),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Display for Event {
|
impl fmt::Display for Event {
|
||||||
|
@ -224,14 +210,11 @@ impl fmt::Display for Event {
|
||||||
impl InitializedLspDriver for CompileServer {
|
impl InitializedLspDriver for CompileServer {
|
||||||
fn initialized(&mut self, _params: lsp_types::InitializedParams) {}
|
fn initialized(&mut self, _params: lsp_types::InitializedParams) {}
|
||||||
|
|
||||||
fn main_loop(
|
fn main_loop(&mut self, inbox: crossbeam_channel::Receiver<Message>) -> anyhow::Result<()> {
|
||||||
&mut self,
|
|
||||||
inbox: crossbeam_channel::Receiver<lsp_server::Message>,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
while let Some(event) = self.next_event(&inbox) {
|
while let Some(event) = self.next_event(&inbox) {
|
||||||
if matches!(
|
if matches!(
|
||||||
&event,
|
&event,
|
||||||
Event::Lsp(lsp_server::Message::Notification(Notification { method, .. }))
|
Event::Lsp(Message::Notification(Notification { method, .. }))
|
||||||
if method == lsp_types::notification::Exit::METHOD
|
if method == lsp_types::notification::Exit::METHOD
|
||||||
) {
|
) {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
|
@ -245,7 +228,7 @@ impl InitializedLspDriver for CompileServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CompileServer {
|
impl CompileServer {
|
||||||
fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
|
fn next_event(&self, inbox: &Receiver<Message>) -> Option<Event> {
|
||||||
select! {
|
select! {
|
||||||
recv(inbox) -> msg =>
|
recv(inbox) -> msg =>
|
||||||
msg.ok().map(Event::Lsp),
|
msg.ok().map(Event::Lsp),
|
||||||
|
@ -258,11 +241,9 @@ impl CompileServer {
|
||||||
// let was_quiescent = self.is_quiescent();
|
// let was_quiescent = self.is_quiescent();
|
||||||
match event {
|
match event {
|
||||||
Event::Lsp(msg) => match msg {
|
Event::Lsp(msg) => match msg {
|
||||||
lsp_server::Message::Request(req) => self.on_new_request(loop_start, req),
|
Message::Request(req) => self.on_request(loop_start, req),
|
||||||
lsp_server::Message::Notification(not) => self.on_notification(loop_start, not)?,
|
Message::Notification(not) => self.on_notification(loop_start, not)?,
|
||||||
lsp_server::Message::Response(resp) => {
|
Message::Response(resp) => self.client.clone().complete_request(self, resp),
|
||||||
self.client.clone().complete_request(self, resp)
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -270,17 +251,13 @@ impl CompileServer {
|
||||||
|
|
||||||
/// Registers and handles a request. This should only be called once per
|
/// Registers and handles a request. This should only be called once per
|
||||||
/// incoming request.
|
/// incoming request.
|
||||||
fn on_new_request(&mut self, request_received: Instant, req: Request) {
|
fn on_request(&mut self, request_received: Instant, req: Request) {
|
||||||
self.client.register_request(&req, request_received);
|
self.client.register_request(&req, request_received);
|
||||||
self.on_request(req);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handles a request.
|
|
||||||
fn on_request(&mut self, req: Request) {
|
|
||||||
if self.shutdown_requested {
|
if self.shutdown_requested {
|
||||||
self.client.respond(lsp_server::Response::new_err(
|
self.client.respond(Response::new_err(
|
||||||
req.id.clone(),
|
req.id.clone(),
|
||||||
lsp_server::ErrorCode::InvalidRequest as i32,
|
ErrorCode::InvalidRequest as i32,
|
||||||
"Shutdown already requested.".to_owned(),
|
"Shutdown already requested.".to_owned(),
|
||||||
));
|
));
|
||||||
return;
|
return;
|
||||||
|
@ -298,12 +275,12 @@ impl CompileServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn result_to_response(
|
fn result_to_response(
|
||||||
id: lsp_server::RequestId,
|
id: RequestId,
|
||||||
result: Result<JsonValue, ResponseError>,
|
result: Result<JsonValue, ResponseError>,
|
||||||
) -> Result<lsp_server::Response, Cancelled> {
|
) -> Result<Response, Cancelled> {
|
||||||
let res = match result {
|
let res = match result {
|
||||||
Ok(resp) => lsp_server::Response::new_ok(id, resp),
|
Ok(resp) => Response::new_ok(id, resp),
|
||||||
Err(e) => lsp_server::Response::new_err(id, e.code, e.message),
|
Err(e) => Response::new_err(id, e.code, e.message),
|
||||||
};
|
};
|
||||||
Ok(res)
|
Ok(res)
|
||||||
}
|
}
|
||||||
|
@ -404,7 +381,7 @@ impl CompileServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The entry point for the `workspace/executeCommand` request.
|
/// The entry point for the `workspace/executeCommand` request.
|
||||||
fn execute_command(&mut self, params: ExecuteCommandParams) -> LspResult<Option<JsonValue>> {
|
fn execute_command(&mut self, params: ExecuteCommandParams) -> LspResult<JsonValue> {
|
||||||
let ExecuteCommandParams {
|
let ExecuteCommandParams {
|
||||||
command,
|
command,
|
||||||
arguments,
|
arguments,
|
||||||
|
@ -414,8 +391,7 @@ impl CompileServer {
|
||||||
error!("asked to execute unknown command");
|
error!("asked to execute unknown command");
|
||||||
return Err(method_not_found());
|
return Err(method_not_found());
|
||||||
};
|
};
|
||||||
|
handler(self, arguments)
|
||||||
Ok(Some(handler(self, arguments)?))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Export the current document as a PDF file.
|
/// Export the current document as a PDF file.
|
||||||
|
|
|
@ -12,15 +12,15 @@ use tinymist_query::PositionEncoding;
|
||||||
use tinymist_render::PeriscopeArgs;
|
use tinymist_render::PeriscopeArgs;
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
use typst::foundations::IntoValue;
|
use typst::foundations::IntoValue;
|
||||||
use typst::syntax::FileId;
|
use typst::syntax::{FileId, VirtualPath};
|
||||||
use typst::syntax::VirtualPath;
|
|
||||||
use typst::util::Deferred;
|
use typst::util::Deferred;
|
||||||
use typst_ts_core::config::compiler::EntryState;
|
use typst_ts_core::config::compiler::EntryState;
|
||||||
use typst_ts_core::{ImmutPath, TypstDict};
|
use typst_ts_core::{ImmutPath, TypstDict};
|
||||||
|
|
||||||
use crate::actor::cluster::CompileClusterRequest;
|
use crate::actor::editor::EditorRequest;
|
||||||
use crate::compiler::{CompileServer, CompileServerArgs};
|
use crate::compiler::CompileServer;
|
||||||
use crate::harness::LspDriver;
|
use crate::harness::LspDriver;
|
||||||
|
use crate::utils::{try_, try_or_default};
|
||||||
use crate::world::{ImmutDict, SharedFontResolver};
|
use crate::world::{ImmutDict, SharedFontResolver};
|
||||||
use crate::{CompileExtraOpts, CompileFontOpts, ExportMode, LspHost};
|
use crate::{CompileExtraOpts, CompileFontOpts, ExportMode, LspHost};
|
||||||
|
|
||||||
|
@ -124,68 +124,29 @@ impl CompileConfig {
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Errors if the update is invalid.
|
/// Errors if the update is invalid.
|
||||||
pub fn update_by_map(&mut self, update: &Map<String, JsonValue>) -> anyhow::Result<()> {
|
pub fn update_by_map(&mut self, update: &Map<String, JsonValue>) -> anyhow::Result<()> {
|
||||||
if let Some(JsonValue::String(output_path)) = update.get("outputPath") {
|
self.output_path = try_or_default(|| Some(update.get("outputPath")?.as_str()?.to_owned()));
|
||||||
output_path.clone_into(&mut self.output_path);
|
self.export_pdf = try_or_default(|| ExportMode::deserialize(update.get("exportPdf")?).ok());
|
||||||
} else {
|
self.root_path = try_(|| Some(update.get("rootPath")?.as_str()?.into()));
|
||||||
self.output_path = String::new();
|
self.notify_compile_status = match try_(|| update.get("compileStatus")?.as_str()) {
|
||||||
}
|
Some("enable") => true,
|
||||||
|
Some("disable") | None => false,
|
||||||
let export_pdf = update
|
_ => bail!("compileStatus must be either 'enable' or 'disable'"),
|
||||||
.get("exportPdf")
|
};
|
||||||
.map(ExportMode::deserialize)
|
self.preferred_theme = try_(|| Some(update.get("preferredTheme")?.as_str()?.to_owned()));
|
||||||
.and_then(Result::ok);
|
|
||||||
if let Some(export_pdf) = export_pdf {
|
|
||||||
self.export_pdf = export_pdf;
|
|
||||||
} else {
|
|
||||||
self.export_pdf = ExportMode::default();
|
|
||||||
}
|
|
||||||
|
|
||||||
let root_path = update.get("rootPath");
|
|
||||||
if let Some(root_path) = root_path {
|
|
||||||
if root_path.is_null() {
|
|
||||||
self.root_path = None;
|
|
||||||
}
|
|
||||||
if let Some(root_path) = root_path.as_str().map(PathBuf::from) {
|
|
||||||
self.root_path = Some(root_path);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.root_path = None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let compile_status = update.get("compileStatus").and_then(|x| x.as_str());
|
|
||||||
if let Some(word_count) = compile_status {
|
|
||||||
if !matches!(word_count, "enable" | "disable") {
|
|
||||||
bail!("compileStatus must be either 'enable' or 'disable'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.notify_compile_status = compile_status.map_or(false, |e| e != "disable");
|
|
||||||
|
|
||||||
let preferred_theme = update.get("preferredTheme").and_then(|x| x.as_str());
|
|
||||||
self.preferred_theme = preferred_theme.map(str::to_owned);
|
|
||||||
|
|
||||||
// periscope_args
|
// periscope_args
|
||||||
let periscope_args = update.get("hoverPeriscope");
|
self.periscope_args = match update.get("hoverPeriscope") {
|
||||||
let periscope_args: Option<PeriscopeArgs> = match periscope_args {
|
|
||||||
Some(serde_json::Value::String(e)) if e == "enable" => Some(PeriscopeArgs::default()),
|
Some(serde_json::Value::String(e)) if e == "enable" => Some(PeriscopeArgs::default()),
|
||||||
Some(serde_json::Value::Null | serde_json::Value::String(..)) | None => None,
|
Some(serde_json::Value::Null | serde_json::Value::String(..)) | None => None,
|
||||||
Some(periscope_args) => match serde_json::from_value(periscope_args.clone()) {
|
Some(periscope_args) => match serde_json::from_value(periscope_args.clone()) {
|
||||||
Ok(e) => Some(e),
|
Ok(e) => Some(e),
|
||||||
Err(e) => {
|
Err(e) => bail!("failed to parse hoverPeriscope: {e}"),
|
||||||
log::error!("failed to parse hoverPeriscope: {e}");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
if let Some(mut periscope_args) = periscope_args {
|
if let Some(args) = self.periscope_args.as_mut() {
|
||||||
if periscope_args.invert_color == "auto"
|
if args.invert_color == "auto" && self.preferred_theme.as_deref() == Some("dark") {
|
||||||
&& self.preferred_theme.as_ref().is_some_and(|t| t == "dark")
|
args.invert_color = "always".to_owned();
|
||||||
{
|
|
||||||
"always".clone_into(&mut periscope_args.invert_color);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.periscope_args = Some(periscope_args);
|
|
||||||
} else {
|
|
||||||
self.periscope_args = None;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
'parse_extra_args: {
|
'parse_extra_args: {
|
||||||
|
@ -193,10 +154,7 @@ impl CompileConfig {
|
||||||
let typst_args: Vec<String> = match serde_json::from_value(typst_extra_args.clone())
|
let typst_args: Vec<String> = match serde_json::from_value(typst_extra_args.clone())
|
||||||
{
|
{
|
||||||
Ok(e) => e,
|
Ok(e) => e,
|
||||||
Err(e) => {
|
Err(e) => bail!("failed to parse typstExtraArgs: {e}"),
|
||||||
log::error!("failed to parse typstExtraArgs: {e}");
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let command = match CompileOnceArgs::try_parse_from(
|
let command = match CompileOnceArgs::try_parse_from(
|
||||||
|
@ -229,9 +187,7 @@ impl CompileConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.has_default_entry_path = self.determine_default_entry_path().is_some();
|
self.has_default_entry_path = self.determine_default_entry_path().is_some();
|
||||||
self.validate()?;
|
self.validate()
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn determine_root(&self, entry: Option<&ImmutPath>) -> Option<ImmutPath> {
|
pub fn determine_root(&self, entry: Option<&ImmutPath>) -> Option<ImmutPath> {
|
||||||
|
@ -239,18 +195,8 @@ impl CompileConfig {
|
||||||
return Some(path.as_path().into());
|
return Some(path.as_path().into());
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(extras) = &self.typst_extra_args {
|
if let Some(root) = try_(|| self.typst_extra_args.as_ref()?.root_dir.as_ref()) {
|
||||||
if let Some(root) = &extras.root_dir {
|
return Some(root.as_path().into());
|
||||||
return Some(root.as_path().into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(path) = &self
|
|
||||||
.typst_extra_args
|
|
||||||
.as_ref()
|
|
||||||
.and_then(|x| x.root_dir.clone())
|
|
||||||
{
|
|
||||||
return Some(path.as_path().into());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(entry) = entry {
|
if let Some(entry) = entry {
|
||||||
|
@ -277,15 +223,15 @@ impl CompileConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn determine_default_entry_path(&self) -> Option<ImmutPath> {
|
pub fn determine_default_entry_path(&self) -> Option<ImmutPath> {
|
||||||
self.typst_extra_args.as_ref().and_then(|e| {
|
let extras = self.typst_extra_args.as_ref()?;
|
||||||
if let Some(e) = &e.entry {
|
// todo: pre-compute this when updating config
|
||||||
if e.is_relative() {
|
if let Some(entry) = &extras.entry {
|
||||||
let root = self.determine_root(None)?;
|
if entry.is_relative() {
|
||||||
return Some(root.join(e).as_path().into());
|
let root = self.determine_root(None)?;
|
||||||
}
|
return Some(root.join(entry).as_path().into());
|
||||||
}
|
}
|
||||||
e.entry.clone()
|
}
|
||||||
})
|
extras.entry.clone()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn determine_entry(&self, entry: Option<ImmutPath>) -> EntryState {
|
pub fn determine_entry(&self, entry: Option<ImmutPath>) -> EntryState {
|
||||||
|
@ -370,7 +316,7 @@ impl Default for CompilerConstConfig {
|
||||||
pub struct CompileInit {
|
pub struct CompileInit {
|
||||||
pub handle: tokio::runtime::Handle,
|
pub handle: tokio::runtime::Handle,
|
||||||
pub font: CompileFontOpts,
|
pub font: CompileFontOpts,
|
||||||
pub diag_tx: mpsc::UnboundedSender<CompileClusterRequest>,
|
pub editor_tx: mpsc::UnboundedSender<EditorRequest>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
|
@ -410,10 +356,10 @@ impl LspDriver for CompileInit {
|
||||||
Deferred::new(|| SharedFontResolver::new(opts).expect("failed to create font book"))
|
Deferred::new(|| SharedFontResolver::new(opts).expect("failed to create font book"))
|
||||||
};
|
};
|
||||||
|
|
||||||
let args = CompileServerArgs {
|
let mut service = CompileServer::new(
|
||||||
client,
|
client,
|
||||||
compile_config,
|
compile_config,
|
||||||
const_config: CompilerConstConfig {
|
CompilerConstConfig {
|
||||||
position_encoding: params
|
position_encoding: params
|
||||||
.position_encoding
|
.position_encoding
|
||||||
.map(|x| match x.as_str() {
|
.map(|x| match x.as_str() {
|
||||||
|
@ -422,12 +368,10 @@ impl LspDriver for CompileInit {
|
||||||
})
|
})
|
||||||
.unwrap_or_default(),
|
.unwrap_or_default(),
|
||||||
},
|
},
|
||||||
diag_tx: self.diag_tx,
|
self.editor_tx,
|
||||||
handle: self.handle,
|
|
||||||
font,
|
font,
|
||||||
};
|
self.handle,
|
||||||
|
);
|
||||||
let mut service = CompileServer::new(args);
|
|
||||||
|
|
||||||
let primary = service.server(
|
let primary = service.server(
|
||||||
"primary".to_owned(),
|
"primary".to_owned(),
|
||||||
|
@ -435,10 +379,9 @@ impl LspDriver for CompileInit {
|
||||||
service.config.determine_inputs(),
|
service.config.determine_inputs(),
|
||||||
service.vfs_snapshot(),
|
service.vfs_snapshot(),
|
||||||
);
|
);
|
||||||
if service.compiler.is_some() {
|
if service.compiler.replace(primary).is_some() {
|
||||||
panic!("primary already initialized");
|
panic!("primary already initialized");
|
||||||
}
|
}
|
||||||
service.compiler = Some(primary);
|
|
||||||
|
|
||||||
(service, Ok(()))
|
(service, Ok(()))
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
//! tinymist LSP mode
|
//! tinymist LSP mode
|
||||||
|
|
||||||
use core::fmt;
|
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
@ -8,16 +7,13 @@ use std::time::Instant;
|
||||||
use std::{collections::HashMap, path::PathBuf};
|
use std::{collections::HashMap, path::PathBuf};
|
||||||
|
|
||||||
use anyhow::{bail, Context};
|
use anyhow::{bail, Context};
|
||||||
use crossbeam_channel::select;
|
|
||||||
use crossbeam_channel::Receiver;
|
|
||||||
use futures::future::BoxFuture;
|
use futures::future::BoxFuture;
|
||||||
use log::{error, info, trace, warn};
|
use log::{error, info, trace, warn};
|
||||||
use lsp_server::{ErrorCode, Message, Notification, Request, RequestId, ResponseError};
|
use lsp_server::{ErrorCode, Message, Notification, Request, RequestId, Response, ResponseError};
|
||||||
use lsp_types::notification::Notification as NotificationTrait;
|
use lsp_types::notification::Notification as NotificationTrait;
|
||||||
use lsp_types::request::{GotoDeclarationParams, GotoDeclarationResponse, WorkspaceConfiguration};
|
use lsp_types::request::{GotoDeclarationParams, GotoDeclarationResponse, WorkspaceConfiguration};
|
||||||
use lsp_types::*;
|
use lsp_types::*;
|
||||||
use parking_lot::lock_api::RwLock;
|
use parking_lot::RwLock;
|
||||||
use paste::paste;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use serde_json::{Map, Value as JsonValue};
|
use serde_json::{Map, Value as JsonValue};
|
||||||
use tinymist_query::{
|
use tinymist_query::{
|
||||||
|
@ -33,12 +29,11 @@ use typst_ts_core::path::PathClean;
|
||||||
use typst_ts_core::{error::prelude::*, ImmutPath};
|
use typst_ts_core::{error::prelude::*, ImmutPath};
|
||||||
|
|
||||||
use super::lsp_init::*;
|
use super::lsp_init::*;
|
||||||
use crate::actor::cluster::CompileClusterRequest;
|
use crate::actor::editor::EditorRequest;
|
||||||
|
use crate::actor::format::{FormatConfig, FormatRequest};
|
||||||
use crate::actor::typ_client::CompileClientActor;
|
use crate::actor::typ_client::CompileClientActor;
|
||||||
use crate::actor::{
|
use crate::actor::user_action::{TraceParams, UserActionRequest};
|
||||||
FormattingConfig, FormattingRequest, UserActionRequest, UserActionTraceRequest,
|
use crate::compiler::CompileServer;
|
||||||
};
|
|
||||||
use crate::compiler::{CompileServer, CompileServerArgs};
|
|
||||||
use crate::compiler_init::CompilerConstConfig;
|
use crate::compiler_init::CompilerConstConfig;
|
||||||
use crate::harness::{InitializedLspDriver, LspHost};
|
use crate::harness::{InitializedLspDriver, LspHost};
|
||||||
use crate::tools::package::InitTask;
|
use crate::tools::package::InitTask;
|
||||||
|
@ -47,21 +42,6 @@ use crate::{run_query, LspResult};
|
||||||
|
|
||||||
pub type MaySyncResult<'a> = Result<JsonValue, BoxFuture<'a, JsonValue>>;
|
pub type MaySyncResult<'a> = Result<JsonValue, BoxFuture<'a, JsonValue>>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
enum Event {
|
|
||||||
Lsp(lsp_server::Message),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for Event {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
match self {
|
|
||||||
Event::Lsp(_) => write!(f, "Event::Lsp"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) struct Cancelled;
|
|
||||||
|
|
||||||
type LspMethod<Res> = fn(srv: &mut TypstLanguageServer, args: JsonValue) -> LspResult<Res>;
|
type LspMethod<Res> = fn(srv: &mut TypstLanguageServer, args: JsonValue) -> LspResult<Res>;
|
||||||
type LspHandler<Req, Res> = fn(srv: &mut TypstLanguageServer, args: Req) -> LspResult<Res>;
|
type LspHandler<Req, Res> = fn(srv: &mut TypstLanguageServer, args: Req) -> LspResult<Res>;
|
||||||
|
|
||||||
|
@ -69,7 +49,7 @@ type LspHandler<Req, Res> = fn(srv: &mut TypstLanguageServer, args: Req) -> LspR
|
||||||
/// Returns Ok(None) -> Need to respond none
|
/// Returns Ok(None) -> Need to respond none
|
||||||
/// Returns Err(..) -> Need to respond error
|
/// Returns Err(..) -> Need to respond error
|
||||||
type LspRawHandler<T> =
|
type LspRawHandler<T> =
|
||||||
fn(srv: &mut TypstLanguageServer, args: (RequestId, T)) -> LspResult<Option<()>>;
|
fn(srv: &mut TypstLanguageServer, req_id: RequestId, args: T) -> LspResult<Option<()>>;
|
||||||
|
|
||||||
type ExecuteCmdMap = HashMap<&'static str, LspRawHandler<Vec<JsonValue>>>;
|
type ExecuteCmdMap = HashMap<&'static str, LspRawHandler<Vec<JsonValue>>>;
|
||||||
type NotifyCmdMap = HashMap<&'static str, LspMethod<()>>;
|
type NotifyCmdMap = HashMap<&'static str, LspMethod<()>>;
|
||||||
|
@ -86,7 +66,7 @@ macro_rules! resource_fn {
|
||||||
macro_rules! request_fn_ {
|
macro_rules! request_fn_ {
|
||||||
($desc: ty, Self::$method: ident) => {
|
($desc: ty, Self::$method: ident) => {
|
||||||
(<$desc>::METHOD, {
|
(<$desc>::METHOD, {
|
||||||
const E: LspRawHandler<JsonValue> = |this, (req_id, req)| {
|
const E: LspRawHandler<JsonValue> = |this, req_id, req| {
|
||||||
let req: <$desc as lsp_types::request::Request>::Params =
|
let req: <$desc as lsp_types::request::Request>::Params =
|
||||||
serde_json::from_value(req).unwrap(); // todo: soft unwrap
|
serde_json::from_value(req).unwrap(); // todo: soft unwrap
|
||||||
this.$method(req_id, req)
|
this.$method(req_id, req)
|
||||||
|
@ -99,26 +79,12 @@ macro_rules! request_fn_ {
|
||||||
macro_rules! request_fn {
|
macro_rules! request_fn {
|
||||||
($desc: ty, Self::$method: ident) => {
|
($desc: ty, Self::$method: ident) => {
|
||||||
(<$desc>::METHOD, {
|
(<$desc>::METHOD, {
|
||||||
const E: LspRawHandler<JsonValue> = |this, (req_id, req)| {
|
const E: LspRawHandler<JsonValue> = |this, req_id, req| {
|
||||||
let req: <$desc as lsp_types::request::Request>::Params =
|
let req: <$desc as lsp_types::request::Request>::Params =
|
||||||
serde_json::from_value(req).unwrap(); // todo: soft unwrap
|
serde_json::from_value(req).unwrap(); // todo: soft unwrap
|
||||||
let res = this
|
let res = this.$method(req);
|
||||||
.$method(req)
|
|
||||||
.map(|res| serde_json::to_value(res).unwrap()); // todo: soft unwrap
|
|
||||||
|
|
||||||
if let Ok(response) = result_to_response(req_id, res) {
|
this.client.respond(result_to_response(req_id, res));
|
||||||
this.client.respond(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
// todo: cancellation
|
|
||||||
// Err(e) => match e.downcast::<Cancelled>() {
|
|
||||||
// Ok(cancelled) => return Err(cancelled),
|
|
||||||
// Err(e) => lsp_server::Response::new_err(
|
|
||||||
// id,
|
|
||||||
// lsp_server::ErrorCode::InternalError as i32,
|
|
||||||
// e.to_string(),
|
|
||||||
// ),
|
|
||||||
// },
|
|
||||||
|
|
||||||
Ok(Some(()))
|
Ok(Some(()))
|
||||||
};
|
};
|
||||||
|
@ -130,11 +96,8 @@ macro_rules! request_fn {
|
||||||
macro_rules! exec_fn_ {
|
macro_rules! exec_fn_ {
|
||||||
($key: expr, Self::$method: ident) => {
|
($key: expr, Self::$method: ident) => {
|
||||||
($key, {
|
($key, {
|
||||||
{
|
const E: LspRawHandler<Vec<JsonValue>> = |this, req_id, req| this.$method(req_id, req);
|
||||||
const E: LspRawHandler<Vec<JsonValue>> =
|
E
|
||||||
|this, (req_id, req)| this.$method(req_id, req);
|
|
||||||
E
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -142,13 +105,9 @@ macro_rules! exec_fn_ {
|
||||||
macro_rules! exec_fn {
|
macro_rules! exec_fn {
|
||||||
($key: expr, Self::$method: ident) => {
|
($key: expr, Self::$method: ident) => {
|
||||||
($key, {
|
($key, {
|
||||||
const E: LspRawHandler<Vec<JsonValue>> = |this, (req_id, args)| {
|
const E: LspRawHandler<Vec<JsonValue>> = |this, req_id, args| {
|
||||||
let res = this.$method(args);
|
let res = this.$method(args);
|
||||||
|
this.client.respond(result_to_response(req_id, res));
|
||||||
if let Ok(response) = result_to_response(req_id, res) {
|
|
||||||
this.client.respond(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(Some(()))
|
Ok(Some(()))
|
||||||
};
|
};
|
||||||
E
|
E
|
||||||
|
@ -181,14 +140,6 @@ fn as_path_pos(inp: TextDocumentPositionParams) -> (PathBuf, Position) {
|
||||||
(as_path(inp.text_document), inp.position)
|
(as_path(inp.text_document), inp.position)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct TypstLanguageServerArgs {
|
|
||||||
pub handle: tokio::runtime::Handle,
|
|
||||||
pub client: LspHost<TypstLanguageServer>,
|
|
||||||
pub const_config: ConstConfig,
|
|
||||||
pub diag_tx: mpsc::UnboundedSender<CompileClusterRequest>,
|
|
||||||
pub font: Deferred<SharedFontResolver>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The object providing the language server functionality.
|
/// The object providing the language server functionality.
|
||||||
pub struct TypstLanguageServer {
|
pub struct TypstLanguageServer {
|
||||||
/// The language server client.
|
/// The language server client.
|
||||||
|
@ -198,9 +149,9 @@ pub struct TypstLanguageServer {
|
||||||
/// Whether the server is shutting down.
|
/// Whether the server is shutting down.
|
||||||
pub shutdown_requested: bool,
|
pub shutdown_requested: bool,
|
||||||
/// Whether the server has registered semantic tokens capabilities.
|
/// Whether the server has registered semantic tokens capabilities.
|
||||||
pub sema_tokens_registered: Option<bool>,
|
pub sema_tokens_registered: bool,
|
||||||
/// Whether the server has registered document formatter capabilities.
|
/// Whether the server has registered document formatter capabilities.
|
||||||
pub formatter_registered: Option<bool>,
|
pub formatter_registered: bool,
|
||||||
/// Whether client is pinning a file.
|
/// Whether client is pinning a file.
|
||||||
pub pinning: bool,
|
pub pinning: bool,
|
||||||
/// The client focusing file.
|
/// The client focusing file.
|
||||||
|
@ -236,41 +187,47 @@ pub struct TypstLanguageServer {
|
||||||
pub dedicates: Vec<CompileServer>,
|
pub dedicates: Vec<CompileServer>,
|
||||||
/// The formatter thread running in backend.
|
/// The formatter thread running in backend.
|
||||||
/// Note: The thread will exit if you drop the sender.
|
/// Note: The thread will exit if you drop the sender.
|
||||||
pub format_thread: Option<crossbeam_channel::Sender<FormattingRequest>>,
|
pub format_thread: Option<crossbeam_channel::Sender<FormatRequest>>,
|
||||||
/// The user action thread running in backend.
|
/// The user action thread running in backend.
|
||||||
/// Note: The thread will exit if you drop the sender.
|
/// Note: The thread will exit if you drop the sender.
|
||||||
pub user_action_threads: Option<crossbeam_channel::Sender<UserActionRequest>>,
|
pub user_action_thread: Option<crossbeam_channel::Sender<UserActionRequest>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Getters and the main loop.
|
/// Getters and the main loop.
|
||||||
impl TypstLanguageServer {
|
impl TypstLanguageServer {
|
||||||
/// Create a new language server.
|
/// Create a new language server.
|
||||||
pub fn new(args: TypstLanguageServerArgs) -> Self {
|
pub fn new(
|
||||||
|
client: LspHost<TypstLanguageServer>,
|
||||||
|
const_config: ConstConfig,
|
||||||
|
editor_tx: mpsc::UnboundedSender<EditorRequest>,
|
||||||
|
font: Deferred<SharedFontResolver>,
|
||||||
|
handle: tokio::runtime::Handle,
|
||||||
|
) -> Self {
|
||||||
let tokens_ctx = SemanticTokenContext::new(
|
let tokens_ctx = SemanticTokenContext::new(
|
||||||
args.const_config.position_encoding,
|
const_config.position_encoding,
|
||||||
args.const_config.sema_tokens_overlapping_token_support,
|
const_config.tokens_overlapping_token_support,
|
||||||
args.const_config.sema_tokens_multiline_token_support,
|
const_config.tokens_multiline_token_support,
|
||||||
);
|
);
|
||||||
Self {
|
Self {
|
||||||
client: args.client.clone(),
|
client,
|
||||||
primary: CompileServer::new(CompileServerArgs {
|
primary: CompileServer::new(
|
||||||
client: LspHost::new(Arc::new(RwLock::new(None))),
|
LspHost::new(Arc::new(RwLock::new(None))),
|
||||||
compile_config: Default::default(),
|
Default::default(),
|
||||||
const_config: CompilerConstConfig {
|
CompilerConstConfig {
|
||||||
position_encoding: args.const_config.position_encoding,
|
position_encoding: const_config.position_encoding,
|
||||||
},
|
},
|
||||||
diag_tx: args.diag_tx,
|
editor_tx,
|
||||||
font: args.font,
|
font,
|
||||||
handle: args.handle,
|
handle,
|
||||||
}),
|
),
|
||||||
dedicates: Vec::new(),
|
dedicates: Vec::new(),
|
||||||
shutdown_requested: false,
|
shutdown_requested: false,
|
||||||
ever_focusing_by_activities: false,
|
ever_focusing_by_activities: false,
|
||||||
ever_manual_focusing: false,
|
ever_manual_focusing: false,
|
||||||
sema_tokens_registered: None,
|
sema_tokens_registered: false,
|
||||||
formatter_registered: None,
|
formatter_registered: false,
|
||||||
config: Default::default(),
|
config: Default::default(),
|
||||||
const_config: args.const_config,
|
const_config,
|
||||||
|
|
||||||
exec_cmds: Self::get_exec_commands(),
|
exec_cmds: Self::get_exec_commands(),
|
||||||
regular_cmds: Self::get_regular_cmds(),
|
regular_cmds: Self::get_regular_cmds(),
|
||||||
|
@ -281,7 +238,7 @@ impl TypstLanguageServer {
|
||||||
focusing: None,
|
focusing: None,
|
||||||
tokens_ctx,
|
tokens_ctx,
|
||||||
format_thread: None,
|
format_thread: None,
|
||||||
user_action_threads: None,
|
user_action_thread: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -350,7 +307,7 @@ impl InitializedLspDriver for TypstLanguageServer {
|
||||||
/// The server can use the `initialized` notification, for example, to
|
/// The server can use the `initialized` notification, for example, to
|
||||||
/// dynamically register capabilities with the client.
|
/// dynamically register capabilities with the client.
|
||||||
fn initialized(&mut self, params: InitializedParams) {
|
fn initialized(&mut self, params: InitializedParams) {
|
||||||
if self.const_config().sema_tokens_dynamic_registration
|
if self.const_config().tokens_dynamic_registration
|
||||||
&& self.config.semantic_tokens == SemanticTokensMode::Enable
|
&& self.config.semantic_tokens == SemanticTokensMode::Enable
|
||||||
{
|
{
|
||||||
let err = self.enable_sema_token_caps(true);
|
let err = self.enable_sema_token_caps(true);
|
||||||
|
@ -413,15 +370,15 @@ impl InitializedLspDriver for TypstLanguageServer {
|
||||||
// SetThreadPriority(thread, thread_priority_above_normal);
|
// SetThreadPriority(thread, thread_priority_above_normal);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
while let Some(event) = self.next_event(&inbox) {
|
while let Ok(msg) = inbox.recv() {
|
||||||
if matches!(
|
const EXIT_METHOD: &str = lsp_types::notification::Exit::METHOD;
|
||||||
&event,
|
let loop_start = Instant::now();
|
||||||
Event::Lsp(lsp_server::Message::Notification(Notification { method, .. }))
|
match msg {
|
||||||
if method == lsp_types::notification::Exit::METHOD
|
Message::Notification(not) if not.method == EXIT_METHOD => return Ok(()),
|
||||||
) {
|
Message::Notification(not) => self.on_notification(loop_start, not)?,
|
||||||
return Ok(());
|
Message::Request(req) => self.on_request(loop_start, req),
|
||||||
|
Message::Response(resp) => self.client.clone().complete_request(self, resp),
|
||||||
}
|
}
|
||||||
self.handle_event(event)?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
warn!("client exited without proper shutdown sequence");
|
warn!("client exited without proper shutdown sequence");
|
||||||
|
@ -430,44 +387,15 @@ impl InitializedLspDriver for TypstLanguageServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TypstLanguageServer {
|
impl TypstLanguageServer {
|
||||||
/// Receives the next event from event sources.
|
|
||||||
fn next_event(&self, inbox: &Receiver<lsp_server::Message>) -> Option<Event> {
|
|
||||||
select! {
|
|
||||||
recv(inbox) -> msg =>
|
|
||||||
msg.ok().map(Event::Lsp),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handles an incoming event.
|
|
||||||
fn handle_event(&mut self, event: Event) -> anyhow::Result<()> {
|
|
||||||
let loop_start = Instant::now();
|
|
||||||
|
|
||||||
// let was_quiescent = self.is_quiescent();
|
|
||||||
match event {
|
|
||||||
Event::Lsp(msg) => match msg {
|
|
||||||
lsp_server::Message::Request(req) => self.on_new_request(loop_start, req),
|
|
||||||
lsp_server::Message::Notification(not) => self.on_notification(loop_start, not)?,
|
|
||||||
lsp_server::Message::Response(resp) => {
|
|
||||||
self.client.clone().complete_request(self, resp)
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Registers and handles a request. This should only be called once per
|
/// Registers and handles a request. This should only be called once per
|
||||||
/// incoming request.
|
/// incoming request.
|
||||||
fn on_new_request(&mut self, request_received: Instant, req: Request) {
|
fn on_request(&mut self, request_received: Instant, req: Request) {
|
||||||
self.client.register_request(&req, request_received);
|
self.client.register_request(&req, request_received);
|
||||||
self.on_request(req);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Handles a request.
|
|
||||||
fn on_request(&mut self, req: Request) {
|
|
||||||
if self.shutdown_requested {
|
if self.shutdown_requested {
|
||||||
self.client.respond(lsp_server::Response::new_err(
|
self.client.respond(Response::new_err(
|
||||||
req.id.clone(),
|
req.id.clone(),
|
||||||
lsp_server::ErrorCode::InvalidRequest as i32,
|
ErrorCode::InvalidRequest as i32,
|
||||||
"Shutdown already requested.".to_owned(),
|
"Shutdown already requested.".to_owned(),
|
||||||
));
|
));
|
||||||
return;
|
return;
|
||||||
|
@ -478,14 +406,7 @@ impl TypstLanguageServer {
|
||||||
return;
|
return;
|
||||||
};
|
};
|
||||||
|
|
||||||
let res = handler(self, (req.id.clone(), req.params));
|
let _ = handler(self, req.id.clone(), req.params);
|
||||||
if matches!(res, Ok(Some(()))) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Ok(response) = result_to_response_(req.id, res) {
|
|
||||||
self.client.respond(response);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The entry point for the `workspace/executeCommand` request.
|
/// The entry point for the `workspace/executeCommand` request.
|
||||||
|
@ -495,16 +416,14 @@ impl TypstLanguageServer {
|
||||||
params: ExecuteCommandParams,
|
params: ExecuteCommandParams,
|
||||||
) -> LspResult<Option<()>> {
|
) -> LspResult<Option<()>> {
|
||||||
let ExecuteCommandParams {
|
let ExecuteCommandParams {
|
||||||
command,
|
command, arguments, ..
|
||||||
arguments,
|
|
||||||
work_done_progress_params: _,
|
|
||||||
} = params;
|
} = params;
|
||||||
let Some(handler) = self.exec_cmds.get(command.as_str()) else {
|
let Some(handler) = self.exec_cmds.get(command.as_str()) else {
|
||||||
error!("asked to execute unknown command");
|
error!("asked to execute unknown command");
|
||||||
return Err(method_not_found());
|
return Err(method_not_found());
|
||||||
};
|
};
|
||||||
|
|
||||||
handler(self, (req_id.clone(), arguments))
|
handler(self, req_id.clone(), arguments)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handles an incoming notification.
|
/// Handles an incoming notification.
|
||||||
|
@ -540,33 +459,29 @@ impl TypstLanguageServer {
|
||||||
|
|
||||||
/// Registers or unregisters semantic tokens.
|
/// Registers or unregisters semantic tokens.
|
||||||
fn enable_sema_token_caps(&mut self, enable: bool) -> anyhow::Result<()> {
|
fn enable_sema_token_caps(&mut self, enable: bool) -> anyhow::Result<()> {
|
||||||
if !self.const_config().sema_tokens_dynamic_registration {
|
if !self.const_config().tokens_dynamic_registration {
|
||||||
trace!("skip register semantic by config");
|
trace!("skip register semantic by config");
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let res = match (enable, self.sema_tokens_registered) {
|
match (enable, self.sema_tokens_registered) {
|
||||||
(true, None | Some(false)) => {
|
(true, false) => {
|
||||||
trace!("registering semantic tokens");
|
trace!("registering semantic tokens");
|
||||||
let options = get_semantic_tokens_options();
|
let options = get_semantic_tokens_options();
|
||||||
self.client
|
self.client
|
||||||
.register_capability(vec![get_semantic_tokens_registration(options)])
|
.register_capability(vec![get_semantic_tokens_registration(options)])
|
||||||
|
.inspect(|_| self.sema_tokens_registered = enable)
|
||||||
.context("could not register semantic tokens")
|
.context("could not register semantic tokens")
|
||||||
}
|
}
|
||||||
(false, Some(true)) => {
|
(false, true) => {
|
||||||
trace!("unregistering semantic tokens");
|
trace!("unregistering semantic tokens");
|
||||||
self.client
|
self.client
|
||||||
.unregister_capability(vec![get_semantic_tokens_unregistration()])
|
.unregister_capability(vec![get_semantic_tokens_unregistration()])
|
||||||
|
.inspect(|_| self.sema_tokens_registered = enable)
|
||||||
.context("could not unregister semantic tokens")
|
.context("could not unregister semantic tokens")
|
||||||
}
|
}
|
||||||
(true, Some(true)) | (false, None | Some(false)) => Ok(()),
|
_ => Ok(()),
|
||||||
};
|
|
||||||
|
|
||||||
if res.is_ok() {
|
|
||||||
self.sema_tokens_registered = Some(enable);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Registers or unregisters document formatter.
|
/// Registers or unregisters document formatter.
|
||||||
|
@ -594,27 +509,23 @@ impl TypstLanguageServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let res = match (enable, self.formatter_registered) {
|
match (enable, self.formatter_registered) {
|
||||||
(true, None | Some(false)) => {
|
(true, false) => {
|
||||||
trace!("registering formatter");
|
trace!("registering formatter");
|
||||||
self.client
|
self.client
|
||||||
.register_capability(vec![get_formatting_registration()])
|
.register_capability(vec![get_formatting_registration()])
|
||||||
|
.inspect(|_| self.formatter_registered = enable)
|
||||||
.context("could not register formatter")
|
.context("could not register formatter")
|
||||||
}
|
}
|
||||||
(false, Some(true)) => {
|
(false, true) => {
|
||||||
trace!("unregistering formatter");
|
trace!("unregistering formatter");
|
||||||
self.client
|
self.client
|
||||||
.unregister_capability(vec![get_formatting_unregistration()])
|
.unregister_capability(vec![get_formatting_unregistration()])
|
||||||
|
.inspect(|_| self.formatter_registered = enable)
|
||||||
.context("could not unregister formatter")
|
.context("could not unregister formatter")
|
||||||
}
|
}
|
||||||
(true, Some(true)) | (false, None | Some(false)) => Ok(()),
|
_ => Ok(()),
|
||||||
};
|
|
||||||
|
|
||||||
if res.is_ok() {
|
|
||||||
self.formatter_registered = Some(enable);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
res
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -731,7 +642,7 @@ impl TypstLanguageServer {
|
||||||
let self_path = std::env::current_exe()
|
let self_path = std::env::current_exe()
|
||||||
.map_err(|e| internal_error(format!("Cannot get typst compiler {e}")))?;
|
.map_err(|e| internal_error(format!("Cannot get typst compiler {e}")))?;
|
||||||
|
|
||||||
let thread = self.user_action_threads.clone();
|
let thread = self.user_action_thread.clone();
|
||||||
let entry = self.config.compile.determine_entry(Some(path));
|
let entry = self.config.compile.determine_entry(Some(path));
|
||||||
|
|
||||||
let res = self
|
let res = self
|
||||||
|
@ -750,16 +661,16 @@ impl TypstLanguageServer {
|
||||||
.ok_or_else(|| anyhow::anyhow!("main file must be resolved, got {entry:?}"))?;
|
.ok_or_else(|| anyhow::anyhow!("main file must be resolved, got {entry:?}"))?;
|
||||||
|
|
||||||
if let Some(f) = thread {
|
if let Some(f) = thread {
|
||||||
f.send(UserActionRequest::Trace((
|
f.send(UserActionRequest::Trace(
|
||||||
req_id,
|
req_id,
|
||||||
UserActionTraceRequest {
|
TraceParams {
|
||||||
compiler_program: self_path,
|
compiler_program: self_path,
|
||||||
root: root.as_ref().to_owned(),
|
root: root.as_ref().to_owned(),
|
||||||
main,
|
main,
|
||||||
inputs: cc.world().inputs.as_ref().deref().clone(),
|
inputs: cc.world().inputs.as_ref().deref().clone(),
|
||||||
font_paths: cc.world().font_resolver.font_paths().to_owned(),
|
font_paths: cc.world().font_resolver.font_paths().to_owned(),
|
||||||
},
|
},
|
||||||
)))
|
))
|
||||||
.context("cannot send trace request")?;
|
.context("cannot send trace request")?;
|
||||||
} else {
|
} else {
|
||||||
bail!("user action thread is not available");
|
bail!("user action thread is not available");
|
||||||
|
@ -847,8 +758,8 @@ impl TypstLanguageServer {
|
||||||
use crate::tools::package::{self, determine_latest_version, TemplateSource};
|
use crate::tools::package::{self, determine_latest_version, TemplateSource};
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
struct InitResult {
|
struct InitResult {
|
||||||
#[serde(rename = "entryPath")]
|
|
||||||
entry_path: PathBuf,
|
entry_path: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1049,7 +960,7 @@ impl TypstLanguageServer {
|
||||||
error!("could not change formatter config: {err}");
|
error!("could not change formatter config: {err}");
|
||||||
}
|
}
|
||||||
if let Some(f) = &self.format_thread {
|
if let Some(f) = &self.format_thread {
|
||||||
let err = f.send(FormattingRequest::ChangeConfig(FormattingConfig {
|
let err = f.send(FormatRequest::ChangeConfig(FormatConfig {
|
||||||
mode: self.config.formatter,
|
mode: self.config.formatter,
|
||||||
width: self.config.formatter_print_width,
|
width: self.config.formatter_print_width,
|
||||||
}));
|
}));
|
||||||
|
@ -1182,7 +1093,7 @@ impl TypstLanguageServer {
|
||||||
let path = as_path(params.text_document).as_path().into();
|
let path = as_path(params.text_document).as_path().into();
|
||||||
self.query_source(path, |source| {
|
self.query_source(path, |source| {
|
||||||
if let Some(f) = &self.format_thread {
|
if let Some(f) = &self.format_thread {
|
||||||
f.send(FormattingRequest::Formatting((req_id, source.clone())))?;
|
f.send(FormatRequest::Format(req_id, source.clone()))?;
|
||||||
} else {
|
} else {
|
||||||
bail!("formatter thread is not available");
|
bail!("formatter thread is not available");
|
||||||
}
|
}
|
||||||
|
@ -1323,32 +1234,20 @@ pub fn method_not_found() -> ResponseError {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn result_to_response_<T: Serialize>(
|
pub(crate) fn result_to_response<T: Serialize>(
|
||||||
id: lsp_server::RequestId,
|
id: RequestId,
|
||||||
result: Result<T, ResponseError>,
|
result: Result<T, ResponseError>,
|
||||||
) -> Result<lsp_server::Response, Cancelled> {
|
) -> Response {
|
||||||
let res = match result {
|
match result {
|
||||||
Ok(resp) => {
|
Ok(resp) => match serde_json::to_value(resp) {
|
||||||
let resp = serde_json::to_value(resp);
|
Ok(resp) => Response::new_ok(id, resp),
|
||||||
match resp {
|
Err(e) => {
|
||||||
Ok(resp) => lsp_server::Response::new_ok(id, resp),
|
let e = internal_error(e.to_string());
|
||||||
Err(e) => return result_to_response(id, Err(internal_error(e.to_string()))),
|
Response::new_err(id, e.code, e.message)
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
Err(e) => lsp_server::Response::new_err(id, e.code, e.message),
|
Err(e) => Response::new_err(id, e.code, e.message),
|
||||||
};
|
}
|
||||||
Ok(res)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn result_to_response(
|
|
||||||
id: lsp_server::RequestId,
|
|
||||||
result: Result<JsonValue, ResponseError>,
|
|
||||||
) -> Result<lsp_server::Response, Cancelled> {
|
|
||||||
let res = match result {
|
|
||||||
Ok(resp) => lsp_server::Response::new_ok(id, resp),
|
|
||||||
Err(e) => lsp_server::Response::new_err(id, e.code, e.message),
|
|
||||||
};
|
|
||||||
Ok(res)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::{collections::HashMap, path::PathBuf};
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use anyhow::bail;
|
use anyhow::bail;
|
||||||
use itertools::Itertools;
|
use itertools::Itertools;
|
||||||
|
@ -11,13 +11,12 @@ use tokio::sync::mpsc;
|
||||||
use typst::util::Deferred;
|
use typst::util::Deferred;
|
||||||
use typst_ts_core::ImmutPath;
|
use typst_ts_core::ImmutPath;
|
||||||
|
|
||||||
use crate::actor::cluster::EditorActor;
|
use crate::actor::editor::EditorActor;
|
||||||
use crate::compiler_init::CompileConfig;
|
use crate::compiler_init::CompileConfig;
|
||||||
use crate::harness::LspHost;
|
use crate::harness::LspHost;
|
||||||
|
use crate::utils::{try_, try_or};
|
||||||
use crate::world::{ImmutDict, SharedFontResolver};
|
use crate::world::{ImmutDict, SharedFontResolver};
|
||||||
use crate::{
|
use crate::{invalid_params, CompileFontOpts, LspResult, TypstLanguageServer};
|
||||||
invalid_params, CompileFontOpts, LspResult, TypstLanguageServer, TypstLanguageServerArgs,
|
|
||||||
};
|
|
||||||
|
|
||||||
// todo: svelte-language-server responds to a Goto Definition request with
|
// todo: svelte-language-server responds to a Goto Definition request with
|
||||||
// LocationLink[] even if the client does not report the
|
// LocationLink[] even if the client does not report the
|
||||||
|
@ -68,13 +67,10 @@ pub enum SemanticTokensMode {
|
||||||
pub struct CompileExtraOpts {
|
pub struct CompileExtraOpts {
|
||||||
/// The root directory for compilation routine.
|
/// The root directory for compilation routine.
|
||||||
pub root_dir: Option<PathBuf>,
|
pub root_dir: Option<PathBuf>,
|
||||||
|
|
||||||
/// Path to entry
|
/// Path to entry
|
||||||
pub entry: Option<ImmutPath>,
|
pub entry: Option<ImmutPath>,
|
||||||
|
|
||||||
/// Additional input arguments to compile the entry file.
|
/// Additional input arguments to compile the entry file.
|
||||||
pub inputs: ImmutDict,
|
pub inputs: ImmutDict,
|
||||||
|
|
||||||
/// will remove later
|
/// will remove later
|
||||||
pub font_paths: Vec<PathBuf>,
|
pub font_paths: Vec<PathBuf>,
|
||||||
}
|
}
|
||||||
|
@ -153,38 +149,14 @@ impl Config {
|
||||||
/// # Errors
|
/// # Errors
|
||||||
/// Errors if the update is invalid.
|
/// Errors if the update is invalid.
|
||||||
pub fn update_by_map(&mut self, update: &Map<String, JsonValue>) -> anyhow::Result<()> {
|
pub fn update_by_map(&mut self, update: &Map<String, JsonValue>) -> anyhow::Result<()> {
|
||||||
let semantic_tokens = update
|
try_(|| SemanticTokensMode::deserialize(update.get("semanticTokens")?).ok())
|
||||||
.get("semanticTokens")
|
.inspect(|v| self.semantic_tokens = *v);
|
||||||
.map(SemanticTokensMode::deserialize)
|
try_(|| FormatterMode::deserialize(update.get("formatterMode")?).ok())
|
||||||
.and_then(Result::ok);
|
.inspect(|v| self.formatter = *v);
|
||||||
if let Some(semantic_tokens) = semantic_tokens {
|
try_(|| u32::deserialize(update.get("formatterPrintWidth")?).ok())
|
||||||
self.semantic_tokens = semantic_tokens;
|
.inspect(|v| self.formatter_print_width = *v);
|
||||||
}
|
|
||||||
|
|
||||||
let formatter = update
|
|
||||||
.get("formatterMode")
|
|
||||||
.map(FormatterMode::deserialize)
|
|
||||||
.and_then(Result::ok);
|
|
||||||
if let Some(formatter) = formatter {
|
|
||||||
self.formatter = formatter;
|
|
||||||
}
|
|
||||||
|
|
||||||
let print_width = update
|
|
||||||
.get("formatterPrintWidth")
|
|
||||||
.and_then(|e| serde_json::from_value::<u32>(e.clone()).ok());
|
|
||||||
if let Some(formatter) = print_width {
|
|
||||||
self.formatter_print_width = formatter;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.compile.update_by_map(update)?;
|
self.compile.update_by_map(update)?;
|
||||||
self.validate()?;
|
self.compile.validate()
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
fn validate(&self) -> anyhow::Result<()> {
|
|
||||||
self.compile.validate()?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -198,11 +170,11 @@ pub struct ConstConfig {
|
||||||
/// Allow dynamic registration of configuration changes.
|
/// Allow dynamic registration of configuration changes.
|
||||||
pub cfg_change_registration: bool,
|
pub cfg_change_registration: bool,
|
||||||
/// Allow dynamic registration of semantic tokens.
|
/// Allow dynamic registration of semantic tokens.
|
||||||
pub sema_tokens_dynamic_registration: bool,
|
pub tokens_dynamic_registration: bool,
|
||||||
/// Allow overlapping tokens.
|
/// Allow overlapping tokens.
|
||||||
pub sema_tokens_overlapping_token_support: bool,
|
pub tokens_overlapping_token_support: bool,
|
||||||
/// Allow multiline tokens.
|
/// Allow multiline tokens.
|
||||||
pub sema_tokens_multiline_token_support: bool,
|
pub tokens_multiline_token_support: bool,
|
||||||
/// Allow line folding on documents.
|
/// Allow line folding on documents.
|
||||||
pub doc_line_folding_only: bool,
|
pub doc_line_folding_only: bool,
|
||||||
/// Allow dynamic registration of document formatting.
|
/// Allow dynamic registration of document formatting.
|
||||||
|
@ -211,16 +183,12 @@ pub struct ConstConfig {
|
||||||
|
|
||||||
impl From<&InitializeParams> for ConstConfig {
|
impl From<&InitializeParams> for ConstConfig {
|
||||||
fn from(params: &InitializeParams) -> Self {
|
fn from(params: &InitializeParams) -> Self {
|
||||||
const DEFAULT_ENCODING: &[PositionEncodingKind; 1] = &[PositionEncodingKind::UTF16];
|
const DEFAULT_ENCODING: &[PositionEncodingKind] = &[PositionEncodingKind::UTF16];
|
||||||
|
|
||||||
let position_encoding = {
|
let position_encoding = {
|
||||||
let encodings = params
|
let general = params.capabilities.general.as_ref();
|
||||||
.capabilities
|
let encodings = try_(|| Some(general?.position_encodings.as_ref()?.as_slice()));
|
||||||
.general
|
let encodings = encodings.unwrap_or(DEFAULT_ENCODING);
|
||||||
.as_ref()
|
|
||||||
.and_then(|general| general.position_encodings.as_ref())
|
|
||||||
.map(|encodings| encodings.as_slice())
|
|
||||||
.unwrap_or(DEFAULT_ENCODING);
|
|
||||||
|
|
||||||
if encodings.contains(&PositionEncodingKind::UTF8) {
|
if encodings.contains(&PositionEncodingKind::UTF8) {
|
||||||
PositionEncoding::Utf8
|
PositionEncoding::Utf8
|
||||||
|
@ -229,42 +197,20 @@ impl From<&InitializeParams> for ConstConfig {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let workspace_caps = params.capabilities.workspace.as_ref();
|
let workspace = params.capabilities.workspace.as_ref();
|
||||||
let supports_config_change_registration = workspace_caps
|
let doc = params.capabilities.text_document.as_ref();
|
||||||
.and_then(|workspace| workspace.configuration)
|
let sema = try_(|| doc?.semantic_tokens.as_ref());
|
||||||
.unwrap_or(false);
|
let fold = try_(|| doc?.folding_range.as_ref());
|
||||||
|
let format = try_(|| doc?.formatting.as_ref());
|
||||||
let doc_caps = params.capabilities.text_document.as_ref();
|
|
||||||
let folding_caps = doc_caps.and_then(|doc| doc.folding_range.as_ref());
|
|
||||||
let line_folding_only = folding_caps
|
|
||||||
.and_then(|folding| folding.line_folding_only)
|
|
||||||
.unwrap_or(true);
|
|
||||||
|
|
||||||
let semantic_tokens_caps = doc_caps.and_then(|doc| doc.semantic_tokens.as_ref());
|
|
||||||
let supports_semantic_tokens_dynamic_registration = semantic_tokens_caps
|
|
||||||
.and_then(|semantic_tokens| semantic_tokens.dynamic_registration)
|
|
||||||
.unwrap_or(false);
|
|
||||||
let supports_semantic_tokens_overlapping_token_support = semantic_tokens_caps
|
|
||||||
.and_then(|semantic_tokens| semantic_tokens.overlapping_token_support)
|
|
||||||
.unwrap_or(false);
|
|
||||||
let supports_semantic_tokens_multiline_token_support = semantic_tokens_caps
|
|
||||||
.and_then(|semantic_tokens| semantic_tokens.multiline_token_support)
|
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
let formatter_caps = doc_caps.and_then(|doc| doc.formatting.as_ref());
|
|
||||||
let supports_document_formatting_dynamic_registration = formatter_caps
|
|
||||||
.and_then(|formatting| formatting.dynamic_registration)
|
|
||||||
.unwrap_or(false);
|
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
position_encoding,
|
position_encoding,
|
||||||
sema_tokens_dynamic_registration: supports_semantic_tokens_dynamic_registration,
|
cfg_change_registration: try_or(|| workspace?.configuration, false),
|
||||||
sema_tokens_overlapping_token_support:
|
tokens_dynamic_registration: try_or(|| sema?.dynamic_registration, false),
|
||||||
supports_semantic_tokens_overlapping_token_support,
|
tokens_overlapping_token_support: try_or(|| sema?.overlapping_token_support, false),
|
||||||
sema_tokens_multiline_token_support: supports_semantic_tokens_multiline_token_support,
|
tokens_multiline_token_support: try_or(|| sema?.multiline_token_support, false),
|
||||||
doc_fmt_dynamic_registration: supports_document_formatting_dynamic_registration,
|
doc_line_folding_only: try_or(|| fold?.line_folding_only, true),
|
||||||
cfg_change_registration: supports_config_change_registration,
|
doc_fmt_dynamic_registration: try_or(|| format?.dynamic_registration, false),
|
||||||
doc_line_folding_only: line_folding_only,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -299,19 +245,14 @@ impl Init {
|
||||||
|
|
||||||
// Initialize configurations
|
// Initialize configurations
|
||||||
let cc = ConstConfig::from(¶ms);
|
let cc = ConstConfig::from(¶ms);
|
||||||
info!(
|
info!("initialized with const_config {cc:?}");
|
||||||
"initialized with const_config {const_config:?}",
|
|
||||||
const_config = cc
|
|
||||||
);
|
|
||||||
let mut config = Config {
|
let mut config = Config {
|
||||||
compile: CompileConfig {
|
compile: CompileConfig {
|
||||||
roots: match params.workspace_folders.as_ref() {
|
roots: match params.workspace_folders.as_ref() {
|
||||||
Some(roots) => roots
|
Some(roots) => roots
|
||||||
.iter()
|
.iter()
|
||||||
.map(|root| &root.uri)
|
.filter_map(|root| root.uri.to_file_path().ok())
|
||||||
.map(Url::to_file_path)
|
.collect::<Vec<_>>(),
|
||||||
.collect::<Result<Vec<_>, _>>()
|
|
||||||
.unwrap(),
|
|
||||||
#[allow(deprecated)] // `params.root_path` is marked as deprecated
|
#[allow(deprecated)] // `params.root_path` is marked as deprecated
|
||||||
None => params
|
None => params
|
||||||
.root_uri
|
.root_uri
|
||||||
|
@ -352,15 +293,15 @@ impl Init {
|
||||||
};
|
};
|
||||||
|
|
||||||
// Bootstrap server
|
// Bootstrap server
|
||||||
let (diag_tx, diag_rx) = mpsc::unbounded_channel();
|
let (editor_tx, editor_rx) = mpsc::unbounded_channel();
|
||||||
|
|
||||||
let mut service = TypstLanguageServer::new(TypstLanguageServerArgs {
|
let mut service = TypstLanguageServer::new(
|
||||||
client: self.host.clone(),
|
self.host.clone(),
|
||||||
const_config: cc.clone(),
|
cc.clone(),
|
||||||
diag_tx,
|
editor_tx,
|
||||||
handle: self.handle.clone(),
|
|
||||||
font,
|
font,
|
||||||
});
|
self.handle.clone(),
|
||||||
|
);
|
||||||
|
|
||||||
if let Err(err) = res {
|
if let Err(err) = res {
|
||||||
return (service, Err(err));
|
return (service, Err(err));
|
||||||
|
@ -373,14 +314,11 @@ impl Init {
|
||||||
service.run_format_thread();
|
service.run_format_thread();
|
||||||
service.run_user_action_thread();
|
service.run_user_action_thread();
|
||||||
|
|
||||||
let cluster_actor = EditorActor {
|
let editor_actor = EditorActor::new(
|
||||||
host: self.host.clone(),
|
self.host.clone(),
|
||||||
diag_rx,
|
editor_rx,
|
||||||
diagnostics: HashMap::new(),
|
service.config.compile.notify_compile_status,
|
||||||
affect_map: HashMap::new(),
|
);
|
||||||
published_primary: false,
|
|
||||||
notify_compile_status: service.config.compile.notify_compile_status,
|
|
||||||
};
|
|
||||||
|
|
||||||
let fallback = service.config.compile.determine_default_entry_path();
|
let fallback = service.config.compile.determine_default_entry_path();
|
||||||
let primary = service.server(
|
let primary = service.server(
|
||||||
|
@ -394,14 +332,14 @@ impl Init {
|
||||||
service.primary.compiler = Some(primary);
|
service.primary.compiler = Some(primary);
|
||||||
|
|
||||||
// Run the cluster in the background after we referencing it
|
// Run the cluster in the background after we referencing it
|
||||||
self.handle.spawn(cluster_actor.run());
|
self.handle.spawn(editor_actor.run());
|
||||||
|
|
||||||
// Respond to the host (LSP client)
|
// Respond to the host (LSP client)
|
||||||
|
|
||||||
// Register these capabilities statically if the client does not support dynamic
|
// Register these capabilities statically if the client does not support dynamic
|
||||||
// registration
|
// registration
|
||||||
let semantic_tokens_provider = match service.config.semantic_tokens {
|
let semantic_tokens_provider = match service.config.semantic_tokens {
|
||||||
SemanticTokensMode::Enable if !cc.sema_tokens_dynamic_registration => {
|
SemanticTokensMode::Enable if !cc.tokens_dynamic_registration => {
|
||||||
Some(get_semantic_tokens_options().into())
|
Some(get_semantic_tokens_options().into())
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
|
|
|
@ -2,13 +2,13 @@
|
||||||
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use ::typst::{diag::FileResult, syntax::Source};
|
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
use lsp_types::TextDocumentContentChangeEvent;
|
use lsp_types::TextDocumentContentChangeEvent;
|
||||||
use tinymist_query::{
|
use tinymist_query::{
|
||||||
lsp_to_typst, CompilerQueryRequest, CompilerQueryResponse, FoldRequestFeature, OnExportRequest,
|
lsp_to_typst, CompilerQueryRequest, CompilerQueryResponse, FoldRequestFeature, OnExportRequest,
|
||||||
OnSaveExportRequest, PositionEncoding, SemanticRequest, StatefulRequest, SyntaxRequest,
|
OnSaveExportRequest, PositionEncoding, SemanticRequest, StatefulRequest, SyntaxRequest,
|
||||||
};
|
};
|
||||||
|
use typst::{diag::FileResult, syntax::Source};
|
||||||
use typst_ts_compiler::{
|
use typst_ts_compiler::{
|
||||||
vfs::notify::{FileChangeSet, MemoryEvent},
|
vfs::notify::{FileChangeSet, MemoryEvent},
|
||||||
Time,
|
Time,
|
||||||
|
@ -30,11 +30,10 @@ impl CompileServer {
|
||||||
impl TypstLanguageServer {
|
impl TypstLanguageServer {
|
||||||
/// Pin the entry to the given path
|
/// Pin the entry to the given path
|
||||||
pub fn pin_entry(&mut self, new_entry: Option<ImmutPath>) -> Result<(), Error> {
|
pub fn pin_entry(&mut self, new_entry: Option<ImmutPath>) -> Result<(), Error> {
|
||||||
let pinning = new_entry.is_some();
|
self.pinning = new_entry.is_some();
|
||||||
self.primary.do_change_entry(new_entry)?;
|
self.primary.do_change_entry(new_entry)?;
|
||||||
self.pinning = pinning;
|
|
||||||
|
|
||||||
if !pinning {
|
if !self.pinning {
|
||||||
let fallback = self.config.compile.determine_default_entry_path();
|
let fallback = self.config.compile.determine_default_entry_path();
|
||||||
let fallback = fallback.or_else(|| self.focusing.clone());
|
let fallback = fallback.or_else(|| self.focusing.clone());
|
||||||
if let Some(e) = fallback {
|
if let Some(e) = fallback {
|
||||||
|
@ -195,7 +194,7 @@ impl TypstLanguageServer {
|
||||||
macro_rules! run_query {
|
macro_rules! run_query {
|
||||||
($self: ident.$query: ident ($($arg_key:ident),* $(,)?)) => {{
|
($self: ident.$query: ident ($($arg_key:ident),* $(,)?)) => {{
|
||||||
use tinymist_query::*;
|
use tinymist_query::*;
|
||||||
let req = paste! { [<$query Request>] { $($arg_key),* } };
|
let req = paste::paste! { [<$query Request>] { $($arg_key),* } };
|
||||||
$self
|
$self
|
||||||
.query(CompilerQueryRequest::$query(req.clone()))
|
.query(CompilerQueryRequest::$query(req.clone()))
|
||||||
.map_err(|err| {
|
.map_err(|err| {
|
||||||
|
@ -255,7 +254,7 @@ impl TypstLanguageServer {
|
||||||
f: impl FnOnce(Source) -> anyhow::Result<T>,
|
f: impl FnOnce(Source) -> anyhow::Result<T>,
|
||||||
) -> anyhow::Result<T> {
|
) -> anyhow::Result<T> {
|
||||||
let snapshot = self.primary.memory_changes.get(&path);
|
let snapshot = self.primary.memory_changes.get(&path);
|
||||||
let snapshot = snapshot.ok_or_else(|| anyhow!("file missing {:?}", path))?;
|
let snapshot = snapshot.ok_or_else(|| anyhow!("file missing {path:?}"))?;
|
||||||
let source = snapshot.content.clone();
|
let source = snapshot.content.clone();
|
||||||
f(source)
|
f(source)
|
||||||
}
|
}
|
||||||
|
@ -292,10 +291,10 @@ impl TypstLanguageServer {
|
||||||
assert!(query.fold_feature() != FoldRequestFeature::ContextFreeUnique);
|
assert!(query.fold_feature() != FoldRequestFeature::ContextFreeUnique);
|
||||||
|
|
||||||
match query {
|
match query {
|
||||||
CompilerQueryRequest::OnExport(OnExportRequest { kind, path }) => Ok(
|
OnExport(OnExportRequest { kind, path }) => Ok(
|
||||||
CompilerQueryResponse::OnExport(client.on_export(kind, path)?),
|
CompilerQueryResponse::OnExport(client.on_export(kind, path)?),
|
||||||
),
|
),
|
||||||
CompilerQueryRequest::OnSaveExport(OnSaveExportRequest { path }) => {
|
OnSaveExport(OnSaveExportRequest { path }) => {
|
||||||
client.on_save_export(path)?;
|
client.on_save_export(path)?;
|
||||||
Ok(CompilerQueryResponse::OnSaveExport(()))
|
Ok(CompilerQueryResponse::OnSaveExport(()))
|
||||||
}
|
}
|
||||||
|
@ -312,21 +311,12 @@ impl TypstLanguageServer {
|
||||||
Rename(req) => query_state!(client, Rename, req),
|
Rename(req) => query_state!(client, Rename, req),
|
||||||
PrepareRename(req) => query_state!(client, PrepareRename, req),
|
PrepareRename(req) => query_state!(client, PrepareRename, req),
|
||||||
Symbol(req) => query_world!(client, Symbol, req),
|
Symbol(req) => query_world!(client, Symbol, req),
|
||||||
|
|
||||||
DocumentMetrics(req) => query_state!(client, DocumentMetrics, req),
|
DocumentMetrics(req) => query_state!(client, DocumentMetrics, req),
|
||||||
ServerInfo(_) => {
|
ServerInfo(_) => {
|
||||||
let res = client.collect_server_info()?;
|
let res = client.collect_server_info()?;
|
||||||
Ok(CompilerQueryResponse::ServerInfo(Some(res)))
|
Ok(CompilerQueryResponse::ServerInfo(Some(res)))
|
||||||
}
|
}
|
||||||
|
_ => unreachable!(),
|
||||||
InteractCodeContext(..)
|
|
||||||
| FoldingRange(..)
|
|
||||||
| SelectionRange(..)
|
|
||||||
| SemanticTokensDelta(..)
|
|
||||||
| Formatting(..)
|
|
||||||
| DocumentSymbol(..)
|
|
||||||
| ColorPresentation(..)
|
|
||||||
| SemanticTokensFull(..) => unreachable!(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,6 +9,7 @@ use unicode_script::{Script, UnicodeScript};
|
||||||
|
|
||||||
/// Words count for a document.
|
/// Words count for a document.
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct WordsCount {
|
pub struct WordsCount {
|
||||||
/// Number of words.
|
/// Number of words.
|
||||||
pub words: usize,
|
pub words: usize,
|
||||||
|
@ -18,7 +19,6 @@ pub struct WordsCount {
|
||||||
/// Multiple consecutive spaces are counted as one.
|
/// Multiple consecutive spaces are counted as one.
|
||||||
pub spaces: usize,
|
pub spaces: usize,
|
||||||
/// Number of CJK characters.
|
/// Number of CJK characters.
|
||||||
#[serde(rename = "cjkChars")]
|
|
||||||
pub cjk_chars: usize,
|
pub cjk_chars: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,18 @@ pub fn threaded_receive<T: Send>(f: oneshot::Receiver<T>) -> Result<T, Error> {
|
||||||
.map_err(map_string_err("failed to recv from receive data"))
|
.map_err(map_string_err("failed to recv from receive data"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn try_<T>(f: impl FnOnce() -> Option<T>) -> Option<T> {
|
||||||
|
f()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn try_or<T>(f: impl FnOnce() -> Option<T>, default: T) -> T {
|
||||||
|
f().unwrap_or(default)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn try_or_default<T: Default>(f: impl FnOnce() -> Option<T>) -> T {
|
||||||
|
f().unwrap_or_default()
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
fn do_receive() {
|
fn do_receive() {
|
||||||
|
|
|
@ -26,30 +26,24 @@ pub struct CompileOpts {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct CompileOnceOpts {
|
pub struct CompileOnceOpts {
|
||||||
/// The root directory for compilation routine.
|
/// The root directory for compilation routine.
|
||||||
#[serde(rename = "rootDir")]
|
|
||||||
pub root_dir: PathBuf,
|
pub root_dir: PathBuf,
|
||||||
|
|
||||||
/// Path to entry
|
/// Path to entry
|
||||||
pub entry: PathBuf,
|
pub entry: PathBuf,
|
||||||
|
|
||||||
/// Additional input arguments to compile the entry file.
|
/// Additional input arguments to compile the entry file.
|
||||||
pub inputs: TypstDict,
|
pub inputs: TypstDict,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
|
||||||
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct CompileFontOpts {
|
pub struct CompileFontOpts {
|
||||||
/// Path to font profile for cache
|
/// Path to font profile for cache
|
||||||
#[serde(rename = "fontProfileCachePath")]
|
|
||||||
pub font_profile_cache_path: PathBuf,
|
pub font_profile_cache_path: PathBuf,
|
||||||
|
|
||||||
/// will remove later
|
/// will remove later
|
||||||
#[serde(rename = "fontPaths")]
|
|
||||||
pub font_paths: Vec<PathBuf>,
|
pub font_paths: Vec<PathBuf>,
|
||||||
|
|
||||||
/// Exclude system font paths
|
/// Exclude system font paths
|
||||||
#[serde(rename = "noSystemFonts")]
|
|
||||||
pub no_system_fonts: bool,
|
pub no_system_fonts: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue