mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-11-19 19:44:18 +00:00
feat: multi-page export and no-write support (#2098)
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
## Features - Support multi-page export in the server export command. Added an extra arg to decide whether to write to the file or only return data for client use (page preview in exporter). VSCode users can use this feature via the quick-export command or code lens. - (refactor) Move most export logic from tinymist to tinymist-task, excluding typlite-related, which already depends on tinymist-task. - Added relevant export tests. The export e2e test now includes hash checking for all targets. ## Not done - Support new args in `TypstExtraArgs` for CLI compilation. --------- Co-authored-by: Myriad-Dreamin <camiyoru@gmail.com>
This commit is contained in:
parent
1ca1d82901
commit
bcd6af975e
26 changed files with 1662 additions and 889 deletions
|
|
@ -226,16 +226,25 @@ impl TaskCompileArgs {
|
|||
let config = match output_format {
|
||||
OutputFormat::Pdf => ProjectTask::ExportPdf(ExportPdfTask {
|
||||
export,
|
||||
pages: self.pages.clone(),
|
||||
pdf_standards: self.pdf.pdf_standard.clone(),
|
||||
creation_timestamp: None,
|
||||
}),
|
||||
OutputFormat::Png => ProjectTask::ExportPng(ExportPngTask {
|
||||
export,
|
||||
pages: self.pages.clone(),
|
||||
page_number_template: None,
|
||||
merge: None,
|
||||
ppi: self.png.ppi.try_into().unwrap(),
|
||||
fill: None,
|
||||
}),
|
||||
OutputFormat::Svg => ProjectTask::ExportSvg(ExportSvgTask { export }),
|
||||
OutputFormat::Html => ProjectTask::ExportSvg(ExportSvgTask { export }),
|
||||
OutputFormat::Svg => ProjectTask::ExportSvg(ExportSvgTask {
|
||||
export,
|
||||
pages: self.pages.clone(),
|
||||
page_number_template: None,
|
||||
merge: None,
|
||||
}),
|
||||
OutputFormat::Html => ProjectTask::ExportHtml(ExportHtmlTask { export }),
|
||||
};
|
||||
|
||||
Ok(ApplyProjectTask {
|
||||
|
|
|
|||
|
|
@ -145,10 +145,44 @@ mod polymorphic {
|
|||
pub path: PathBuf,
|
||||
/// The export task to run.
|
||||
pub task: ProjectTask,
|
||||
/// Whether to write to file.
|
||||
pub write: bool,
|
||||
/// Whether to open the exported file(s) after the export is done.
|
||||
pub open: bool,
|
||||
}
|
||||
|
||||
/// The response to an export request.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(untagged, rename_all = "camelCase")]
|
||||
pub enum OnExportResponse {
|
||||
/// Non-page or a single page exported.
|
||||
Single {
|
||||
/// The path of the exported file. None if not written to file.
|
||||
path: Option<PathBuf>,
|
||||
/// The data of the exported file. None if written to file.
|
||||
data: Option<String>,
|
||||
},
|
||||
/// Multiple pages exported.
|
||||
Paged {
|
||||
/// The total number of pages of the document.
|
||||
total_pages: usize,
|
||||
/// The exported pages.
|
||||
items: Vec<PagedExportResponse>,
|
||||
},
|
||||
}
|
||||
|
||||
/// The response to a single page export.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct PagedExportResponse {
|
||||
/// The page number of the exported page (0-based).
|
||||
pub page: usize,
|
||||
/// The path of the exported file. None if not written to file.
|
||||
pub path: Option<PathBuf>,
|
||||
/// The data of the exported file. None if written to file.
|
||||
pub data: Option<String>,
|
||||
}
|
||||
|
||||
/// A request to format the document.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct FormattingRequest {
|
||||
|
|
@ -334,7 +368,7 @@ mod polymorphic {
|
|||
#[serde(untagged)]
|
||||
pub enum CompilerQueryResponse {
|
||||
/// The response to the on export request.
|
||||
OnExport(Option<PathBuf>),
|
||||
OnExport(Option<OnExportResponse>),
|
||||
/// The response to the hover request.
|
||||
Hover(Option<Hover>),
|
||||
/// The response to the goto definition request.
|
||||
|
|
|
|||
|
|
@ -3,24 +3,24 @@
|
|||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use comemo::Track;
|
||||
use ecow::EcoString;
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::{TypstDocument, TypstHtmlDocument, TypstPagedDocument};
|
||||
use tinymist_std::typst::TypstPagedDocument;
|
||||
use tinymist_world::{CompileSnapshot, CompilerFeat, ExportComputation, WorldComputeGraph};
|
||||
use typst::World;
|
||||
use typst::diag::{SourceResult, StrResult};
|
||||
use typst::foundations::{Bytes, Content, IntoValue, LocatableSelector, Scope, Value};
|
||||
use typst::layout::Abs;
|
||||
use typst::routines::EvalMode;
|
||||
use typst::syntax::{Span, SyntaxNode, ast};
|
||||
use typst::foundations::Bytes;
|
||||
use typst::layout::{Abs, Page};
|
||||
use typst::syntax::{SyntaxNode, ast};
|
||||
use typst::visualize::Color;
|
||||
use typst_eval::eval_string;
|
||||
|
||||
use crate::model::{ExportHtmlTask, ExportPngTask, ExportSvgTask};
|
||||
use crate::primitives::TaskWhen;
|
||||
use crate::{ExportTransform, Pages, QueryTask};
|
||||
use crate::{Pages, TaskWhen, exported_page_ranges};
|
||||
|
||||
mod html;
|
||||
pub use html::*;
|
||||
mod png;
|
||||
pub use png::*;
|
||||
mod query;
|
||||
pub use query::*;
|
||||
mod svg;
|
||||
pub use svg::*;
|
||||
#[cfg(feature = "pdf")]
|
||||
pub mod pdf;
|
||||
#[cfg(feature = "pdf")]
|
||||
|
|
@ -52,283 +52,37 @@ impl ExportTimings {
|
|||
}
|
||||
}
|
||||
|
||||
/// The computation for svg export.
|
||||
pub struct SvgExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for SvgExport {
|
||||
type Output = String;
|
||||
type Config = ExportSvgTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &ExportSvgTask,
|
||||
) -> Result<String> {
|
||||
let (is_first, merged_gap) = get_page_selection(&config.export)?;
|
||||
|
||||
let first_page = doc.pages.first();
|
||||
|
||||
Ok(if is_first {
|
||||
if let Some(first_page) = first_page {
|
||||
typst_svg::svg(first_page)
|
||||
} else {
|
||||
typst_svg::svg_merged(doc, merged_gap)
|
||||
}
|
||||
} else {
|
||||
typst_svg::svg_merged(doc, merged_gap)
|
||||
})
|
||||
}
|
||||
/// The output of image exports, either paged or merged.
|
||||
pub enum ImageOutput<T> {
|
||||
/// Each page exported separately.
|
||||
Paged(Vec<PagedOutput<T>>),
|
||||
/// All pages merged into one output.
|
||||
Merged(T),
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for SvgExport {
|
||||
// type Output = Option<String>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
/// The computation for png export.
|
||||
pub struct PngExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PngExport {
|
||||
type Output = Bytes;
|
||||
type Config = ExportPngTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &ExportPngTask,
|
||||
) -> Result<Bytes> {
|
||||
let ppi = config.ppi.to_f32();
|
||||
if ppi <= 1e-6 {
|
||||
tinymist_std::bail!("invalid ppi: {ppi}");
|
||||
}
|
||||
|
||||
let fill = if let Some(fill) = &config.fill {
|
||||
parse_color(fill.clone()).map_err(|err| anyhow::anyhow!("invalid fill ({err})"))?
|
||||
} else {
|
||||
Color::WHITE
|
||||
};
|
||||
|
||||
let (is_first, merged_gap) = get_page_selection(&config.export)?;
|
||||
|
||||
let ppp = ppi / 72.;
|
||||
let pixmap = if is_first {
|
||||
if let Some(first_page) = doc.pages.first() {
|
||||
typst_render::render(first_page, ppp)
|
||||
} else {
|
||||
typst_render::render_merged(doc, ppp, merged_gap, Some(fill))
|
||||
}
|
||||
} else {
|
||||
typst_render::render_merged(doc, ppp, merged_gap, Some(fill))
|
||||
};
|
||||
|
||||
pixmap
|
||||
.encode_png()
|
||||
.map(Bytes::new)
|
||||
.context_ut("failed to encode PNG")
|
||||
}
|
||||
/// The output of a single page.
|
||||
pub struct PagedOutput<T> {
|
||||
/// The page number (0-based).
|
||||
pub page: usize,
|
||||
/// The value of the page.
|
||||
pub value: T,
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for PngExport {
|
||||
// type Output = Option<Bytes>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
/// The computation for html export.
|
||||
pub struct HtmlExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstHtmlDocument> for HtmlExport {
|
||||
type Output = String;
|
||||
type Config = ExportHtmlTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstHtmlDocument>,
|
||||
_config: &ExportHtmlTask,
|
||||
) -> Result<String> {
|
||||
Ok(typst_html::html(doc)?)
|
||||
}
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for HtmlExport {
|
||||
// type Output = Option<String>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
/// The computation for document query.
|
||||
pub struct DocumentQuery;
|
||||
|
||||
impl DocumentQuery {
|
||||
// todo: query exporter
|
||||
/// Retrieve the matches for the selector.
|
||||
pub fn retrieve<D: typst::Document>(
|
||||
world: &dyn World,
|
||||
selector: &str,
|
||||
document: &D,
|
||||
) -> StrResult<Vec<Content>> {
|
||||
let selector = eval_string(
|
||||
&typst::ROUTINES,
|
||||
world.track(),
|
||||
selector,
|
||||
Span::detached(),
|
||||
EvalMode::Code,
|
||||
Scope::default(),
|
||||
)
|
||||
.map_err(|errors| {
|
||||
let mut message = EcoString::from("failed to evaluate selector");
|
||||
for (i, error) in errors.into_iter().enumerate() {
|
||||
message.push_str(if i == 0 { ": " } else { ", " });
|
||||
message.push_str(&error.message);
|
||||
}
|
||||
message
|
||||
})?
|
||||
.cast::<LocatableSelector>()
|
||||
.map_err(|e| EcoString::from(format!("failed to cast: {}", e.message())))?;
|
||||
|
||||
Ok(document
|
||||
.introspector()
|
||||
.query(&selector.0)
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
fn run_inner<F: CompilerFeat, D: typst::Document>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<D>,
|
||||
config: &QueryTask,
|
||||
) -> Result<Vec<Value>> {
|
||||
let selector = &config.selector;
|
||||
let elements = Self::retrieve(&g.snap.world, selector, doc.as_ref())
|
||||
.map_err(|e| anyhow::anyhow!("failed to retrieve: {e}"))?;
|
||||
if config.one && elements.len() != 1 {
|
||||
bail!("expected exactly one element, found {}", elements.len());
|
||||
}
|
||||
|
||||
Ok(elements
|
||||
.into_iter()
|
||||
.filter_map(|c| match &config.field {
|
||||
Some(field) => c.get_by_name(field).ok(),
|
||||
_ => Some(c.into_value()),
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// Queries the document and returns the result as a value.
|
||||
pub fn doc_get_as_value<F: CompilerFeat>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &TypstDocument,
|
||||
config: &QueryTask,
|
||||
) -> Result<serde_json::Value> {
|
||||
match doc {
|
||||
TypstDocument::Paged(doc) => Self::get_as_value(g, doc, config),
|
||||
TypstDocument::Html(doc) => Self::get_as_value(g, doc, config),
|
||||
}
|
||||
}
|
||||
|
||||
/// Queries the document and returns the result as a value.
|
||||
pub fn get_as_value<F: CompilerFeat, D: typst::Document>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<D>,
|
||||
config: &QueryTask,
|
||||
) -> Result<serde_json::Value> {
|
||||
let mapped = Self::run_inner(g, doc, config)?;
|
||||
|
||||
let res = if config.one {
|
||||
let Some(value) = mapped.first() else {
|
||||
bail!("no such field found for element");
|
||||
};
|
||||
serde_json::to_value(value)
|
||||
} else {
|
||||
serde_json::to_value(&mapped)
|
||||
};
|
||||
|
||||
res.context("failed to serialize")
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: CompilerFeat, D: typst::Document> ExportComputation<F, D> for DocumentQuery {
|
||||
type Output = SourceResult<String>;
|
||||
type Config = QueryTask;
|
||||
|
||||
fn run(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<D>,
|
||||
config: &QueryTask,
|
||||
) -> Result<SourceResult<String>> {
|
||||
let pretty = false;
|
||||
let mapped = Self::run_inner(g, doc, config)?;
|
||||
|
||||
let res = if config.one {
|
||||
let Some(value) = mapped.first() else {
|
||||
bail!("no such field found for element");
|
||||
};
|
||||
serialize(value, &config.format, pretty)
|
||||
} else {
|
||||
serialize(&mapped, &config.format, pretty)
|
||||
};
|
||||
|
||||
res.map(Ok)
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize data to the output format.
|
||||
fn serialize(data: &impl serde::Serialize, format: &str, pretty: bool) -> Result<String> {
|
||||
Ok(match format {
|
||||
"json" if pretty => serde_json::to_string_pretty(data).context("serialize query")?,
|
||||
"json" => serde_json::to_string(data).context("serialize query")?,
|
||||
"yaml" => serde_yaml::to_string(&data).context_ut("serialize query")?,
|
||||
"txt" => {
|
||||
use serde_json::Value::*;
|
||||
let value = serde_json::to_value(data).context("serialize query")?;
|
||||
match value {
|
||||
String(s) => s,
|
||||
_ => {
|
||||
let kind = match value {
|
||||
Null => "null",
|
||||
Bool(_) => "boolean",
|
||||
Number(_) => "number",
|
||||
String(_) => "string",
|
||||
Array(_) => "array",
|
||||
Object(_) => "object",
|
||||
};
|
||||
bail!("expected a string value for format: {format}, got {kind}")
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bail!("unsupported format for query: {format}"),
|
||||
})
|
||||
}
|
||||
|
||||
/// Gets legacy page selection
|
||||
pub fn get_page_selection(task: &crate::ExportTask) -> Result<(bool, Abs)> {
|
||||
let is_first = task
|
||||
.transform
|
||||
fn select_pages<'a>(
|
||||
document: &'a TypstPagedDocument,
|
||||
pages: &Option<Vec<Pages>>,
|
||||
) -> Vec<(usize, &'a Page)> {
|
||||
let pages = pages.as_ref().map(|pages| exported_page_ranges(pages));
|
||||
document
|
||||
.pages
|
||||
.iter()
|
||||
.any(|t| matches!(t, ExportTransform::Pages { ranges, .. } if ranges == &[Pages::FIRST]));
|
||||
|
||||
let mut gap_res = Abs::default();
|
||||
if !is_first {
|
||||
for trans in &task.transform {
|
||||
if let ExportTransform::Merge { gap } = trans {
|
||||
let gap = gap
|
||||
.as_deref()
|
||||
.map(parse_length)
|
||||
.transpose()
|
||||
.context_ut("failed to parse gap")?;
|
||||
gap_res = gap.unwrap_or_default();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((is_first, gap_res))
|
||||
.enumerate()
|
||||
.filter(|(i, _)| {
|
||||
pages
|
||||
.as_ref()
|
||||
.is_none_or(|exported_page_ranges| exported_page_ranges.includes_page_index(*i))
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
|
||||
fn parse_length(gap: &str) -> Result<Abs> {
|
||||
|
|
@ -364,8 +118,8 @@ fn descendants(node: &SyntaxNode) -> impl IntoIterator<Item = &SyntaxNode> + '_
|
|||
res
|
||||
}
|
||||
|
||||
fn parse_color(fill: String) -> anyhow::Result<Color> {
|
||||
match fill.as_str() {
|
||||
fn parse_color(fill: &str) -> anyhow::Result<Color> {
|
||||
match fill {
|
||||
"black" => Ok(Color::BLACK),
|
||||
"white" => Ok(Color::WHITE),
|
||||
"red" => Ok(Color::RED),
|
||||
|
|
@ -385,24 +139,15 @@ mod tests {
|
|||
|
||||
#[test]
|
||||
fn test_parse_color() {
|
||||
assert_eq!(parse_color("black".to_owned()).unwrap(), Color::BLACK);
|
||||
assert_eq!(parse_color("white".to_owned()).unwrap(), Color::WHITE);
|
||||
assert_eq!(parse_color("red".to_owned()).unwrap(), Color::RED);
|
||||
assert_eq!(parse_color("green".to_owned()).unwrap(), Color::GREEN);
|
||||
assert_eq!(parse_color("blue".to_owned()).unwrap(), Color::BLUE);
|
||||
assert_eq!(
|
||||
parse_color("#000000".to_owned()).unwrap().to_hex(),
|
||||
"#000000"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_color("#ffffff".to_owned()).unwrap().to_hex(),
|
||||
"#ffffff"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_color("#000000cc".to_owned()).unwrap().to_hex(),
|
||||
"#000000cc"
|
||||
);
|
||||
assert!(parse_color("invalid".to_owned()).is_err());
|
||||
assert_eq!(parse_color("black").unwrap(), Color::BLACK);
|
||||
assert_eq!(parse_color("white").unwrap(), Color::WHITE);
|
||||
assert_eq!(parse_color("red").unwrap(), Color::RED);
|
||||
assert_eq!(parse_color("green").unwrap(), Color::GREEN);
|
||||
assert_eq!(parse_color("blue").unwrap(), Color::BLUE);
|
||||
assert_eq!(parse_color("#000000").unwrap().to_hex(), "#000000");
|
||||
assert_eq!(parse_color("#ffffff").unwrap().to_hex(), "#ffffff");
|
||||
assert_eq!(parse_color("#000000cc").unwrap().to_hex(), "#000000cc");
|
||||
assert!(parse_color("invalid").is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
33
crates/tinymist-task/src/compute/html.rs
Normal file
33
crates/tinymist-task/src/compute/html.rs
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
//! The computation for html export.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::TypstHtmlDocument;
|
||||
use tinymist_world::{CompilerFeat, ExportComputation, WorldComputeGraph};
|
||||
|
||||
use crate::model::ExportHtmlTask;
|
||||
|
||||
/// The computation for html export.
|
||||
pub struct HtmlExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstHtmlDocument> for HtmlExport {
|
||||
type Output = String;
|
||||
type Config = ExportHtmlTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstHtmlDocument>,
|
||||
_config: &ExportHtmlTask,
|
||||
) -> Result<String> {
|
||||
Ok(typst_html::html(doc)?)
|
||||
}
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for HtmlExport {
|
||||
// type Output = Option<String>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
|
@ -48,6 +48,10 @@ impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PdfExport {
|
|||
Ok(Bytes::new(typst_pdf::pdf(
|
||||
doc,
|
||||
&PdfOptions {
|
||||
page_ranges: config
|
||||
.pages
|
||||
.as_ref()
|
||||
.map(|pages| exported_page_ranges(pages)),
|
||||
timestamp: Some(timestamp),
|
||||
standards,
|
||||
..Default::default()
|
||||
|
|
|
|||
84
crates/tinymist-task/src/compute/png.rs
Normal file
84
crates/tinymist-task/src/compute/png.rs
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
//! The computation for png export.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::TypstPagedDocument;
|
||||
use tinymist_world::{CompilerFeat, ExportComputation, WorldComputeGraph};
|
||||
use typst::foundations::Bytes;
|
||||
|
||||
use crate::compute::{parse_color, parse_length, select_pages};
|
||||
use crate::model::ExportPngTask;
|
||||
use crate::{ImageOutput, PageMerge, PagedOutput};
|
||||
|
||||
/// The computation for png export.
|
||||
pub struct PngExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PngExport {
|
||||
type Output = ImageOutput<Bytes>;
|
||||
type Config = ExportPngTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &ExportPngTask,
|
||||
) -> Result<Self::Output> {
|
||||
let ppi = config.ppi.to_f32();
|
||||
if ppi <= 1e-6 {
|
||||
bail!("invalid ppi: {ppi}");
|
||||
}
|
||||
|
||||
let fill = if let Some(fill) = &config.fill {
|
||||
Some(parse_color(fill).map_err(|err| anyhow::anyhow!("invalid fill ({err})"))?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let ppp = ppi / 72.;
|
||||
|
||||
let exported_pages = select_pages(doc, &config.pages);
|
||||
if let Some(PageMerge { ref gap }) = config.merge {
|
||||
let dummy_doc = TypstPagedDocument {
|
||||
pages: exported_pages
|
||||
.into_iter()
|
||||
.map(|(_, page)| page.clone())
|
||||
.collect(),
|
||||
..Default::default()
|
||||
};
|
||||
let gap = gap
|
||||
.as_ref()
|
||||
.and_then(|gap| parse_length(gap).ok())
|
||||
.unwrap_or_default();
|
||||
let pixmap = typst_render::render_merged(&dummy_doc, ppp, gap, fill);
|
||||
let png = pixmap
|
||||
.encode_png()
|
||||
.map(Bytes::new)
|
||||
.context_ut("failed to encode PNG")?;
|
||||
Ok(ImageOutput::Merged(png))
|
||||
} else {
|
||||
let exported = exported_pages
|
||||
.into_iter()
|
||||
.map(|(i, page)| {
|
||||
let pixmap = typst_render::render(page, ppp);
|
||||
let png = pixmap
|
||||
.encode_png()
|
||||
.map(Bytes::new)
|
||||
.context_ut("failed to encode PNG")?;
|
||||
Ok(PagedOutput {
|
||||
page: i,
|
||||
value: png,
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
Ok(ImageOutput::Paged(exported))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for PngExport {
|
||||
// type Output = Option<Bytes>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
161
crates/tinymist-task/src/compute/query.rs
Normal file
161
crates/tinymist-task/src/compute/query.rs
Normal file
|
|
@ -0,0 +1,161 @@
|
|||
//! The computation for document query.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use comemo::Track;
|
||||
use ecow::EcoString;
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::TypstDocument;
|
||||
use tinymist_world::{CompilerFeat, ExportComputation, WorldComputeGraph};
|
||||
use typst::World;
|
||||
use typst::diag::{SourceResult, StrResult};
|
||||
use typst::foundations::{Content, IntoValue, LocatableSelector, Scope, Value};
|
||||
use typst::routines::EvalMode;
|
||||
use typst::syntax::Span;
|
||||
use typst_eval::eval_string;
|
||||
|
||||
use crate::QueryTask;
|
||||
|
||||
/// The computation for document query.
|
||||
pub struct DocumentQuery;
|
||||
|
||||
impl DocumentQuery {
|
||||
// todo: query exporter
|
||||
/// Retrieve the matches for the selector.
|
||||
pub fn retrieve<D: typst::Document>(
|
||||
world: &dyn World,
|
||||
selector: &str,
|
||||
document: &D,
|
||||
) -> StrResult<Vec<Content>> {
|
||||
let selector = eval_string(
|
||||
&typst::ROUTINES,
|
||||
world.track(),
|
||||
selector,
|
||||
Span::detached(),
|
||||
EvalMode::Code,
|
||||
Scope::default(),
|
||||
)
|
||||
.map_err(|errors| {
|
||||
let mut message = EcoString::from("failed to evaluate selector");
|
||||
for (i, error) in errors.into_iter().enumerate() {
|
||||
message.push_str(if i == 0 { ": " } else { ", " });
|
||||
message.push_str(&error.message);
|
||||
}
|
||||
message
|
||||
})?
|
||||
.cast::<LocatableSelector>()
|
||||
.map_err(|e| EcoString::from(format!("failed to cast: {}", e.message())))?;
|
||||
|
||||
Ok(document
|
||||
.introspector()
|
||||
.query(&selector.0)
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
fn run_inner<F: CompilerFeat, D: typst::Document>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<D>,
|
||||
config: &QueryTask,
|
||||
) -> Result<Vec<Value>> {
|
||||
let selector = &config.selector;
|
||||
let elements = Self::retrieve(&g.snap.world, selector, doc.as_ref())
|
||||
.map_err(|e| anyhow::anyhow!("failed to retrieve: {e}"))?;
|
||||
if config.one && elements.len() != 1 {
|
||||
bail!("expected exactly one element, found {}", elements.len());
|
||||
}
|
||||
|
||||
Ok(elements
|
||||
.into_iter()
|
||||
.filter_map(|c| match &config.field {
|
||||
Some(field) => c.get_by_name(field).ok(),
|
||||
_ => Some(c.into_value()),
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
/// Queries the document and returns the result as a value.
|
||||
pub fn doc_get_as_value<F: CompilerFeat>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &TypstDocument,
|
||||
config: &QueryTask,
|
||||
) -> Result<serde_json::Value> {
|
||||
match doc {
|
||||
TypstDocument::Paged(doc) => Self::get_as_value(g, doc, config),
|
||||
TypstDocument::Html(doc) => Self::get_as_value(g, doc, config),
|
||||
}
|
||||
}
|
||||
|
||||
/// Queries the document and returns the result as a value.
|
||||
pub fn get_as_value<F: CompilerFeat, D: typst::Document>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<D>,
|
||||
config: &QueryTask,
|
||||
) -> Result<serde_json::Value> {
|
||||
let mapped = Self::run_inner(g, doc, config)?;
|
||||
|
||||
let res = if config.one {
|
||||
let Some(value) = mapped.first() else {
|
||||
bail!("no such field found for element");
|
||||
};
|
||||
serde_json::to_value(value)
|
||||
} else {
|
||||
serde_json::to_value(&mapped)
|
||||
};
|
||||
|
||||
res.context("failed to serialize")
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: CompilerFeat, D: typst::Document> ExportComputation<F, D> for DocumentQuery {
|
||||
type Output = SourceResult<String>;
|
||||
type Config = QueryTask;
|
||||
|
||||
fn run(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<D>,
|
||||
config: &QueryTask,
|
||||
) -> Result<SourceResult<String>> {
|
||||
let pretty = false;
|
||||
let mapped = Self::run_inner(g, doc, config)?;
|
||||
|
||||
let res = if config.one {
|
||||
let Some(value) = mapped.first() else {
|
||||
bail!("no such field found for element");
|
||||
};
|
||||
serialize(value, &config.format, pretty)
|
||||
} else {
|
||||
serialize(&mapped, &config.format, pretty)
|
||||
};
|
||||
|
||||
res.map(Ok)
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize data to the output format.
|
||||
fn serialize(data: &impl serde::Serialize, format: &str, pretty: bool) -> Result<String> {
|
||||
Ok(match format {
|
||||
"json" if pretty => serde_json::to_string_pretty(data).context("serialize query")?,
|
||||
"json" => serde_json::to_string(data).context("serialize query")?,
|
||||
"yaml" => serde_yaml::to_string(&data).context_ut("serialize query")?,
|
||||
"txt" => {
|
||||
use serde_json::Value::*;
|
||||
let value = serde_json::to_value(data).context("serialize query")?;
|
||||
match value {
|
||||
String(s) => s,
|
||||
_ => {
|
||||
let kind = match value {
|
||||
Null => "null",
|
||||
Bool(_) => "boolean",
|
||||
Number(_) => "number",
|
||||
String(_) => "string",
|
||||
Array(_) => "array",
|
||||
Object(_) => "object",
|
||||
};
|
||||
bail!("expected a string value for format: {format}, got {kind}")
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bail!("unsupported format for query: {format}"),
|
||||
})
|
||||
}
|
||||
64
crates/tinymist-task/src/compute/svg.rs
Normal file
64
crates/tinymist-task/src/compute/svg.rs
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
//! The computation for svg export.
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::TypstPagedDocument;
|
||||
use tinymist_world::{CompilerFeat, ExportComputation, WorldComputeGraph};
|
||||
|
||||
use crate::compute::{parse_length, select_pages};
|
||||
use crate::model::ExportSvgTask;
|
||||
use crate::{ImageOutput, PageMerge, PagedOutput};
|
||||
|
||||
/// The computation for svg export.
|
||||
pub struct SvgExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for SvgExport {
|
||||
type Output = ImageOutput<String>;
|
||||
type Config = ExportSvgTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &ExportSvgTask,
|
||||
) -> Result<Self::Output> {
|
||||
let exported_pages = select_pages(doc, &config.pages);
|
||||
if let Some(PageMerge { ref gap }) = config.merge {
|
||||
// Typst does not expose svg-merging API.
|
||||
// Therefore, we have to create a dummy document here.
|
||||
let dummy_doc = TypstPagedDocument {
|
||||
pages: exported_pages
|
||||
.into_iter()
|
||||
.map(|(_, page)| page.clone())
|
||||
.collect(),
|
||||
..Default::default()
|
||||
};
|
||||
let gap = gap
|
||||
.as_ref()
|
||||
.and_then(|gap| parse_length(gap).ok())
|
||||
.unwrap_or_default();
|
||||
let svg = typst_svg::svg_merged(&dummy_doc, gap);
|
||||
Ok(ImageOutput::Merged(svg))
|
||||
} else {
|
||||
let exported = exported_pages
|
||||
.into_iter()
|
||||
.map(|(i, page)| {
|
||||
let svg = typst_svg::svg(page);
|
||||
Ok(PagedOutput {
|
||||
page: i,
|
||||
value: svg,
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
Ok(ImageOutput::Paged(exported))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for SvgExport {
|
||||
// type Output = Option<String>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
|
@ -190,18 +190,12 @@ impl ExportTask {
|
|||
}
|
||||
}
|
||||
|
||||
/// The legacy page selection specifier.
|
||||
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum PageSelection {
|
||||
/// Selects the first page.
|
||||
#[default]
|
||||
First,
|
||||
/// Merges all pages into a single page.
|
||||
Merged {
|
||||
/// The gap between pages (in pt).
|
||||
gap: Option<String>,
|
||||
},
|
||||
/// A page merge specifier.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct PageMerge {
|
||||
/// The gap between pages (in pt).
|
||||
pub gap: Option<String>,
|
||||
}
|
||||
|
||||
/// A project export transform specifier.
|
||||
|
|
@ -242,6 +236,9 @@ pub struct ExportPdfTask {
|
|||
/// The shared export arguments.
|
||||
#[serde(flatten)]
|
||||
pub export: ExportTask,
|
||||
/// Which pages to export. When unspecified, all pages are exported.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub pages: Option<Vec<Pages>>,
|
||||
/// One (or multiple comma-separated) PDF standards that Typst will enforce
|
||||
/// conformance with.
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
|
|
@ -260,6 +257,15 @@ pub struct ExportPngTask {
|
|||
/// The shared export arguments.
|
||||
#[serde(flatten)]
|
||||
pub export: ExportTask,
|
||||
/// Which pages to export. When unspecified, all pages are exported.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub pages: Option<Vec<Pages>>,
|
||||
/// The page template to use for multiple pages.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub page_number_template: Option<String>,
|
||||
/// The page merge specifier.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub merge: Option<PageMerge>,
|
||||
/// The PPI (pixels per inch) to use for PNG export.
|
||||
pub ppi: Scalar,
|
||||
/// The expression constructing background fill color (in typst script).
|
||||
|
|
@ -278,6 +284,15 @@ pub struct ExportSvgTask {
|
|||
/// The shared export arguments.
|
||||
#[serde(flatten)]
|
||||
pub export: ExportTask,
|
||||
/// The page template to use for multiple pages.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub page_number_template: Option<String>,
|
||||
/// Which pages to export. When unspecified, all pages are exported.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub pages: Option<Vec<Pages>>,
|
||||
/// The page merge specifier.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub merge: Option<PageMerge>,
|
||||
}
|
||||
|
||||
/// An export html task specifier.
|
||||
|
|
|
|||
|
|
@ -14,6 +14,7 @@ use tinymist_std::path::{PathClean, unix_slash};
|
|||
use tinymist_world::vfs::WorkspaceResolver;
|
||||
use tinymist_world::{CompilerFeat, CompilerWorld, EntryReader, EntryState};
|
||||
use typst::diag::EcoString;
|
||||
use typst::layout::PageRanges;
|
||||
use typst::syntax::FileId;
|
||||
|
||||
/// A scalar that is not NaN.
|
||||
|
|
@ -199,7 +200,7 @@ pub struct Pages(pub RangeInclusive<Option<NonZeroUsize>>);
|
|||
|
||||
impl Pages {
|
||||
/// Selects the first page.
|
||||
pub const FIRST: Pages = Pages(NonZeroUsize::new(1)..=None);
|
||||
pub const FIRST: Pages = Pages(NonZeroUsize::new(1)..=NonZeroUsize::new(1));
|
||||
}
|
||||
|
||||
impl FromStr for Pages {
|
||||
|
|
@ -234,6 +235,11 @@ impl FromStr for Pages {
|
|||
}
|
||||
}
|
||||
|
||||
/// The ranges of the pages to be exported as specified by the user.
|
||||
pub fn exported_page_ranges(pages: &[Pages]) -> PageRanges {
|
||||
PageRanges::new(pages.iter().map(|p| p.0.clone()).collect())
|
||||
}
|
||||
|
||||
impl fmt::Display for Pages {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let start = match self.0.start() {
|
||||
|
|
@ -405,6 +411,40 @@ impl ResourcePath {
|
|||
}
|
||||
}
|
||||
|
||||
/// Utilities for output template processing.
|
||||
/// Copied from typst-cli.
|
||||
pub mod output_template {
|
||||
const INDEXABLE: [&str; 3] = ["{p}", "{0p}", "{n}"];
|
||||
|
||||
/// Checks if the output template has indexable templates.
|
||||
pub fn has_indexable_template(output: &str) -> bool {
|
||||
INDEXABLE.iter().any(|template| output.contains(template))
|
||||
}
|
||||
|
||||
/// Formats the output template with the given page number and total pages.
|
||||
/// Note: `this_page` is 1-based.
|
||||
pub fn format(output: &str, this_page: usize, total_pages: usize) -> String {
|
||||
// Find the base 10 width of number `i`
|
||||
fn width(i: usize) -> usize {
|
||||
1 + i.checked_ilog10().unwrap_or(0) as usize
|
||||
}
|
||||
|
||||
let other_templates = ["{t}"];
|
||||
INDEXABLE
|
||||
.iter()
|
||||
.chain(other_templates.iter())
|
||||
.fold(output.to_string(), |out, template| {
|
||||
let replacement = match *template {
|
||||
"{p}" => format!("{this_page}"),
|
||||
"{0p}" | "{n}" => format!("{:01$}", this_page, width(total_pages)),
|
||||
"{t}" => format!("{total_pages}"),
|
||||
_ => unreachable!("unhandled template placeholder {template}"),
|
||||
};
|
||||
out.replace(template, replacement.as_str())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,7 @@
|
|||
//! Tinymist LSP commands
|
||||
|
||||
mod export;
|
||||
|
||||
use std::ops::Range;
|
||||
use std::path::PathBuf;
|
||||
|
||||
|
|
@ -9,14 +11,9 @@ use serde_json::Value as JsonValue;
|
|||
#[cfg(feature = "trace")]
|
||||
use task::TraceParams;
|
||||
use tinymist_assets::TYPST_PREVIEW_HTML;
|
||||
use tinymist_project::{
|
||||
ExportHtmlTask, ExportPdfTask, ExportPngTask, ExportSvgTask, ExportTask, ExportTeXTask,
|
||||
ExportTextTask, ExportTransform, PageSelection, Pages, ProjectTask, QueryTask,
|
||||
};
|
||||
use tinymist_query::package::PackageInfo;
|
||||
use tinymist_query::{LocalContextGuard, LspRange};
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_task::ExportMarkdownTask;
|
||||
use typst::syntax::{LinkedNode, Source};
|
||||
|
||||
use super::*;
|
||||
|
|
@ -31,49 +28,6 @@ use typst::syntax::package::{PackageSpec, VersionlessPackageSpec};
|
|||
#[cfg(feature = "system")]
|
||||
use crate::tool::package::InitTask;
|
||||
|
||||
/// See [`ProjectTask`].
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
struct ExportOpts {
|
||||
fill: Option<String>,
|
||||
ppi: Option<f32>,
|
||||
#[serde(default)]
|
||||
page: PageSelection,
|
||||
/// Whether to open the exported file(s) after the export is done.
|
||||
open: Option<bool>,
|
||||
// todo: we made a mistake that they will be snakecase, but they should be camelCase
|
||||
/// The creation timestamp for various outputs (in seconds).
|
||||
creation_timestamp: Option<String>,
|
||||
/// A PDF standard that Typst can enforce conformance with.
|
||||
pdf_standard: Option<Vec<PdfStandard>>,
|
||||
}
|
||||
|
||||
/// See [`ProjectTask`].
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ExportTypliteOpts {
|
||||
/// Whether to open the exported file(s) after the export is done.
|
||||
open: Option<bool>,
|
||||
/// The processor to use for the typlite export.
|
||||
processor: Option<String>,
|
||||
/// The path of external assets directory.
|
||||
assets_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
/// See [`ProjectTask`].
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct QueryOpts {
|
||||
format: String,
|
||||
output_extension: Option<String>,
|
||||
strict: Option<bool>,
|
||||
pretty: Option<bool>,
|
||||
selector: String,
|
||||
field: Option<String>,
|
||||
one: Option<bool>,
|
||||
/// Whether to open the exported file(s) after the export is done.
|
||||
open: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ExportSyntaxRangeOpts {
|
||||
|
|
@ -82,161 +36,6 @@ struct ExportSyntaxRangeOpts {
|
|||
|
||||
/// Here are implemented the handlers for each command.
|
||||
impl ServerState {
|
||||
/// Export the current document as PDF file(s).
|
||||
pub fn export_pdf(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
|
||||
let creation_timestamp = if let Some(value) = opts.creation_timestamp {
|
||||
Some(
|
||||
parse_source_date_epoch(&value)
|
||||
.map_err(|e| invalid_params(format!("Cannot parse creation timestamp: {e}")))?,
|
||||
)
|
||||
} else {
|
||||
self.config.creation_timestamp()
|
||||
};
|
||||
let pdf_standards = opts.pdf_standard.or_else(|| self.config.pdf_standards());
|
||||
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
ProjectTask::ExportPdf(ExportPdfTask {
|
||||
export,
|
||||
pdf_standards: pdf_standards.unwrap_or_default(),
|
||||
creation_timestamp,
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as HTML file(s).
|
||||
pub fn export_html(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
ProjectTask::ExportHtml(ExportHtmlTask { export }),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as Markdown file(s).
|
||||
pub fn export_markdown(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportTypliteOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
ProjectTask::ExportMd(ExportMarkdownTask {
|
||||
processor: opts.processor,
|
||||
assets_path: opts.assets_path,
|
||||
export,
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as Tex file(s).
|
||||
pub fn export_tex(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportTypliteOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
ProjectTask::ExportTeX(ExportTeXTask {
|
||||
processor: opts.processor,
|
||||
assets_path: opts.assets_path,
|
||||
export,
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as Text file(s).
|
||||
pub fn export_text(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
ProjectTask::ExportText(ExportTextTask { export }),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Query the current document and export the result as JSON file(s).
|
||||
pub fn export_query(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as QueryOpts);
|
||||
// todo: deprecate it
|
||||
let _ = opts.strict;
|
||||
|
||||
let mut export = self.config.export_task();
|
||||
if opts.pretty.unwrap_or(true) {
|
||||
export.apply_pretty();
|
||||
}
|
||||
|
||||
self.export(
|
||||
ProjectTask::Query(QueryTask {
|
||||
format: opts.format,
|
||||
output_extension: opts.output_extension,
|
||||
selector: opts.selector,
|
||||
field: opts.field,
|
||||
one: opts.one.unwrap_or(false),
|
||||
export,
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as Svg file(s).
|
||||
pub fn export_svg(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
|
||||
let mut export = self.config.export_task();
|
||||
select_page(&mut export, opts.page).map_err(invalid_params)?;
|
||||
|
||||
self.export(
|
||||
ProjectTask::ExportSvg(ExportSvgTask { export }),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as Png file(s).
|
||||
pub fn export_png(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
|
||||
let ppi = opts.ppi.unwrap_or(144.);
|
||||
let ppi = ppi
|
||||
.try_into()
|
||||
.context("cannot convert ppi")
|
||||
.map_err(invalid_params)?;
|
||||
|
||||
let mut export = self.config.export_task();
|
||||
select_page(&mut export, opts.page).map_err(invalid_params)?;
|
||||
|
||||
self.export(
|
||||
ProjectTask::ExportPng(ExportPngTask {
|
||||
fill: opts.fill,
|
||||
ppi,
|
||||
export,
|
||||
}),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as some format. The client is responsible
|
||||
/// for passing the correct absolute path of typst document.
|
||||
pub fn export(
|
||||
&mut self,
|
||||
|
||||
task: ProjectTask,
|
||||
open: bool,
|
||||
mut args: Vec<JsonValue>,
|
||||
) -> ScheduleResult {
|
||||
let path = get_arg!(args[0] as PathBuf);
|
||||
|
||||
run_query!(self.OnExport(path, open, task))
|
||||
}
|
||||
|
||||
/// Export a range of the current document as Ansi highlighted text.
|
||||
pub fn export_ansi_hl(&mut self, mut args: Vec<JsonValue>) -> AnySchedulableResponse {
|
||||
let path = get_arg!(args[0] as PathBuf);
|
||||
|
|
@ -773,17 +572,3 @@ impl ServerState {
|
|||
Ok(async move { snap.run_within_package(&info, f).map_err(internal_error) })
|
||||
}
|
||||
}
|
||||
|
||||
/// Applies page selection to the export task.
|
||||
fn select_page(task: &mut ExportTask, selection: PageSelection) -> Result<()> {
|
||||
match selection {
|
||||
PageSelection::First => task.transform.push(ExportTransform::Pages {
|
||||
ranges: vec![Pages::FIRST],
|
||||
}),
|
||||
PageSelection::Merged { gap } => {
|
||||
task.transform.push(ExportTransform::Merge { gap });
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
|||
231
crates/tinymist/src/cmd/export.rs
Normal file
231
crates/tinymist/src/cmd/export.rs
Normal file
|
|
@ -0,0 +1,231 @@
|
|||
//! Tinymist LSP commands for export
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value as JsonValue;
|
||||
use tinymist_project::{
|
||||
ExportHtmlTask, ExportPdfTask, ExportPngTask, ExportSvgTask, ExportTeXTask, ExportTextTask,
|
||||
Pages, ProjectTask, QueryTask,
|
||||
};
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_task::{ExportMarkdownTask, PageMerge};
|
||||
|
||||
use super::*;
|
||||
use crate::lsp::query::run_query;
|
||||
|
||||
/// Basic export options with no additional fields.
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct ExportOpts {}
|
||||
|
||||
/// See [`ProjectTask`].
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
struct ExportPdfOpts {
|
||||
/// Which pages to export. When unspecified, all pages are exported.
|
||||
pages: Option<Vec<Pages>>,
|
||||
/// The creation timestamp for various outputs (in seconds).
|
||||
creation_timestamp: Option<String>,
|
||||
/// A PDF standard that Typst can enforce conformance with.
|
||||
pdf_standard: Option<Vec<PdfStandard>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
struct ExportSvgOpts {
|
||||
/// Which pages to export. When unspecified, all pages are exported.
|
||||
pages: Option<Vec<Pages>>,
|
||||
page_number_template: Option<String>,
|
||||
merge: Option<PageMerge>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
struct ExportPngOpts {
|
||||
/// Which pages to export. When unspecified, all pages are exported.
|
||||
pages: Option<Vec<Pages>>,
|
||||
page_number_template: Option<String>,
|
||||
merge: Option<PageMerge>,
|
||||
fill: Option<String>,
|
||||
ppi: Option<f32>,
|
||||
}
|
||||
|
||||
/// See [`ProjectTask`].
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
struct ExportTypliteOpts {
|
||||
/// The processor to use for the typlite export.
|
||||
processor: Option<String>,
|
||||
/// The path of external assets directory.
|
||||
assets_path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
/// See [`ProjectTask`].
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
struct ExportQueryOpts {
|
||||
format: String,
|
||||
output_extension: Option<String>,
|
||||
strict: Option<bool>,
|
||||
pretty: Option<bool>,
|
||||
selector: String,
|
||||
field: Option<String>,
|
||||
one: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Deserialize)]
|
||||
#[serde(default, rename_all = "camelCase")]
|
||||
struct ExportActionOpts {
|
||||
/// Whether to write to file.
|
||||
write: Option<bool>,
|
||||
/// Whether to open the exported file(s) after the export is done.
|
||||
open: bool,
|
||||
}
|
||||
|
||||
/// Here are implemented the handlers for each command.
|
||||
impl ServerState {
|
||||
/// Export the current document as PDF file(s).
|
||||
pub fn export_pdf(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportPdfOpts);
|
||||
|
||||
let creation_timestamp = if let Some(value) = opts.creation_timestamp {
|
||||
Some(
|
||||
parse_source_date_epoch(&value)
|
||||
.map_err(|e| invalid_params(format!("Cannot parse creation timestamp: {e}")))?,
|
||||
)
|
||||
} else {
|
||||
self.config.creation_timestamp()
|
||||
};
|
||||
let pdf_standards = opts.pdf_standard.or_else(|| self.config.pdf_standards());
|
||||
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
ProjectTask::ExportPdf(ExportPdfTask {
|
||||
export,
|
||||
pages: opts.pages,
|
||||
pdf_standards: pdf_standards.unwrap_or_default(),
|
||||
creation_timestamp,
|
||||
}),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as HTML file(s).
|
||||
pub fn export_html(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let _opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(ProjectTask::ExportHtml(ExportHtmlTask { export }), args)
|
||||
}
|
||||
|
||||
/// Export the current document as Markdown file(s).
|
||||
pub fn export_markdown(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportTypliteOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
ProjectTask::ExportMd(ExportMarkdownTask {
|
||||
processor: opts.processor,
|
||||
assets_path: opts.assets_path,
|
||||
export,
|
||||
}),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as Tex file(s).
|
||||
pub fn export_tex(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportTypliteOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
ProjectTask::ExportTeX(ExportTeXTask {
|
||||
processor: opts.processor,
|
||||
assets_path: opts.assets_path,
|
||||
export,
|
||||
}),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as Text file(s).
|
||||
pub fn export_text(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let _opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(ProjectTask::ExportText(ExportTextTask { export }), args)
|
||||
}
|
||||
|
||||
/// Query the current document and export the result as JSON file(s).
|
||||
pub fn export_query(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportQueryOpts);
|
||||
// todo: deprecate it
|
||||
let _ = opts.strict;
|
||||
|
||||
let mut export = self.config.export_task();
|
||||
if opts.pretty.unwrap_or(true) {
|
||||
export.apply_pretty();
|
||||
}
|
||||
|
||||
self.export(
|
||||
ProjectTask::Query(QueryTask {
|
||||
format: opts.format,
|
||||
output_extension: opts.output_extension,
|
||||
selector: opts.selector,
|
||||
field: opts.field,
|
||||
one: opts.one.unwrap_or(false),
|
||||
export,
|
||||
}),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as Svg file(s).
|
||||
pub fn export_svg(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportSvgOpts);
|
||||
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
ProjectTask::ExportSvg(ExportSvgTask {
|
||||
export,
|
||||
pages: opts.pages,
|
||||
page_number_template: opts.page_number_template,
|
||||
merge: opts.merge,
|
||||
}),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as Png file(s).
|
||||
pub fn export_png(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportPngOpts);
|
||||
|
||||
let ppi = opts.ppi.unwrap_or(144.);
|
||||
let ppi = ppi
|
||||
.try_into()
|
||||
.context("cannot convert ppi")
|
||||
.map_err(invalid_params)?;
|
||||
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
ProjectTask::ExportPng(ExportPngTask {
|
||||
export,
|
||||
pages: opts.pages,
|
||||
page_number_template: opts.page_number_template,
|
||||
merge: opts.merge,
|
||||
fill: opts.fill,
|
||||
ppi,
|
||||
}),
|
||||
args,
|
||||
)
|
||||
}
|
||||
|
||||
/// Export the current document as some format. The client is responsible
|
||||
/// for passing the correct absolute path of typst document.
|
||||
pub fn export(&mut self, task: ProjectTask, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let path = get_arg!(args[0] as PathBuf);
|
||||
let action_opts = get_arg_or_default!(args[2] as ExportActionOpts);
|
||||
let write = action_opts.write.unwrap_or(true);
|
||||
let open = action_opts.open;
|
||||
|
||||
run_query!(self.OnExport(path, task, write, open))
|
||||
}
|
||||
}
|
||||
|
|
@ -560,6 +560,7 @@ impl Config {
|
|||
// },
|
||||
task: ProjectTask::ExportPdf(ExportPdfTask {
|
||||
export,
|
||||
pages: None, // todo: set pages
|
||||
pdf_standards: self.pdf_standards().unwrap_or_default(),
|
||||
creation_timestamp: self.creation_timestamp(),
|
||||
}),
|
||||
|
|
|
|||
|
|
@ -1,7 +1,6 @@
|
|||
//! The actor that handles various document export, like PDF and SVG export.
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::atomic::AtomicUsize;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use std::{ops::DerefMut, pin::Pin};
|
||||
|
|
@ -10,16 +9,18 @@ use reflexo::ImmutPath;
|
|||
use reflexo_typst::{Bytes, CompilationTask, ExportComputation};
|
||||
use sync_ls::{internal_error, just_future};
|
||||
use tinymist_project::LspWorld;
|
||||
use tinymist_query::OnExportRequest;
|
||||
use tinymist_query::{OnExportRequest, OnExportResponse, PagedExportResponse};
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::fs::paths::write_atomic;
|
||||
use tinymist_std::path::PathClean;
|
||||
use tinymist_std::typst::TypstDocument;
|
||||
use tinymist_task::{get_page_selection, ExportMarkdownTask, ExportTarget, PdfExport, TextExport};
|
||||
use tinymist_task::{
|
||||
output_template, DocumentQuery, ExportMarkdownTask, ExportPngTask, ExportSvgTask, ExportTarget,
|
||||
ImageOutput, PdfExport, PngExport, SvgExport, TextExport,
|
||||
};
|
||||
use tokio::sync::mpsc;
|
||||
use typlite::{Format, Typlite};
|
||||
use typst::foundations::IntoValue;
|
||||
use typst::visualize::Color;
|
||||
use typst::ecow::EcoString;
|
||||
|
||||
use futures::Future;
|
||||
use parking_lot::Mutex;
|
||||
|
|
@ -29,9 +30,9 @@ use super::SyncTaskFactory;
|
|||
use crate::lsp::query::QueryFuture;
|
||||
use crate::project::{
|
||||
update_lock, ApplyProjectTask, CompiledArtifact, DevEvent, DevExportEvent, EntryReader,
|
||||
ExportHtmlTask, ExportPdfTask, ExportPngTask, ExportSvgTask, ExportTask as ProjectExportTask,
|
||||
ExportTeXTask, ExportTextTask, LspCompiledArtifact, LspComputeGraph, ProjectClient,
|
||||
ProjectTask, QueryTask, TaskWhen, PROJECT_ROUTE_USER_ACTION_PRIORITY,
|
||||
ExportHtmlTask, ExportPdfTask, ExportTask as ProjectExportTask, ExportTeXTask, ExportTextTask,
|
||||
LspCompiledArtifact, LspComputeGraph, ProjectClient, ProjectTask, TaskWhen,
|
||||
PROJECT_ROUTE_USER_ACTION_PRIORITY,
|
||||
};
|
||||
use crate::world::TaskInputs;
|
||||
use crate::ServerState;
|
||||
|
|
@ -40,7 +41,12 @@ use crate::{actor::editor::EditorRequest, tool::word_count};
|
|||
impl ServerState {
|
||||
/// Exports the current document.
|
||||
pub fn on_export(&mut self, req: OnExportRequest) -> QueryFuture {
|
||||
let OnExportRequest { path, task, open } = req;
|
||||
let OnExportRequest {
|
||||
path,
|
||||
task,
|
||||
open,
|
||||
write,
|
||||
} = req;
|
||||
let entry = self.entry_resolver().resolve(Some(path.as_path().into()));
|
||||
let lock_dir = self.entry_resolver().resolve_lock(&entry);
|
||||
|
||||
|
|
@ -71,31 +77,42 @@ impl ServerState {
|
|||
let is_html = matches!(task, ProjectTask::ExportHtml { .. });
|
||||
// todo: we may get some file missing errors here
|
||||
let artifact = CompiledArtifact::from_graph(snap.clone(), is_html);
|
||||
let res = ExportTask::do_export(task, artifact, lock_dir)
|
||||
.await
|
||||
.map_err(internal_error)?;
|
||||
|
||||
let res = if write {
|
||||
// Export to file and return path
|
||||
ExportTask::do_export(task, artifact, lock_dir)
|
||||
.await
|
||||
.map_err(internal_error)?
|
||||
} else {
|
||||
// Export to memory and return base64-encoded data
|
||||
ExportTask::do_export_to_memory(task, artifact)
|
||||
.await
|
||||
.map_err(internal_error)?
|
||||
};
|
||||
|
||||
if let Some(update_dep) = update_dep {
|
||||
tokio::spawn(update_dep(snap));
|
||||
}
|
||||
#[cfg(not(feature = "open"))]
|
||||
|
||||
// Only open the first page if multiple pages are exported
|
||||
if open {
|
||||
log::warn!("open is not supported in this build, ignoring");
|
||||
}
|
||||
|
||||
#[cfg(feature = "open")]
|
||||
{
|
||||
// See https://github.com/Myriad-Dreamin/tinymist/issues/837
|
||||
// Also see https://github.com/Byron/open-rs/issues/105
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let do_open = ::open::that_detached;
|
||||
#[cfg(target_os = "windows")]
|
||||
fn do_open(path: impl AsRef<std::ffi::OsStr>) -> std::io::Result<()> {
|
||||
::open::with_detached(path, "explorer")
|
||||
}
|
||||
|
||||
if let Some(Some(path)) = open.then_some(res.as_ref()) {
|
||||
log::trace!("open with system default apps: {path:?}");
|
||||
do_open(path).log_error("failed to open with system default apps");
|
||||
match &res {
|
||||
Some(OnExportResponse::Single {
|
||||
path: Some(path), ..
|
||||
}) => {
|
||||
open_external(path);
|
||||
}
|
||||
Some(OnExportResponse::Paged { items, .. }) => {
|
||||
if let Some(first_page) = items.first() {
|
||||
if let Some(path) = &first_page.path {
|
||||
open_external(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
log::warn!("CompileActor: on export end: no export result to open");
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -247,18 +264,7 @@ impl ExportTask {
|
|||
Some(())
|
||||
}
|
||||
|
||||
/// Exports a document.
|
||||
pub async fn do_export(
|
||||
task: ProjectTask,
|
||||
artifact: LspCompiledArtifact,
|
||||
lock_dir: Option<ImmutPath>,
|
||||
) -> Result<Option<PathBuf>> {
|
||||
use reflexo_vec2svg::DefaultExportFeature;
|
||||
use ProjectTask::*;
|
||||
|
||||
let CompiledArtifact { graph, doc, .. } = artifact;
|
||||
|
||||
// Prepare the output path.
|
||||
fn prepare_output_path(task: &ProjectTask, graph: &LspComputeGraph) -> Result<Option<PathBuf>> {
|
||||
let entry = graph.snap.world.entry_state();
|
||||
let config = task.as_export().unwrap();
|
||||
let output = config.output.clone().unwrap_or_default();
|
||||
|
|
@ -277,13 +283,97 @@ impl ExportTask {
|
|||
if write_to.is_dir() {
|
||||
bail!("ExportTask({task:?}): output path is a directory: {write_to:?}");
|
||||
}
|
||||
|
||||
// Apply page template if any
|
||||
let write_to = match task {
|
||||
ProjectTask::ExportPng(ExportPngTask {
|
||||
page_number_template: Some(page_number_template),
|
||||
..
|
||||
})
|
||||
| ProjectTask::ExportSvg(ExportSvgTask {
|
||||
page_number_template: Some(page_number_template),
|
||||
..
|
||||
}) => write_to.with_file_name(page_number_template),
|
||||
_ => write_to,
|
||||
};
|
||||
let write_to = write_to.with_extension(task.extension());
|
||||
|
||||
Ok(Some(write_to))
|
||||
}
|
||||
|
||||
/// Exports a document to memory, returning the binary data directly.
|
||||
pub async fn do_export_to_memory(
|
||||
task: ProjectTask,
|
||||
artifact: LspCompiledArtifact,
|
||||
) -> Result<Option<OnExportResponse>> {
|
||||
use base64::prelude::*;
|
||||
|
||||
let CompiledArtifact { graph, .. } = &artifact;
|
||||
|
||||
let write_to = Self::prepare_output_path(&task, graph)?;
|
||||
|
||||
let artifact = Self::do_export_bytes(task, artifact, 0).await?;
|
||||
|
||||
let res = match artifact {
|
||||
ExportArtifact::Single(data) => OnExportResponse::Single {
|
||||
path: write_to.clone(),
|
||||
data: Some(BASE64_STANDARD.encode(data.as_slice())),
|
||||
},
|
||||
ExportArtifact::Paged { total_pages, items } => {
|
||||
let can_handle_multiple = write_to.as_ref().is_some_and(|write_to| {
|
||||
output_template::has_indexable_template(write_to.to_str().unwrap_or_default())
|
||||
});
|
||||
|
||||
OnExportResponse::Paged {
|
||||
total_pages,
|
||||
items: items
|
||||
.into_iter()
|
||||
.map(|(page_idx, bytes)| {
|
||||
let to = write_to.as_ref().map(|write_to| {
|
||||
if can_handle_multiple {
|
||||
let storage = output_template::format(
|
||||
write_to.to_str().unwrap_or_default(),
|
||||
page_idx + 1,
|
||||
total_pages,
|
||||
);
|
||||
PathBuf::from(storage)
|
||||
} else {
|
||||
write_to.clone()
|
||||
}
|
||||
});
|
||||
|
||||
PagedExportResponse {
|
||||
page: page_idx,
|
||||
path: to,
|
||||
data: Some(BASE64_STANDARD.encode(bytes.as_slice())),
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
||||
/// Exports a document.
|
||||
pub async fn do_export(
|
||||
task: ProjectTask,
|
||||
artifact: LspCompiledArtifact,
|
||||
lock_dir: Option<ImmutPath>,
|
||||
) -> Result<Option<OnExportResponse>> {
|
||||
let CompiledArtifact { graph, .. } = &artifact;
|
||||
|
||||
let Some(write_to) = Self::prepare_output_path(&task, graph)? else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
static EXPORT_ID: AtomicUsize = AtomicUsize::new(0);
|
||||
let export_id = EXPORT_ID.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
|
||||
|
||||
log::debug!(
|
||||
"ExportTask({export_id},lock={lock_dir:?}): exporting {entry:?} to {write_to:?}"
|
||||
"ExportTask({export_id},lock={lock_dir:?}): exporting {entry:?} to {write_to:?}",
|
||||
entry = graph.snap.world.entry_state()
|
||||
);
|
||||
if let Some(e) = write_to.parent() {
|
||||
if !e.exists() {
|
||||
|
|
@ -309,12 +399,87 @@ impl ExportTask {
|
|||
Some(())
|
||||
});
|
||||
|
||||
// Generate the data using common logic
|
||||
let artifact = Self::do_export_bytes(task.clone(), artifact, export_id).await?;
|
||||
|
||||
let res = match artifact {
|
||||
ExportArtifact::Single(data) => {
|
||||
let res = OnExportResponse::Single {
|
||||
path: Some(write_to.clone()),
|
||||
data: None,
|
||||
};
|
||||
|
||||
let to = write_to.clone();
|
||||
tokio::task::spawn_blocking(move || write_atomic(to, data))
|
||||
.await
|
||||
.context_ut("failed to export")??;
|
||||
|
||||
res
|
||||
}
|
||||
ExportArtifact::Paged { total_pages, items } => {
|
||||
let can_handle_multiple =
|
||||
output_template::has_indexable_template(write_to.to_str().unwrap_or_default());
|
||||
|
||||
if !can_handle_multiple && items.len() > 1 {
|
||||
bail!("cannot export multiple images without a page number template ({{p}}, {{0p}}) in the output path");
|
||||
}
|
||||
|
||||
let mut res_items = Vec::new();
|
||||
let mut write_futures = Vec::new();
|
||||
for (page_idx, bytes) in items {
|
||||
let to = if can_handle_multiple {
|
||||
let storage = output_template::format(
|
||||
write_to.to_str().unwrap_or_default(),
|
||||
page_idx + 1,
|
||||
total_pages,
|
||||
);
|
||||
PathBuf::from(storage)
|
||||
} else {
|
||||
write_to.clone()
|
||||
};
|
||||
|
||||
res_items.push(PagedExportResponse {
|
||||
page: page_idx,
|
||||
path: Some(to.clone()),
|
||||
data: None,
|
||||
});
|
||||
|
||||
let fut = tokio::task::spawn_blocking(move || write_atomic(to, bytes));
|
||||
write_futures.push(fut);
|
||||
}
|
||||
|
||||
// Await all writes in parallel
|
||||
for result in futures::future::join_all(write_futures).await {
|
||||
result.context_ut("failed to export")??;
|
||||
}
|
||||
|
||||
OnExportResponse::Paged {
|
||||
total_pages,
|
||||
items: res_items,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
log::debug!("ExportTask({export_id}): export complete");
|
||||
Ok(Some(res))
|
||||
}
|
||||
|
||||
/// Export a document into bytes.
|
||||
async fn do_export_bytes(
|
||||
task: ProjectTask,
|
||||
artifact: LspCompiledArtifact,
|
||||
export_id: usize,
|
||||
) -> Result<ExportArtifact> {
|
||||
use reflexo_vec2svg::DefaultExportFeature;
|
||||
use ProjectTask::*;
|
||||
|
||||
let CompiledArtifact { graph, doc, .. } = artifact;
|
||||
|
||||
// Prepare the document.
|
||||
let doc = doc.context("cannot export with compilation errors")?;
|
||||
|
||||
// Prepare data.
|
||||
let kind2 = task.clone();
|
||||
let data = FutureFolder::compute(move |_| -> Result<Bytes> {
|
||||
let data = FutureFolder::compute(move |_| -> Result<ExportArtifact> {
|
||||
let doc = &doc;
|
||||
|
||||
// static BLANK: Lazy<Page> = Lazy::new(Page::default);
|
||||
|
|
@ -343,59 +508,22 @@ impl ExportTask {
|
|||
.as_ref()
|
||||
.map_err(|e| e.clone())
|
||||
};
|
||||
let first_page = || {
|
||||
paged_doc()?
|
||||
.pages
|
||||
.first()
|
||||
.context("no first page to export")
|
||||
};
|
||||
Ok(match kind2 {
|
||||
Preview(..) => Bytes::new([]),
|
||||
let total_pages = || paged_doc().map(|d| d.pages.len()).unwrap_or_default();
|
||||
|
||||
Ok(match task {
|
||||
Preview(..) => Bytes::new([]).into(),
|
||||
// todo: more pdf flags
|
||||
ExportPdf(config) => PdfExport::run(&graph, paged_doc()?, &config)?,
|
||||
Query(QueryTask {
|
||||
export: _,
|
||||
output_extension: _,
|
||||
format,
|
||||
selector,
|
||||
field,
|
||||
one,
|
||||
}) => {
|
||||
let pretty = false;
|
||||
let elements = reflexo_typst::query::retrieve(&graph.world(), &selector, doc)
|
||||
.map_err(|e| anyhow::anyhow!("failed to retrieve: {e}"))?;
|
||||
if one && elements.len() != 1 {
|
||||
bail!("expected exactly one element, found {}", elements.len());
|
||||
}
|
||||
|
||||
let mapped: Vec<_> = elements
|
||||
.into_iter()
|
||||
.filter_map(|c| match &field {
|
||||
Some(field) => c.get_by_name(field).ok(),
|
||||
_ => Some(c.into_value()),
|
||||
})
|
||||
.collect();
|
||||
|
||||
if one {
|
||||
let Some(value) = mapped.first() else {
|
||||
bail!("no such field found for element");
|
||||
};
|
||||
serialize(value, &format, pretty).map(Bytes::from_string)?
|
||||
} else {
|
||||
serialize(&mapped, &format, pretty).map(Bytes::from_string)?
|
||||
}
|
||||
}
|
||||
ExportHtml(ExportHtmlTask { export: _ }) => Bytes::from_string(
|
||||
ExportPdf(config) => PdfExport::run(&graph, paged_doc()?, &config)?.into(),
|
||||
ExportSvg(config) => SvgExport::run(&graph, paged_doc()?, &config)?.with_pages(total_pages()),
|
||||
ExportPng(config) => PngExport::run(&graph, paged_doc()?,& config)?.with_pages(total_pages()),
|
||||
Query(config) => DocumentQuery::run(&graph, paged_doc()?, &config)??.into(),
|
||||
ExportHtml(ExportHtmlTask { export: _ }) =>
|
||||
typst_html::html(html_doc()?)
|
||||
.map_err(|e| format!("export error: {e:?}"))
|
||||
.context_ut("failed to export to html")?,
|
||||
),
|
||||
ExportSvgHtml(ExportHtmlTask { export: _ }) => Bytes::from_string(
|
||||
reflexo_vec2svg::render_svg_html::<DefaultExportFeature>(paged_doc()?),
|
||||
),
|
||||
ExportText(ExportTextTask { export: _ }) => {
|
||||
Bytes::from_string(TextExport::run_on_doc(doc)?)
|
||||
}
|
||||
.context_ut("failed to export to html")?.into(),
|
||||
ExportSvgHtml(ExportHtmlTask { export: _ }) =>
|
||||
reflexo_vec2svg::render_svg_html::<DefaultExportFeature>(paged_doc()?).into(),
|
||||
ExportText(ExportTextTask { export: _ }) => TextExport::run_on_doc(doc)?.into(),
|
||||
ExportMd(ExportMarkdownTask {
|
||||
processor,
|
||||
assets_path,
|
||||
|
|
@ -410,8 +538,7 @@ impl ExportTask {
|
|||
})
|
||||
.convert()
|
||||
.map_err(|e| anyhow::anyhow!("failed to convert to markdown: {e}"))?;
|
||||
|
||||
Bytes::from_string(conv)
|
||||
conv.into()
|
||||
}
|
||||
// todo: duplicated code with ExportMd
|
||||
ExportTeX(ExportTeXTask {
|
||||
|
|
@ -429,55 +556,69 @@ impl ExportTask {
|
|||
})
|
||||
.convert()
|
||||
.map_err(|e| anyhow::anyhow!("failed to convert to latex: {e}"))?;
|
||||
|
||||
Bytes::from_string(conv)
|
||||
}
|
||||
ExportSvg(ExportSvgTask { export }) => {
|
||||
let (is_first, merged_gap) = get_page_selection(&export)?;
|
||||
|
||||
Bytes::from_string(if is_first {
|
||||
typst_svg::svg(first_page()?)
|
||||
} else {
|
||||
typst_svg::svg_merged(paged_doc()?, merged_gap)
|
||||
})
|
||||
}
|
||||
ExportPng(ExportPngTask { export, ppi, fill }) => {
|
||||
let ppi = ppi.to_f32();
|
||||
if ppi <= 1e-6 {
|
||||
bail!("invalid ppi: {ppi}");
|
||||
}
|
||||
|
||||
let fill = if let Some(fill) = fill {
|
||||
parse_color(fill).map_err(|err| anyhow::anyhow!("invalid fill ({err})"))?
|
||||
} else {
|
||||
Color::WHITE
|
||||
};
|
||||
|
||||
let (is_first, merged_gap) = get_page_selection(&export)?;
|
||||
|
||||
let pixmap = if is_first {
|
||||
typst_render::render(first_page()?, ppi / 72.)
|
||||
} else {
|
||||
typst_render::render_merged(paged_doc()?, ppi / 72., merged_gap, Some(fill))
|
||||
};
|
||||
|
||||
Bytes::new(
|
||||
pixmap
|
||||
.encode_png()
|
||||
.map_err(|err| anyhow::anyhow!("failed to encode PNG ({err})"))?,
|
||||
)
|
||||
}
|
||||
})
|
||||
conv.into()
|
||||
}})
|
||||
})
|
||||
.await??;
|
||||
|
||||
let to = write_to.clone();
|
||||
tokio::task::spawn_blocking(move || write_atomic(to, data))
|
||||
.await
|
||||
.context_ut("failed to export")??;
|
||||
Ok(data)
|
||||
}
|
||||
}
|
||||
|
||||
log::debug!("ExportTask({export_id}): export complete");
|
||||
Ok(Some(write_to))
|
||||
enum ExportArtifact {
|
||||
Single(Bytes),
|
||||
Paged {
|
||||
total_pages: usize,
|
||||
items: Vec<(usize, Bytes)>,
|
||||
},
|
||||
}
|
||||
|
||||
impl From<Bytes> for ExportArtifact {
|
||||
fn from(value: Bytes) -> Self {
|
||||
ExportArtifact::Single(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for ExportArtifact {
|
||||
fn from(value: String) -> Self {
|
||||
ExportArtifact::Single(Bytes::from_string(value))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<EcoString> for ExportArtifact {
|
||||
fn from(value: EcoString) -> Self {
|
||||
ExportArtifact::Single(Bytes::from_string(value))
|
||||
}
|
||||
}
|
||||
|
||||
trait WithPages {
|
||||
fn with_pages(self, total_pages: usize) -> ExportArtifact;
|
||||
}
|
||||
|
||||
impl WithPages for ImageOutput<Bytes> {
|
||||
fn with_pages(self, total_pages: usize) -> ExportArtifact {
|
||||
match self {
|
||||
ImageOutput::Merged(b) => ExportArtifact::Single(b),
|
||||
ImageOutput::Paged(v) => ExportArtifact::Paged {
|
||||
total_pages,
|
||||
items: v.into_iter().map(|item| (item.page, item.value)).collect(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl WithPages for ImageOutput<String> {
|
||||
fn with_pages(self, total_pages: usize) -> ExportArtifact {
|
||||
match self {
|
||||
ImageOutput::Merged(b) => ExportArtifact::Single(Bytes::from_string(b)),
|
||||
ImageOutput::Paged(v) => ExportArtifact::Paged {
|
||||
total_pages,
|
||||
items: v
|
||||
.into_iter()
|
||||
.map(|item| (item.page, Bytes::from_string(item.value)))
|
||||
.collect(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -502,6 +643,7 @@ impl Default for ExportUserConfig {
|
|||
output: None,
|
||||
transform: vec![],
|
||||
},
|
||||
pages: None,
|
||||
pdf_standards: vec![],
|
||||
creation_timestamp: None,
|
||||
}),
|
||||
|
|
@ -511,20 +653,6 @@ impl Default for ExportUserConfig {
|
|||
}
|
||||
}
|
||||
|
||||
fn parse_color(fill: String) -> Result<Color> {
|
||||
match fill.as_str() {
|
||||
"black" => Ok(Color::BLACK),
|
||||
"white" => Ok(Color::WHITE),
|
||||
"red" => Ok(Color::RED),
|
||||
"green" => Ok(Color::GREEN),
|
||||
"blue" => Ok(Color::BLUE),
|
||||
hex if hex.starts_with('#') => {
|
||||
Color::from_str(&hex[1..]).context_ut("failed to parse color")
|
||||
}
|
||||
_ => bail!("invalid color: {fill}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn log_err<T>(artifact: Result<T>) -> Option<T> {
|
||||
match artifact {
|
||||
Ok(v) => Some(v),
|
||||
|
|
@ -547,34 +675,6 @@ fn extra_compile_for_export<D: typst::Document + Send + Sync + 'static>(
|
|||
}
|
||||
}
|
||||
|
||||
/// Serialize data to the output format.
|
||||
fn serialize(data: &impl serde::Serialize, format: &str, pretty: bool) -> Result<String> {
|
||||
Ok(match format {
|
||||
"json" if pretty => serde_json::to_string_pretty(data).context("serialize to json")?,
|
||||
"json" => serde_json::to_string(data).context("serialize to json")?,
|
||||
"yaml" => serde_yaml::to_string(&data).context_ut("serialize to yaml")?,
|
||||
"txt" => {
|
||||
use serde_json::Value::*;
|
||||
let value = serde_json::to_value(data).context("serialize to json value")?;
|
||||
match value {
|
||||
String(s) => s,
|
||||
_ => {
|
||||
let kind = match value {
|
||||
Null => "null",
|
||||
Bool(_) => "boolean",
|
||||
Number(_) => "number",
|
||||
String(_) => "string",
|
||||
Array(_) => "array",
|
||||
Object(_) => "object",
|
||||
};
|
||||
bail!("expected a string value for format: {format}, got {kind}")
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bail!("unsupported format for query: {format}"),
|
||||
})
|
||||
}
|
||||
|
||||
type FoldFuture = Pin<Box<dyn Future<Output = Option<()>> + Send>>;
|
||||
|
||||
#[derive(Default)]
|
||||
|
|
@ -644,6 +744,28 @@ impl FutureFolder {
|
|||
}
|
||||
}
|
||||
|
||||
fn open_external(path: &Path) {
|
||||
#[cfg(not(feature = "open"))]
|
||||
if open {
|
||||
log::warn!("open is not supported in this build, ignoring");
|
||||
}
|
||||
|
||||
#[cfg(feature = "open")]
|
||||
{
|
||||
// See https://github.com/Myriad-Dreamin/tinymist/issues/837
|
||||
// Also see https://github.com/Byron/open-rs/issues/105
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let do_open = ::open::that_detached;
|
||||
#[cfg(target_os = "windows")]
|
||||
fn do_open(path: impl AsRef<std::ffi::OsStr>) -> std::io::Result<()> {
|
||||
::open::with_detached(path, "explorer")
|
||||
}
|
||||
|
||||
log::trace!("open with system default apps: {path:?}");
|
||||
do_open(path).log_error("failed to open with system default apps");
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use clap::Parser;
|
||||
|
|
@ -660,28 +782,6 @@ mod tests {
|
|||
assert_eq!(conf.task.when(), Some(&TaskWhen::Never));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_color() {
|
||||
assert_eq!(parse_color("black".to_owned()).unwrap(), Color::BLACK);
|
||||
assert_eq!(parse_color("white".to_owned()).unwrap(), Color::WHITE);
|
||||
assert_eq!(parse_color("red".to_owned()).unwrap(), Color::RED);
|
||||
assert_eq!(parse_color("green".to_owned()).unwrap(), Color::GREEN);
|
||||
assert_eq!(parse_color("blue".to_owned()).unwrap(), Color::BLUE);
|
||||
assert_eq!(
|
||||
parse_color("#000000".to_owned()).unwrap().to_hex(),
|
||||
"#000000"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_color("#ffffff".to_owned()).unwrap().to_hex(),
|
||||
"#ffffff"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_color("#000000cc".to_owned()).unwrap().to_hex(),
|
||||
"#000000cc"
|
||||
);
|
||||
assert!(parse_color("invalid".to_owned()).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn compilation_default_never() {
|
||||
let args = CompileOnceArgs::parse_from(["tinymist", "main.typ"]);
|
||||
|
|
|
|||
|
|
@ -146,8 +146,8 @@ impl WorldComputable<LspCompilerFeat> for ProjectExport {
|
|||
match config.as_ref() {
|
||||
Preview(..) => todo!(),
|
||||
ExportPdf(config) => Self::export_bytes::<_, PdfExport>(graph, when, config),
|
||||
ExportPng(config) => Self::export_bytes::<_, PngExport>(graph, when, config),
|
||||
ExportSvg(config) => Self::export_string::<_, SvgExport>(graph, when, config),
|
||||
ExportPng(_config) => todo!(),
|
||||
ExportSvg(_config) => todo!(),
|
||||
ExportHtml(config) => Self::export_string::<_, HtmlExport>(graph, when, config),
|
||||
// todo: configuration
|
||||
ExportSvgHtml(_config) => Self::export_string::<
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
|
||||
== A Hello World Example of Export Typst Document to Various Formats
|
||||
= Hello *Typst* _Export_
|
||||
|
||||
Hello World.
|
||||
- The `World`!
|
||||
|
|
|
|||
7
editors/vscode/e2e-workspaces/export/paged.typ
Normal file
7
editors/vscode/e2e-workspaces/export/paged.typ
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
#set page(paper: "a10")
|
||||
|
||||
#for i in range(3) {
|
||||
page[
|
||||
#text(size: 5em)[#(i+1)]
|
||||
]
|
||||
}
|
||||
|
|
@ -1431,7 +1431,7 @@
|
|||
"format-check": "prettier --check .",
|
||||
"format": "prettier --write .",
|
||||
"test:unit": "vitest",
|
||||
"test": "vitest && rimraf test-dist/ && tsc -p tsconfig.test.json && node test-dist/test/runTests.js"
|
||||
"test": "vitest run && rimraf test-dist/ && tsc -p tsconfig.test.json && node test-dist/test/runTests.js"
|
||||
},
|
||||
"dependencies": {
|
||||
"cpr": "^3.0.1",
|
||||
|
|
|
|||
58
editors/vscode/src/cmd.export.ts
Normal file
58
editors/vscode/src/cmd.export.ts
Normal file
|
|
@ -0,0 +1,58 @@
|
|||
export interface ExportPdfOpts {
|
||||
pages?: string[];
|
||||
creationTimestamp?: string | null;
|
||||
// todo: pdf_standard
|
||||
}
|
||||
|
||||
export interface PageMergeOpts {
|
||||
gap?: string | null;
|
||||
}
|
||||
|
||||
export interface ExportPngOpts {
|
||||
pages?: string[];
|
||||
pageNumberTemplate?: string;
|
||||
merge?: PageMergeOpts;
|
||||
fill?: string;
|
||||
ppi?: number;
|
||||
}
|
||||
|
||||
export interface ExportSvgOpts {
|
||||
pages?: string[];
|
||||
pageNumberTemplate?: string;
|
||||
merge?: PageMergeOpts;
|
||||
}
|
||||
|
||||
export interface ExportTypliteOpts {
|
||||
processor?: string;
|
||||
assetsPath?: string;
|
||||
}
|
||||
|
||||
export interface ExportQueryOpts {
|
||||
format: string;
|
||||
outputExtension?: string;
|
||||
strict?: boolean;
|
||||
pretty?: boolean;
|
||||
selector: string;
|
||||
field?: string;
|
||||
one?: boolean;
|
||||
}
|
||||
|
||||
// biome-ignore lint/suspicious/noEmptyInterface: no fields yet
|
||||
export interface ExportHtmlOpts {}
|
||||
|
||||
// biome-ignore lint/suspicious/noEmptyInterface: no fields yet
|
||||
export interface ExportTextOpts {}
|
||||
|
||||
export type ExportOpts =
|
||||
| ExportPdfOpts
|
||||
| ExportPngOpts
|
||||
| ExportSvgOpts
|
||||
| ExportTypliteOpts
|
||||
| ExportQueryOpts
|
||||
| ExportHtmlOpts
|
||||
| ExportTextOpts;
|
||||
|
||||
export interface ExportActionOpts {
|
||||
write?: boolean;
|
||||
open?: boolean;
|
||||
}
|
||||
|
|
@ -33,7 +33,7 @@ import { copyAndPasteActivate, dragAndDropActivate } from "./features/drop-paste
|
|||
import { testingActivate } from "./features/testing";
|
||||
import { testingDebugActivate } from "./features/testing/debug";
|
||||
import { FeatureEntry, tinymistActivate, tinymistDeactivate } from "./extension.shared";
|
||||
import { commandShow, exportActivate, quickExports } from "./features/export";
|
||||
import { askPageSelection, commandShow, exportActivate, quickExports } from "./features/export";
|
||||
import { resolveCodeAction } from "./lsp.code-action";
|
||||
import { HoverTmpStorage } from "./features/hover-storage.tmp";
|
||||
|
||||
|
|
@ -611,19 +611,22 @@ async function commandRunCodeLens(...args: string[]): Promise<void> {
|
|||
void vscode.commands.executeCommand(`typst-preview.${command}`);
|
||||
return;
|
||||
}
|
||||
case kProfileServer: {
|
||||
void vscode.commands.executeCommand(`tinymist.profileServer`);
|
||||
return;
|
||||
}
|
||||
default: {
|
||||
if (!moreAction || !("exportKind" in moreAction)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// A quick export action
|
||||
if (!(await askPageSelection(moreAction))) {
|
||||
return; // cancelled
|
||||
}
|
||||
await commandShow(moreAction.exportKind, moreAction.extraOpts);
|
||||
return;
|
||||
}
|
||||
case kProfileServer: {
|
||||
void vscode.commands.executeCommand(`tinymist.profileServer`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,18 @@
|
|||
import * as vscode from "vscode";
|
||||
import { l10nMsg } from "../l10n";
|
||||
import { tinymist } from "../lsp";
|
||||
import { IContext } from "../context";
|
||||
import { commands } from "vscode";
|
||||
import type {
|
||||
ExportActionOpts,
|
||||
ExportOpts,
|
||||
ExportPdfOpts,
|
||||
ExportPngOpts,
|
||||
ExportSvgOpts,
|
||||
ExportTypliteOpts,
|
||||
} from "../cmd.export";
|
||||
import type { IContext } from "../context";
|
||||
import { l10nMsg } from "../l10n";
|
||||
import { type ExportResponse, tinymist } from "../lsp";
|
||||
|
||||
/// These are names of the export functions in the LSP client, e.g. `exportPdf`, `exportHtml`.
|
||||
export type ExportKind = "Pdf" | "Html" | "Svg" | "Png" | "Markdown" | "TeX" | "Text" | "Query";
|
||||
|
||||
export function exportActivate(context: IContext) {
|
||||
|
|
@ -20,7 +29,8 @@ export interface QuickExportFormatMeta {
|
|||
label: string;
|
||||
description: string;
|
||||
exportKind: ExportKind;
|
||||
extraOpts?: any;
|
||||
extraOpts?: ExportOpts;
|
||||
selectPages?: true | "merged";
|
||||
}
|
||||
|
||||
export const quickExports: QuickExportFormatMeta[] = [
|
||||
|
|
@ -29,17 +39,35 @@ export const quickExports: QuickExportFormatMeta[] = [
|
|||
description: l10nMsg("Export as PDF"),
|
||||
exportKind: "Pdf",
|
||||
},
|
||||
{
|
||||
label: l10nMsg("PDF (Specific Pages)"),
|
||||
description: l10nMsg("Export as PDF with specified pages"),
|
||||
exportKind: "Pdf",
|
||||
selectPages: true,
|
||||
},
|
||||
{
|
||||
label: l10nMsg("PNG (Merged)"),
|
||||
description: l10nMsg("Export as a single PNG by merging pages"),
|
||||
exportKind: "Png",
|
||||
extraOpts: { page: { merged: { gap: "0pt" } } },
|
||||
selectPages: "merged",
|
||||
},
|
||||
{
|
||||
label: l10nMsg("PNG (Specific Pages)"),
|
||||
description: l10nMsg("Export the specified pages as multiple PNGs"),
|
||||
exportKind: "Png",
|
||||
selectPages: true,
|
||||
},
|
||||
{
|
||||
label: l10nMsg("SVG (Merged)"),
|
||||
description: l10nMsg("Export as a single SVG by merging pages"),
|
||||
exportKind: "Svg",
|
||||
extraOpts: { page: { merged: { gap: "0pt" } } },
|
||||
selectPages: "merged",
|
||||
},
|
||||
{
|
||||
label: l10nMsg("SVG (Specific Pages)"),
|
||||
description: l10nMsg("Export the specified pages as multiple SVGs"),
|
||||
exportKind: "Svg",
|
||||
selectPages: true,
|
||||
},
|
||||
{
|
||||
label: "HTML",
|
||||
|
|
@ -76,21 +104,11 @@ export const quickExports: QuickExportFormatMeta[] = [
|
|||
// description: l10nMsg("Query current document and export the result as a file. We will ask a few questions and update the tasks.json file for you."),
|
||||
// exportKind: "Query",
|
||||
// },
|
||||
{
|
||||
label: l10nMsg("PNG (First Page)"),
|
||||
description: l10nMsg("Export the first page as a single PNG"),
|
||||
exportKind: "Png",
|
||||
},
|
||||
// {
|
||||
// label: l10nMsg("PNG (Task)"),
|
||||
// description: l10nMsg("Export as PNG (and update tasks.json)"),
|
||||
// exportKind: "Png",
|
||||
// },
|
||||
{
|
||||
label: l10nMsg("SVG (First Page)"),
|
||||
description: l10nMsg("Export the first page as a single SVG"),
|
||||
exportKind: "Svg",
|
||||
},
|
||||
// {
|
||||
// label: l10nMsg("SVG (Task)"),
|
||||
// description: l10nMsg("Export as SVG (and update tasks.json)"),
|
||||
|
|
@ -108,8 +126,9 @@ async function askAndRun<T>(
|
|||
return;
|
||||
}
|
||||
|
||||
picked.extraOpts ??= {};
|
||||
|
||||
if (picked.exportKind === "TeX") {
|
||||
picked.extraOpts = picked.extraOpts || {};
|
||||
const processor = await vscode.window.showInputBox({
|
||||
title: l10nMsg("TeX processor"),
|
||||
placeHolder: l10nMsg(
|
||||
|
|
@ -121,17 +140,76 @@ async function askAndRun<T>(
|
|||
});
|
||||
|
||||
if (processor) {
|
||||
picked.extraOpts.processor = processor;
|
||||
(picked.extraOpts as ExportTypliteOpts).processor = processor;
|
||||
}
|
||||
}
|
||||
|
||||
if (!(await askPageSelection(picked))) {
|
||||
return; // cancelled
|
||||
}
|
||||
|
||||
return cb(picked);
|
||||
}
|
||||
|
||||
export async function commandAskAndExport(): Promise<string | undefined> {
|
||||
return await askAndRun(l10nMsg("Pick a method to export"), (picked) => {
|
||||
return commandExport(picked.exportKind, picked.extraOpts);
|
||||
/** returns false if export cancelled */
|
||||
export async function askPageSelection(picked: QuickExportFormatMeta) {
|
||||
const selectPages = picked.selectPages;
|
||||
if (!selectPages) {
|
||||
return true; // no need to select pages
|
||||
}
|
||||
|
||||
picked.extraOpts ??= {};
|
||||
if (selectPages === "merged") {
|
||||
(picked.extraOpts as ExportPngOpts | ExportSvgOpts).merge = {};
|
||||
return true;
|
||||
}
|
||||
|
||||
const pages = await vscode.window.showInputBox({
|
||||
title: l10nMsg("Pages to export"),
|
||||
value: "1",
|
||||
placeHolder: l10nMsg("e.g. `1-3,5,7-9`, leave empty for all pages"),
|
||||
prompt: l10nMsg("Specify the pages you want to export"),
|
||||
validateInput: validatePageRanges,
|
||||
});
|
||||
if (pages === undefined) {
|
||||
return false; // cancelled
|
||||
}
|
||||
|
||||
const pageRanges = pages.split(",");
|
||||
(picked.extraOpts as ExportPdfOpts | ExportPngOpts | ExportSvgOpts).pages = pageRanges;
|
||||
|
||||
if (
|
||||
(picked.exportKind === "Png" || picked.exportKind === "Svg") &&
|
||||
!(pageRanges.length === 1 && !pageRanges[0].includes("-"))
|
||||
) {
|
||||
// multiple pages, ask for page number template
|
||||
// if only one page without range, no need for page number template
|
||||
const pageNumberTemplate = await vscode.window.showInputBox({
|
||||
title: "Page Number Template",
|
||||
placeHolder: l10nMsg("e.g., `page-{0p}-of-{t}.png`"),
|
||||
prompt: l10nMsg(
|
||||
"A page number template must be present if the source document renders to multiple pages. Use `{p}` for page numbers, `{0p}` for zero padded page numbers and `{t}` for page count.\nLeave empty for default naming scheme.",
|
||||
),
|
||||
});
|
||||
if (pageNumberTemplate === undefined) {
|
||||
return false; // cancelled
|
||||
}
|
||||
|
||||
if (pageNumberTemplate.length > 0) {
|
||||
// only set if not empty
|
||||
(picked.extraOpts as ExportPngOpts | ExportSvgOpts).pageNumberTemplate = pageNumberTemplate;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export async function commandAskAndExport(): Promise<ExportResponse | null> {
|
||||
return (
|
||||
(await askAndRun(l10nMsg("Pick a method to export"), (picked) => {
|
||||
return commandExport(picked.exportKind, picked.extraOpts);
|
||||
})) ?? null
|
||||
);
|
||||
}
|
||||
|
||||
export async function commandAskAndShow(): Promise<void> {
|
||||
|
|
@ -140,25 +218,31 @@ export async function commandAskAndShow(): Promise<void> {
|
|||
});
|
||||
}
|
||||
|
||||
export async function commandExport(kind: ExportKind, opts?: any): Promise<string | undefined> {
|
||||
export async function commandExport(
|
||||
kind: ExportKind,
|
||||
opts?: ExportOpts,
|
||||
actionOpts?: ExportActionOpts,
|
||||
): Promise<ExportResponse | null> {
|
||||
const uri = vscode.window.activeTextEditor?.document.uri.fsPath;
|
||||
if (!uri) {
|
||||
return;
|
||||
return null;
|
||||
}
|
||||
|
||||
return (await tinymist[`export${kind}`](uri, opts)) || undefined;
|
||||
return await tinymist[`export${kind}`](uri, opts, actionOpts);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements the functionality for the 'Show PDF' button shown in the editor title
|
||||
* if a `.typ` file is opened.
|
||||
*/
|
||||
export async function commandShow(kind: ExportKind, extraOpts?: any): Promise<void> {
|
||||
export async function commandShow(kind: ExportKind, extraOpts?: ExportOpts): Promise<void> {
|
||||
const activeEditor = vscode.window.activeTextEditor;
|
||||
if (activeEditor === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const actionOpts: ExportActionOpts = {};
|
||||
|
||||
const conf = vscode.workspace.getConfiguration("tinymist");
|
||||
const openIn: string = conf.get("showExportFileIn") || "editorTab";
|
||||
|
||||
|
|
@ -172,27 +256,35 @@ export async function commandShow(kind: ExportKind, extraOpts?: any): Promise<vo
|
|||
// Also see https://github.com/microsoft/vscode/issues/85930
|
||||
const openBySystemDefault = openIn === "systemDefault";
|
||||
if (openBySystemDefault) {
|
||||
extraOpts = extraOpts || {};
|
||||
extraOpts.open = true;
|
||||
actionOpts.open = true;
|
||||
}
|
||||
|
||||
// only create pdf if it does not exist yet
|
||||
const exportPath = await commandExport(kind, extraOpts);
|
||||
|
||||
if (exportPath === undefined) {
|
||||
const exportResponse = await commandExport(kind, extraOpts, actionOpts);
|
||||
if (!exportResponse || "message" in exportResponse) {
|
||||
// show error message
|
||||
await vscode.window.showErrorMessage(`Failed to export ${kind}`);
|
||||
await vscode.window.showErrorMessage(`Failed to export ${kind}: ${exportResponse?.message}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const showRes = await showExportFileIn(exportResponse, openIn);
|
||||
if (!showRes) {
|
||||
await vscode.window.showErrorMessage(`Failed to export ${kind}: no path in response`);
|
||||
}
|
||||
}
|
||||
|
||||
async function showExportFileIn(exportResponse: ExportResponse, openIn: string): Promise<boolean> {
|
||||
// PDF export is not paged. The response should be a simple object.
|
||||
// For other formats, we just open the first page.
|
||||
const exportPath =
|
||||
"items" in exportResponse ? exportResponse.items[0]?.path : exportResponse.path;
|
||||
if (!exportPath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
switch (openIn) {
|
||||
case "systemDefault":
|
||||
break;
|
||||
default:
|
||||
vscode.window.showWarningMessage(
|
||||
`Unknown value of "tinymist.showExportFileIn", expected "systemDefault" or "editorTab", got "${openIn}"`,
|
||||
);
|
||||
// fall through
|
||||
case "editorTab": {
|
||||
// find and replace exportUri
|
||||
const exportUri = vscode.Uri.file(exportPath);
|
||||
|
|
@ -213,5 +305,59 @@ export async function commandShow(kind: ExportKind, extraOpts?: any): Promise<vo
|
|||
} as vscode.TextDocumentShowOptions);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
vscode.window.showWarningMessage(
|
||||
`Unknown value of "tinymist.showExportFileIn", expected "systemDefault" or "editorTab", got "${openIn}"`,
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
function validatePageRanges(value: string): string | undefined {
|
||||
if (!value.trim()) {
|
||||
return; // Allow empty input
|
||||
}
|
||||
const parts = value
|
||||
.split(",")
|
||||
.map((p) => p.trim())
|
||||
.filter((p) => p);
|
||||
for (const part of parts) {
|
||||
const rangeParts = part.split("-").map((s) => s.trim());
|
||||
if (rangeParts.length > 2) {
|
||||
return l10nMsg("Invalid page range format: {range}", { range: part });
|
||||
}
|
||||
if (rangeParts.length === 1) {
|
||||
// Single page
|
||||
const num = parseInt(rangeParts[0], 10);
|
||||
if (Number.isNaN(num) || num <= 0) {
|
||||
return l10nMsg("Invalid page number: {page}", { page: part });
|
||||
}
|
||||
} else {
|
||||
// Range
|
||||
const [startStr, endStr] = rangeParts;
|
||||
let startNum: number | undefined;
|
||||
let endNum: number | undefined;
|
||||
if (startStr) {
|
||||
startNum = parseInt(startStr, 10);
|
||||
if (Number.isNaN(startNum) || startNum <= 0) {
|
||||
return l10nMsg("Invalid page range: {range}", { range: part });
|
||||
}
|
||||
}
|
||||
if (endStr) {
|
||||
endNum = parseInt(endStr, 10);
|
||||
if (Number.isNaN(endNum) || endNum <= 0) {
|
||||
return l10nMsg("Invalid page range: {range}", { range: part });
|
||||
}
|
||||
}
|
||||
if (startNum !== undefined && endNum !== undefined && startNum > endNum) {
|
||||
return l10nMsg("Invalid page range: {range}", { range: part });
|
||||
}
|
||||
// If both start and end are empty, invalid
|
||||
if (!startStr && !endStr) {
|
||||
return l10nMsg("Invalid page range: {range}", { range: part });
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,20 +1,33 @@
|
|||
/** biome-ignore-all lint/complexity/useLiteralKeys: special keys */
|
||||
import * as vscode from "vscode";
|
||||
import type {
|
||||
ExportHtmlOpts,
|
||||
ExportPdfOpts,
|
||||
ExportPngOpts,
|
||||
ExportQueryOpts,
|
||||
ExportSvgOpts,
|
||||
ExportTextOpts,
|
||||
ExportTypliteOpts,
|
||||
} from "../cmd.export";
|
||||
import { tinymist } from "../lsp";
|
||||
import { VirtualConsole } from "../util";
|
||||
import { extensionState } from "../state";
|
||||
import { VirtualConsole } from "../util";
|
||||
|
||||
type ExportFormat = "pdf" | "png" | "svg" | "html" | "markdown" | "text" | "query" | "pdfpc";
|
||||
export type ExportFormat = "pdf" | "png" | "svg" | "html" | "markdown" | "text" | "query" | "pdfpc";
|
||||
|
||||
interface ExportArgs {
|
||||
export interface ExportArgs {
|
||||
format: ExportFormat | ExportFormat[];
|
||||
inputPath: string;
|
||||
outputPath: string;
|
||||
|
||||
"pdf.creationTimestamp"?: string | null;
|
||||
"png.ppi"?: number;
|
||||
pages?: string | string[]; // Array of page ranges like ["1-3", "5", "7-9"], or comma separated ranges
|
||||
"pdf.pages"?: string | string[];
|
||||
"png.pages"?: string | string[];
|
||||
"svg.pages"?: string | string[];
|
||||
|
||||
fill?: string;
|
||||
"png.fill"?: string;
|
||||
pageNumberTemplate?: string;
|
||||
"png.pageNumberTemplate"?: string;
|
||||
"svg.pageNumberTemplate"?: string;
|
||||
|
||||
merged?: boolean;
|
||||
"svg.merged"?: boolean;
|
||||
|
|
@ -24,11 +37,18 @@ interface ExportArgs {
|
|||
"png.merged.gap"?: string;
|
||||
"svg.merged.gap"?: string;
|
||||
|
||||
"query.format"?: string;
|
||||
"pdf.creationTimestamp"?: string | null;
|
||||
|
||||
"png.ppi"?: number;
|
||||
|
||||
fill?: string;
|
||||
"png.fill"?: string;
|
||||
|
||||
"query.format": string;
|
||||
"query.outputExtension"?: string;
|
||||
"query.strict"?: boolean;
|
||||
"query.pretty"?: boolean;
|
||||
"query.selector"?: string;
|
||||
"query.selector": string;
|
||||
"query.field"?: string;
|
||||
"query.one"?: boolean;
|
||||
|
||||
|
|
@ -59,7 +79,7 @@ export const runExport = (def: vscode.TaskDefinition) => {
|
|||
|
||||
try {
|
||||
await run();
|
||||
} catch (e: any) {
|
||||
} catch (e) {
|
||||
vc.writeln(`Typst export task failed: ${err(e)}`);
|
||||
} finally {
|
||||
closeEmitter.fire(0);
|
||||
|
|
@ -91,21 +111,21 @@ export const runExport = (def: vscode.TaskDefinition) => {
|
|||
}
|
||||
};
|
||||
|
||||
const exportOps = (exportArgs: ExportArgs) => ({
|
||||
inheritedProp(prop: "merged" | "merged.gap", from: "svg" | "png"): any {
|
||||
return exportArgs[`${from}.${prop}`] === undefined
|
||||
? exportArgs[prop]
|
||||
: exportArgs[`${from}.${prop}`];
|
||||
export const exportOps = (exportArgs: ExportArgs) => ({
|
||||
inheritedProp<P extends keyof ExportArgs>(prop: P, from: ExportFormat): ExportArgs[P] {
|
||||
const key = `${from}.${prop}` as keyof ExportArgs;
|
||||
return exportArgs[key] === undefined ? exportArgs[prop] : (exportArgs[key] as ExportArgs[P]);
|
||||
},
|
||||
resolvePageOpts(fmt: "svg" | "png"): any {
|
||||
resolvePagesOpts(fmt: "pdf" | "png" | "svg") {
|
||||
const pages = this.inheritedProp("pages", fmt);
|
||||
return typeof pages === "string" ? pages.split(",") : pages;
|
||||
},
|
||||
resolveMergeOpts(fmt: "png" | "svg") {
|
||||
if (this.inheritedProp("merged", fmt)) {
|
||||
return {
|
||||
merged: {
|
||||
gap: this.inheritedProp("merged.gap", fmt),
|
||||
},
|
||||
gap: this.inheritedProp("merged.gap", fmt),
|
||||
};
|
||||
}
|
||||
return "first";
|
||||
},
|
||||
resolveInputPath() {
|
||||
const inputPath = exportArgs.inputPath;
|
||||
|
|
@ -117,65 +137,72 @@ const exportOps = (exportArgs: ExportArgs) => ({
|
|||
},
|
||||
});
|
||||
|
||||
const provideFormats = (exportArgs: ExportArgs, ops = exportOps(exportArgs)) => ({
|
||||
export const provideFormats = (exportArgs: ExportArgs, ops = exportOps(exportArgs)) => ({
|
||||
pdf: {
|
||||
opts() {
|
||||
opts(): ExportPdfOpts {
|
||||
return {
|
||||
pages: ops.resolvePagesOpts("pdf"),
|
||||
creationTimestamp: exportArgs["pdf.creationTimestamp"],
|
||||
};
|
||||
},
|
||||
export: tinymist.exportPdf,
|
||||
},
|
||||
png: {
|
||||
opts() {
|
||||
opts(): ExportPngOpts {
|
||||
return {
|
||||
ppi: exportArgs["png.ppi"] || 96,
|
||||
fill: exportArgs["png.fill"] || exportArgs["fill"],
|
||||
page: ops.resolvePageOpts("png"),
|
||||
pages: ops.resolvePagesOpts("png"),
|
||||
pageNumberTemplate:
|
||||
exportArgs["png.pageNumberTemplate"] ?? exportArgs["pageNumberTemplate"],
|
||||
merge: ops.resolveMergeOpts("png"),
|
||||
ppi: exportArgs["png.ppi"],
|
||||
fill: exportArgs["png.fill"] ?? exportArgs["fill"],
|
||||
};
|
||||
},
|
||||
export: tinymist.exportPng,
|
||||
},
|
||||
svg: {
|
||||
opts() {
|
||||
opts(): ExportSvgOpts {
|
||||
return {
|
||||
page: ops.resolvePageOpts("svg"),
|
||||
pages: ops.resolvePagesOpts("svg"),
|
||||
pageNumberTemplate:
|
||||
exportArgs["svg.pageNumberTemplate"] ?? exportArgs["pageNumberTemplate"],
|
||||
merge: ops.resolveMergeOpts("svg"),
|
||||
};
|
||||
},
|
||||
export: tinymist.exportSvg,
|
||||
},
|
||||
html: {
|
||||
opts() {
|
||||
opts(): ExportHtmlOpts {
|
||||
return {};
|
||||
},
|
||||
export: tinymist.exportHtml,
|
||||
},
|
||||
markdown: {
|
||||
opts() {
|
||||
opts(): ExportTypliteOpts {
|
||||
return {
|
||||
processor: exportArgs["markdown.processor"] || exportArgs["processor"],
|
||||
assetsPath: exportArgs["markdown.assetsPath"] || exportArgs["assetsPath"],
|
||||
processor: exportArgs["markdown.processor"] ?? exportArgs["processor"],
|
||||
assetsPath: exportArgs["markdown.assetsPath"] ?? exportArgs["assetsPath"],
|
||||
};
|
||||
},
|
||||
export: tinymist.exportMarkdown,
|
||||
},
|
||||
tex: {
|
||||
opts() {
|
||||
opts(): ExportTypliteOpts {
|
||||
return {
|
||||
processor: exportArgs["tex.processor"] || exportArgs["processor"],
|
||||
assetsPath: exportArgs["tex.assetsPath"] || exportArgs["assetsPath"],
|
||||
processor: exportArgs["tex.processor"] ?? exportArgs["processor"],
|
||||
assetsPath: exportArgs["tex.assetsPath"] ?? exportArgs["assetsPath"],
|
||||
};
|
||||
},
|
||||
export: tinymist.exportTeX,
|
||||
},
|
||||
text: {
|
||||
opts() {
|
||||
opts(): ExportTextOpts {
|
||||
return {};
|
||||
},
|
||||
export: tinymist.exportText,
|
||||
},
|
||||
query: {
|
||||
opts() {
|
||||
opts(): ExportQueryOpts {
|
||||
return {
|
||||
format: exportArgs["query.format"],
|
||||
outputExtension: exportArgs["query.outputExtension"],
|
||||
|
|
@ -189,7 +216,7 @@ const provideFormats = (exportArgs: ExportArgs, ops = exportOps(exportArgs)) =>
|
|||
export: tinymist.exportQuery,
|
||||
},
|
||||
pdfpc: {
|
||||
opts() {
|
||||
opts(): ExportQueryOpts {
|
||||
return {
|
||||
format: "json",
|
||||
pretty: exportArgs["query.pretty"],
|
||||
|
|
|
|||
|
|
@ -16,12 +16,12 @@ import { HoverDummyStorage } from "./features/hover-storage";
|
|||
import type { HoverTmpStorage } from "./features/hover-storage.tmp";
|
||||
import { extensionState } from "./state";
|
||||
import {
|
||||
base64Encode,
|
||||
bytesBase64Encode,
|
||||
DisposeList,
|
||||
getSensibleTextEditorColumn,
|
||||
typstDocumentSelector,
|
||||
} from "./util";
|
||||
import type { ExportActionOpts, ExportOpts } from "./cmd.export";
|
||||
import { substVscodeVarsInConfig, TinymistConfig } from "./config";
|
||||
import { TinymistStatus, wordCountItemProcess } from "./ui-extends";
|
||||
import { previewProcessOutline } from "./features/preview";
|
||||
|
|
@ -347,8 +347,8 @@ export class LanguageState {
|
|||
exportTeX = exportCommand("tinymist.exportTeX");
|
||||
exportText = exportCommand("tinymist.exportText");
|
||||
exportQuery = exportCommand("tinymist.exportQuery");
|
||||
exportAnsiHighlight = exportCommand("tinymist.exportAnsiHighlight");
|
||||
exportAst = exportCommand("tinymist.exportAst");
|
||||
exportAnsiHighlight = exportStringCommand("tinymist.exportAnsiHighlight");
|
||||
exportAst = exportStringCommand("tinymist.exportAst");
|
||||
|
||||
getResource<T extends keyof ResourceRoutes>(path: T, ...args: any[]) {
|
||||
return tinymist.executeCommand<ResourceRoutes[T]>("tinymist.getResources", [path, ...args]);
|
||||
|
|
@ -749,9 +749,30 @@ export class LanguageState {
|
|||
|
||||
export const tinymist = new LanguageState();
|
||||
|
||||
// Type definitions for export responses (matches Rust OnExportResponse)
|
||||
export type ExportResponse =
|
||||
| { path: string | null; data: string | null } // Single
|
||||
| { totalPages: number; items: ExportedPage[] }; // Multiple
|
||||
|
||||
type ExportedPage = { page: number; path: string | null; data: string | null };
|
||||
|
||||
function exportCommand(command: string) {
|
||||
return (uri: string, extraOpts?: any) => {
|
||||
return tinymist.executeCommand<string>(command, [uri, ...(extraOpts ? [extraOpts] : [])]);
|
||||
return (
|
||||
uri: string,
|
||||
extraOpts?: ExportOpts,
|
||||
actions?: ExportActionOpts,
|
||||
): Promise<ExportResponse | null> => {
|
||||
return tinymist.executeCommand<ExportResponse | null>(command, [
|
||||
uri,
|
||||
extraOpts ?? {},
|
||||
actions ?? {},
|
||||
]);
|
||||
};
|
||||
}
|
||||
|
||||
function exportStringCommand(command: string) {
|
||||
return (uri: string, extraOpts?: ExportOpts): Promise<string> => {
|
||||
return tinymist.executeCommand<string>(command, [uri, extraOpts ?? {}]);
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,63 +1,223 @@
|
|||
// You can import and use all API from the 'vscode' module
|
||||
// as well as import your extension to test it
|
||||
|
||||
import { hash } from "node:crypto";
|
||||
import * as fs from "node:fs";
|
||||
import * as vscode from "vscode";
|
||||
import type { ExportActionOpts, ExportOpts } from "../../cmd.export";
|
||||
import type { ExportKind } from "../../features/export";
|
||||
import type { ExportResponse } from "../../lsp";
|
||||
import { base64DecodeToBytes } from "../../util";
|
||||
import type { Context } from ".";
|
||||
import * as fs from "fs";
|
||||
import { hash } from "crypto";
|
||||
|
||||
export async function getTests(ctx: Context) {
|
||||
const dirPat = /target[\\/]typst[\\/]/;
|
||||
|
||||
const getFileHash = (path?: string | null) => {
|
||||
if (!path || !fs.existsSync(path)) {
|
||||
throw new Error(`File ${path} does not exist`);
|
||||
}
|
||||
ctx.expect(!!dirPat.exec(path), `${path} is not under correct directory`).eq(true);
|
||||
return hash("sha256", fs.readFileSync(path), "hex");
|
||||
};
|
||||
|
||||
const expectSingleHash = (response: ExportResponse | null, ignoreHash: boolean = false) => {
|
||||
if (!response) {
|
||||
throw new Error("No response from export command");
|
||||
}
|
||||
if ("items" in response) {
|
||||
throw new Error("Expected single export, got multiple");
|
||||
}
|
||||
const sha256 = getFileHash(response.path);
|
||||
return ctx.expect(ignoreHash ? undefined : sha256.slice(0, 8), `sha256:${sha256}`);
|
||||
};
|
||||
|
||||
const expectPaged = (response: ExportResponse | null, ignoreHash: boolean = false) => {
|
||||
if (!response) {
|
||||
throw new Error("No response from export command");
|
||||
}
|
||||
if (!("items" in response)) {
|
||||
throw new Error("Expected multi-page export, got single");
|
||||
}
|
||||
|
||||
const expected = response.items.map((item) => ({
|
||||
page: item.page,
|
||||
hash: ignoreHash ? undefined : getFileHash(item.path).slice(0, 8),
|
||||
}));
|
||||
|
||||
return ctx.expect(expected, `sha256:${expected.map((e) => e.hash).join(",")}`).to.deep;
|
||||
};
|
||||
|
||||
const expectPagedData = (response: ExportResponse | null) => {
|
||||
if (!response) {
|
||||
throw new Error("No response from export command");
|
||||
}
|
||||
if (!("items" in response)) {
|
||||
throw new Error("Expected multi-page export, got single");
|
||||
}
|
||||
|
||||
for (const item of response.items) {
|
||||
ctx.expect(item.data).to.be.a("string");
|
||||
}
|
||||
|
||||
const expected = response.items.map((item) => ({
|
||||
page: item.page,
|
||||
hash: hash("sha256", base64DecodeToBytes(item.data as string), "hex").slice(0, 8),
|
||||
}));
|
||||
|
||||
return ctx.expect(expected, `sha256:${expected.map((e) => e.hash).join(",")}`).to.deep;
|
||||
};
|
||||
|
||||
const workspaceCtx = ctx.workspaceCtx("export");
|
||||
|
||||
await workspaceCtx.suite("export", async (suite) => {
|
||||
// const uri = workspaceCtx.workspaceUri();
|
||||
const uri = workspaceCtx.getWorkspace("export");
|
||||
const baseUri = workspaceCtx.getWorkspace("export");
|
||||
vscode.window.showInformationMessage("Start export tests.");
|
||||
|
||||
console.log("Start all tests on ", uri.fsPath);
|
||||
console.log("Start all tests on ", baseUri.fsPath);
|
||||
|
||||
suite.addTest("export by test", async () => {
|
||||
const mainUrl = vscode.Uri.joinPath(uri, "main.typ");
|
||||
// check and clear target directory
|
||||
const targetDir = vscode.Uri.joinPath(baseUri, "target");
|
||||
if (fs.existsSync(targetDir.fsPath)) {
|
||||
fs.rmdirSync(targetDir.fsPath, { recursive: true });
|
||||
}
|
||||
|
||||
const mainTyp = await ctx.openDocument(mainUrl);
|
||||
const prepareMain = async (mainPath: string) => {
|
||||
const mainUrl = vscode.Uri.joinPath(baseUri, mainPath);
|
||||
|
||||
// check and clear target directory
|
||||
const targetDir = vscode.Uri.joinPath(uri, "target");
|
||||
if (fs.existsSync(targetDir.fsPath)) {
|
||||
fs.rmdirSync(targetDir.fsPath, { recursive: true });
|
||||
}
|
||||
await ctx.openDocument(mainUrl);
|
||||
};
|
||||
|
||||
const exported: string[] = [];
|
||||
await vscode.commands.executeCommand<string>("tinymist.exportCurrentPdf");
|
||||
for (const kind of ["Pdf", "Png", "Svg", "Html", "Markdown", "Text"] as const) {
|
||||
const outputPath = await vscode.commands.executeCommand<string>("tinymist.export", kind);
|
||||
exported.push(outputPath);
|
||||
}
|
||||
const exportDoc = async (
|
||||
mainPath: string,
|
||||
kind: ExportKind,
|
||||
opts?: ExportOpts,
|
||||
actionOpts?: ExportActionOpts,
|
||||
) => {
|
||||
await prepareMain(mainPath);
|
||||
|
||||
const dirPat = /target[\\/]typst[\\/]/;
|
||||
for (const path of exported) {
|
||||
if (!path) {
|
||||
throw new Error(`Failed to export ${exported}`);
|
||||
}
|
||||
return await vscode.commands.executeCommand<ExportResponse | null>(
|
||||
"tinymist.export",
|
||||
kind,
|
||||
opts,
|
||||
actionOpts,
|
||||
);
|
||||
};
|
||||
|
||||
ctx.expect(!!dirPat.exec(path), `${path} is not under correct directory`).eq(true);
|
||||
ctx.expect(fs.existsSync(path), `${path} does not exist`).eq(true);
|
||||
// NOTE: For svg tests, the output (especially glyph id) may vary between different environments. So we do not check hash.
|
||||
|
||||
if (path.endsWith(".png")) {
|
||||
const sha256 = hash("sha256", fs.readFileSync(path), "hex");
|
||||
ctx
|
||||
.expect(sha256, `sha256:${sha256}`)
|
||||
.eq("4523673a2ab4ce07de888830e3a84c2e70529703d904ac38138cab904a15dca8");
|
||||
}
|
||||
suite.addTest("export current pdf", async () => {
|
||||
await prepareMain("main.typ");
|
||||
|
||||
if (path.endsWith(".txt")) {
|
||||
const content = fs.readFileSync(path, "utf-8");
|
||||
ctx
|
||||
.expect(content)
|
||||
.eq(`A Hello World Example of Export Typst Document to Various FormatsHello World.`);
|
||||
}
|
||||
}
|
||||
const resp = await vscode.commands.executeCommand<ExportResponse | null>(
|
||||
"tinymist.exportCurrentPdf",
|
||||
);
|
||||
expectSingleHash(resp).to.be.a("string");
|
||||
});
|
||||
|
||||
// close the editor
|
||||
await vscode.commands.executeCommand("workbench.action.closeActiveEditor");
|
||||
suite.addTest("export pdf", async () => {
|
||||
const resp = await exportDoc("main.typ", "Pdf", { creationTimestamp: "0" });
|
||||
expectSingleHash(resp).eq("f5d1a181");
|
||||
});
|
||||
|
||||
suite.addTest("export html", async () => {
|
||||
const resp = await exportDoc("main.typ", "Html");
|
||||
expectSingleHash(resp).eq("a55cf03e");
|
||||
});
|
||||
|
||||
suite.addTest("export markdown", async () => {
|
||||
const resp = await exportDoc("main.typ", "Markdown");
|
||||
expectSingleHash(resp).eq("62ca0c72");
|
||||
});
|
||||
|
||||
suite.addTest("export tex", async () => {
|
||||
const resp = await exportDoc("main.typ", "TeX");
|
||||
expectSingleHash(resp).eq("492c3e62");
|
||||
});
|
||||
|
||||
suite.addTest("export text", async () => {
|
||||
const resp = await exportDoc("main.typ", "Text");
|
||||
expectSingleHash(resp).eq("8ae8f637");
|
||||
});
|
||||
|
||||
suite.addTest("export query", async () => {
|
||||
const resp = await exportDoc("main.typ", "Query", { format: "json", selector: "heading" });
|
||||
expectSingleHash(resp).eq("a08f208d");
|
||||
});
|
||||
|
||||
suite.addTest("export png", async () => {
|
||||
const resp = await exportDoc("main.typ", "Png");
|
||||
expectPaged(resp).eq([{ page: 0, hash: "a3987ce8" }]);
|
||||
});
|
||||
|
||||
suite.addTest("export svg", async () => {
|
||||
const resp = await exportDoc("main.typ", "Svg");
|
||||
expectPaged(resp, true).eq([{ page: 0, hash: undefined }]);
|
||||
});
|
||||
|
||||
suite.addTest("export png paged all", async () => {
|
||||
const resp = await exportDoc("paged.typ", "Png", { pageNumberTemplate: "paged-{p}" });
|
||||
expectPaged(resp).eq([
|
||||
{ page: 0, hash: "27d34da8" },
|
||||
{ page: 1, hash: "a97c7cc8" },
|
||||
{ page: 2, hash: "08dfb2df" },
|
||||
]);
|
||||
});
|
||||
|
||||
suite.addTest("export png paged partial", async () => {
|
||||
const resp = await exportDoc("paged.typ", "Png", {
|
||||
pages: ["1"],
|
||||
pageNumberTemplate: "paged-partial-{p}",
|
||||
});
|
||||
expectPaged(resp).eq([{ page: 0, hash: "27d34da8" }]);
|
||||
});
|
||||
|
||||
suite.addTest("export png paged merged", async () => {
|
||||
const resp = await exportDoc("paged.typ", "Png", {
|
||||
pages: ["2-3"],
|
||||
merge: {},
|
||||
});
|
||||
expectSingleHash(resp).eq("9b87f1ce");
|
||||
});
|
||||
|
||||
suite.addTest("export svg paged all", async () => {
|
||||
const resp = await exportDoc("paged.typ", "Svg", { pageNumberTemplate: "paged-{p}" });
|
||||
expectPaged(resp, true).eq([
|
||||
{ page: 0, hash: undefined },
|
||||
{ page: 1, hash: undefined },
|
||||
{ page: 2, hash: undefined },
|
||||
]);
|
||||
});
|
||||
|
||||
suite.addTest("export svg paged partial", async () => {
|
||||
const resp = await exportDoc("paged.typ", "Svg", {
|
||||
pages: ["2"],
|
||||
pageNumberTemplate: "paged-partial-{p}",
|
||||
});
|
||||
expectPaged(resp, true).eq([{ page: 1, hash: undefined }]);
|
||||
});
|
||||
|
||||
suite.addTest("export svg paged merged", async () => {
|
||||
const resp = await exportDoc("paged.typ", "Svg", {
|
||||
pages: ["1-2"],
|
||||
merge: {},
|
||||
});
|
||||
expectSingleHash(resp, true).eq(undefined);
|
||||
});
|
||||
|
||||
suite.addTest("export png paged all no-write", async () => {
|
||||
const resp = await exportDoc(
|
||||
"paged.typ",
|
||||
"Png",
|
||||
{ pageNumberTemplate: "paged-{p}" },
|
||||
{ write: false },
|
||||
);
|
||||
expectPagedData(resp).eq([
|
||||
{ page: 0, hash: "27d34da8" },
|
||||
{ page: 1, hash: "a97c7cc8" },
|
||||
{ page: 2, hash: "08dfb2df" },
|
||||
]); // this should be same as "export png paged all"
|
||||
});
|
||||
});
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,13 +22,19 @@ export function assert(condition: boolean, explanation: string): asserts conditi
|
|||
const bytes2utf8 = new TextDecoder("utf-8");
|
||||
const utf82bytes = new TextEncoder();
|
||||
|
||||
/** Base64 to bytes
|
||||
* @param encoded Base64 encoded string
|
||||
* @returns bytes
|
||||
*/
|
||||
export const base64DecodeToBytes = (encoded: string) =>
|
||||
Uint8Array.from(atob(encoded), (m) => m.charCodeAt(0));
|
||||
|
||||
/**
|
||||
* Base64 to UTF-8
|
||||
* @param encoded Base64 encoded string
|
||||
* @returns UTF-8 string
|
||||
*/
|
||||
export const base64Decode = (encoded: string) =>
|
||||
bytes2utf8.decode(Uint8Array.from(atob(encoded), (m) => m.charCodeAt(0)));
|
||||
export const base64Decode = (encoded: string) => bytes2utf8.decode(base64DecodeToBytes(encoded));
|
||||
|
||||
/**
|
||||
* bytes to Base64
|
||||
|
|
|
|||
|
|
@ -5,6 +5,9 @@
|
|||
en = " (Preview)"
|
||||
zh = " (预览)"
|
||||
|
||||
["A page number template must be present if the source document renders to multiple pages. Use `{p}` for page numbers, `{0p}` for zero padded page numbers and `{t}` for page count.\nLeave empty for default naming scheme."]
|
||||
en = "A page number template must be present if the source document renders to multiple pages. Use `{p}` for page numbers, `{0p}` for zero padded page numbers and `{t}` for page count.\nLeave empty for default naming scheme."
|
||||
|
||||
["A typst file help export to TeX, e.g. `/ieee-tex.typ` or `@local/ieee-tex:0.1.0`"]
|
||||
en = "A typst file help export to TeX, e.g. `/ieee-tex.typ` or `@local/ieee-tex:0.1.0`"
|
||||
zh = "提供一个帮助导出为 TeX 的 typst 文件,例如 `/ieee-tex.typ` 或 `@local/ieee-tex:0.1.0`"
|
||||
|
|
@ -17,6 +20,10 @@ zh = "导出为 HTML"
|
|||
en = "Export as Markdown"
|
||||
zh = "导出为 Markdown"
|
||||
|
||||
["Export as PDF with specified pages"]
|
||||
en = "Export as PDF with specified pages"
|
||||
zh = "导出指定页码为 PDF"
|
||||
|
||||
["Export as PDF"]
|
||||
en = "Export as PDF"
|
||||
zh = "导出为 PDF"
|
||||
|
|
@ -31,6 +38,7 @@ zh = "导出为 SVG (并更新 tasks.json)"
|
|||
|
||||
["Export as TeX"]
|
||||
en = "Export as TeX"
|
||||
zh = "导出为 TeX"
|
||||
|
||||
["Export as Text"]
|
||||
en = "Export as Text"
|
||||
|
|
@ -44,32 +52,53 @@ zh = "合并页面并导出为单个 PNG"
|
|||
en = "Export as a single SVG by merging pages"
|
||||
zh = "合并页面并导出为单个 SVG"
|
||||
|
||||
["Export the first page as a single PNG"]
|
||||
en = "Export the first page as a single PNG"
|
||||
zh = "导出首页为单个 PNG"
|
||||
["Export the specified pages as multiple PNGs"]
|
||||
en = "Export the specified pages as multiple PNGs"
|
||||
zh = "将指定页面导出为多个单独 PNG"
|
||||
|
||||
["Export the first page as a single SVG"]
|
||||
en = "Export the first page as a single SVG"
|
||||
zh = "导出首页为单个 SVG"
|
||||
["Export the specified pages as multiple SVGs"]
|
||||
en = "Export the specified pages as multiple SVGs"
|
||||
zh = "将指定页面导出为多个单独 SVG"
|
||||
|
||||
["Hint: you can create and find local packages in the sidebar. See https://github.com/Myriad-Dreamin/tinymist/tree/bc15eb55cee9f9b048aafd5f22472894961a1f51/editors/vscode/e2e-workspaces/ieee-paper for more information."]
|
||||
en = "Hint: you can create and find local packages in the sidebar. See https://github.com/Myriad-Dreamin/tinymist/tree/bc15eb55cee9f9b048aafd5f22472894961a1f51/editors/vscode/e2e-workspaces/ieee-paper for more information."
|
||||
zh = "提示:您可以在侧边栏中创建和查找本地包。有关更多信息,请参见 https://github.com/Myriad-Dreamin/tinymist/tree/bc15eb55cee9f9b048aafd5f22472894961a1f51/editors/vscode/e2e-workspaces/ieee-paper。"
|
||||
|
||||
["PNG (First Page)"]
|
||||
en = "PNG (First Page)"
|
||||
zh = "PNG (首页)"
|
||||
["Invalid page number: {page}"]
|
||||
en = "Invalid page number: {page}"
|
||||
zh = "无效的页码:{page}"
|
||||
|
||||
["Invalid page range format: {range}"]
|
||||
en = "Invalid page range format: {range}"
|
||||
zh = "无效的页码范围格式:{range}"
|
||||
|
||||
["Invalid page range: {range}"]
|
||||
en = "Invalid page range: {range}"
|
||||
zh = "无效的页码范围:{range}"
|
||||
|
||||
["PDF (Specific Pages)"]
|
||||
en = "PDF (Specific Pages)"
|
||||
zh = "PDF (指定页码)"
|
||||
|
||||
["PNG (Merged)"]
|
||||
en = "PNG (Merged)"
|
||||
zh = "PNG (合并)"
|
||||
|
||||
["PNG (Specific Pages)"]
|
||||
en = "PNG (Specific Pages)"
|
||||
zh = "PNG (指定页码)"
|
||||
|
||||
["PNG (Task)"]
|
||||
en = "PNG (Task)"
|
||||
zh = "PNG (任务)"
|
||||
|
||||
["Pages to export"]
|
||||
en = "Pages to export"
|
||||
zh = "要导出的页码"
|
||||
|
||||
["Pick a method to export and show"]
|
||||
en = "Pick a method to export and show"
|
||||
zh = "选择导出并预览的方式"
|
||||
|
||||
["Pick a method to export"]
|
||||
en = "Pick a method to export"
|
||||
|
|
@ -87,18 +116,28 @@ zh = "查询当前文档并导出结果为 YAML 文件"
|
|||
en = "Query current document and export the result as a file. We will ask a few questions and update the tasks.json file for you."
|
||||
zh = "查询当前文档并导出结果为文件。我们将会询问一些问题并为您更新 tasks.json 文件。"
|
||||
|
||||
["SVG (First Page)"]
|
||||
en = "SVG (First Page)"
|
||||
zh = "SVG (首页)"
|
||||
|
||||
["SVG (Merged)"]
|
||||
en = "SVG (Merged)"
|
||||
zh = "SVG (合并)"
|
||||
|
||||
["SVG (Specific Pages)"]
|
||||
en = "SVG (Specific Pages)"
|
||||
|
||||
["SVG (Task)"]
|
||||
en = "SVG (Task)"
|
||||
zh = "SVG (任务)"
|
||||
|
||||
["Specify the pages you want to export"]
|
||||
en = "Specify the pages you want to export"
|
||||
zh = "指定要导出的页码"
|
||||
|
||||
["TeX processor"]
|
||||
en = "TeX processor"
|
||||
zh = "TeX 导出的处理脚本"
|
||||
|
||||
["e.g. `1-3,5,7-9`, leave empty for all pages"]
|
||||
en = "e.g. `1-3,5,7-9`, leave empty for all pages"
|
||||
zh = "例如 `1-3,5,7-9`,留空表示导出所有页"
|
||||
|
||||
["e.g., `page-{0p}-of-{t}.png`"]
|
||||
en = "e.g., `page-{0p}-of-{t}.png`"
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue