mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-11-25 05:22:52 +00:00
feat: split tinymist-task (#1277)
* feat: split tinymist-task * cargo lock * revert html changes * Revert "revert html changes" This reverts commitb82662e441. * Revert "Revert "revert html changes"" This reverts commitb42643399c. * fix: examples
This commit is contained in:
parent
2c335b25d1
commit
3799db6dd4
24 changed files with 1538 additions and 696 deletions
52
crates/tinymist-task/Cargo.toml
Normal file
52
crates/tinymist-task/Cargo.toml
Normal file
|
|
@ -0,0 +1,52 @@
|
|||
[package]
|
||||
name = "tinymist-task"
|
||||
description = "Task model of typst for tinymist."
|
||||
categories = ["compilers"]
|
||||
keywords = ["language", "typst"]
|
||||
authors.workspace = true
|
||||
version.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
chrono.workspace = true
|
||||
clap.workspace = true
|
||||
comemo.workspace = true
|
||||
dirs.workspace = true
|
||||
ecow.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
pathdiff.workspace = true
|
||||
tokio = { workspace = true, features = ["sync"] }
|
||||
rayon.workspace = true
|
||||
rpds.workspace = true
|
||||
semver.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_yaml.workspace = true
|
||||
tinymist-world = { workspace = true }
|
||||
tinymist-std = { workspace = true }
|
||||
tinymist-derive.workspace = true
|
||||
toml.workspace = true
|
||||
typst.workspace = true
|
||||
typst-assets.workspace = true
|
||||
typst-pdf.workspace = true
|
||||
typst-shim.workspace = true
|
||||
typst-svg.workspace = true
|
||||
typst-render.workspace = true
|
||||
notify.workspace = true
|
||||
|
||||
[features]
|
||||
|
||||
default = ["pdf", "text"]
|
||||
no-content-hint = []
|
||||
|
||||
pdf = []
|
||||
text = []
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
408
crates/tinymist-task/src/compute.rs
Normal file
408
crates/tinymist-task/src/compute.rs
Normal file
|
|
@ -0,0 +1,408 @@
|
|||
#![allow(missing_docs)]
|
||||
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
|
||||
use comemo::Track;
|
||||
use ecow::EcoString;
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::TypstPagedDocument;
|
||||
use tinymist_world::{
|
||||
args::convert_source_date_epoch, CompileSnapshot, CompilerFeat, ExportComputation,
|
||||
WorldComputeGraph,
|
||||
};
|
||||
use typst::diag::{SourceResult, StrResult};
|
||||
use typst::foundations::{Bytes, Content, Datetime, IntoValue, LocatableSelector, Scope, Value};
|
||||
use typst::layout::Abs;
|
||||
use typst::syntax::{ast, Span, SyntaxNode};
|
||||
use typst::visualize::Color;
|
||||
use typst::World;
|
||||
use typst_pdf::PdfOptions;
|
||||
use typst_shim::eval::EvalMode;
|
||||
|
||||
use crate::model::{ExportPdfTask, ExportPngTask, ExportSvgTask};
|
||||
use crate::primitives::TaskWhen;
|
||||
use crate::{ExportTransform, Pages, QueryTask};
|
||||
|
||||
#[cfg(feature = "pdf")]
|
||||
pub mod pdf;
|
||||
#[cfg(feature = "pdf")]
|
||||
pub use pdf::*;
|
||||
#[cfg(feature = "text")]
|
||||
pub mod text;
|
||||
#[cfg(feature = "text")]
|
||||
pub use text::*;
|
||||
|
||||
pub struct SvgFlag;
|
||||
pub struct PngFlag;
|
||||
pub struct HtmlFlag;
|
||||
|
||||
pub struct ExportTimings;
|
||||
|
||||
impl ExportTimings {
|
||||
pub fn needs_run<F: CompilerFeat>(
|
||||
snap: &CompileSnapshot<F>,
|
||||
timing: Option<TaskWhen>,
|
||||
docs: Option<&TypstPagedDocument>,
|
||||
) -> Option<bool> {
|
||||
let s = snap.signal;
|
||||
let when = timing.unwrap_or(TaskWhen::Never);
|
||||
if !matches!(when, TaskWhen::Never) && s.by_entry_update {
|
||||
return Some(true);
|
||||
}
|
||||
|
||||
match when {
|
||||
TaskWhen::Never => Some(false),
|
||||
TaskWhen::OnType => Some(s.by_mem_events),
|
||||
TaskWhen::OnSave => Some(s.by_fs_events),
|
||||
TaskWhen::OnDocumentHasTitle if s.by_fs_events => {
|
||||
docs.map(|doc| doc.info.title.is_some())
|
||||
}
|
||||
TaskWhen::OnDocumentHasTitle => Some(false),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SvgExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for SvgExport {
|
||||
type Output = String;
|
||||
type Config = ExportSvgTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &ExportSvgTask,
|
||||
) -> Result<String> {
|
||||
let (is_first, merged_gap) = get_page_selection(&config.export)?;
|
||||
|
||||
let first_page = doc.pages.first();
|
||||
|
||||
Ok(if is_first {
|
||||
if let Some(first_page) = first_page {
|
||||
typst_svg::svg(first_page)
|
||||
} else {
|
||||
typst_svg::svg_merged(doc, merged_gap)
|
||||
}
|
||||
} else {
|
||||
typst_svg::svg_merged(doc, merged_gap)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for SvgExport {
|
||||
// type Output = Option<String>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
pub struct PngExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PngExport {
|
||||
type Output = Bytes;
|
||||
type Config = ExportPngTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &ExportPngTask,
|
||||
) -> Result<Bytes> {
|
||||
let ppi = config.ppi.to_f32();
|
||||
if ppi <= 1e-6 {
|
||||
tinymist_std::bail!("invalid ppi: {ppi}");
|
||||
}
|
||||
|
||||
let fill = if let Some(fill) = &config.fill {
|
||||
parse_color(fill.clone()).map_err(|err| anyhow::anyhow!("invalid fill ({err})"))?
|
||||
} else {
|
||||
Color::WHITE
|
||||
};
|
||||
|
||||
let (is_first, merged_gap) = get_page_selection(&config.export)?;
|
||||
|
||||
let ppp = ppi / 72.;
|
||||
let pixmap = if is_first {
|
||||
if let Some(first_page) = doc.pages.first() {
|
||||
typst_render::render(first_page, ppp)
|
||||
} else {
|
||||
typst_render::render_merged(doc, ppp, merged_gap, Some(fill))
|
||||
}
|
||||
} else {
|
||||
typst_render::render_merged(doc, ppp, merged_gap, Some(fill))
|
||||
};
|
||||
|
||||
pixmap
|
||||
.encode_png()
|
||||
.map(Bytes::from)
|
||||
.context_ut("failed to encode PNG")
|
||||
}
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for PngExport {
|
||||
// type Output = Option<Bytes>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for HtmlExport {
|
||||
// type Output = Option<String>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
pub struct DocumentQuery;
|
||||
|
||||
impl DocumentQuery {
|
||||
// todo: query exporter
|
||||
/// Retrieve the matches for the selector.
|
||||
pub fn retrieve(
|
||||
world: &dyn World,
|
||||
selector: &str,
|
||||
document: &TypstPagedDocument,
|
||||
) -> StrResult<Vec<Content>> {
|
||||
let selector = typst_shim::eval::eval_string(
|
||||
world.track(),
|
||||
selector,
|
||||
Span::detached(),
|
||||
EvalMode::Code,
|
||||
Scope::default(),
|
||||
)
|
||||
.map_err(|errors| {
|
||||
let mut message = EcoString::from("failed to evaluate selector");
|
||||
for (i, error) in errors.into_iter().enumerate() {
|
||||
message.push_str(if i == 0 { ": " } else { ", " });
|
||||
message.push_str(&error.message);
|
||||
}
|
||||
message
|
||||
})?
|
||||
.cast::<LocatableSelector>()
|
||||
.map_err(|e| EcoString::from(format!("failed to cast: {}", e.message())))?;
|
||||
|
||||
Ok(document
|
||||
.introspector
|
||||
.query(&selector.0)
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>())
|
||||
}
|
||||
|
||||
fn run_inner<F: CompilerFeat>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &QueryTask,
|
||||
) -> Result<Vec<Value>> {
|
||||
let selector = &config.selector;
|
||||
let elements = Self::retrieve(&g.snap.world, selector, doc.as_ref())
|
||||
.map_err(|e| anyhow::anyhow!("failed to retrieve: {e}"))?;
|
||||
if config.one && elements.len() != 1 {
|
||||
bail!("expected exactly one element, found {}", elements.len());
|
||||
}
|
||||
|
||||
Ok(elements
|
||||
.into_iter()
|
||||
.filter_map(|c| match &config.field {
|
||||
Some(field) => c.get_by_name(field).ok(),
|
||||
_ => Some(c.into_value()),
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub fn get_as_value<F: CompilerFeat>(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &QueryTask,
|
||||
) -> Result<serde_json::Value> {
|
||||
let mapped = Self::run_inner(g, doc, config)?;
|
||||
|
||||
let res = if config.one {
|
||||
let Some(value) = mapped.first() else {
|
||||
bail!("no such field found for element");
|
||||
};
|
||||
serde_json::to_value(value)
|
||||
} else {
|
||||
serde_json::to_value(&mapped)
|
||||
};
|
||||
|
||||
res.context("failed to serialize")
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for DocumentQuery {
|
||||
type Output = SourceResult<String>;
|
||||
type Config = QueryTask;
|
||||
|
||||
fn run(
|
||||
g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &QueryTask,
|
||||
) -> Result<SourceResult<String>> {
|
||||
let pretty = false;
|
||||
let mapped = Self::run_inner(g, doc, config)?;
|
||||
|
||||
let res = if config.one {
|
||||
let Some(value) = mapped.first() else {
|
||||
bail!("no such field found for element");
|
||||
};
|
||||
serialize(value, &config.format, pretty)
|
||||
} else {
|
||||
serialize(&mapped, &config.format, pretty)
|
||||
};
|
||||
|
||||
res.map(Ok)
|
||||
}
|
||||
}
|
||||
|
||||
/// Serialize data to the output format.
|
||||
fn serialize(data: &impl serde::Serialize, format: &str, pretty: bool) -> Result<String> {
|
||||
Ok(match format {
|
||||
"json" if pretty => serde_json::to_string_pretty(data).context("serialize query")?,
|
||||
"json" => serde_json::to_string(data).context("serialize query")?,
|
||||
"yaml" => serde_yaml::to_string(&data).context_ut("serialize query")?,
|
||||
"txt" => {
|
||||
use serde_json::Value::*;
|
||||
let value = serde_json::to_value(data).context("serialize query")?;
|
||||
match value {
|
||||
String(s) => s,
|
||||
_ => {
|
||||
let kind = match value {
|
||||
Null => "null",
|
||||
Bool(_) => "boolean",
|
||||
Number(_) => "number",
|
||||
String(_) => "string",
|
||||
Array(_) => "array",
|
||||
Object(_) => "object",
|
||||
};
|
||||
bail!("expected a string value for format: {format}, got {kind}")
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => bail!("unsupported format for query: {format}"),
|
||||
})
|
||||
}
|
||||
|
||||
/// Gets legacy page selection
|
||||
pub fn get_page_selection(task: &crate::ExportTask) -> Result<(bool, Abs)> {
|
||||
let is_first = task
|
||||
.transform
|
||||
.iter()
|
||||
.any(|t| matches!(t, ExportTransform::Pages { ranges, .. } if ranges == &[Pages::FIRST]));
|
||||
|
||||
let mut gap_res = Abs::default();
|
||||
if !is_first {
|
||||
for trans in &task.transform {
|
||||
if let ExportTransform::Merge { gap } = trans {
|
||||
let gap = gap
|
||||
.as_deref()
|
||||
.map(parse_length)
|
||||
.transpose()
|
||||
.context_ut("failed to parse gap")?;
|
||||
gap_res = gap.unwrap_or_default();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok((is_first, gap_res))
|
||||
}
|
||||
|
||||
fn parse_length(gap: &str) -> Result<Abs> {
|
||||
let length = typst::syntax::parse_code(gap);
|
||||
if length.erroneous() {
|
||||
bail!("invalid length: {gap}, errors: {:?}", length.errors());
|
||||
}
|
||||
|
||||
let length: Option<ast::Numeric> = descendants(&length).into_iter().find_map(SyntaxNode::cast);
|
||||
|
||||
let Some(length) = length else {
|
||||
bail!("not a length: {gap}");
|
||||
};
|
||||
|
||||
let (value, unit) = length.get();
|
||||
match unit {
|
||||
ast::Unit::Pt => Ok(Abs::pt(value)),
|
||||
ast::Unit::Mm => Ok(Abs::mm(value)),
|
||||
ast::Unit::Cm => Ok(Abs::cm(value)),
|
||||
ast::Unit::In => Ok(Abs::inches(value)),
|
||||
_ => bail!("invalid unit: {unit:?} in {gap}"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Low performance but simple recursive iterator.
|
||||
fn descendants(node: &SyntaxNode) -> impl IntoIterator<Item = &SyntaxNode> + '_ {
|
||||
let mut res = vec![];
|
||||
for child in node.children() {
|
||||
res.push(child);
|
||||
res.extend(descendants(child));
|
||||
}
|
||||
|
||||
res
|
||||
}
|
||||
|
||||
fn parse_color(fill: String) -> anyhow::Result<Color> {
|
||||
match fill.as_str() {
|
||||
"black" => Ok(Color::BLACK),
|
||||
"white" => Ok(Color::WHITE),
|
||||
"red" => Ok(Color::RED),
|
||||
"green" => Ok(Color::GREEN),
|
||||
"blue" => Ok(Color::BLUE),
|
||||
hex if hex.starts_with('#') => {
|
||||
Color::from_str(&hex[1..]).map_err(|e| anyhow::anyhow!("failed to parse color: {e}"))
|
||||
}
|
||||
_ => anyhow::bail!("invalid color: {fill}"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert [`chrono::DateTime`] to [`Datetime`]
|
||||
fn convert_datetime(date_time: chrono::DateTime<chrono::Utc>) -> Option<Datetime> {
|
||||
use chrono::{Datelike, Timelike};
|
||||
Datetime::from_ymd_hms(
|
||||
date_time.year(),
|
||||
date_time.month().try_into().ok()?,
|
||||
date_time.day().try_into().ok()?,
|
||||
date_time.hour().try_into().ok()?,
|
||||
date_time.minute().try_into().ok()?,
|
||||
date_time.second().try_into().ok()?,
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_color() {
|
||||
assert_eq!(parse_color("black".to_owned()).unwrap(), Color::BLACK);
|
||||
assert_eq!(parse_color("white".to_owned()).unwrap(), Color::WHITE);
|
||||
assert_eq!(parse_color("red".to_owned()).unwrap(), Color::RED);
|
||||
assert_eq!(parse_color("green".to_owned()).unwrap(), Color::GREEN);
|
||||
assert_eq!(parse_color("blue".to_owned()).unwrap(), Color::BLUE);
|
||||
assert_eq!(
|
||||
parse_color("#000000".to_owned()).unwrap().to_hex(),
|
||||
"#000000"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_color("#ffffff".to_owned()).unwrap().to_hex(),
|
||||
"#ffffff"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_color("#000000cc".to_owned()).unwrap().to_hex(),
|
||||
"#000000cc"
|
||||
);
|
||||
assert!(parse_color("invalid".to_owned()).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_length() {
|
||||
assert_eq!(parse_length("1pt").unwrap(), Abs::pt(1.));
|
||||
assert_eq!(parse_length("1mm").unwrap(), Abs::mm(1.));
|
||||
assert_eq!(parse_length("1cm").unwrap(), Abs::cm(1.));
|
||||
assert_eq!(parse_length("1in").unwrap(), Abs::inches(1.));
|
||||
assert!(parse_length("1").is_err());
|
||||
assert!(parse_length("1px").is_err());
|
||||
}
|
||||
}
|
||||
81
crates/tinymist-task/src/compute/pdf.rs
Normal file
81
crates/tinymist-task/src/compute/pdf.rs
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
use super::*;
|
||||
|
||||
pub use typst_pdf::pdf;
|
||||
pub use typst_pdf::PdfStandard as TypstPdfStandard;
|
||||
pub struct PdfExport;
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for PdfExport {
|
||||
type Output = Bytes;
|
||||
type Config = ExportPdfTask;
|
||||
|
||||
fn run(
|
||||
_graph: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
config: &ExportPdfTask,
|
||||
) -> Result<Bytes> {
|
||||
// todo: timestamp world.now()
|
||||
let creation_timestamp = config
|
||||
.creation_timestamp
|
||||
.map(convert_source_date_epoch)
|
||||
.transpose()
|
||||
.context_ut("parse pdf creation timestamp")?
|
||||
.unwrap_or_else(chrono::Utc::now);
|
||||
|
||||
// todo: Some(pdf_uri.as_str())
|
||||
|
||||
Ok(Bytes::from(typst_pdf::pdf(
|
||||
doc,
|
||||
&PdfOptions {
|
||||
timestamp: convert_datetime(creation_timestamp),
|
||||
..Default::default()
|
||||
},
|
||||
)?))
|
||||
}
|
||||
}
|
||||
|
||||
// impl<F: CompilerFeat> WorldComputable<F> for PdfExport {
|
||||
// type Output = Option<Bytes>;
|
||||
|
||||
// fn compute(graph: &Arc<WorldComputeGraph<F>>) -> Result<Self::Output> {
|
||||
// OptionDocumentTask::run_export::<F, Self>(graph)
|
||||
// }
|
||||
// }
|
||||
|
||||
// use std::sync::Arc;
|
||||
|
||||
// use reflexo::typst::TypstPagedDocument;
|
||||
// use typst::{diag:: World;
|
||||
// use typst_pdf::{PdfOptions, PdfStandard, PdfStandards, Timestamp};
|
||||
|
||||
// #[derive(Debug, Clone, Default)]
|
||||
// pub struct PdfDocExporter {
|
||||
// ctime: Option<Timestamp>,
|
||||
// standards: Option<PdfStandards>,
|
||||
// }
|
||||
|
||||
// impl PdfDocExporter {
|
||||
// pub fn with_ctime(mut self, v: Option<Timestamp>) -> Self {
|
||||
// self.ctime = v;
|
||||
// self
|
||||
// }
|
||||
|
||||
// pub fn with_standard(mut self, v: Option<PdfStandard>) -> Self {
|
||||
// self.standards = v.map(|v| PdfStandards::new(&[v]).unwrap());
|
||||
// self
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl Exporter<TypstPagedDocument, Vec<u8>> for PdfDocExporter {
|
||||
// fn export(&self, _world: &dyn World, output: Arc<TypstPagedDocument>) ->
|
||||
// Vecu8>> { // todo: ident option
|
||||
|
||||
// typst_pdf::pdf(
|
||||
// output.as_ref(),
|
||||
// &PdfOptions {
|
||||
// timestamp: self.ctime,
|
||||
// standards: self.standards.clone().unwrap_or_default(),
|
||||
// ..Default::default()
|
||||
// },
|
||||
// )
|
||||
// }
|
||||
// }
|
||||
71
crates/tinymist-task/src/compute/text.rs
Normal file
71
crates/tinymist-task/src/compute/text.rs
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
use core::fmt;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::ExportTextTask;
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::typst::{TypstDocument, TypstPagedDocument};
|
||||
use tinymist_world::{CompilerFeat, ExportComputation, WorldComputeGraph};
|
||||
|
||||
pub struct TextExport;
|
||||
|
||||
impl TextExport {
|
||||
pub fn run_on_doc(doc: &TypstDocument) -> Result<String> {
|
||||
Ok(format!("{}", FullTextDigest(doc.clone())))
|
||||
}
|
||||
}
|
||||
|
||||
impl<F: CompilerFeat> ExportComputation<F, TypstPagedDocument> for TextExport {
|
||||
type Output = String;
|
||||
type Config = ExportTextTask;
|
||||
|
||||
fn run(
|
||||
_g: &Arc<WorldComputeGraph<F>>,
|
||||
doc: &Arc<TypstPagedDocument>,
|
||||
_config: &ExportTextTask,
|
||||
) -> Result<String> {
|
||||
Self::run_on_doc(&TypstDocument::Paged(doc.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
/// A full text digest of a document.
|
||||
pub struct FullTextDigest(pub TypstDocument);
|
||||
|
||||
impl FullTextDigest {
|
||||
fn export_frame(f: &mut fmt::Formatter<'_>, doc: &typst::layout::Frame) -> fmt::Result {
|
||||
for (_, item) in doc.items() {
|
||||
Self::export_item(f, item)?;
|
||||
}
|
||||
#[cfg(not(feature = "no-content-hint"))]
|
||||
{
|
||||
use std::fmt::Write;
|
||||
let c = doc.content_hint();
|
||||
if c != '\0' {
|
||||
f.write_char(c)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn export_item(f: &mut fmt::Formatter<'_>, item: &typst::layout::FrameItem) -> fmt::Result {
|
||||
use typst::layout::FrameItem::*;
|
||||
match item {
|
||||
Group(g) => Self::export_frame(f, &g.frame),
|
||||
Text(t) => f.write_str(t.text.as_str()),
|
||||
Link(..) | Tag(..) | Shape(..) | Image(..) => Ok(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for FullTextDigest {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match &self.0 {
|
||||
TypstDocument::Paged(paged_doc) => {
|
||||
for page in paged_doc.pages.iter() {
|
||||
Self::export_frame(f, &page.frame)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
10
crates/tinymist-task/src/lib.rs
Normal file
10
crates/tinymist-task/src/lib.rs
Normal file
|
|
@ -0,0 +1,10 @@
|
|||
#![allow(missing_docs)]
|
||||
|
||||
mod model;
|
||||
pub use model::*;
|
||||
|
||||
mod primitives;
|
||||
pub use primitives::*;
|
||||
|
||||
pub mod compute;
|
||||
pub use compute::*;
|
||||
303
crates/tinymist-task/src/model.rs
Normal file
303
crates/tinymist-task/src/model.rs
Normal file
|
|
@ -0,0 +1,303 @@
|
|||
//! Project task models.
|
||||
|
||||
use std::hash::Hash;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use super::{Id, Pages, PathPattern, PdfStandard, Scalar, TaskWhen};
|
||||
|
||||
/// A project task application specifier. This is used for specifying tasks to
|
||||
/// run in a project. When the language service notifies an update event of the
|
||||
/// project, it will check whether any associated tasks need to be run.
|
||||
///
|
||||
/// Each task can have different timing and conditions for running. See
|
||||
/// [`TaskWhen`] for more information.
|
||||
///
|
||||
/// The available task types listed in the [`ProjectTask`] only represent the
|
||||
/// direct formats supported by the typst compiler. More task types can be
|
||||
/// customized by the [`ExportTransform`].
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// Export a JSON file with the pdfpc notes of the document:
|
||||
///
|
||||
/// ```bash
|
||||
/// tinymist project query main.typ --format json --selector "<pdfpc-notes>" --field value --one
|
||||
/// ```
|
||||
///
|
||||
/// Export a PDF file and then runs a ghostscript command to compress it:
|
||||
///
|
||||
/// ```bash
|
||||
/// tinymist project compile main.typ --pipe 'import "@local/postprocess:0.0.1": ghostscript; ghostscript(output.path)'
|
||||
/// ```
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", tag = "type")]
|
||||
pub struct ApplyProjectTask {
|
||||
/// The task's ID.
|
||||
pub id: Id,
|
||||
/// The document's ID.
|
||||
pub document: Id,
|
||||
/// The task to run.
|
||||
#[serde(flatten)]
|
||||
pub task: ProjectTask,
|
||||
}
|
||||
|
||||
impl ApplyProjectTask {
|
||||
/// Returns the document's ID.
|
||||
pub fn doc_id(&self) -> &Id {
|
||||
&self.document
|
||||
}
|
||||
|
||||
/// Returns the task's ID.
|
||||
pub fn id(&self) -> &Id {
|
||||
&self.id
|
||||
}
|
||||
}
|
||||
|
||||
/// A project task specifier. This structure specifies the arguments for a task.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", tag = "type")]
|
||||
pub enum ProjectTask {
|
||||
/// A preview task.
|
||||
Preview(PreviewTask),
|
||||
/// An export PDF task.
|
||||
ExportPdf(ExportPdfTask),
|
||||
/// An export PNG task.
|
||||
ExportPng(ExportPngTask),
|
||||
/// An export SVG task.
|
||||
ExportSvg(ExportSvgTask),
|
||||
/// An export HTML task.
|
||||
ExportHtml(ExportHtmlTask),
|
||||
/// An export Markdown task.
|
||||
ExportMd(ExportMarkdownTask),
|
||||
/// An export Text task.
|
||||
ExportText(ExportTextTask),
|
||||
/// An query task.
|
||||
Query(QueryTask),
|
||||
// todo: compatibility
|
||||
// An export task of another type.
|
||||
// Other(serde_json::Value),
|
||||
}
|
||||
|
||||
impl ProjectTask {
|
||||
/// Returns the timing of executing the task.
|
||||
pub fn when(&self) -> Option<TaskWhen> {
|
||||
Some(match self {
|
||||
Self::Preview(task) => task.when,
|
||||
Self::ExportPdf(..)
|
||||
| Self::ExportPng(..)
|
||||
| Self::ExportSvg(..)
|
||||
| Self::ExportHtml(..)
|
||||
| Self::ExportMd(..)
|
||||
| Self::ExportText(..)
|
||||
| Self::Query(..) => self.as_export()?.when,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the export configuration of a task.
|
||||
pub fn as_export(&self) -> Option<&ExportTask> {
|
||||
Some(match self {
|
||||
Self::Preview(..) => return None,
|
||||
Self::ExportPdf(task) => &task.export,
|
||||
Self::ExportPng(task) => &task.export,
|
||||
Self::ExportSvg(task) => &task.export,
|
||||
Self::ExportHtml(task) => &task.export,
|
||||
Self::ExportMd(task) => &task.export,
|
||||
Self::ExportText(task) => &task.export,
|
||||
Self::Query(task) => &task.export,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns extension of the artifact.
|
||||
pub fn extension(&self) -> &str {
|
||||
match self {
|
||||
Self::ExportPdf { .. } => "pdf",
|
||||
Self::Preview(..) | Self::ExportHtml { .. } => "html",
|
||||
Self::ExportMd { .. } => "md",
|
||||
Self::ExportText { .. } => "txt",
|
||||
Self::ExportSvg { .. } => "svg",
|
||||
Self::ExportPng { .. } => "png",
|
||||
Self::Query(QueryTask {
|
||||
format,
|
||||
output_extension,
|
||||
..
|
||||
}) => output_extension.as_deref().unwrap_or(format),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A preview task specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct PreviewTask {
|
||||
/// When to run the task. See [`TaskWhen`] for more
|
||||
/// information.
|
||||
pub when: TaskWhen,
|
||||
}
|
||||
|
||||
/// An export task specifier.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportTask {
|
||||
/// When to run the task
|
||||
pub when: TaskWhen,
|
||||
/// The output path pattern.
|
||||
pub output: Option<PathPattern>,
|
||||
/// The task's transforms.
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub transform: Vec<ExportTransform>,
|
||||
}
|
||||
|
||||
impl ExportTask {
|
||||
/// Creates a new unmounted export task.
|
||||
pub fn new(when: TaskWhen) -> Self {
|
||||
Self {
|
||||
when,
|
||||
output: None,
|
||||
transform: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Pretty prints the output whenever possible.
|
||||
pub fn apply_pretty(&mut self) {
|
||||
self.transform
|
||||
.push(ExportTransform::Pretty { script: None });
|
||||
}
|
||||
}
|
||||
|
||||
/// The legacy page selection specifier.
|
||||
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum PageSelection {
|
||||
/// Selects the first page.
|
||||
#[default]
|
||||
First,
|
||||
/// Merges all pages into a single page.
|
||||
Merged {
|
||||
/// The gap between pages (in pt).
|
||||
gap: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
/// A project export transform specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub enum ExportTransform {
|
||||
/// Only pick a subset of pages.
|
||||
Pages {
|
||||
/// The page ranges to export.
|
||||
ranges: Vec<Pages>,
|
||||
},
|
||||
/// Merge pages into a single page.
|
||||
Merge {
|
||||
/// The gap between pages (typst code expression, e.g. `1pt`).
|
||||
gap: Option<String>,
|
||||
},
|
||||
/// Execute a transform script.
|
||||
Script {
|
||||
/// The postprocess script (typst script) to run.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
script: Option<String>,
|
||||
},
|
||||
/// Uses a pretty printer to format the output.
|
||||
Pretty {
|
||||
/// The pretty command (typst script) to run.
|
||||
///
|
||||
/// If not provided, the default pretty printer will be used.
|
||||
/// Note: the builtin one may be only effective for json outputs.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
script: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
/// An export pdf task specifier.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportPdfTask {
|
||||
/// The shared export arguments.
|
||||
#[serde(flatten)]
|
||||
pub export: ExportTask,
|
||||
/// One (or multiple comma-separated) PDF standards that Typst will enforce
|
||||
/// conformance with.
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub pdf_standards: Vec<PdfStandard>,
|
||||
/// The document's creation date formatted as a UNIX timestamp (in seconds).
|
||||
///
|
||||
/// For more information, see <https://reproducible-builds.org/specs/source-date-epoch/>.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub creation_timestamp: Option<i64>,
|
||||
}
|
||||
|
||||
/// An export png task specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportPngTask {
|
||||
/// The shared export arguments.
|
||||
#[serde(flatten)]
|
||||
pub export: ExportTask,
|
||||
/// The PPI (pixels per inch) to use for PNG export.
|
||||
pub ppi: Scalar,
|
||||
/// The expression constructing background fill color (in typst script).
|
||||
/// e.g. `#ffffff`, `#000000`, `rgba(255, 255, 255, 0.5)`.
|
||||
///
|
||||
/// If not provided, the default background color specified in the document
|
||||
/// will be used.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub fill: Option<String>,
|
||||
}
|
||||
|
||||
/// An export svg task specifier.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportSvgTask {
|
||||
/// The shared export arguments.
|
||||
#[serde(flatten)]
|
||||
pub export: ExportTask,
|
||||
}
|
||||
|
||||
/// An export html task specifier.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportHtmlTask {
|
||||
/// The shared export arguments.
|
||||
#[serde(flatten)]
|
||||
pub export: ExportTask,
|
||||
}
|
||||
|
||||
/// An export markdown task specifier.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportMarkdownTask {
|
||||
/// The shared export arguments.
|
||||
#[serde(flatten)]
|
||||
pub export: ExportTask,
|
||||
}
|
||||
|
||||
/// An export text task specifier.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct ExportTextTask {
|
||||
/// The shared export arguments.
|
||||
#[serde(flatten)]
|
||||
pub export: ExportTask,
|
||||
}
|
||||
|
||||
/// An export query task specifier.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct QueryTask {
|
||||
/// The shared export arguments.
|
||||
#[serde(flatten)]
|
||||
pub export: ExportTask,
|
||||
/// The format to serialize in. Can be `json`, `yaml`, or `txt`,
|
||||
pub format: String,
|
||||
/// Uses a different output extension from the one inferring from the
|
||||
/// [`Self::format`].
|
||||
pub output_extension: Option<String>,
|
||||
/// Defines which elements to retrieve.
|
||||
pub selector: String,
|
||||
/// Extracts just one field from all retrieved elements.
|
||||
pub field: Option<String>,
|
||||
/// Expects and retrieves exactly one element.
|
||||
pub one: bool,
|
||||
}
|
||||
471
crates/tinymist-task/src/primitives.rs
Normal file
471
crates/tinymist-task/src/primitives.rs
Normal file
|
|
@ -0,0 +1,471 @@
|
|||
use core::fmt;
|
||||
use std::hash::{Hash, Hasher};
|
||||
use std::num::NonZeroUsize;
|
||||
use std::ops::RangeInclusive;
|
||||
use std::path::PathBuf;
|
||||
use std::{path::Path, str::FromStr};
|
||||
|
||||
use clap::ValueEnum;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tinymist_std::error::prelude::*;
|
||||
use tinymist_std::path::{unix_slash, PathClean};
|
||||
use tinymist_std::ImmutPath;
|
||||
use tinymist_world::vfs::WorkspaceResolver;
|
||||
use tinymist_world::{CompilerFeat, CompilerWorld, EntryReader, EntryState};
|
||||
use typst::diag::EcoString;
|
||||
use typst::syntax::FileId;
|
||||
|
||||
/// A scalar that is not NaN.
|
||||
#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)]
|
||||
pub struct Scalar(f32);
|
||||
|
||||
impl TryFrom<f32> for Scalar {
|
||||
type Error = &'static str;
|
||||
|
||||
fn try_from(value: f32) -> Result<Self, Self::Error> {
|
||||
if value.is_nan() {
|
||||
Err("NaN is not a valid scalar value")
|
||||
} else {
|
||||
Ok(Scalar(value))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Scalar {
|
||||
/// Converts the scalar to an f32.
|
||||
pub fn to_f32(self) -> f32 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Scalar {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.0 == other.0
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Scalar {}
|
||||
|
||||
impl Hash for Scalar {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
self.0.to_bits().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Scalar {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Scalar {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.0.partial_cmp(&other.0).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// A project ID.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct Id(String);
|
||||
|
||||
impl Id {
|
||||
/// Creates a new project Id.
|
||||
pub fn new(s: String) -> Self {
|
||||
Id(s)
|
||||
}
|
||||
|
||||
/// Creates a new project Id from a world.
|
||||
pub fn from_world<F: CompilerFeat>(world: &CompilerWorld<F>) -> Option<Self> {
|
||||
let entry = world.entry_state();
|
||||
let id = unix_slash(entry.main()?.vpath().as_rootless_path());
|
||||
|
||||
let path = &ResourcePath::from_user_sys(Path::new(&id));
|
||||
Some(path.into())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&ResourcePath> for Id {
|
||||
fn from(value: &ResourcePath) -> Self {
|
||||
Id::new(value.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Id {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! display_possible_values {
|
||||
($ty:ty) => {
|
||||
impl fmt::Display for $ty {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.to_possible_value()
|
||||
.expect("no values are skipped")
|
||||
.get_name()
|
||||
.fmt(f)
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// When to export an output file.
|
||||
///
|
||||
/// By default, a `tinymist compile` only provides input information and
|
||||
/// doesn't change the `when` field. However, you can still specify a `when`
|
||||
/// argument to override the default behavior for specific tasks.
|
||||
///
|
||||
/// ## Examples
|
||||
///
|
||||
/// ```bash
|
||||
/// tinymist compile --when onSave main.typ
|
||||
/// alias typst="tinymist compile --when=onSave"
|
||||
/// typst compile main.typ
|
||||
/// ```
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Default, Hash, ValueEnum, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[clap(rename_all = "camelCase")]
|
||||
pub enum TaskWhen {
|
||||
/// Never watch to run task.
|
||||
#[default]
|
||||
Never,
|
||||
/// Run task on saving the document, i.e. on `textDocument/didSave` events.
|
||||
OnSave,
|
||||
/// Run task on typing, i.e. on `textDocument/didChange` events.
|
||||
OnType,
|
||||
/// *DEPRECATED* Run task when a document has a title and on saved, which is
|
||||
/// useful to filter out template files.
|
||||
///
|
||||
/// Note: this is deprecating.
|
||||
OnDocumentHasTitle,
|
||||
}
|
||||
|
||||
impl TaskWhen {
|
||||
/// Returns `true` if the task should never be run automatically.
|
||||
pub fn is_never(&self) -> bool {
|
||||
matches!(self, TaskWhen::Never)
|
||||
}
|
||||
}
|
||||
|
||||
display_possible_values!(TaskWhen);
|
||||
|
||||
/// Which format to use for the generated output file.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, ValueEnum)]
|
||||
pub enum OutputFormat {
|
||||
/// Export to PDF.
|
||||
Pdf,
|
||||
/// Export to PNG.
|
||||
Png,
|
||||
/// Export to SVG.
|
||||
Svg,
|
||||
/// Export to HTML.
|
||||
Html,
|
||||
}
|
||||
|
||||
display_possible_values!(OutputFormat);
|
||||
|
||||
/// The path pattern that could be substituted.
|
||||
///
|
||||
/// # Examples
|
||||
/// - `$root` is the root of the project.
|
||||
/// - `$root/$dir` is the parent directory of the input (main) file.
|
||||
/// - `$root/main` will help store pdf file to `$root/main.pdf` constantly.
|
||||
/// - (default) `$root/$dir/$name` will help store pdf file along with the input
|
||||
/// file.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct PathPattern(pub String);
|
||||
|
||||
impl fmt::Display for PathPattern {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.write_str(&self.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl PathPattern {
|
||||
/// Creates a new path pattern.
|
||||
pub fn new(pattern: &str) -> Self {
|
||||
Self(pattern.to_owned())
|
||||
}
|
||||
|
||||
/// Substitutes the path pattern with `$root`, and `$dir/$name`.
|
||||
pub fn substitute(&self, entry: &EntryState) -> Option<ImmutPath> {
|
||||
self.substitute_impl(entry.root(), entry.main())
|
||||
}
|
||||
|
||||
#[comemo::memoize]
|
||||
fn substitute_impl(&self, root: Option<ImmutPath>, main: Option<FileId>) -> Option<ImmutPath> {
|
||||
log::info!("Check path {main:?} and root {root:?} with output directory {self:?}");
|
||||
|
||||
let (root, main) = root.zip(main)?;
|
||||
|
||||
// Files in packages are not exported
|
||||
if WorkspaceResolver::is_package_file(main) {
|
||||
return None;
|
||||
}
|
||||
// Files without a path are not exported
|
||||
let path = main.vpath().resolve(&root)?;
|
||||
|
||||
// todo: handle untitled path
|
||||
if let Ok(path) = path.strip_prefix("/untitled") {
|
||||
let tmp = std::env::temp_dir();
|
||||
let path = tmp.join("typst").join(path);
|
||||
return Some(path.as_path().into());
|
||||
}
|
||||
|
||||
if self.0.is_empty() {
|
||||
return Some(path.to_path_buf().clean().into());
|
||||
}
|
||||
|
||||
let path = path.strip_prefix(&root).ok()?;
|
||||
let dir = path.parent();
|
||||
let file_name = path.file_name().unwrap_or_default();
|
||||
|
||||
let w = root.to_string_lossy();
|
||||
let f = file_name.to_string_lossy();
|
||||
|
||||
// replace all $root
|
||||
let mut path = self.0.replace("$root", &w);
|
||||
if let Some(dir) = dir {
|
||||
let d = dir.to_string_lossy();
|
||||
path = path.replace("$dir", &d);
|
||||
}
|
||||
path = path.replace("$name", &f);
|
||||
|
||||
Some(PathBuf::from(path).clean().into())
|
||||
}
|
||||
}
|
||||
|
||||
/// A PDF standard that Typst can enforce conformance with.
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, ValueEnum, Serialize, Deserialize)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum PdfStandard {
|
||||
/// PDF 1.7.
|
||||
#[value(name = "1.7")]
|
||||
#[serde(rename = "1.7")]
|
||||
V_1_7,
|
||||
/// PDF/A-2b.
|
||||
#[value(name = "a-2b")]
|
||||
#[serde(rename = "a-2b")]
|
||||
A_2b,
|
||||
}
|
||||
|
||||
display_possible_values!(PdfStandard);
|
||||
|
||||
/// Implements parsing of page ranges (`1-3`, `4`, `5-`, `-2`), used by the
|
||||
/// `CompileCommand.pages` argument, through the `FromStr` trait instead of a
|
||||
/// value parser, in order to generate better errors.
|
||||
///
|
||||
/// See also: <https://github.com/clap-rs/clap/issues/5065>
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Pages(pub RangeInclusive<Option<NonZeroUsize>>);
|
||||
|
||||
impl Pages {
|
||||
/// Selects the first page.
|
||||
pub const FIRST: Pages = Pages(NonZeroUsize::new(1)..=None);
|
||||
}
|
||||
|
||||
impl FromStr for Pages {
|
||||
type Err = &'static str;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
match value
|
||||
.split('-')
|
||||
.map(str::trim)
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice()
|
||||
{
|
||||
[] | [""] => Err("page export range must not be empty"),
|
||||
[single_page] => {
|
||||
let page_number = parse_page_number(single_page)?;
|
||||
Ok(Pages(Some(page_number)..=Some(page_number)))
|
||||
}
|
||||
["", ""] => Err("page export range must have start or end"),
|
||||
[start, ""] => Ok(Pages(Some(parse_page_number(start)?)..=None)),
|
||||
["", end] => Ok(Pages(None..=Some(parse_page_number(end)?))),
|
||||
[start, end] => {
|
||||
let start = parse_page_number(start)?;
|
||||
let end = parse_page_number(end)?;
|
||||
if start > end {
|
||||
Err("page export range must end at a page after the start")
|
||||
} else {
|
||||
Ok(Pages(Some(start)..=Some(end)))
|
||||
}
|
||||
}
|
||||
[_, _, _, ..] => Err("page export range must have a single hyphen"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Pages {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let start = match self.0.start() {
|
||||
Some(start) => start.to_string(),
|
||||
None => String::from(""),
|
||||
};
|
||||
let end = match self.0.end() {
|
||||
Some(end) => end.to_string(),
|
||||
None => String::from(""),
|
||||
};
|
||||
write!(f, "{start}-{end}")
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for Pages {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(&self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for Pages {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let value = String::deserialize(deserializer)?;
|
||||
value.parse().map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a single page number.
|
||||
fn parse_page_number(value: &str) -> Result<NonZeroUsize, &'static str> {
|
||||
if value == "0" {
|
||||
Err("page numbers start at one")
|
||||
} else {
|
||||
NonZeroUsize::from_str(value).map_err(|_| "not a valid page number")
|
||||
}
|
||||
}
|
||||
|
||||
/// A resource path.
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub struct ResourcePath(EcoString, String);
|
||||
|
||||
impl fmt::Display for ResourcePath {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}:{}", self.0, self.1)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for ResourcePath {
|
||||
type Err = &'static str;
|
||||
|
||||
fn from_str(value: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = value.split(':');
|
||||
let scheme = parts.next().ok_or("missing scheme")?;
|
||||
let path = parts.next().ok_or("missing path")?;
|
||||
if parts.next().is_some() {
|
||||
Err("too many colons")
|
||||
} else {
|
||||
Ok(ResourcePath(scheme.into(), path.to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::Serialize for ResourcePath {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.serialize_str(&self.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> serde::Deserialize<'de> for ResourcePath {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let value = String::deserialize(deserializer)?;
|
||||
value.parse().map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
impl ResourcePath {
|
||||
/// Creates a new resource path from a user passing system path.
|
||||
pub fn from_user_sys(inp: &Path) -> Self {
|
||||
let rel = if inp.is_relative() {
|
||||
inp.to_path_buf()
|
||||
} else {
|
||||
let cwd = std::env::current_dir().unwrap();
|
||||
pathdiff::diff_paths(inp, &cwd).unwrap()
|
||||
};
|
||||
let rel = unix_slash(&rel);
|
||||
ResourcePath("file".into(), rel.to_string())
|
||||
}
|
||||
/// Creates a new resource path from a file id.
|
||||
pub fn from_file_id(id: FileId) -> Self {
|
||||
let package = id.package();
|
||||
match package {
|
||||
Some(package) => ResourcePath(
|
||||
"file_id".into(),
|
||||
format!("{package}{}", unix_slash(id.vpath().as_rooted_path())),
|
||||
),
|
||||
None => ResourcePath(
|
||||
"file_id".into(),
|
||||
format!("$root{}", unix_slash(id.vpath().as_rooted_path())),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the resource path to a path relative to the `base` (usually the
|
||||
/// directory storing the lockfile).
|
||||
pub fn to_rel_path(&self, base: &Path) -> Option<PathBuf> {
|
||||
if self.0 == "file" {
|
||||
let path = Path::new(&self.1);
|
||||
if path.is_absolute() {
|
||||
Some(pathdiff::diff_paths(path, base).unwrap_or_else(|| path.to_owned()))
|
||||
} else {
|
||||
Some(path.to_owned())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts the resource path to an absolute file system path.
|
||||
pub fn to_abs_path(&self, base: &Path) -> Option<PathBuf> {
|
||||
if self.0 == "file" {
|
||||
let path = Path::new(&self.1);
|
||||
if path.is_absolute() {
|
||||
Some(path.to_owned())
|
||||
} else {
|
||||
Some(base.join(path))
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use typst::syntax::VirtualPath;
|
||||
|
||||
#[test]
|
||||
fn test_substitute_path() {
|
||||
let root = Path::new("/root");
|
||||
let entry =
|
||||
EntryState::new_rooted(root.into(), Some(VirtualPath::new("/dir1/dir2/file.txt")));
|
||||
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$dir/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/dir1/dir2/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$dir/../$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/dir1/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/file.txt").into())
|
||||
);
|
||||
assert_eq!(
|
||||
PathPattern::new("/substitute/target/$dir/$name").substitute(&entry),
|
||||
Some(PathBuf::from("/substitute/target/dir1/dir2/file.txt").into())
|
||||
);
|
||||
}
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue