mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-11-23 12:46:43 +00:00
fix: ensure that the lockfile is respected (#2000)
This commit is contained in:
parent
bd688f354e
commit
c3fa0c5cb2
17 changed files with 372 additions and 63 deletions
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
|
|
@ -76,6 +76,10 @@ jobs:
|
||||||
path: tinymist-completions.tar.gz
|
path: tinymist-completions.tar.gz
|
||||||
- name: Test tinymist
|
- name: Test tinymist
|
||||||
run: cargo test --workspace -- --skip=e2e
|
run: cargo test --workspace -- --skip=e2e
|
||||||
|
- name: Test Lockfile (Prepare)
|
||||||
|
run: ./scripts/test-lock.sh
|
||||||
|
- name: Test Lockfile (Check)
|
||||||
|
run: cargo test --package tinymist --lib -- route::tests --show-output --ignored
|
||||||
|
|
||||||
checks-windows:
|
checks-windows:
|
||||||
name: Check Minimum Rust version and Tests (Windows)
|
name: Check Minimum Rust version and Tests (Windows)
|
||||||
|
|
|
||||||
|
|
@ -28,7 +28,7 @@ RUN cargo install cargo-chef
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||||
--mount=type=cache,target=$SCCACHE_DIR,sharing=locked \
|
--mount=type=cache,target=$SCCACHE_DIR,sharing=locked \
|
||||||
cargo chef prepare --recipe-path recipe.json
|
cargo +${RUST_VERSION} chef prepare --recipe-path recipe.json
|
||||||
|
|
||||||
FROM base as builder
|
FROM base as builder
|
||||||
WORKDIR app
|
WORKDIR app
|
||||||
|
|
@ -36,11 +36,11 @@ RUN cargo install cargo-chef
|
||||||
COPY --from=planner /app/recipe.json recipe.json
|
COPY --from=planner /app/recipe.json recipe.json
|
||||||
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||||
--mount=type=cache,target=$SCCACHE_DIR,sharing=locked \
|
--mount=type=cache,target=$SCCACHE_DIR,sharing=locked \
|
||||||
cargo chef cook --release --recipe-path recipe.json
|
cargo +${RUST_VERSION} chef cook --release --recipe-path recipe.json
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||||
--mount=type=cache,target=$SCCACHE_DIR,sharing=locked \
|
--mount=type=cache,target=$SCCACHE_DIR,sharing=locked \
|
||||||
cargo build -p tinymist --release
|
cargo +${RUST_VERSION} build -p tinymist --release
|
||||||
|
|
||||||
FROM debian:12
|
FROM debian:12
|
||||||
WORKDIR /app/
|
WORKDIR /app/
|
||||||
|
|
|
||||||
|
|
@ -37,36 +37,37 @@ pub struct DocNewArgs {
|
||||||
|
|
||||||
impl DocNewArgs {
|
impl DocNewArgs {
|
||||||
/// Converts to project input.
|
/// Converts to project input.
|
||||||
pub fn to_input(&self) -> ProjectInput {
|
pub fn to_input(&self, ctx: CtxPath) -> ProjectInput {
|
||||||
let id: Id = (&self.id).into();
|
let id: Id = self.id.id(ctx);
|
||||||
|
|
||||||
let root = self
|
let root = self
|
||||||
.root
|
.root
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|root| ResourcePath::from_user_sys(Path::new(root)));
|
.map(|root| ResourcePath::from_user_sys(Path::new(root), ctx));
|
||||||
let main = ResourcePath::from_user_sys(Path::new(&self.id.input));
|
let main = ResourcePath::from_user_sys(Path::new(&self.id.input), ctx);
|
||||||
|
|
||||||
let font_paths = self
|
let font_paths = self
|
||||||
.font
|
.font
|
||||||
.font_paths
|
.font_paths
|
||||||
.iter()
|
.iter()
|
||||||
.map(|p| ResourcePath::from_user_sys(p))
|
.map(|p| ResourcePath::from_user_sys(p, ctx))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let package_path = self
|
let package_path = self
|
||||||
.package
|
.package
|
||||||
.package_path
|
.package_path
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| ResourcePath::from_user_sys(p));
|
.map(|p| ResourcePath::from_user_sys(p, ctx));
|
||||||
|
|
||||||
let package_cache_path = self
|
let package_cache_path = self
|
||||||
.package
|
.package
|
||||||
.package_cache_path
|
.package_cache_path
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|p| ResourcePath::from_user_sys(p));
|
.map(|p| ResourcePath::from_user_sys(p, ctx));
|
||||||
|
|
||||||
ProjectInput {
|
ProjectInput {
|
||||||
id: id.clone(),
|
id: id.clone(),
|
||||||
|
lock_dir: Some(ctx.1.to_path_buf()),
|
||||||
root,
|
root,
|
||||||
main,
|
main,
|
||||||
// todo: inputs
|
// todo: inputs
|
||||||
|
|
@ -92,12 +93,13 @@ pub struct DocIdArgs {
|
||||||
pub input: String,
|
pub input: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&DocIdArgs> for Id {
|
impl DocIdArgs {
|
||||||
fn from(args: &DocIdArgs) -> Self {
|
/// Converts to a document ID.
|
||||||
if let Some(id) = &args.name {
|
pub fn id(&self, ctx: CtxPath) -> Id {
|
||||||
|
if let Some(id) = &self.name {
|
||||||
Id::new(id.clone())
|
Id::new(id.clone())
|
||||||
} else {
|
} else {
|
||||||
(&ResourcePath::from_user_sys(Path::new(&args.input))).into()
|
(&ResourcePath::from_user_sys(Path::new(&self.input), ctx)).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -172,7 +174,7 @@ pub struct TaskCompileArgs {
|
||||||
|
|
||||||
impl TaskCompileArgs {
|
impl TaskCompileArgs {
|
||||||
/// Convert the arguments to a project task.
|
/// Convert the arguments to a project task.
|
||||||
pub fn to_task(self, doc_id: Id) -> Result<ApplyProjectTask> {
|
pub fn to_task(self, doc_id: Id, cwd: &Path) -> Result<ApplyProjectTask> {
|
||||||
let new_task_id = self.task_name.map(Id::new);
|
let new_task_id = self.task_name.map(Id::new);
|
||||||
let task_id = new_task_id.unwrap_or(doc_id.clone());
|
let task_id = new_task_id.unwrap_or(doc_id.clone());
|
||||||
|
|
||||||
|
|
@ -195,6 +197,17 @@ impl TaskCompileArgs {
|
||||||
OutputFormat::Pdf
|
OutputFormat::Pdf
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let output = self.output.as_ref().map(|output| {
|
||||||
|
let output = Path::new(output);
|
||||||
|
let output = if output.is_absolute() {
|
||||||
|
output.to_path_buf()
|
||||||
|
} else {
|
||||||
|
cwd.join(output)
|
||||||
|
};
|
||||||
|
|
||||||
|
PathPattern::new(&output.with_extension("").to_string_lossy())
|
||||||
|
});
|
||||||
|
|
||||||
let when = self.when.unwrap_or(TaskWhen::Never);
|
let when = self.when.unwrap_or(TaskWhen::Never);
|
||||||
|
|
||||||
let mut transforms = vec![];
|
let mut transforms = vec![];
|
||||||
|
|
@ -207,7 +220,7 @@ impl TaskCompileArgs {
|
||||||
|
|
||||||
let export = ExportTask {
|
let export = ExportTask {
|
||||||
when,
|
when,
|
||||||
output: self.output.as_deref().map(PathPattern::new),
|
output,
|
||||||
transform: transforms,
|
transform: transforms,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,6 +8,7 @@ use ecow::{eco_vec, EcoVec};
|
||||||
use tinymist_std::error::prelude::*;
|
use tinymist_std::error::prelude::*;
|
||||||
use tinymist_std::path::unix_slash;
|
use tinymist_std::path::unix_slash;
|
||||||
use tinymist_std::{bail, ImmutPath};
|
use tinymist_std::{bail, ImmutPath};
|
||||||
|
use tinymist_task::CtxPath;
|
||||||
use typst::diag::EcoString;
|
use typst::diag::EcoString;
|
||||||
use typst::World;
|
use typst::World;
|
||||||
|
|
||||||
|
|
@ -25,7 +26,9 @@ impl LockFile {
|
||||||
self.task.iter().find(|i| &i.id == id)
|
self.task.iter().find(|i| &i.id == id)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn replace_document(&mut self, input: ProjectInput) {
|
pub fn replace_document(&mut self, mut input: ProjectInput) {
|
||||||
|
input.lock_dir = None;
|
||||||
|
let input = input;
|
||||||
let id = input.id.clone();
|
let id = input.id.clone();
|
||||||
let index = self.document.iter().position(|i| i.id == id);
|
let index = self.document.iter().position(|i| i.id == id);
|
||||||
if let Some(index) = index {
|
if let Some(index) = index {
|
||||||
|
|
@ -35,7 +38,14 @@ impl LockFile {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn replace_task(&mut self, task: ApplyProjectTask) {
|
pub fn replace_task(&mut self, mut task: ApplyProjectTask) {
|
||||||
|
if let Some(pat) = task.task.as_export_mut().and_then(|t| t.output.as_mut()) {
|
||||||
|
let rel = pat.clone().relative_to(self.lock_dir.as_ref().unwrap());
|
||||||
|
*pat = rel;
|
||||||
|
}
|
||||||
|
|
||||||
|
let task = task;
|
||||||
|
|
||||||
let id = task.id().clone();
|
let id = task.id().clone();
|
||||||
let index = self.task.iter().position(|i| *i.id() == id);
|
let index = self.task.iter().position(|i| *i.id() == id);
|
||||||
if let Some(index) = index {
|
if let Some(index) = index {
|
||||||
|
|
@ -146,6 +156,8 @@ impl LockFile {
|
||||||
|
|
||||||
let mut state = if old_data.trim().is_empty() {
|
let mut state = if old_data.trim().is_empty() {
|
||||||
LockFile {
|
LockFile {
|
||||||
|
// todo: reduce cost
|
||||||
|
lock_dir: Some(ImmutPath::from(cwd)),
|
||||||
document: vec![],
|
document: vec![],
|
||||||
task: vec![],
|
task: vec![],
|
||||||
route: eco_vec![],
|
route: eco_vec![],
|
||||||
|
|
@ -169,7 +181,9 @@ impl LockFile {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
old_state.migrate()?
|
let mut lf = old_state.migrate()?;
|
||||||
|
lf.lock_dir = Some(ImmutPath::from(cwd));
|
||||||
|
lf
|
||||||
};
|
};
|
||||||
|
|
||||||
f(&mut state)?;
|
f(&mut state)?;
|
||||||
|
|
@ -213,7 +227,9 @@ impl LockFile {
|
||||||
let state = toml::from_str::<LockFileCompat>(data)
|
let state = toml::from_str::<LockFileCompat>(data)
|
||||||
.context_ut("tinymist.lock file is not a valid TOML file")?;
|
.context_ut("tinymist.lock file is not a valid TOML file")?;
|
||||||
|
|
||||||
state.migrate()
|
let mut lf = state.migrate()?;
|
||||||
|
lf.lock_dir = Some(dir.into());
|
||||||
|
Ok(lf)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -238,17 +254,18 @@ pub struct LockFileUpdate {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl LockFileUpdate {
|
impl LockFileUpdate {
|
||||||
pub fn compiled(&mut self, world: &LspWorld) -> Option<Id> {
|
pub fn compiled(&mut self, world: &LspWorld, ctx: CtxPath) -> Option<Id> {
|
||||||
let id = Id::from_world(world)?;
|
let id = Id::from_world(world, ctx)?;
|
||||||
|
|
||||||
let root = ResourcePath::from_user_sys(Path::new("."));
|
let root = ResourcePath::from_user_sys(Path::new("."), ctx);
|
||||||
let main = ResourcePath::from_user_sys(world.path_for_id(world.main()).ok()?.as_path());
|
let main =
|
||||||
|
ResourcePath::from_user_sys(world.path_for_id(world.main()).ok()?.as_path(), ctx);
|
||||||
|
|
||||||
let font_resolver = &world.font_resolver;
|
let font_resolver = &world.font_resolver;
|
||||||
let font_paths = font_resolver
|
let font_paths = font_resolver
|
||||||
.font_paths()
|
.font_paths()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|p| ResourcePath::from_user_sys(p))
|
.map(|p| ResourcePath::from_user_sys(p, ctx))
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
// let system_font = font_resolver.system_font();
|
// let system_font = font_resolver.system_font();
|
||||||
|
|
@ -256,10 +273,10 @@ impl LockFileUpdate {
|
||||||
let registry = &world.registry;
|
let registry = &world.registry;
|
||||||
let package_path = registry
|
let package_path = registry
|
||||||
.package_path()
|
.package_path()
|
||||||
.map(|p| ResourcePath::from_user_sys(p));
|
.map(|p| ResourcePath::from_user_sys(p, ctx));
|
||||||
let package_cache_path = registry
|
let package_cache_path = registry
|
||||||
.package_cache_path()
|
.package_cache_path()
|
||||||
.map(|p| ResourcePath::from_user_sys(p));
|
.map(|p| ResourcePath::from_user_sys(p, ctx));
|
||||||
|
|
||||||
// todo: freeze the package paths
|
// todo: freeze the package paths
|
||||||
let _ = package_cache_path;
|
let _ = package_cache_path;
|
||||||
|
|
@ -269,6 +286,7 @@ impl LockFileUpdate {
|
||||||
|
|
||||||
let input = ProjectInput {
|
let input = ProjectInput {
|
||||||
id: id.clone(),
|
id: id.clone(),
|
||||||
|
lock_dir: Some(ctx.1.to_path_buf()),
|
||||||
root: Some(root),
|
root: Some(root),
|
||||||
main,
|
main,
|
||||||
inputs: vec![],
|
inputs: vec![],
|
||||||
|
|
@ -325,7 +343,7 @@ impl LockFileUpdate {
|
||||||
let id_hi = id >> 12;
|
let id_hi = id >> 12;
|
||||||
|
|
||||||
let hash_str =
|
let hash_str =
|
||||||
format!("{root_lo:03x}/{root_hi:013x}/{id_lo:03x}/{id_hi:016x}");
|
format!("{root_lo:03x}/{root_hi:013x}/{id_lo:03x}/{id_hi:013x}");
|
||||||
|
|
||||||
let cache_dir = cache_dir.join("tinymist/projects").join(hash_str);
|
let cache_dir = cache_dir.join("tinymist/projects").join(hash_str);
|
||||||
let _ = std::fs::create_dir_all(&cache_dir);
|
let _ = std::fs::create_dir_all(&cache_dir);
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
use std::hash::Hash;
|
use std::hash::Hash;
|
||||||
use std::path::PathBuf;
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
use ecow::EcoVec;
|
use ecow::EcoVec;
|
||||||
use tinymist_std::error::prelude::*;
|
use tinymist_std::error::prelude::*;
|
||||||
|
|
@ -53,6 +53,9 @@ impl LockFileCompat {
|
||||||
/// A lock file storing project information.
|
/// A lock file storing project information.
|
||||||
#[derive(Debug, Default, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Default, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
pub struct LockFile {
|
pub struct LockFile {
|
||||||
|
/// The directory where stores the lock file.
|
||||||
|
#[serde(skip)]
|
||||||
|
pub lock_dir: Option<ImmutPath>,
|
||||||
// The lock file version.
|
// The lock file version.
|
||||||
// version: String,
|
// version: String,
|
||||||
/// The project's document (input).
|
/// The project's document (input).
|
||||||
|
|
@ -72,6 +75,9 @@ pub struct LockFile {
|
||||||
pub struct ProjectInput {
|
pub struct ProjectInput {
|
||||||
/// The project's ID.
|
/// The project's ID.
|
||||||
pub id: Id,
|
pub id: Id,
|
||||||
|
/// The cwd of the project when relative paths will be resolved.
|
||||||
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
|
pub lock_dir: Option<PathBuf>,
|
||||||
/// The path to the root directory of the project.
|
/// The path to the root directory of the project.
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
#[serde(skip_serializing_if = "Option::is_none")]
|
||||||
pub root: Option<ResourcePath>,
|
pub root: Option<ResourcePath>,
|
||||||
|
|
@ -93,6 +99,19 @@ pub struct ProjectInput {
|
||||||
pub package_cache_path: Option<ResourcePath>,
|
pub package_cache_path: Option<ResourcePath>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl ProjectInput {
|
||||||
|
/// Returns a new project input relative to the provided lock directory.
|
||||||
|
pub fn relative_to(&self, that: &Path) -> Self {
|
||||||
|
if let Some(lock_dir) = &self.lock_dir {
|
||||||
|
if lock_dir == that {
|
||||||
|
return self.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// A project route specifier.
|
/// A project route specifier.
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize)]
|
||||||
#[serde(rename_all = "kebab-case")]
|
#[serde(rename_all = "kebab-case")]
|
||||||
|
|
|
||||||
|
|
@ -116,6 +116,22 @@ impl ProjectTask {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the export configuration of a task.
|
||||||
|
pub fn as_export_mut(&mut self) -> Option<&mut ExportTask> {
|
||||||
|
Some(match self {
|
||||||
|
Self::Preview(..) => return None,
|
||||||
|
Self::ExportPdf(task) => &mut task.export,
|
||||||
|
Self::ExportPng(task) => &mut task.export,
|
||||||
|
Self::ExportSvg(task) => &mut task.export,
|
||||||
|
Self::ExportHtml(task) => &mut task.export,
|
||||||
|
Self::ExportSvgHtml(task) => &mut task.export,
|
||||||
|
Self::ExportTeX(task) => &mut task.export,
|
||||||
|
Self::ExportMd(task) => &mut task.export,
|
||||||
|
Self::ExportText(task) => &mut task.export,
|
||||||
|
Self::Query(task) => &mut task.export,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns extension of the artifact.
|
/// Returns extension of the artifact.
|
||||||
pub fn extension(&self) -> &str {
|
pub fn extension(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
|
|
|
||||||
|
|
@ -77,11 +77,12 @@ impl Id {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new project Id from a world.
|
/// Creates a new project Id from a world.
|
||||||
pub fn from_world<F: CompilerFeat>(world: &CompilerWorld<F>) -> Option<Self> {
|
pub fn from_world<F: CompilerFeat>(world: &CompilerWorld<F>, ctx: CtxPath) -> Option<Self> {
|
||||||
let entry = world.entry_state();
|
let entry = world.entry_state();
|
||||||
let id = unix_slash(entry.main()?.vpath().as_rootless_path());
|
let id = unix_slash(entry.main()?.vpath().as_rootless_path());
|
||||||
|
|
||||||
let path = &ResourcePath::from_user_sys(Path::new(&id));
|
// todo: entry root may not be set, so we should use the cwd
|
||||||
|
let path = &ResourcePath::from_user_sys(Path::new(&id), ctx);
|
||||||
Some(path.into())
|
Some(path.into())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -121,6 +122,25 @@ impl PathPattern {
|
||||||
Self(pattern.into())
|
Self(pattern.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Creates a new path pattern from a string.
|
||||||
|
pub fn relative_to(self, base: &Path) -> Self {
|
||||||
|
if self.0.is_empty() {
|
||||||
|
return self;
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = Path::new(self.0.as_str());
|
||||||
|
if path.is_absolute() {
|
||||||
|
let rel_path = tinymist_std::path::diff(path, base);
|
||||||
|
|
||||||
|
match rel_path {
|
||||||
|
Some(rel) => PathPattern(unix_slash(&rel).into()),
|
||||||
|
None => self,
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Substitutes the path pattern with `$root`, and `$dir/$name`.
|
/// Substitutes the path pattern with `$root`, and `$dir/$name`.
|
||||||
pub fn substitute(&self, entry: &EntryState) -> Option<ImmutPath> {
|
pub fn substitute(&self, entry: &EntryState) -> Option<ImmutPath> {
|
||||||
self.substitute_impl(entry.root(), entry.main())
|
self.substitute_impl(entry.root(), entry.main())
|
||||||
|
|
@ -300,18 +320,29 @@ impl<'de> serde::Deserialize<'de> for ResourcePath {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// todo: The ctx path looks not quite maintainable. But we only target to make
|
||||||
|
// things correct, then back to make code good.
|
||||||
|
pub type CtxPath<'a, 'b> = (/* cwd */ &'a Path, /* lock_dir */ &'b Path);
|
||||||
|
|
||||||
impl ResourcePath {
|
impl ResourcePath {
|
||||||
/// Creates a new resource path from a user passing system path.
|
/// Creates a new resource path from a user passing system path.
|
||||||
pub fn from_user_sys(inp: &Path) -> Self {
|
pub fn from_user_sys(inp: &Path, (cwd, lock_dir): CtxPath) -> Self {
|
||||||
let rel = if inp.is_relative() {
|
let abs = if inp.is_absolute() {
|
||||||
inp.to_path_buf()
|
inp.to_path_buf()
|
||||||
} else {
|
} else {
|
||||||
let cwd = std::env::current_dir().unwrap();
|
cwd.join(inp)
|
||||||
tinymist_std::path::diff(inp, &cwd).unwrap()
|
|
||||||
};
|
};
|
||||||
let rel = unix_slash(&rel);
|
let resource_path = if let Some(rel) = tinymist_std::path::diff(&abs, lock_dir) {
|
||||||
|
rel
|
||||||
|
} else {
|
||||||
|
abs
|
||||||
|
};
|
||||||
|
// todo: clean is not posix compatible,
|
||||||
|
// for example /symlink/../file is not equivalent to /file
|
||||||
|
let rel = unix_slash(&resource_path.clean());
|
||||||
ResourcePath("file".into(), rel.to_string())
|
ResourcePath("file".into(), rel.to_string())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Creates a new resource path from a file id.
|
/// Creates a new resource path from a file id.
|
||||||
pub fn from_file_id(id: FileId) -> Self {
|
pub fn from_file_id(id: FileId) -> Self {
|
||||||
let package = id.package();
|
let package = id.package();
|
||||||
|
|
@ -327,6 +358,20 @@ impl ResourcePath {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn relative_to(&self, base: &Path) -> Option<Self> {
|
||||||
|
if self.0 == "file" {
|
||||||
|
let path = Path::new(&self.1);
|
||||||
|
if path.is_absolute() {
|
||||||
|
let rel_path = tinymist_std::path::diff(path, base)?;
|
||||||
|
Some(ResourcePath(self.0.clone(), unix_slash(&rel_path)))
|
||||||
|
} else {
|
||||||
|
Some(ResourcePath(self.0.clone(), self.1.clone()))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Some(self.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Converts the resource path to a path relative to the `base` (usually the
|
/// Converts the resource path to a path relative to the `base` (usually the
|
||||||
/// directory storing the lockfile).
|
/// directory storing the lockfile).
|
||||||
pub fn to_rel_path(&self, base: &Path) -> Option<PathBuf> {
|
pub fn to_rel_path(&self, base: &Path) -> Option<PathBuf> {
|
||||||
|
|
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
use std::{path::Path, sync::Arc};
|
use std::{path::Path, sync::Arc};
|
||||||
|
|
||||||
use reflexo_typst::{path::unix_slash, typst::prelude::EcoVec, LazyHash};
|
use reflexo_typst::{path::unix_slash, typst::prelude::EcoVec, EntryReader, LazyHash};
|
||||||
use rpds::RedBlackTreeMapSync;
|
use rpds::RedBlackTreeMapSync;
|
||||||
use tinymist_std::{hash::FxHashMap, ImmutPath};
|
use tinymist_std::{hash::FxHashMap, ImmutPath};
|
||||||
use typst::diag::EcoString;
|
use typst::diag::EcoString;
|
||||||
|
|
@ -12,6 +12,7 @@ pub struct ProjectRouteState {
|
||||||
path_routes: FxHashMap<ImmutPath, RoutePathState>,
|
path_routes: FxHashMap<ImmutPath, RoutePathState>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub struct ProjectResolution {
|
pub struct ProjectResolution {
|
||||||
pub lock_dir: ImmutPath,
|
pub lock_dir: ImmutPath,
|
||||||
pub project_id: Id,
|
pub project_id: Id,
|
||||||
|
|
@ -95,8 +96,10 @@ impl ProjectRouteState {
|
||||||
snap: &LspCompileSnapshot,
|
snap: &LspCompileSnapshot,
|
||||||
) -> Option<()> {
|
) -> Option<()> {
|
||||||
let path_route = self.path_routes.get_mut(&lock_dir)?;
|
let path_route = self.path_routes.get_mut(&lock_dir)?;
|
||||||
|
// todo: rootless
|
||||||
|
let root = snap.world.entry_state().root()?;
|
||||||
|
|
||||||
let id = Id::from_world(&snap.world)?;
|
let id = Id::from_world(&snap.world, (&root, &lock_dir))?;
|
||||||
let deps = snap.world.depended_fs_paths();
|
let deps = snap.world.depended_fs_paths();
|
||||||
let material = ProjectPathMaterial::from_deps(id, deps);
|
let material = ProjectPathMaterial::from_deps(id, deps);
|
||||||
|
|
||||||
|
|
@ -121,7 +124,7 @@ impl ProjectRouteState {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
log::info!("loaded lock at {path:?}");
|
log::debug!("loaded lock at {path:?}");
|
||||||
|
|
||||||
let root: EcoString = unix_slash(path).into();
|
let root: EcoString = unix_slash(path).into();
|
||||||
let root_hash = tinymist_std::hash::hash128(&root);
|
let root_hash = tinymist_std::hash::hash128(&root);
|
||||||
|
|
@ -151,7 +154,7 @@ impl ProjectRouteState {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn read_material(&self, entry_path: &Path) -> Option<ProjectPathMaterial> {
|
fn read_material(&self, entry_path: &Path) -> Option<ProjectPathMaterial> {
|
||||||
log::info!("check material at {entry_path:?}");
|
log::debug!("check material at {entry_path:?}");
|
||||||
let name = entry_path.file_name().unwrap_or(entry_path.as_os_str());
|
let name = entry_path.file_name().unwrap_or(entry_path.as_os_str());
|
||||||
if name != "path-material.json" {
|
if name != "path-material.json" {
|
||||||
return None;
|
return None;
|
||||||
|
|
@ -193,3 +196,37 @@ struct RoutePathState {
|
||||||
routes: Arc<FxHashMap<ImmutPath, Id>>,
|
routes: Arc<FxHashMap<ImmutPath, Id>>,
|
||||||
cache_dir: Option<ImmutPath>,
|
cache_dir: Option<ImmutPath>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use reflexo::path::PathClean;
|
||||||
|
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
// todo: enable me
|
||||||
|
#[test]
|
||||||
|
#[ignore]
|
||||||
|
fn test_resolve_chapter() {
|
||||||
|
let mut state = ProjectRouteState::default();
|
||||||
|
|
||||||
|
let lock_dir = Path::new(env!("CARGO_MANIFEST_DIR")).join("../../tests/workspaces/book/");
|
||||||
|
let lock_dir = lock_dir.clean();
|
||||||
|
|
||||||
|
let leaf = lock_dir.join("chapters/chapter1.typ").into();
|
||||||
|
|
||||||
|
// Resolve the path
|
||||||
|
let resolution = state.resolve(&leaf);
|
||||||
|
assert!(resolution.is_some(), "Resolution should not be None");
|
||||||
|
let resolution = resolution.unwrap();
|
||||||
|
assert_eq!(
|
||||||
|
resolution.lock_dir,
|
||||||
|
ImmutPath::from(lock_dir),
|
||||||
|
"Lock directory should match"
|
||||||
|
);
|
||||||
|
assert_eq!(
|
||||||
|
resolution.project_id,
|
||||||
|
Id::new("file:main.typ".to_owned()),
|
||||||
|
"Project ID should match"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
||||||
|
|
@ -454,9 +454,11 @@ impl ServerState {
|
||||||
|
|
||||||
let update_dep = lock_dir.clone().map(|lock_dir| {
|
let update_dep = lock_dir.clone().map(|lock_dir| {
|
||||||
|snap: LspComputeGraph| async move {
|
|snap: LspComputeGraph| async move {
|
||||||
let mut updater = update_lock(lock_dir);
|
let mut updater = update_lock(lock_dir.clone());
|
||||||
let world = snap.world();
|
let world = snap.world();
|
||||||
let doc_id = updater.compiled(world)?;
|
// todo: rootless.
|
||||||
|
let root_dir = world.entry_state().root()?;
|
||||||
|
let doc_id = updater.compiled(world, (&root_dir, &lock_dir))?;
|
||||||
|
|
||||||
updater.update_materials(doc_id.clone(), world.depended_fs_paths());
|
updater.update_materials(doc_id.clone(), world.depended_fs_paths());
|
||||||
updater.route(doc_id, PROJECT_ROUTE_USER_ACTION_PRIORITY);
|
updater.route(doc_id, PROJECT_ROUTE_USER_ACTION_PRIORITY);
|
||||||
|
|
|
||||||
|
|
@ -7,7 +7,7 @@ use std::sync::{Arc, OnceLock};
|
||||||
|
|
||||||
use reflexo::ImmutPath;
|
use reflexo::ImmutPath;
|
||||||
use reflexo_typst::{Bytes, CompilationTask, ExportComputation};
|
use reflexo_typst::{Bytes, CompilationTask, ExportComputation};
|
||||||
use tinymist_project::LspWorld;
|
use tinymist_project::{LspWorld, PROJECT_ROUTE_USER_ACTION_PRIORITY};
|
||||||
use tinymist_std::error::prelude::*;
|
use tinymist_std::error::prelude::*;
|
||||||
use tinymist_std::fs::paths::write_atomic;
|
use tinymist_std::fs::paths::write_atomic;
|
||||||
use tinymist_std::path::PathClean;
|
use tinymist_std::path::PathClean;
|
||||||
|
|
@ -196,7 +196,9 @@ impl ExportTask {
|
||||||
static EXPORT_ID: AtomicUsize = AtomicUsize::new(0);
|
static EXPORT_ID: AtomicUsize = AtomicUsize::new(0);
|
||||||
let export_id = EXPORT_ID.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
|
let export_id = EXPORT_ID.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
|
||||||
|
|
||||||
log::debug!("ExportTask({export_id}): exporting {entry:?} to {write_to:?}");
|
log::debug!(
|
||||||
|
"ExportTask({export_id},lock={lock_dir:?}): exporting {entry:?} to {write_to:?}"
|
||||||
|
);
|
||||||
if let Some(e) = write_to.parent() {
|
if let Some(e) = write_to.parent() {
|
||||||
if !e.exists() {
|
if !e.exists() {
|
||||||
std::fs::create_dir_all(e).context("failed to create directory")?;
|
std::fs::create_dir_all(e).context("failed to create directory")?;
|
||||||
|
|
@ -204,15 +206,18 @@ impl ExportTask {
|
||||||
}
|
}
|
||||||
|
|
||||||
let _: Option<()> = lock_dir.and_then(|lock_dir| {
|
let _: Option<()> = lock_dir.and_then(|lock_dir| {
|
||||||
let mut updater = crate::project::update_lock(lock_dir);
|
let mut updater = crate::project::update_lock(lock_dir.clone());
|
||||||
|
let root = graph.world().entry_state().root()?;
|
||||||
|
|
||||||
let doc_id = updater.compiled(graph.world())?;
|
let doc_id = updater.compiled(graph.world(), (&root, &lock_dir))?;
|
||||||
|
|
||||||
updater.task(ApplyProjectTask {
|
updater.task(ApplyProjectTask {
|
||||||
id: doc_id.clone(),
|
id: doc_id.clone(),
|
||||||
document: doc_id,
|
document: doc_id.clone(),
|
||||||
task: task.clone(),
|
task: task.clone(),
|
||||||
});
|
});
|
||||||
|
updater.update_materials(doc_id.clone(), graph.world().depended_fs_paths());
|
||||||
|
updater.route(doc_id, PROJECT_ROUTE_USER_ACTION_PRIORITY);
|
||||||
updater.commit();
|
updater.commit();
|
||||||
|
|
||||||
Some(())
|
Some(())
|
||||||
|
|
|
||||||
|
|
@ -111,22 +111,33 @@ impl LockFileExt for LockFile {
|
||||||
|
|
||||||
/// Runs project compilation(s)
|
/// Runs project compilation(s)
|
||||||
pub async fn compile_main(args: CompileArgs) -> Result<()> {
|
pub async fn compile_main(args: CompileArgs) -> Result<()> {
|
||||||
// Identifies the input and output
|
let cwd = std::env::current_dir().context("cannot get cwd")?;
|
||||||
let input = args.compile.declare.to_input();
|
// todo: respect the name of the lock file
|
||||||
let output = args.compile.to_task(input.id.clone())?;
|
|
||||||
|
|
||||||
// Saves the lock file if the flags are set
|
// Saves the lock file if the flags are set
|
||||||
let save_lock = args.save_lock || args.lockfile.is_some();
|
let save_lock = args.save_lock || args.lockfile.is_some();
|
||||||
// todo: respect the name of the lock file
|
|
||||||
let lock_dir: ImmutPath = if let Some(lockfile) = args.lockfile {
|
let lock_dir: ImmutPath = if let Some(lockfile) = args.lockfile {
|
||||||
lockfile.parent().context("no parent")?.into()
|
let lockfile = if lockfile.is_absolute() {
|
||||||
|
lockfile
|
||||||
} else {
|
} else {
|
||||||
std::env::current_dir().context("lock directory")?.into()
|
cwd.join(lockfile)
|
||||||
};
|
};
|
||||||
|
lockfile
|
||||||
|
.parent()
|
||||||
|
.context("lock file must have a parent directory")?
|
||||||
|
.into()
|
||||||
|
} else {
|
||||||
|
cwd.as_path().into()
|
||||||
|
};
|
||||||
|
|
||||||
|
// Identifies the input and output
|
||||||
|
let input = args.compile.declare.to_input((&cwd, &lock_dir));
|
||||||
|
let output = args.compile.to_task(input.id.clone(), &cwd)?;
|
||||||
|
|
||||||
if save_lock {
|
if save_lock {
|
||||||
LockFile::update(&lock_dir, |state| {
|
LockFile::update(&lock_dir, |state| {
|
||||||
state.replace_document(input.clone());
|
state.replace_document(input.relative_to(&lock_dir));
|
||||||
state.replace_task(output.clone());
|
state.replace_task(output.clone());
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
@ -365,13 +376,15 @@ fn shell_build_script(shell: Shell) -> Result<String> {
|
||||||
|
|
||||||
/// Project document commands' main
|
/// Project document commands' main
|
||||||
pub fn project_main(args: DocCommands) -> Result<()> {
|
pub fn project_main(args: DocCommands) -> Result<()> {
|
||||||
LockFile::update(Path::new("."), |state| {
|
let cwd = std::env::current_dir().context("cannot get cwd")?;
|
||||||
|
LockFile::update(&cwd, |state| {
|
||||||
|
let ctx: (&Path, &Path) = (&cwd, &cwd);
|
||||||
match args {
|
match args {
|
||||||
DocCommands::New(args) => {
|
DocCommands::New(args) => {
|
||||||
state.replace_document(args.to_input());
|
state.replace_document(args.to_input(ctx));
|
||||||
}
|
}
|
||||||
DocCommands::Configure(args) => {
|
DocCommands::Configure(args) => {
|
||||||
let id: Id = (&args.id).into();
|
let id: Id = args.id.id(ctx);
|
||||||
|
|
||||||
state.route.push(ProjectRoute {
|
state.route.push(ProjectRoute {
|
||||||
id: id.clone(),
|
id: id.clone(),
|
||||||
|
|
@ -386,12 +399,14 @@ pub fn project_main(args: DocCommands) -> Result<()> {
|
||||||
|
|
||||||
/// Project task commands' main
|
/// Project task commands' main
|
||||||
pub fn task_main(args: TaskCommands) -> Result<()> {
|
pub fn task_main(args: TaskCommands) -> Result<()> {
|
||||||
LockFile::update(Path::new("."), |state| {
|
let cwd = std::env::current_dir().context("cannot get cwd")?;
|
||||||
|
LockFile::update(&cwd, |state| {
|
||||||
|
let ctx: (&Path, &Path) = (&cwd, &cwd);
|
||||||
let _ = state;
|
let _ = state;
|
||||||
match args {
|
match args {
|
||||||
#[cfg(feature = "preview")]
|
#[cfg(feature = "preview")]
|
||||||
TaskCommands::Preview(args) => {
|
TaskCommands::Preview(args) => {
|
||||||
let input = args.declare.to_input();
|
let input = args.declare.to_input(ctx);
|
||||||
let id = input.id.clone();
|
let id = input.id.clone();
|
||||||
state.replace_document(input);
|
state.replace_document(input);
|
||||||
let _ = state.preview(id, &args);
|
let _ = state.preview(id, &args);
|
||||||
|
|
|
||||||
|
|
@ -3,7 +3,10 @@
|
||||||
|
|
||||||
FROM debian:12 AS builder
|
FROM debian:12 AS builder
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN --mount=target=/var/lib/apt/lists,type=cache,sharing=locked \
|
||||||
|
--mount=target=/var/cache/apt,type=cache,sharing=locked \
|
||||||
|
rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||||
|
apt-get update && apt-get install -y \
|
||||||
git \
|
git \
|
||||||
file \
|
file \
|
||||||
ninja-build gettext cmake unzip curl build-essential
|
ninja-build gettext cmake unzip curl build-essential
|
||||||
|
|
@ -17,9 +20,19 @@ FROM myriaddreamin/tinymist:0.13.22 as tinymist
|
||||||
FROM debian:12
|
FROM debian:12
|
||||||
|
|
||||||
COPY --from=builder /neovim/build/nvim-linux-x86_64.deb /tmp/nvim-linux-x86_64.deb
|
COPY --from=builder /neovim/build/nvim-linux-x86_64.deb /tmp/nvim-linux-x86_64.deb
|
||||||
RUN apt-get update && apt-get install -y curl git ripgrep build-essential unzip
|
|
||||||
RUN apt-get update && apt-get install -y python3
|
RUN --mount=target=/var/lib/apt/lists,type=cache,sharing=locked \
|
||||||
RUN apt-get install -y /tmp/nvim-linux-x86_64.deb \
|
--mount=target=/var/cache/apt,type=cache,sharing=locked \
|
||||||
|
rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||||
|
apt-get update && apt-get install -y curl git ripgrep build-essential unzip
|
||||||
|
RUN --mount=target=/var/lib/apt/lists,type=cache,sharing=locked \
|
||||||
|
--mount=target=/var/cache/apt,type=cache,sharing=locked \
|
||||||
|
rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||||
|
apt-get update && apt-get install -y python3
|
||||||
|
RUN --mount=target=/var/lib/apt/lists,type=cache,sharing=locked \
|
||||||
|
--mount=target=/var/cache/apt,type=cache,sharing=locked \
|
||||||
|
rm -f /etc/apt/apt.conf.d/docker-clean && \
|
||||||
|
apt-get install -y /tmp/nvim-linux-x86_64.deb \
|
||||||
&& rm /tmp/nvim-linux-x86_64.deb
|
&& rm /tmp/nvim-linux-x86_64.deb
|
||||||
|
|
||||||
RUN useradd --create-home --shell /bin/bash runner
|
RUN useradd --create-home --shell /bin/bash runner
|
||||||
|
|
|
||||||
71
editors/neovim/spec/lockfile_spec.lua
Normal file
71
editors/neovim/spec/lockfile_spec.lua
Normal file
|
|
@ -0,0 +1,71 @@
|
||||||
|
---@brief [[
|
||||||
|
--- Tests for export functionalities.
|
||||||
|
---@brief ]]
|
||||||
|
|
||||||
|
local fixtures = require 'spec.fixtures'
|
||||||
|
local helpers = require 'spec.helpers'
|
||||||
|
|
||||||
|
-- async async
|
||||||
|
local util = require "plenary.async.util"
|
||||||
|
local async = require('plenary.async')
|
||||||
|
|
||||||
|
local async_tests = require "plenary.async.tests"
|
||||||
|
|
||||||
|
require('tinymist').setup {
|
||||||
|
lsp = {
|
||||||
|
init_options = {
|
||||||
|
projectResolution = 'lockDatabase',
|
||||||
|
exportPdf = 'onType',
|
||||||
|
outputPath = '/home/runner/test/$name',
|
||||||
|
development = true,
|
||||||
|
systemFonts = false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
local defer_swapped = function(timeout, callback)
|
||||||
|
vim.defer_fn(callback, timeout)
|
||||||
|
end
|
||||||
|
|
||||||
|
async_tests.describe('Lockfile', function()
|
||||||
|
assert.is.empty(vim.lsp.get_clients { bufnr = 0, name = 'tinymist', _uninitialized = true })
|
||||||
|
|
||||||
|
async_tests.it('pdf of main is created onType', function()
|
||||||
|
local pdf_path = '/home/runner/test/main.pdf'
|
||||||
|
local pdf_sub_path = '/home/runner/test/chapter1.pdf'
|
||||||
|
assert.is.same(nil, vim.uv.fs_stat(pdf_path), 'PDF file should not be created before testing')
|
||||||
|
assert.is.same(nil, vim.uv.fs_stat(pdf_sub_path), 'PDF sub file should not be created before testing')
|
||||||
|
|
||||||
|
local pdf_exported = async.wrap(function(cb)
|
||||||
|
require('tinymist').subscribeDevEvent(
|
||||||
|
function(result)
|
||||||
|
if result.type == 'export' and result.needExport
|
||||||
|
then
|
||||||
|
cb(result) -- resolve the promise when the export event is received
|
||||||
|
return true -- unregister the callback after receiving the event
|
||||||
|
end
|
||||||
|
end)
|
||||||
|
|
||||||
|
-- defer 2000ms and resolve a nil
|
||||||
|
defer_swapped(2000, function()
|
||||||
|
cb(nil) -- resolve the promise after 2 seconds
|
||||||
|
end)
|
||||||
|
|
||||||
|
vim.cmd.edit(fixtures.project.some_nested_existing_file)
|
||||||
|
assert.is.same(1, #vim.lsp.get_clients { bufnr = 0, name = 'tinymist', _uninitialized = true })
|
||||||
|
--- append a text to current buffer
|
||||||
|
helpers.insert('This is a test export.\n')
|
||||||
|
vim.cmd.sleep('30m')
|
||||||
|
--- append a text to current buffer
|
||||||
|
helpers.insert('This is a test export.\n')
|
||||||
|
vim.cmd.sleep('30m')
|
||||||
|
|
||||||
|
end, 1)()
|
||||||
|
|
||||||
|
assert.is_not.same(nil, pdf_exported, 'PDF export should be triggered on type')
|
||||||
|
assert.is.same('onType', pdf_exported.when, 'Export is when = onType')
|
||||||
|
|
||||||
|
assert.is.same(nil, vim.uv.fs_stat(pdf_sub_path), 'PDF file should not be created because of the lockfile')
|
||||||
|
assert.is_not.same(nil, vim.uv.fs_stat(pdf_path), 'PDF file should be created after typing')
|
||||||
|
end)
|
||||||
|
end)
|
||||||
|
|
@ -34,6 +34,31 @@ def run_tests(test_files=None):
|
||||||
subprocess.run(command, check=True)
|
subprocess.run(command, check=True)
|
||||||
|
|
||||||
|
|
||||||
|
def prepare():
|
||||||
|
bookdir = os.environ.get("BOOKDIR", "/home/runner/dev/workspaces/book")
|
||||||
|
if not os.path.exists(bookdir):
|
||||||
|
print(f"Book directory {bookdir} does not exist.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# compile
|
||||||
|
compile_command = [
|
||||||
|
"tinymist",
|
||||||
|
"compile",
|
||||||
|
"--lockfile",
|
||||||
|
os.path.join(bookdir, "tinymist.lock"),
|
||||||
|
os.path.join(bookdir, "main.typ"),
|
||||||
|
os.path.join(bookdir, "book.pdf"),
|
||||||
|
]
|
||||||
|
|
||||||
|
try:
|
||||||
|
subprocess.run(compile_command, check=True)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
print(f"Compilation failed: {e}")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
print("Compilation completed successfully.")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# Check if any test files are provided as command line arguments
|
# Check if any test files are provided as command line arguments
|
||||||
if len(sys.argv) > 1:
|
if len(sys.argv) > 1:
|
||||||
|
|
@ -41,4 +66,5 @@ if __name__ == "__main__":
|
||||||
else:
|
else:
|
||||||
test_files = None
|
test_files = None
|
||||||
|
|
||||||
|
prepare()
|
||||||
run_tests(test_files)
|
run_tests(test_files)
|
||||||
|
|
|
||||||
|
|
@ -33,6 +33,7 @@
|
||||||
"lint-fix": "eslint editors/vscode/src --fix",
|
"lint-fix": "eslint editors/vscode/src --fix",
|
||||||
"benches": "cargo bench --workspace",
|
"benches": "cargo bench --workspace",
|
||||||
"bench": "cargo bench --workspace --bench",
|
"bench": "cargo bench --workspace --bench",
|
||||||
|
"test:nvim": "cd editors/neovim && ./bootstrap.sh test",
|
||||||
"test:grammar": "cd syntaxes/textmate && yarn run test",
|
"test:grammar": "cd syntaxes/textmate && yarn run test",
|
||||||
"build:typlite": "cargo build --bin typlite",
|
"build:typlite": "cargo build --bin typlite",
|
||||||
"typlite": "target/debug/typlite",
|
"typlite": "target/debug/typlite",
|
||||||
|
|
|
||||||
3
scripts/test-lock.sh
Executable file
3
scripts/test-lock.sh
Executable file
|
|
@ -0,0 +1,3 @@
|
||||||
|
bookdir=tests/workspaces/book
|
||||||
|
# typst compile ${bookdir}/main.typ ${bookdir}/book.pdf
|
||||||
|
cargo run --bin tinymist -- compile --lockfile ${bookdir}/tinymist.lock ${bookdir}/main.typ ${bookdir}/book.pdf
|
||||||
21
tests/workspaces/book/tinymist.lock
Normal file
21
tests/workspaces/book/tinymist.lock
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
# This file is automatically @generated by tinymist.
|
||||||
|
# It is not intended for manual editing.
|
||||||
|
version = "0.1.0-beta0"
|
||||||
|
|
||||||
|
[[document]]
|
||||||
|
id = "file:main.typ"
|
||||||
|
inputs = []
|
||||||
|
main = "file:main.typ"
|
||||||
|
root = "file:."
|
||||||
|
system-fonts = true
|
||||||
|
|
||||||
|
[[route]]
|
||||||
|
id = "file:main.typ"
|
||||||
|
priority = 256
|
||||||
|
|
||||||
|
[[task]]
|
||||||
|
document = "file:main.typ"
|
||||||
|
id = "file:main.typ"
|
||||||
|
output = "book"
|
||||||
|
type = "export-pdf"
|
||||||
|
when = "never"
|
||||||
Loading…
Add table
Add a link
Reference in a new issue