fix: don't export on entry change and first filesystem sync (#1854)
Some checks are pending
tinymist::ci / build-vscode-others (push) Blocked by required conditions
tinymist::ci / publish-vscode (push) Blocked by required conditions
tinymist::ci / build-vsc-assets (push) Blocked by required conditions
tinymist::ci / build-vscode (push) Blocked by required conditions
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / E2E Tests (darwin-arm64 on macos-latest) (push) Blocked by required conditions
tinymist::ci / E2E Tests (linux-x64 on ubuntu-22.04) (push) Blocked by required conditions
tinymist::ci / E2E Tests (linux-x64 on ubuntu-latest) (push) Blocked by required conditions
tinymist::ci / E2E Tests (win32-x64 on windows-2019) (push) Blocked by required conditions
tinymist::ci / E2E Tests (win32-x64 on windows-latest) (push) Blocked by required conditions
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / build-binary (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run

* fix: make real onSave export conditions

* fix: remove fix

* feat: pass export tests

* fix: revert bootstrap changes

* feat: reduce num of exports

* fix: diag tests
This commit is contained in:
Myriad-Dreamin 2025-06-29 21:54:05 +08:00 committed by GitHub
parent db7e031bbe
commit d0a478929e
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 214 additions and 119 deletions

2
Cargo.lock generated
View file

@ -5626,7 +5626,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [ dependencies = [
"windows-sys 0.48.0", "windows-sys 0.59.0",
] ]
[[package]] [[package]]

View file

@ -18,7 +18,7 @@ ENV RUSTC_WRAPPER=sccache SCCACHE_DIR=/sccache
# to download the toolchain # to download the toolchain
RUN --mount=type=cache,target=/usr/local/cargo/registry \ RUN --mount=type=cache,target=/usr/local/cargo/registry \
--mount=type=cache,target=$SCCACHE_DIR,sharing=locked \ --mount=type=cache,target=$SCCACHE_DIR,sharing=locked \
cargo --version rustup update
FROM base as planner FROM base as planner
WORKDIR app WORKDIR app

View file

@ -340,8 +340,8 @@ struct TaggedMemoryEvent {
pub struct CompileServerOpts<F: CompilerFeat, Ext> { pub struct CompileServerOpts<F: CompilerFeat, Ext> {
/// The compilation handler. /// The compilation handler.
pub handler: Arc<dyn CompileHandler<F, Ext>>, pub handler: Arc<dyn CompileHandler<F, Ext>>,
/// Whether to enable file system watching. /// Whether to ignoring the first fs sync event.
pub enable_watch: bool, pub ignore_first_sync: bool,
/// Specifies the current export target. /// Specifies the current export target.
pub export_target: ExportTarget, pub export_target: ExportTarget,
} }
@ -350,7 +350,7 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: 'static> Default for CompileS
fn default() -> Self { fn default() -> Self {
Self { Self {
handler: Arc::new(std::marker::PhantomData), handler: Arc::new(std::marker::PhantomData),
enable_watch: false, ignore_first_sync: false,
export_target: ExportTarget::Paged, export_target: ExportTarget::Paged,
} }
} }
@ -364,8 +364,8 @@ pub struct ProjectCompiler<F: CompilerFeat, Ext> {
export_target: ExportTarget, export_target: ExportTarget,
/// Channel for sending interrupts to the compiler actor. /// Channel for sending interrupts to the compiler actor.
dep_tx: mpsc::UnboundedSender<NotifyMessage>, dep_tx: mpsc::UnboundedSender<NotifyMessage>,
/// Whether to enable file system watching. /// Whether to ignore the first sync event.
pub enable_watch: bool, pub ignore_first_sync: bool,
/// The current logical tick. /// The current logical tick.
logical_tick: usize, logical_tick: usize,
@ -389,7 +389,7 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
dep_tx: mpsc::UnboundedSender<NotifyMessage>, dep_tx: mpsc::UnboundedSender<NotifyMessage>,
CompileServerOpts { CompileServerOpts {
handler, handler,
enable_watch, ignore_first_sync,
export_target, export_target,
}: CompileServerOpts<F, Ext>, }: CompileServerOpts<F, Ext>,
) -> Self { ) -> Self {
@ -402,13 +402,13 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
Self { Self {
handler, handler,
dep_tx, dep_tx,
enable_watch,
export_target, export_target,
logical_tick: 1, logical_tick: 1,
dirty_shadow_logical_tick: 0, dirty_shadow_logical_tick: 0,
estimated_shadow_files: Default::default(), estimated_shadow_files: Default::default(),
ignore_first_sync,
primary, primary,
deps: Default::default(), deps: Default::default(),
@ -657,7 +657,7 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
// Apply file system changes. // Apply file system changes.
let dirty_tick = &mut self.dirty_shadow_logical_tick; let dirty_tick = &mut self.dirty_shadow_logical_tick;
let (changes, event) = event.split(); let (changes, watched, event) = event.split_with_is_sync();
let changes = std::iter::repeat_n(changes, 1 + self.dedicates.len()); let changes = std::iter::repeat_n(changes, 1 + self.dedicates.len());
let proj = std::iter::once(&mut self.primary).chain(self.dedicates.iter_mut()); let proj = std::iter::once(&mut self.primary).chain(self.dedicates.iter_mut());
@ -681,7 +681,7 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
verse.vfs_changed() verse.vfs_changed()
}); });
if vfs_changed { if vfs_changed && (!self.ignore_first_sync || !watched) {
proj.reason.see(reason_by_fs()); proj.reason.see(reason_by_fs());
} }
} }
@ -911,7 +911,7 @@ impl<F: CompilerFeat, Ext: 'static> ProjectInsState<F, Ext> {
} }
world.evict_vfs(60); world.evict_vfs(60);
let elapsed = evict_start.elapsed(); let elapsed = evict_start.elapsed();
log::info!("ProjectCompiler: evict cache in {elapsed:?}"); log::debug!("ProjectCompiler: evict cache in {elapsed:?}");
}); });
true true

View file

@ -186,7 +186,7 @@ impl<F: FnMut(FilesystemEvent) + Send + Sync> NotifyActor<F> {
} }
ActorEvent::Message(Some(SyncDependency(paths))) => { ActorEvent::Message(Some(SyncDependency(paths))) => {
if let Some(changeset) = self.update_watches(paths.as_ref()) { if let Some(changeset) = self.update_watches(paths.as_ref()) {
(self.interrupted_by_events)(FilesystemEvent::Update(changeset)); (self.interrupted_by_events)(FilesystemEvent::Update(changeset, true));
} }
} }
ActorEvent::NotifyEvent(event) => { ActorEvent::NotifyEvent(event) => {
@ -343,7 +343,7 @@ impl<F: FnMut(FilesystemEvent) + Send + Sync> NotifyActor<F> {
// Send file updates. // Send file updates.
if !changeset.is_empty() { if !changeset.is_empty() {
(self.interrupted_by_events)(FilesystemEvent::Update(changeset)); (self.interrupted_by_events)(FilesystemEvent::Update(changeset, false));
} }
} }
@ -494,7 +494,7 @@ impl<F: FnMut(FilesystemEvent) + Send + Sync> NotifyActor<F> {
let mut changeset = FileChangeSet::default(); let mut changeset = FileChangeSet::default();
changeset.inserts.push((event.path, payload)); changeset.inserts.push((event.path, payload));
(self.interrupted_by_events)(FilesystemEvent::Update(changeset)); (self.interrupted_by_events)(FilesystemEvent::Update(changeset, false));
} }
} }
}; };

View file

@ -37,11 +37,11 @@ pub struct ExportTimings;
impl ExportTimings { impl ExportTimings {
pub fn needs_run<F: CompilerFeat, D: typst::Document>( pub fn needs_run<F: CompilerFeat, D: typst::Document>(
snap: &CompileSnapshot<F>, snap: &CompileSnapshot<F>,
timing: Option<TaskWhen>, timing: Option<&TaskWhen>,
docs: Option<&D>, docs: Option<&D>,
) -> Option<bool> { ) -> Option<bool> {
snap.signal snap.signal
.should_run_task(timing.unwrap_or_default(), docs) .should_run_task(timing.unwrap_or(&TaskWhen::Never), docs)
} }
} }

View file

@ -85,9 +85,9 @@ pub enum ProjectTask {
impl ProjectTask { impl ProjectTask {
/// Returns the timing of executing the task. /// Returns the timing of executing the task.
pub fn when(&self) -> Option<TaskWhen> { pub fn when(&self) -> Option<&TaskWhen> {
Some(match self { Some(match self {
Self::Preview(task) => task.when, Self::Preview(task) => &task.when,
Self::ExportPdf(..) Self::ExportPdf(..)
| Self::ExportPng(..) | Self::ExportPng(..)
| Self::ExportSvg(..) | Self::ExportSvg(..)
@ -96,7 +96,7 @@ impl ProjectTask {
| Self::ExportMd(..) | Self::ExportMd(..)
| Self::ExportTeX(..) | Self::ExportTeX(..)
| Self::ExportText(..) | Self::ExportText(..)
| Self::Query(..) => self.as_export()?.when, | Self::Query(..) => &self.as_export()?.when,
}) })
} }

View file

@ -128,7 +128,7 @@ impl PathPattern {
#[comemo::memoize] #[comemo::memoize]
fn substitute_impl(&self, root: Option<ImmutPath>, main: Option<FileId>) -> Option<ImmutPath> { fn substitute_impl(&self, root: Option<ImmutPath>, main: Option<FileId>) -> Option<ImmutPath> {
log::info!("Check path {main:?} and root {root:?} with output directory {self:?}"); log::debug!("Check path {main:?} and root {root:?} with output directory {self:?}");
let (root, main) = root.zip(main)?; let (root, main) = root.zip(main)?;

View file

@ -468,45 +468,66 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
&mut self.inner.access_model &mut self.inner.access_model
} }
fn invalidate_path(&mut self, path: &Path) { fn invalidate_path(&mut self, path: &Path, snap: Option<&FileSnapshot>) {
if let Some(fids) = self.paths.get(path) { if let Some(fids) = self.paths.get(path) {
if fids.is_empty() { if fids.is_empty() {
return; return;
} }
self.view_changed = true; // Always changes view if snap is none.
self.view_changed = snap.is_none();
for fid in fids.clone() { for fid in fids.clone() {
self.invalidate_file_id(fid); self.invalidate_file_id(fid, snap);
} }
} }
} }
fn invalidate_file_id(&mut self, file_id: FileId) { fn invalidate_file_id(&mut self, file_id: FileId, snap: Option<&FileSnapshot>) {
self.view_changed = true; let mut changed = false;
self.managed.slot(file_id, |e| { self.managed.slot(file_id, |e| {
if let Some(snap) = snap {
let may_read_bytes = e.bytes.get().map(|b| &b.2);
match (snap, may_read_bytes) {
(FileSnapshot(Ok(snap)), Some(Ok(read))) if snap == read => {
return;
}
(FileSnapshot(Err(snap)), Some(Err(read))) if snap.as_ref() == read => {
return;
}
_ => {}
}
}
e.changed_at = self.goal_revision.get(); e.changed_at = self.goal_revision.get();
e.bytes = Arc::default(); e.bytes = Arc::default();
e.source = Arc::default(); e.source = Arc::default();
changed = true;
}); });
self.view_changed = changed;
} }
/// Reset the shadowing files in [`OverlayAccessModel`]. /// Reset the shadowing files in [`OverlayAccessModel`].
pub fn reset_shadow(&mut self) { pub fn reset_shadow(&mut self) {
for path in self.am().inner.inner.file_paths() { for path in self.am().inner.inner.file_paths() {
self.invalidate_path(&path); self.invalidate_path(&path, None);
} }
for fid in self.am().file_paths() { for fid in self.am().file_paths() {
self.invalidate_file_id(fid); self.invalidate_file_id(fid, None);
} }
self.am().clear_shadow(); self.am().clear_shadow();
self.am().inner.inner.clear_shadow(); self.am().inner.inner.clear_shadow();
} }
/// Unconditionally changes the view of the vfs.
pub fn change_view(&mut self) -> FileResult<()> {
self.view_changed = true;
Ok(())
}
/// Adds a shadowing file to the [`OverlayAccessModel`]. /// Adds a shadowing file to the [`OverlayAccessModel`].
pub fn map_shadow(&mut self, path: &Path, snap: FileSnapshot) -> FileResult<()> { pub fn map_shadow(&mut self, path: &Path, snap: FileSnapshot) -> FileResult<()> {
self.view_changed = true; self.invalidate_path(path, Some(&snap));
self.invalidate_path(path);
self.am().inner.inner.add_file(path, snap, |c| c.into()); self.am().inner.inner.add_file(path, snap, |c| c.into());
Ok(()) Ok(())
@ -514,8 +535,7 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
/// Removes a shadowing file from the [`OverlayAccessModel`]. /// Removes a shadowing file from the [`OverlayAccessModel`].
pub fn unmap_shadow(&mut self, path: &Path) -> FileResult<()> { pub fn unmap_shadow(&mut self, path: &Path) -> FileResult<()> {
self.view_changed = true; self.invalidate_path(path, None);
self.invalidate_path(path);
self.am().inner.inner.remove_file(path); self.am().inner.inner.remove_file(path);
Ok(()) Ok(())
@ -523,8 +543,7 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
/// Adds a shadowing file to the [`OverlayAccessModel`] by file id. /// Adds a shadowing file to the [`OverlayAccessModel`] by file id.
pub fn map_shadow_by_id(&mut self, file_id: FileId, snap: FileSnapshot) -> FileResult<()> { pub fn map_shadow_by_id(&mut self, file_id: FileId, snap: FileSnapshot) -> FileResult<()> {
self.view_changed = true; self.invalidate_file_id(file_id, Some(&snap));
self.invalidate_file_id(file_id);
self.am().add_file(&file_id, snap, |c| *c); self.am().add_file(&file_id, snap, |c| *c);
Ok(()) Ok(())
@ -532,8 +551,7 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
/// Removes a shadowing file from the [`OverlayAccessModel`] by file id. /// Removes a shadowing file from the [`OverlayAccessModel`] by file id.
pub fn remove_shadow_by_id(&mut self, file_id: FileId) { pub fn remove_shadow_by_id(&mut self, file_id: FileId) {
self.view_changed = true; self.invalidate_file_id(file_id, None);
self.invalidate_file_id(file_id);
self.am().remove_file(&file_id); self.am().remove_file(&file_id);
} }
@ -548,10 +566,10 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
/// See [`NotifyAccessModel`] for more information. /// See [`NotifyAccessModel`] for more information.
pub fn notify_fs_changes(&mut self, event: FileChangeSet) { pub fn notify_fs_changes(&mut self, event: FileChangeSet) {
for path in &event.removes { for path in &event.removes {
self.invalidate_path(path); self.invalidate_path(path, None);
} }
for (path, _) in &event.inserts { for (path, snap) in &event.inserts {
self.invalidate_path(path); self.invalidate_path(path, Some(snap));
} }
self.am().inner.inner.inner.notify(event); self.am().inner.inner.inner.notify(event);

View file

@ -41,7 +41,7 @@ pub struct UpstreamUpdateEvent {
#[derive(Debug)] #[derive(Debug)]
pub enum FilesystemEvent { pub enum FilesystemEvent {
/// Update file system files according to the given changeset /// Update file system files according to the given changeset
Update(FileChangeSet), Update(FileChangeSet, /* is_sync */ bool),
/// See [`UpstreamUpdateEvent`] /// See [`UpstreamUpdateEvent`]
UpstreamUpdate { UpstreamUpdate {
/// New changeset produced by invalidation /// New changeset produced by invalidation
@ -60,7 +60,19 @@ impl FilesystemEvent {
changeset, changeset,
upstream_event, upstream_event,
} => (changeset, upstream_event), } => (changeset, upstream_event),
FilesystemEvent::Update(changeset) => (changeset, None), FilesystemEvent::Update(changeset, ..) => (changeset, None),
}
}
/// Splits the filesystem event into a changeset and an optional upstream
/// event.
pub fn split_with_is_sync(self) -> (FileChangeSet, bool, Option<UpstreamUpdateEvent>) {
match self {
FilesystemEvent::UpstreamUpdate {
changeset,
upstream_event,
} => (changeset, false, upstream_event),
FilesystemEvent::Update(changeset, is_sync) => (changeset, is_sync, None),
} }
} }
} }

View file

@ -8,7 +8,7 @@ use crate::{Bytes, ImmutPath};
/// ///
/// Note: The error is boxed to avoid large stack size /// Note: The error is boxed to avoid large stack size
#[derive(Clone, PartialEq, Eq)] #[derive(Clone, PartialEq, Eq)]
pub struct FileSnapshot(Result<Bytes, Box<FileError>>); pub struct FileSnapshot(pub(crate) Result<Bytes, Box<FileError>>);
#[derive(Debug)] #[derive(Debug)]
#[allow(dead_code)] #[allow(dead_code)]

View file

@ -236,7 +236,7 @@ macro_rules! display_possible_values {
/// alias typst="tinymist compile --when=onSave" /// alias typst="tinymist compile --when=onSave"
/// typst compile main.typ /// typst compile main.typ
/// ``` /// ```
#[derive(Debug, Copy, Clone, Eq, PartialEq, Default, Hash, ValueEnum, Serialize, Deserialize)] #[derive(Debug, Clone, Eq, PartialEq, Default, Hash, ValueEnum, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[clap(rename_all = "camelCase")] #[clap(rename_all = "camelCase")]
pub enum TaskWhen { pub enum TaskWhen {
@ -252,6 +252,8 @@ pub enum TaskWhen {
/// ///
/// Note: this is deprecating. /// Note: this is deprecating.
OnDocumentHasTitle, OnDocumentHasTitle,
/// Checks by running a typst script.
Script,
} }
impl TaskWhen { impl TaskWhen {

View file

@ -46,7 +46,7 @@ impl ExportSignal {
pub fn should_run_task_dyn( pub fn should_run_task_dyn(
&self, &self,
when: TaskWhen, when: &TaskWhen,
docs: Option<&TypstDocument>, docs: Option<&TypstDocument>,
) -> Option<bool> { ) -> Option<bool> {
match docs { match docs {
@ -58,15 +58,13 @@ impl ExportSignal {
pub fn should_run_task<D: typst::Document>( pub fn should_run_task<D: typst::Document>(
&self, &self,
when: TaskWhen, when: &TaskWhen,
docs: Option<&D>, docs: Option<&D>,
) -> Option<bool> { ) -> Option<bool> {
if !matches!(when, TaskWhen::Never) && self.by_entry_update {
return Some(true);
}
match when { match when {
TaskWhen::Never => Some(false), TaskWhen::Never => Some(false),
// todo: by script
TaskWhen::Script => Some(self.by_entry_update),
TaskWhen::OnType => Some(self.by_mem_events), TaskWhen::OnType => Some(self.by_mem_events),
TaskWhen::OnSave => Some(self.by_fs_events), TaskWhen::OnSave => Some(self.by_fs_events),
TaskWhen::OnDocumentHasTitle if self.by_fs_events => { TaskWhen::OnDocumentHasTitle if self.by_fs_events => {

View file

@ -498,7 +498,7 @@ impl Config {
/// Gets the export task configuration. /// Gets the export task configuration.
pub(crate) fn export_task(&self) -> ExportTask { pub(crate) fn export_task(&self) -> ExportTask {
ExportTask { ExportTask {
when: self.export_pdf, when: self.export_pdf.clone(),
output: Some(self.output_path.clone()), output: Some(self.output_path.clone()),
transform: vec![], transform: vec![],
} }
@ -813,12 +813,12 @@ pub struct LintFeat {
impl LintFeat { impl LintFeat {
/// When to trigger the lint checks. /// When to trigger the lint checks.
pub fn when(&self) -> TaskWhen { pub fn when(&self) -> &TaskWhen {
if matches!(self.enabled, Some(false) | None) { if matches!(self.enabled, Some(false) | None) {
return TaskWhen::Never; return &TaskWhen::Never;
} }
self.when.unwrap_or(TaskWhen::OnSave) self.when.as_ref().unwrap_or(&TaskWhen::OnSave)
} }
} }

View file

@ -162,7 +162,7 @@ impl ServerState {
Some("dark") => tinymist_query::ColorTheme::Dark, Some("dark") => tinymist_query::ColorTheme::Dark,
_ => tinymist_query::ColorTheme::Light, _ => tinymist_query::ColorTheme::Light,
}, },
lint: config.lint.when(), lint: config.lint.when().clone(),
periscope: periscope_args.map(|args| { periscope: periscope_args.map(|args| {
let r = TypstPeriscopeProvider(PeriscopeRenderer::new(args)); let r = TypstPeriscopeProvider(PeriscopeRenderer::new(args));
Arc::new(r) as Arc<dyn PeriscopeProvider + Send + Sync> Arc::new(r) as Arc<dyn PeriscopeProvider + Send + Sync>
@ -209,7 +209,7 @@ impl ServerState {
CompileServerOpts { CompileServerOpts {
handler: compile_handle, handler: compile_handle,
export_target: config.export_target, export_target: config.export_target,
enable_watch: true, ignore_first_sync: true,
}, },
); );
@ -450,9 +450,9 @@ impl CompileHandlerImpl {
let should_lint = art let should_lint = art
.snap .snap
.signal .signal
.should_run_task_dyn(self.analysis.lint, art.doc.as_ref()) .should_run_task_dyn(&self.analysis.lint, art.doc.as_ref())
.unwrap_or_default(); .unwrap_or_default();
log::info!( log::debug!(
"Project: should_lint: {should_lint:?}, signal: {:?}", "Project: should_lint: {should_lint:?}, signal: {:?}",
art.snap.signal art.snap.signal
); );

View file

@ -68,18 +68,22 @@ impl ExportTask {
let doc = artifact.doc.as_ref()?; let doc = artifact.doc.as_ref()?;
let s = artifact.snap.signal; let s = artifact.snap.signal;
let when = config.task.when().unwrap_or_default(); let when = config.task.when().unwrap_or(&TaskWhen::Never);
let need_export = (!matches!(when, TaskWhen::Never) && s.by_entry_update) let need_export = match when {
|| match when { TaskWhen::Never => false,
TaskWhen::Never => false, TaskWhen::Script => s.by_entry_update,
TaskWhen::OnType => s.by_mem_events, TaskWhen::OnType => s.by_mem_events,
TaskWhen::OnSave => s.by_fs_events, TaskWhen::OnSave => s.by_fs_events,
TaskWhen::OnDocumentHasTitle => s.by_fs_events && doc.info().title.is_some(), TaskWhen::OnDocumentHasTitle => s.by_fs_events && doc.info().title.is_some(),
}; };
if !need_export { if !need_export {
return None; return None;
} }
log::info!(
"ExportTask(when={when:?}): export for {} with signal: {s:?}",
artifact.id()
);
let rev = artifact.world().revision().get(); let rev = artifact.world().revision().get();
let fut = self.export_folder.spawn(rev, || { let fut = self.export_folder.spawn(rev, || {
@ -455,7 +459,7 @@ mod tests {
fn test_default_never() { fn test_default_never() {
let conf = ExportUserConfig::default(); let conf = ExportUserConfig::default();
assert!(!conf.count_words); assert!(!conf.count_words);
assert_eq!(conf.task.when(), Some(TaskWhen::Never)); assert_eq!(conf.task.when(), Some(&TaskWhen::Never));
} }
#[test] #[test]

View file

@ -25,37 +25,42 @@ impl ProjectCompilation {
pub fn preconfig_timings<F: CompilerFeat>(graph: &Arc<WorldComputeGraph<F>>) -> Result<bool> { pub fn preconfig_timings<F: CompilerFeat>(graph: &Arc<WorldComputeGraph<F>>) -> Result<bool> {
// todo: configure run_diagnostics! // todo: configure run_diagnostics!
let paged_diag = Some(TaskWhen::OnType); let paged_diag = Some(TaskWhen::OnType);
let paged_diag2 = Some(TaskWhen::Script);
let html_diag = Some(TaskWhen::Never); let html_diag = Some(TaskWhen::Never);
let pdf: Option<TaskWhen> = graph let pdf: Option<TaskWhen> = graph
.get::<ConfigTask<<PdfExport as ExportComputation<LspCompilerFeat, _>>::Config>>() .get::<ConfigTask<<PdfExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
.transpose()? .transpose()?
.map(|config| config.export.when); .map(|config| config.export.when.clone());
let svg: Option<TaskWhen> = graph let svg: Option<TaskWhen> = graph
.get::<ConfigTask<<SvgExport as ExportComputation<LspCompilerFeat, _>>::Config>>() .get::<ConfigTask<<SvgExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
.transpose()? .transpose()?
.map(|config| config.export.when); .map(|config| config.export.when.clone());
let png: Option<TaskWhen> = graph let png: Option<TaskWhen> = graph
.get::<ConfigTask<<PngExport as ExportComputation<LspCompilerFeat, _>>::Config>>() .get::<ConfigTask<<PngExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
.transpose()? .transpose()?
.map(|config| config.export.when); .map(|config| config.export.when.clone());
let html: Option<TaskWhen> = graph let html: Option<TaskWhen> = graph
.get::<ConfigTask<<HtmlExport as ExportComputation<LspCompilerFeat, _>>::Config>>() .get::<ConfigTask<<HtmlExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
.transpose()? .transpose()?
.map(|config| config.export.when); .map(|config| config.export.when.clone());
let md: Option<TaskWhen> = graph let md: Option<TaskWhen> = graph
.get::<ConfigTask<ExportTeXTask>>() .get::<ConfigTask<ExportTeXTask>>()
.transpose()? .transpose()?
.map(|config| config.export.when); .map(|config| config.export.when.clone());
let text: Option<TaskWhen> = graph let text: Option<TaskWhen> = graph
.get::<ConfigTask<<TextExport as ExportComputation<LspCompilerFeat, _>>::Config>>() .get::<ConfigTask<<TextExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
.transpose()? .transpose()?
.map(|config| config.export.when); .map(|config| config.export.when.clone());
let doc = None::<TypstPagedDocument>.as_ref(); let doc = None::<TypstPagedDocument>.as_ref();
let check = |timing| ExportTimings::needs_run(&graph.snap, timing, doc).unwrap_or(true); let check = |timing: Option<TaskWhen>| {
ExportTimings::needs_run(&graph.snap, timing.as_ref(), doc).unwrap_or(true)
};
let compile_paged = [paged_diag, pdf, svg, png, text, md].into_iter().any(check); let compile_paged = [paged_diag, paged_diag2, pdf, svg, png, text, md]
.into_iter()
.any(check);
let compile_html = [html_diag, html].into_iter().any(check); let compile_html = [html_diag, html].into_iter().any(check);
let _ = graph.provide::<FlagTask<PagedCompilationTask>>(Ok(FlagTask::flag(compile_paged))); let _ = graph.provide::<FlagTask<PagedCompilationTask>>(Ok(FlagTask::flag(compile_paged)));
@ -83,7 +88,7 @@ impl ProjectExport {
T: ExportComputation<LspCompilerFeat, D, Output = Bytes>, T: ExportComputation<LspCompilerFeat, D, Output = Bytes>,
>( >(
graph: &Arc<WorldComputeGraph<LspCompilerFeat>>, graph: &Arc<WorldComputeGraph<LspCompilerFeat>>,
when: Option<TaskWhen>, when: Option<&TaskWhen>,
config: &T::Config, config: &T::Config,
) -> Result<Option<Bytes>> { ) -> Result<Option<Bytes>> {
let doc = graph.compute::<OptionDocumentTask<D>>()?; let doc = graph.compute::<OptionDocumentTask<D>>()?;
@ -102,7 +107,7 @@ impl ProjectExport {
T: ExportComputation<LspCompilerFeat, D, Output = String>, T: ExportComputation<LspCompilerFeat, D, Output = String>,
>( >(
graph: &Arc<WorldComputeGraph<LspCompilerFeat>>, graph: &Arc<WorldComputeGraph<LspCompilerFeat>>,
when: Option<TaskWhen>, when: Option<&TaskWhen>,
config: &T::Config, config: &T::Config,
) -> Result<Option<Bytes>> { ) -> Result<Option<Bytes>> {
let doc = graph.compute::<OptionDocumentTask<D>>()?; let doc = graph.compute::<OptionDocumentTask<D>>()?;

View file

@ -97,7 +97,7 @@ impl LockFileExt for LockFile {
.map(|t| Id::new(t.clone())) .map(|t| Id::new(t.clone()))
.unwrap_or(doc_id.clone()); .unwrap_or(doc_id.clone());
let when = args.when.unwrap_or(TaskWhen::OnType); let when = args.when.clone().unwrap_or(TaskWhen::OnType);
let task = ProjectTask::Preview(PreviewTask { when }); let task = ProjectTask::Preview(PreviewTask { when });
let task = ApplyProjectTask { let task = ApplyProjectTask {
id: task_id.clone(), id: task_id.clone(),
@ -472,7 +472,7 @@ where
CompileServerOpts { CompileServerOpts {
handler: compile_handle, handler: compile_handle,
export_target: opts.export_target, export_target: opts.export_target,
enable_watch: true, ignore_first_sync: true,
}, },
); );

View file

@ -12,7 +12,9 @@ RUN git clone --filter=blob:none --branch stable https://github.com/neovim/neovi
USER root USER root
RUN cd neovim/build && cpack -G DEB && dpkg -i nvim-linux-x86_64.deb RUN cd neovim/build && cpack -G DEB && dpkg -i nvim-linux-x86_64.deb
FROM myriaddreamin/tinymist:0.13.14 FROM myriaddreamin/tinymist:0.13.14 as tinymist
FROM debian:12
COPY --from=builder /neovim/build/nvim-linux-x86_64.deb /tmp/nvim-linux-x86_64.deb COPY --from=builder /neovim/build/nvim-linux-x86_64.deb /tmp/nvim-linux-x86_64.deb
RUN apt-get update && apt-get install -y curl git ripgrep build-essential unzip RUN apt-get update && apt-get install -y curl git ripgrep build-essential unzip
@ -27,6 +29,10 @@ WORKDIR /home/runner
RUN for dependency in AndrewRadev/switch.vim andymass/vim-matchup neovim/nvim-lspconfig nvim-lua/plenary.nvim tomtom/tcomment_vim lewis6991/satellite.nvim; do git clone --quiet --filter=blob:none "https://github.com/$dependency" "packpath/$(basename $dependency)"; done RUN for dependency in AndrewRadev/switch.vim andymass/vim-matchup neovim/nvim-lspconfig nvim-lua/plenary.nvim tomtom/tcomment_vim lewis6991/satellite.nvim; do git clone --quiet --filter=blob:none "https://github.com/$dependency" "packpath/$(basename $dependency)"; done
RUN for dependency in Julian/inanis.nvim; do git clone --quiet --filter=blob:none "https://github.com/$dependency" "packpath/$(basename $dependency)"; done RUN for dependency in Julian/inanis.nvim; do git clone --quiet --filter=blob:none "https://github.com/$dependency" "packpath/$(basename $dependency)"; done
USER root
COPY --from=tinymist /usr/local/bin/tinymist /usr/local/bin/tinymist
USER runner
ENV XDG_CONFIG_HOME=/home/runner/.config ENV XDG_CONFIG_HOME=/home/runner/.config
ENV XDG_DATA_HOME=/home/runner/.local/share ENV XDG_DATA_HOME=/home/runner/.local/share
ENV XDG_STATE_HOME=/home/runner/.local/state ENV XDG_STATE_HOME=/home/runner/.local/state

View file

@ -195,3 +195,5 @@ autocmd BufNewFile,BufRead *.typ setfiletype typst
``` ```
## Contributing ## Contributing
Please check the [contributing guide](/editors/neovim/CONTRIBUTING.md) for more information on how to contribute to the project.

View file

@ -14,6 +14,12 @@ else
exit 1 exit 1
fi fi
cd ../.. && docker build -t myriaddreamin/tinymist:0.13.14 . (cd ../.. && docker build -t myriaddreamin/tinymist:0.13.14 .)
docker build -t myriaddreamin/tinymist-nvim:0.13.14 . docker build -t myriaddreamin/tinymist-nvim:0.13.14 .
docker run --rm -it -v $PWD/../../tests/workspaces:/home/runner/dev/workspaces -v $PWD:/home/runner/dev -v $PWD/target/.local:/home/runner/.local -v $PWD/target/.cache:/home/runner/.cache -w /home/runner/dev myriaddreamin/tinymist-nvim:0.13.14 $DOCKER_ARGS docker run --rm -it \
-v $PWD/../../tests/workspaces:/home/runner/dev/workspaces \
-v $PWD:/home/runner/dev \
-v $PWD/target/.local:/home/runner/.local \
-v $PWD/target/.cache:/home/runner/.cache \
-w /home/runner/dev myriaddreamin/tinymist-nvim:0.13.14 \
$DOCKER_ARGS

View file

@ -0,0 +1,31 @@
---@brief [[
--- Tests for export functionalities.
---@brief ]]
local fixtures = require 'spec.fixtures'
local helpers = require 'spec.helpers'
require('tinymist').setup {
lsp = {
init_options = {
exportPdf = 'onSave',
systemFonts = false,
},
}
}
describe('Export', function()
assert.is.empty(vim.lsp.get_clients { bufnr = 0, name = 'tinymist', _uninitialized = true })
it('no pdf is created onSave', function()
vim.cmd.edit(fixtures.project.some_existing_file)
assert.is.same(1, #vim.lsp.get_clients { bufnr = 0, name = 'tinymist', _uninitialized = true })
--- append a text to current buffer
helpers.insert('This is a test export.\n')
-- sleep 300ms
vim.cmd.sleep('300m')
-- there *must not be* a pdf file created, because we only export on save
local pdf_path = fixtures.project.some_existing_file:gsub('%.typ$', '.pdf')
assert.is.same(nil, vim.uv.fs_stat(pdf_path), 'PDF file should not be created without saving because exportPdf = onSave')
end)
end)

View file

@ -1,64 +1,44 @@
# -- packpath := justfile_directory() / "packpath"
# -- scripts := justfile_directory() / "scripts"
# -- doc := justfile_directory() / "doc"
# -- src := justfile_directory() / "lua"
# -- lean := src / "lean"
# -- spec := justfile_directory() / "spec"
# -- fixtures := spec / "fixtures"
# -- demos := justfile_directory() / "demos"
# -- init_lua := scripts / "minimal_init.lua"
# -- clean_config := justfile_directory() / ".test-config"
# -- # Rebuild some test fixtures used in the test suite.
# -- _rebuild-test-fixtures:
# -- cd "{{ fixtures }}/example-project/"; lake build && lake build ProofWidgets Mathlib.Tactic.Widget.Conv
# -- # Run the lean.nvim test suite.
# -- [group('testing')]
# -- test: _rebuild-test-fixtures _clone-test-dependencies
# -- @just retest
# -- # Run the test suite without rebuilding or recloning any dependencies.
# -- [group('testing')]
# -- retest *test_files=spec:
# -- nvim --headless --clean -u {{ init_lua }} -c 'lua require("inanis").run{ specs = vim.split("{{ test_files }}", " "), minimal_init = "{{ init_lua }}", sequential = vim.env.TEST_SEQUENTIAL ~= nil }'
import os import os
import subprocess import subprocess
import sys import sys
def run_tests(test_files=None): def run_tests(test_files=None):
""" """
Run the Neovim test suite with the specified test files. Run the Neovim test suite with the specified test files.
If no test files are specified, it runs all tests in the 'spec' directory. If no test files are specified, it runs all tests in the 'spec' directory.
""" """
init_lua = os.path.realpath(os.path.join(__file__, '../../scripts/minimal_init.lua')) init_lua = os.path.realpath(
os.path.join(__file__, "../../scripts/minimal_init.lua")
)
if test_files is None: if test_files is None:
# all test files in the 'spec' directory # all test files in the 'spec' directory
test_files = [] test_files = []
for root, _, files in os.walk(os.path.dirname(__file__)): for root, _, files in os.walk(os.path.dirname(__file__)):
test_files.extend( os.path.join(root, f) for f in files if f.endswith('_spec.lua') ) test_files.extend(
test_files = ' '.join(test_files) os.path.join(root, f) for f in files if f.endswith("spec.lua")
)
test_files = " ".join(test_files)
command = [ command = [
'nvim', "nvim",
'--headless', "--headless",
'--clean', "--clean",
'-u', init_lua, "-u",
'-c', f'lua require("inanis").run{{ specs = vim.split("{test_files}", " "), minimal_init = "{init_lua}", sequential = vim.env.TEST_SEQUENTIAL ~= nil }}' init_lua,
"-c",
f'lua require("inanis").run{{ specs = vim.split("{test_files}", " "), minimal_init = "{init_lua}", sequential = vim.env.TEST_SEQUENTIAL ~= nil }}',
] ]
subprocess.run(command, check=True) subprocess.run(command, check=True)
if __name__ == "__main__": if __name__ == "__main__":
# Check if any test files are provided as command line arguments # Check if any test files are provided as command line arguments
if len(sys.argv) > 1: if len(sys.argv) > 1:
test_files = ' '.join(sys.argv[1:]) test_files = " ".join(sys.argv[1:])
else: else:
test_files = None test_files = None
run_tests(test_files) run_tests(test_files)

View file

@ -0,0 +1,31 @@
---@brief [[
--- Tests for export functionalities.
---@brief ]]
local fixtures = require 'spec.fixtures'
local helpers = require 'spec.helpers'
require('tinymist').setup {
lsp = {
init_options = {
exportPdf = 'onSave',
systemFonts = false,
},
}
}
describe('Export', function()
assert.is.empty(vim.lsp.get_clients { bufnr = 0, name = 'tinymist', _uninitialized = true })
it('no pdf is created never', function()
vim.cmd.edit(fixtures.project.some_existing_file)
assert.is.same(1, #vim.lsp.get_clients { bufnr = 0, name = 'tinymist', _uninitialized = true })
--- append a text to current buffer
helpers.insert('This is a test export.\n')
-- sleep 300ms
vim.cmd.sleep('300m')
-- there *must not be* a pdf file created, because we only export on save
local pdf_path = fixtures.project.some_existing_file:gsub('%.typ$', '.pdf')
assert.is.same(nil, vim.uv.fs_stat(pdf_path), 'PDF file should not be created without saving because exportPdf = never')
end)
end)