mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-07-07 13:05:02 +00:00
fix: don't export on entry change and first filesystem sync (#1854)
Some checks are pending
tinymist::ci / build-vscode-others (push) Blocked by required conditions
tinymist::ci / publish-vscode (push) Blocked by required conditions
tinymist::ci / build-vsc-assets (push) Blocked by required conditions
tinymist::ci / build-vscode (push) Blocked by required conditions
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / E2E Tests (darwin-arm64 on macos-latest) (push) Blocked by required conditions
tinymist::ci / E2E Tests (linux-x64 on ubuntu-22.04) (push) Blocked by required conditions
tinymist::ci / E2E Tests (linux-x64 on ubuntu-latest) (push) Blocked by required conditions
tinymist::ci / E2E Tests (win32-x64 on windows-2019) (push) Blocked by required conditions
tinymist::ci / E2E Tests (win32-x64 on windows-latest) (push) Blocked by required conditions
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / build-binary (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
Some checks are pending
tinymist::ci / build-vscode-others (push) Blocked by required conditions
tinymist::ci / publish-vscode (push) Blocked by required conditions
tinymist::ci / build-vsc-assets (push) Blocked by required conditions
tinymist::ci / build-vscode (push) Blocked by required conditions
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / E2E Tests (darwin-arm64 on macos-latest) (push) Blocked by required conditions
tinymist::ci / E2E Tests (linux-x64 on ubuntu-22.04) (push) Blocked by required conditions
tinymist::ci / E2E Tests (linux-x64 on ubuntu-latest) (push) Blocked by required conditions
tinymist::ci / E2E Tests (win32-x64 on windows-2019) (push) Blocked by required conditions
tinymist::ci / E2E Tests (win32-x64 on windows-latest) (push) Blocked by required conditions
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / build-binary (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
* fix: make real onSave export conditions * fix: remove fix * feat: pass export tests * fix: revert bootstrap changes * feat: reduce num of exports * fix: diag tests
This commit is contained in:
parent
db7e031bbe
commit
d0a478929e
23 changed files with 214 additions and 119 deletions
2
Cargo.lock
generated
2
Cargo.lock
generated
|
@ -5626,7 +5626,7 @@ version = "0.1.9"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -18,7 +18,7 @@ ENV RUSTC_WRAPPER=sccache SCCACHE_DIR=/sccache
|
|||
# to download the toolchain
|
||||
RUN --mount=type=cache,target=/usr/local/cargo/registry \
|
||||
--mount=type=cache,target=$SCCACHE_DIR,sharing=locked \
|
||||
cargo --version
|
||||
rustup update
|
||||
|
||||
FROM base as planner
|
||||
WORKDIR app
|
||||
|
|
|
@ -340,8 +340,8 @@ struct TaggedMemoryEvent {
|
|||
pub struct CompileServerOpts<F: CompilerFeat, Ext> {
|
||||
/// The compilation handler.
|
||||
pub handler: Arc<dyn CompileHandler<F, Ext>>,
|
||||
/// Whether to enable file system watching.
|
||||
pub enable_watch: bool,
|
||||
/// Whether to ignoring the first fs sync event.
|
||||
pub ignore_first_sync: bool,
|
||||
/// Specifies the current export target.
|
||||
pub export_target: ExportTarget,
|
||||
}
|
||||
|
@ -350,7 +350,7 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: 'static> Default for CompileS
|
|||
fn default() -> Self {
|
||||
Self {
|
||||
handler: Arc::new(std::marker::PhantomData),
|
||||
enable_watch: false,
|
||||
ignore_first_sync: false,
|
||||
export_target: ExportTarget::Paged,
|
||||
}
|
||||
}
|
||||
|
@ -364,8 +364,8 @@ pub struct ProjectCompiler<F: CompilerFeat, Ext> {
|
|||
export_target: ExportTarget,
|
||||
/// Channel for sending interrupts to the compiler actor.
|
||||
dep_tx: mpsc::UnboundedSender<NotifyMessage>,
|
||||
/// Whether to enable file system watching.
|
||||
pub enable_watch: bool,
|
||||
/// Whether to ignore the first sync event.
|
||||
pub ignore_first_sync: bool,
|
||||
|
||||
/// The current logical tick.
|
||||
logical_tick: usize,
|
||||
|
@ -389,7 +389,7 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
|
|||
dep_tx: mpsc::UnboundedSender<NotifyMessage>,
|
||||
CompileServerOpts {
|
||||
handler,
|
||||
enable_watch,
|
||||
ignore_first_sync,
|
||||
export_target,
|
||||
}: CompileServerOpts<F, Ext>,
|
||||
) -> Self {
|
||||
|
@ -402,13 +402,13 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
|
|||
Self {
|
||||
handler,
|
||||
dep_tx,
|
||||
enable_watch,
|
||||
export_target,
|
||||
|
||||
logical_tick: 1,
|
||||
dirty_shadow_logical_tick: 0,
|
||||
|
||||
estimated_shadow_files: Default::default(),
|
||||
ignore_first_sync,
|
||||
|
||||
primary,
|
||||
deps: Default::default(),
|
||||
|
@ -657,7 +657,7 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
|
|||
|
||||
// Apply file system changes.
|
||||
let dirty_tick = &mut self.dirty_shadow_logical_tick;
|
||||
let (changes, event) = event.split();
|
||||
let (changes, watched, event) = event.split_with_is_sync();
|
||||
let changes = std::iter::repeat_n(changes, 1 + self.dedicates.len());
|
||||
let proj = std::iter::once(&mut self.primary).chain(self.dedicates.iter_mut());
|
||||
|
||||
|
@ -681,7 +681,7 @@ impl<F: CompilerFeat + Send + Sync + 'static, Ext: Default + 'static> ProjectCom
|
|||
verse.vfs_changed()
|
||||
});
|
||||
|
||||
if vfs_changed {
|
||||
if vfs_changed && (!self.ignore_first_sync || !watched) {
|
||||
proj.reason.see(reason_by_fs());
|
||||
}
|
||||
}
|
||||
|
@ -911,7 +911,7 @@ impl<F: CompilerFeat, Ext: 'static> ProjectInsState<F, Ext> {
|
|||
}
|
||||
world.evict_vfs(60);
|
||||
let elapsed = evict_start.elapsed();
|
||||
log::info!("ProjectCompiler: evict cache in {elapsed:?}");
|
||||
log::debug!("ProjectCompiler: evict cache in {elapsed:?}");
|
||||
});
|
||||
|
||||
true
|
||||
|
|
|
@ -186,7 +186,7 @@ impl<F: FnMut(FilesystemEvent) + Send + Sync> NotifyActor<F> {
|
|||
}
|
||||
ActorEvent::Message(Some(SyncDependency(paths))) => {
|
||||
if let Some(changeset) = self.update_watches(paths.as_ref()) {
|
||||
(self.interrupted_by_events)(FilesystemEvent::Update(changeset));
|
||||
(self.interrupted_by_events)(FilesystemEvent::Update(changeset, true));
|
||||
}
|
||||
}
|
||||
ActorEvent::NotifyEvent(event) => {
|
||||
|
@ -343,7 +343,7 @@ impl<F: FnMut(FilesystemEvent) + Send + Sync> NotifyActor<F> {
|
|||
|
||||
// Send file updates.
|
||||
if !changeset.is_empty() {
|
||||
(self.interrupted_by_events)(FilesystemEvent::Update(changeset));
|
||||
(self.interrupted_by_events)(FilesystemEvent::Update(changeset, false));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -494,7 +494,7 @@ impl<F: FnMut(FilesystemEvent) + Send + Sync> NotifyActor<F> {
|
|||
let mut changeset = FileChangeSet::default();
|
||||
changeset.inserts.push((event.path, payload));
|
||||
|
||||
(self.interrupted_by_events)(FilesystemEvent::Update(changeset));
|
||||
(self.interrupted_by_events)(FilesystemEvent::Update(changeset, false));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
@ -37,11 +37,11 @@ pub struct ExportTimings;
|
|||
impl ExportTimings {
|
||||
pub fn needs_run<F: CompilerFeat, D: typst::Document>(
|
||||
snap: &CompileSnapshot<F>,
|
||||
timing: Option<TaskWhen>,
|
||||
timing: Option<&TaskWhen>,
|
||||
docs: Option<&D>,
|
||||
) -> Option<bool> {
|
||||
snap.signal
|
||||
.should_run_task(timing.unwrap_or_default(), docs)
|
||||
.should_run_task(timing.unwrap_or(&TaskWhen::Never), docs)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -85,9 +85,9 @@ pub enum ProjectTask {
|
|||
|
||||
impl ProjectTask {
|
||||
/// Returns the timing of executing the task.
|
||||
pub fn when(&self) -> Option<TaskWhen> {
|
||||
pub fn when(&self) -> Option<&TaskWhen> {
|
||||
Some(match self {
|
||||
Self::Preview(task) => task.when,
|
||||
Self::Preview(task) => &task.when,
|
||||
Self::ExportPdf(..)
|
||||
| Self::ExportPng(..)
|
||||
| Self::ExportSvg(..)
|
||||
|
@ -96,7 +96,7 @@ impl ProjectTask {
|
|||
| Self::ExportMd(..)
|
||||
| Self::ExportTeX(..)
|
||||
| Self::ExportText(..)
|
||||
| Self::Query(..) => self.as_export()?.when,
|
||||
| Self::Query(..) => &self.as_export()?.when,
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -128,7 +128,7 @@ impl PathPattern {
|
|||
|
||||
#[comemo::memoize]
|
||||
fn substitute_impl(&self, root: Option<ImmutPath>, main: Option<FileId>) -> Option<ImmutPath> {
|
||||
log::info!("Check path {main:?} and root {root:?} with output directory {self:?}");
|
||||
log::debug!("Check path {main:?} and root {root:?} with output directory {self:?}");
|
||||
|
||||
let (root, main) = root.zip(main)?;
|
||||
|
||||
|
|
|
@ -468,45 +468,66 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
|
|||
&mut self.inner.access_model
|
||||
}
|
||||
|
||||
fn invalidate_path(&mut self, path: &Path) {
|
||||
fn invalidate_path(&mut self, path: &Path, snap: Option<&FileSnapshot>) {
|
||||
if let Some(fids) = self.paths.get(path) {
|
||||
if fids.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.view_changed = true;
|
||||
// Always changes view if snap is none.
|
||||
self.view_changed = snap.is_none();
|
||||
for fid in fids.clone() {
|
||||
self.invalidate_file_id(fid);
|
||||
self.invalidate_file_id(fid, snap);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn invalidate_file_id(&mut self, file_id: FileId) {
|
||||
self.view_changed = true;
|
||||
fn invalidate_file_id(&mut self, file_id: FileId, snap: Option<&FileSnapshot>) {
|
||||
let mut changed = false;
|
||||
self.managed.slot(file_id, |e| {
|
||||
if let Some(snap) = snap {
|
||||
let may_read_bytes = e.bytes.get().map(|b| &b.2);
|
||||
match (snap, may_read_bytes) {
|
||||
(FileSnapshot(Ok(snap)), Some(Ok(read))) if snap == read => {
|
||||
return;
|
||||
}
|
||||
(FileSnapshot(Err(snap)), Some(Err(read))) if snap.as_ref() == read => {
|
||||
return;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
e.changed_at = self.goal_revision.get();
|
||||
e.bytes = Arc::default();
|
||||
e.source = Arc::default();
|
||||
changed = true;
|
||||
});
|
||||
self.view_changed = changed;
|
||||
}
|
||||
|
||||
/// Reset the shadowing files in [`OverlayAccessModel`].
|
||||
pub fn reset_shadow(&mut self) {
|
||||
for path in self.am().inner.inner.file_paths() {
|
||||
self.invalidate_path(&path);
|
||||
self.invalidate_path(&path, None);
|
||||
}
|
||||
for fid in self.am().file_paths() {
|
||||
self.invalidate_file_id(fid);
|
||||
self.invalidate_file_id(fid, None);
|
||||
}
|
||||
|
||||
self.am().clear_shadow();
|
||||
self.am().inner.inner.clear_shadow();
|
||||
}
|
||||
|
||||
/// Unconditionally changes the view of the vfs.
|
||||
pub fn change_view(&mut self) -> FileResult<()> {
|
||||
self.view_changed = true;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Adds a shadowing file to the [`OverlayAccessModel`].
|
||||
pub fn map_shadow(&mut self, path: &Path, snap: FileSnapshot) -> FileResult<()> {
|
||||
self.view_changed = true;
|
||||
self.invalidate_path(path);
|
||||
self.invalidate_path(path, Some(&snap));
|
||||
self.am().inner.inner.add_file(path, snap, |c| c.into());
|
||||
|
||||
Ok(())
|
||||
|
@ -514,8 +535,7 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
|
|||
|
||||
/// Removes a shadowing file from the [`OverlayAccessModel`].
|
||||
pub fn unmap_shadow(&mut self, path: &Path) -> FileResult<()> {
|
||||
self.view_changed = true;
|
||||
self.invalidate_path(path);
|
||||
self.invalidate_path(path, None);
|
||||
self.am().inner.inner.remove_file(path);
|
||||
|
||||
Ok(())
|
||||
|
@ -523,8 +543,7 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
|
|||
|
||||
/// Adds a shadowing file to the [`OverlayAccessModel`] by file id.
|
||||
pub fn map_shadow_by_id(&mut self, file_id: FileId, snap: FileSnapshot) -> FileResult<()> {
|
||||
self.view_changed = true;
|
||||
self.invalidate_file_id(file_id);
|
||||
self.invalidate_file_id(file_id, Some(&snap));
|
||||
self.am().add_file(&file_id, snap, |c| *c);
|
||||
|
||||
Ok(())
|
||||
|
@ -532,8 +551,7 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
|
|||
|
||||
/// Removes a shadowing file from the [`OverlayAccessModel`] by file id.
|
||||
pub fn remove_shadow_by_id(&mut self, file_id: FileId) {
|
||||
self.view_changed = true;
|
||||
self.invalidate_file_id(file_id);
|
||||
self.invalidate_file_id(file_id, None);
|
||||
self.am().remove_file(&file_id);
|
||||
}
|
||||
|
||||
|
@ -548,10 +566,10 @@ impl<M: PathAccessModel + Sized> RevisingVfs<'_, M> {
|
|||
/// See [`NotifyAccessModel`] for more information.
|
||||
pub fn notify_fs_changes(&mut self, event: FileChangeSet) {
|
||||
for path in &event.removes {
|
||||
self.invalidate_path(path);
|
||||
self.invalidate_path(path, None);
|
||||
}
|
||||
for (path, _) in &event.inserts {
|
||||
self.invalidate_path(path);
|
||||
for (path, snap) in &event.inserts {
|
||||
self.invalidate_path(path, Some(snap));
|
||||
}
|
||||
|
||||
self.am().inner.inner.inner.notify(event);
|
||||
|
|
|
@ -41,7 +41,7 @@ pub struct UpstreamUpdateEvent {
|
|||
#[derive(Debug)]
|
||||
pub enum FilesystemEvent {
|
||||
/// Update file system files according to the given changeset
|
||||
Update(FileChangeSet),
|
||||
Update(FileChangeSet, /* is_sync */ bool),
|
||||
/// See [`UpstreamUpdateEvent`]
|
||||
UpstreamUpdate {
|
||||
/// New changeset produced by invalidation
|
||||
|
@ -60,7 +60,19 @@ impl FilesystemEvent {
|
|||
changeset,
|
||||
upstream_event,
|
||||
} => (changeset, upstream_event),
|
||||
FilesystemEvent::Update(changeset) => (changeset, None),
|
||||
FilesystemEvent::Update(changeset, ..) => (changeset, None),
|
||||
}
|
||||
}
|
||||
|
||||
/// Splits the filesystem event into a changeset and an optional upstream
|
||||
/// event.
|
||||
pub fn split_with_is_sync(self) -> (FileChangeSet, bool, Option<UpstreamUpdateEvent>) {
|
||||
match self {
|
||||
FilesystemEvent::UpstreamUpdate {
|
||||
changeset,
|
||||
upstream_event,
|
||||
} => (changeset, false, upstream_event),
|
||||
FilesystemEvent::Update(changeset, is_sync) => (changeset, is_sync, None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,7 +8,7 @@ use crate::{Bytes, ImmutPath};
|
|||
///
|
||||
/// Note: The error is boxed to avoid large stack size
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
pub struct FileSnapshot(Result<Bytes, Box<FileError>>);
|
||||
pub struct FileSnapshot(pub(crate) Result<Bytes, Box<FileError>>);
|
||||
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
|
|
|
@ -236,7 +236,7 @@ macro_rules! display_possible_values {
|
|||
/// alias typst="tinymist compile --when=onSave"
|
||||
/// typst compile main.typ
|
||||
/// ```
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Default, Hash, ValueEnum, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Default, Hash, ValueEnum, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[clap(rename_all = "camelCase")]
|
||||
pub enum TaskWhen {
|
||||
|
@ -252,6 +252,8 @@ pub enum TaskWhen {
|
|||
///
|
||||
/// Note: this is deprecating.
|
||||
OnDocumentHasTitle,
|
||||
/// Checks by running a typst script.
|
||||
Script,
|
||||
}
|
||||
|
||||
impl TaskWhen {
|
||||
|
|
|
@ -46,7 +46,7 @@ impl ExportSignal {
|
|||
|
||||
pub fn should_run_task_dyn(
|
||||
&self,
|
||||
when: TaskWhen,
|
||||
when: &TaskWhen,
|
||||
docs: Option<&TypstDocument>,
|
||||
) -> Option<bool> {
|
||||
match docs {
|
||||
|
@ -58,15 +58,13 @@ impl ExportSignal {
|
|||
|
||||
pub fn should_run_task<D: typst::Document>(
|
||||
&self,
|
||||
when: TaskWhen,
|
||||
when: &TaskWhen,
|
||||
docs: Option<&D>,
|
||||
) -> Option<bool> {
|
||||
if !matches!(when, TaskWhen::Never) && self.by_entry_update {
|
||||
return Some(true);
|
||||
}
|
||||
|
||||
match when {
|
||||
TaskWhen::Never => Some(false),
|
||||
// todo: by script
|
||||
TaskWhen::Script => Some(self.by_entry_update),
|
||||
TaskWhen::OnType => Some(self.by_mem_events),
|
||||
TaskWhen::OnSave => Some(self.by_fs_events),
|
||||
TaskWhen::OnDocumentHasTitle if self.by_fs_events => {
|
||||
|
|
|
@ -498,7 +498,7 @@ impl Config {
|
|||
/// Gets the export task configuration.
|
||||
pub(crate) fn export_task(&self) -> ExportTask {
|
||||
ExportTask {
|
||||
when: self.export_pdf,
|
||||
when: self.export_pdf.clone(),
|
||||
output: Some(self.output_path.clone()),
|
||||
transform: vec![],
|
||||
}
|
||||
|
@ -813,12 +813,12 @@ pub struct LintFeat {
|
|||
|
||||
impl LintFeat {
|
||||
/// When to trigger the lint checks.
|
||||
pub fn when(&self) -> TaskWhen {
|
||||
pub fn when(&self) -> &TaskWhen {
|
||||
if matches!(self.enabled, Some(false) | None) {
|
||||
return TaskWhen::Never;
|
||||
return &TaskWhen::Never;
|
||||
}
|
||||
|
||||
self.when.unwrap_or(TaskWhen::OnSave)
|
||||
self.when.as_ref().unwrap_or(&TaskWhen::OnSave)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -162,7 +162,7 @@ impl ServerState {
|
|||
Some("dark") => tinymist_query::ColorTheme::Dark,
|
||||
_ => tinymist_query::ColorTheme::Light,
|
||||
},
|
||||
lint: config.lint.when(),
|
||||
lint: config.lint.when().clone(),
|
||||
periscope: periscope_args.map(|args| {
|
||||
let r = TypstPeriscopeProvider(PeriscopeRenderer::new(args));
|
||||
Arc::new(r) as Arc<dyn PeriscopeProvider + Send + Sync>
|
||||
|
@ -209,7 +209,7 @@ impl ServerState {
|
|||
CompileServerOpts {
|
||||
handler: compile_handle,
|
||||
export_target: config.export_target,
|
||||
enable_watch: true,
|
||||
ignore_first_sync: true,
|
||||
},
|
||||
);
|
||||
|
||||
|
@ -450,9 +450,9 @@ impl CompileHandlerImpl {
|
|||
let should_lint = art
|
||||
.snap
|
||||
.signal
|
||||
.should_run_task_dyn(self.analysis.lint, art.doc.as_ref())
|
||||
.should_run_task_dyn(&self.analysis.lint, art.doc.as_ref())
|
||||
.unwrap_or_default();
|
||||
log::info!(
|
||||
log::debug!(
|
||||
"Project: should_lint: {should_lint:?}, signal: {:?}",
|
||||
art.snap.signal
|
||||
);
|
||||
|
|
|
@ -68,18 +68,22 @@ impl ExportTask {
|
|||
let doc = artifact.doc.as_ref()?;
|
||||
let s = artifact.snap.signal;
|
||||
|
||||
let when = config.task.when().unwrap_or_default();
|
||||
let need_export = (!matches!(when, TaskWhen::Never) && s.by_entry_update)
|
||||
|| match when {
|
||||
TaskWhen::Never => false,
|
||||
TaskWhen::OnType => s.by_mem_events,
|
||||
TaskWhen::OnSave => s.by_fs_events,
|
||||
TaskWhen::OnDocumentHasTitle => s.by_fs_events && doc.info().title.is_some(),
|
||||
};
|
||||
let when = config.task.when().unwrap_or(&TaskWhen::Never);
|
||||
let need_export = match when {
|
||||
TaskWhen::Never => false,
|
||||
TaskWhen::Script => s.by_entry_update,
|
||||
TaskWhen::OnType => s.by_mem_events,
|
||||
TaskWhen::OnSave => s.by_fs_events,
|
||||
TaskWhen::OnDocumentHasTitle => s.by_fs_events && doc.info().title.is_some(),
|
||||
};
|
||||
|
||||
if !need_export {
|
||||
return None;
|
||||
}
|
||||
log::info!(
|
||||
"ExportTask(when={when:?}): export for {} with signal: {s:?}",
|
||||
artifact.id()
|
||||
);
|
||||
|
||||
let rev = artifact.world().revision().get();
|
||||
let fut = self.export_folder.spawn(rev, || {
|
||||
|
@ -455,7 +459,7 @@ mod tests {
|
|||
fn test_default_never() {
|
||||
let conf = ExportUserConfig::default();
|
||||
assert!(!conf.count_words);
|
||||
assert_eq!(conf.task.when(), Some(TaskWhen::Never));
|
||||
assert_eq!(conf.task.when(), Some(&TaskWhen::Never));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
|
|
@ -25,37 +25,42 @@ impl ProjectCompilation {
|
|||
pub fn preconfig_timings<F: CompilerFeat>(graph: &Arc<WorldComputeGraph<F>>) -> Result<bool> {
|
||||
// todo: configure run_diagnostics!
|
||||
let paged_diag = Some(TaskWhen::OnType);
|
||||
let paged_diag2 = Some(TaskWhen::Script);
|
||||
let html_diag = Some(TaskWhen::Never);
|
||||
|
||||
let pdf: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<<PdfExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
.map(|config| config.export.when.clone());
|
||||
let svg: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<<SvgExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
.map(|config| config.export.when.clone());
|
||||
let png: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<<PngExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
.map(|config| config.export.when.clone());
|
||||
let html: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<<HtmlExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
.map(|config| config.export.when.clone());
|
||||
let md: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<ExportTeXTask>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
.map(|config| config.export.when.clone());
|
||||
let text: Option<TaskWhen> = graph
|
||||
.get::<ConfigTask<<TextExport as ExportComputation<LspCompilerFeat, _>>::Config>>()
|
||||
.transpose()?
|
||||
.map(|config| config.export.when);
|
||||
.map(|config| config.export.when.clone());
|
||||
|
||||
let doc = None::<TypstPagedDocument>.as_ref();
|
||||
let check = |timing| ExportTimings::needs_run(&graph.snap, timing, doc).unwrap_or(true);
|
||||
let check = |timing: Option<TaskWhen>| {
|
||||
ExportTimings::needs_run(&graph.snap, timing.as_ref(), doc).unwrap_or(true)
|
||||
};
|
||||
|
||||
let compile_paged = [paged_diag, pdf, svg, png, text, md].into_iter().any(check);
|
||||
let compile_paged = [paged_diag, paged_diag2, pdf, svg, png, text, md]
|
||||
.into_iter()
|
||||
.any(check);
|
||||
let compile_html = [html_diag, html].into_iter().any(check);
|
||||
|
||||
let _ = graph.provide::<FlagTask<PagedCompilationTask>>(Ok(FlagTask::flag(compile_paged)));
|
||||
|
@ -83,7 +88,7 @@ impl ProjectExport {
|
|||
T: ExportComputation<LspCompilerFeat, D, Output = Bytes>,
|
||||
>(
|
||||
graph: &Arc<WorldComputeGraph<LspCompilerFeat>>,
|
||||
when: Option<TaskWhen>,
|
||||
when: Option<&TaskWhen>,
|
||||
config: &T::Config,
|
||||
) -> Result<Option<Bytes>> {
|
||||
let doc = graph.compute::<OptionDocumentTask<D>>()?;
|
||||
|
@ -102,7 +107,7 @@ impl ProjectExport {
|
|||
T: ExportComputation<LspCompilerFeat, D, Output = String>,
|
||||
>(
|
||||
graph: &Arc<WorldComputeGraph<LspCompilerFeat>>,
|
||||
when: Option<TaskWhen>,
|
||||
when: Option<&TaskWhen>,
|
||||
config: &T::Config,
|
||||
) -> Result<Option<Bytes>> {
|
||||
let doc = graph.compute::<OptionDocumentTask<D>>()?;
|
||||
|
|
|
@ -97,7 +97,7 @@ impl LockFileExt for LockFile {
|
|||
.map(|t| Id::new(t.clone()))
|
||||
.unwrap_or(doc_id.clone());
|
||||
|
||||
let when = args.when.unwrap_or(TaskWhen::OnType);
|
||||
let when = args.when.clone().unwrap_or(TaskWhen::OnType);
|
||||
let task = ProjectTask::Preview(PreviewTask { when });
|
||||
let task = ApplyProjectTask {
|
||||
id: task_id.clone(),
|
||||
|
@ -472,7 +472,7 @@ where
|
|||
CompileServerOpts {
|
||||
handler: compile_handle,
|
||||
export_target: opts.export_target,
|
||||
enable_watch: true,
|
||||
ignore_first_sync: true,
|
||||
},
|
||||
);
|
||||
|
||||
|
|
|
@ -12,7 +12,9 @@ RUN git clone --filter=blob:none --branch stable https://github.com/neovim/neovi
|
|||
USER root
|
||||
RUN cd neovim/build && cpack -G DEB && dpkg -i nvim-linux-x86_64.deb
|
||||
|
||||
FROM myriaddreamin/tinymist:0.13.14
|
||||
FROM myriaddreamin/tinymist:0.13.14 as tinymist
|
||||
|
||||
FROM debian:12
|
||||
|
||||
COPY --from=builder /neovim/build/nvim-linux-x86_64.deb /tmp/nvim-linux-x86_64.deb
|
||||
RUN apt-get update && apt-get install -y curl git ripgrep build-essential unzip
|
||||
|
@ -27,6 +29,10 @@ WORKDIR /home/runner
|
|||
RUN for dependency in AndrewRadev/switch.vim andymass/vim-matchup neovim/nvim-lspconfig nvim-lua/plenary.nvim tomtom/tcomment_vim lewis6991/satellite.nvim; do git clone --quiet --filter=blob:none "https://github.com/$dependency" "packpath/$(basename $dependency)"; done
|
||||
RUN for dependency in Julian/inanis.nvim; do git clone --quiet --filter=blob:none "https://github.com/$dependency" "packpath/$(basename $dependency)"; done
|
||||
|
||||
USER root
|
||||
COPY --from=tinymist /usr/local/bin/tinymist /usr/local/bin/tinymist
|
||||
USER runner
|
||||
|
||||
ENV XDG_CONFIG_HOME=/home/runner/.config
|
||||
ENV XDG_DATA_HOME=/home/runner/.local/share
|
||||
ENV XDG_STATE_HOME=/home/runner/.local/state
|
||||
|
|
|
@ -195,3 +195,5 @@ autocmd BufNewFile,BufRead *.typ setfiletype typst
|
|||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
Please check the [contributing guide](/editors/neovim/CONTRIBUTING.md) for more information on how to contribute to the project.
|
||||
|
|
|
@ -14,6 +14,12 @@ else
|
|||
exit 1
|
||||
fi
|
||||
|
||||
cd ../.. && docker build -t myriaddreamin/tinymist:0.13.14 .
|
||||
(cd ../.. && docker build -t myriaddreamin/tinymist:0.13.14 .)
|
||||
docker build -t myriaddreamin/tinymist-nvim:0.13.14 .
|
||||
docker run --rm -it -v $PWD/../../tests/workspaces:/home/runner/dev/workspaces -v $PWD:/home/runner/dev -v $PWD/target/.local:/home/runner/.local -v $PWD/target/.cache:/home/runner/.cache -w /home/runner/dev myriaddreamin/tinymist-nvim:0.13.14 $DOCKER_ARGS
|
||||
docker run --rm -it \
|
||||
-v $PWD/../../tests/workspaces:/home/runner/dev/workspaces \
|
||||
-v $PWD:/home/runner/dev \
|
||||
-v $PWD/target/.local:/home/runner/.local \
|
||||
-v $PWD/target/.cache:/home/runner/.cache \
|
||||
-w /home/runner/dev myriaddreamin/tinymist-nvim:0.13.14 \
|
||||
$DOCKER_ARGS
|
||||
|
|
31
editors/neovim/spec/export_spec.lua
Normal file
31
editors/neovim/spec/export_spec.lua
Normal file
|
@ -0,0 +1,31 @@
|
|||
---@brief [[
|
||||
--- Tests for export functionalities.
|
||||
---@brief ]]
|
||||
|
||||
local fixtures = require 'spec.fixtures'
|
||||
local helpers = require 'spec.helpers'
|
||||
|
||||
require('tinymist').setup {
|
||||
lsp = {
|
||||
init_options = {
|
||||
exportPdf = 'onSave',
|
||||
systemFonts = false,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
describe('Export', function()
|
||||
assert.is.empty(vim.lsp.get_clients { bufnr = 0, name = 'tinymist', _uninitialized = true })
|
||||
|
||||
it('no pdf is created onSave', function()
|
||||
vim.cmd.edit(fixtures.project.some_existing_file)
|
||||
assert.is.same(1, #vim.lsp.get_clients { bufnr = 0, name = 'tinymist', _uninitialized = true })
|
||||
--- append a text to current buffer
|
||||
helpers.insert('This is a test export.\n')
|
||||
-- sleep 300ms
|
||||
vim.cmd.sleep('300m')
|
||||
-- there *must not be* a pdf file created, because we only export on save
|
||||
local pdf_path = fixtures.project.some_existing_file:gsub('%.typ$', '.pdf')
|
||||
assert.is.same(nil, vim.uv.fs_stat(pdf_path), 'PDF file should not be created without saving because exportPdf = onSave')
|
||||
end)
|
||||
end)
|
|
@ -1,64 +1,44 @@
|
|||
# -- packpath := justfile_directory() / "packpath"
|
||||
# -- scripts := justfile_directory() / "scripts"
|
||||
# -- doc := justfile_directory() / "doc"
|
||||
# -- src := justfile_directory() / "lua"
|
||||
# -- lean := src / "lean"
|
||||
# -- spec := justfile_directory() / "spec"
|
||||
# -- fixtures := spec / "fixtures"
|
||||
# -- demos := justfile_directory() / "demos"
|
||||
|
||||
# -- init_lua := scripts / "minimal_init.lua"
|
||||
# -- clean_config := justfile_directory() / ".test-config"
|
||||
|
||||
# -- # Rebuild some test fixtures used in the test suite.
|
||||
# -- _rebuild-test-fixtures:
|
||||
# -- cd "{{ fixtures }}/example-project/"; lake build && lake build ProofWidgets Mathlib.Tactic.Widget.Conv
|
||||
|
||||
# -- # Run the lean.nvim test suite.
|
||||
# -- [group('testing')]
|
||||
# -- test: _rebuild-test-fixtures _clone-test-dependencies
|
||||
# -- @just retest
|
||||
|
||||
# -- # Run the test suite without rebuilding or recloning any dependencies.
|
||||
# -- [group('testing')]
|
||||
# -- retest *test_files=spec:
|
||||
# -- nvim --headless --clean -u {{ init_lua }} -c 'lua require("inanis").run{ specs = vim.split("{{ test_files }}", " "), minimal_init = "{{ init_lua }}", sequential = vim.env.TEST_SEQUENTIAL ~= nil }'
|
||||
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def run_tests(test_files=None):
|
||||
"""
|
||||
Run the Neovim test suite with the specified test files.
|
||||
If no test files are specified, it runs all tests in the 'spec' directory.
|
||||
"""
|
||||
init_lua = os.path.realpath(os.path.join(__file__, '../../scripts/minimal_init.lua'))
|
||||
|
||||
init_lua = os.path.realpath(
|
||||
os.path.join(__file__, "../../scripts/minimal_init.lua")
|
||||
)
|
||||
|
||||
if test_files is None:
|
||||
# all test files in the 'spec' directory
|
||||
test_files = []
|
||||
for root, _, files in os.walk(os.path.dirname(__file__)):
|
||||
test_files.extend( os.path.join(root, f) for f in files if f.endswith('_spec.lua') )
|
||||
test_files = ' '.join(test_files)
|
||||
test_files.extend(
|
||||
os.path.join(root, f) for f in files if f.endswith("spec.lua")
|
||||
)
|
||||
test_files = " ".join(test_files)
|
||||
|
||||
command = [
|
||||
'nvim',
|
||||
'--headless',
|
||||
'--clean',
|
||||
'-u', init_lua,
|
||||
'-c', f'lua require("inanis").run{{ specs = vim.split("{test_files}", " "), minimal_init = "{init_lua}", sequential = vim.env.TEST_SEQUENTIAL ~= nil }}'
|
||||
"nvim",
|
||||
"--headless",
|
||||
"--clean",
|
||||
"-u",
|
||||
init_lua,
|
||||
"-c",
|
||||
f'lua require("inanis").run{{ specs = vim.split("{test_files}", " "), minimal_init = "{init_lua}", sequential = vim.env.TEST_SEQUENTIAL ~= nil }}',
|
||||
]
|
||||
|
||||
subprocess.run(command, check=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Check if any test files are provided as command line arguments
|
||||
if len(sys.argv) > 1:
|
||||
test_files = ' '.join(sys.argv[1:])
|
||||
test_files = " ".join(sys.argv[1:])
|
||||
else:
|
||||
test_files = None
|
||||
|
||||
run_tests(test_files)
|
||||
|
||||
|
|
31
editors/neovim/spec/never_export_spec.lua
Normal file
31
editors/neovim/spec/never_export_spec.lua
Normal file
|
@ -0,0 +1,31 @@
|
|||
---@brief [[
|
||||
--- Tests for export functionalities.
|
||||
---@brief ]]
|
||||
|
||||
local fixtures = require 'spec.fixtures'
|
||||
local helpers = require 'spec.helpers'
|
||||
|
||||
require('tinymist').setup {
|
||||
lsp = {
|
||||
init_options = {
|
||||
exportPdf = 'onSave',
|
||||
systemFonts = false,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
describe('Export', function()
|
||||
assert.is.empty(vim.lsp.get_clients { bufnr = 0, name = 'tinymist', _uninitialized = true })
|
||||
|
||||
it('no pdf is created never', function()
|
||||
vim.cmd.edit(fixtures.project.some_existing_file)
|
||||
assert.is.same(1, #vim.lsp.get_clients { bufnr = 0, name = 'tinymist', _uninitialized = true })
|
||||
--- append a text to current buffer
|
||||
helpers.insert('This is a test export.\n')
|
||||
-- sleep 300ms
|
||||
vim.cmd.sleep('300m')
|
||||
-- there *must not be* a pdf file created, because we only export on save
|
||||
local pdf_path = fixtures.project.some_existing_file:gsub('%.typ$', '.pdf')
|
||||
assert.is.same(nil, vim.uv.fs_stat(pdf_path), 'PDF file should not be created without saving because exportPdf = never')
|
||||
end)
|
||||
end)
|
Loading…
Add table
Add a link
Reference in a new issue