fix: improve release profile & fix typos (#177)

* dev: split a gh-release profile to accelerate development

* fix: many typos

* fix: miri warnings

* fix: update fixtures
This commit is contained in:
QuarticCat 2024-04-10 11:34:18 +08:00 committed by GitHub
parent e7acb31a54
commit ebed95cbcd
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
22 changed files with 159 additions and 71 deletions

View file

@ -114,53 +114,53 @@ jobs:
- name: Build tinymist binary - name: Build tinymist binary
shell: pwsh shell: pwsh
run: | run: |
cargo build --release -p tinymist --target ${{ matrix.rust-target }} cargo build --profile=gh-release -p tinymist --target ${{ matrix.rust-target }}
if: startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true' if: startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true'
- name: Rename debug symbols for windows - name: Rename debug symbols for windows
if: matrix.platform == 'win32' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') if: matrix.platform == 'win32' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true')
run: | run: |
cd target/${{ matrix.rust-target }}/release cd target/${{ matrix.rust-target }}/gh-release
cp tinymist.pdb tinymist-${{ env.target }}.pdb cp tinymist.pdb tinymist-${{ env.target }}.pdb
- name: Split debug symbols for linux - name: Split debug symbols for linux
if: matrix.platform == 'linux' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') if: matrix.platform == 'linux' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true')
run: | run: |
cd target/${{ matrix.rust-target }}/release cd target/${{ matrix.rust-target }}/gh-release
llvm-objcopy --compress-debug-sections --only-keep-debug "tinymist" "tinymist-${{ env.target }}.debug" llvm-objcopy --compress-debug-sections --only-keep-debug "tinymist" "tinymist-${{ env.target }}.debug"
llvm-objcopy --strip-debug --add-gnu-debuglink="tinymist-${{ env.target }}.debug" "tinymist" llvm-objcopy --strip-debug --add-gnu-debuglink="tinymist-${{ env.target }}.debug" "tinymist"
- name: Collect debug symbols for mac - name: Collect debug symbols for mac
if: matrix.platform == 'darwin' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') if: matrix.platform == 'darwin' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true')
run: | run: |
dsymutil -f "target/${{ matrix.rust-target }}/release/tinymist" dsymutil -f "target/${{ matrix.rust-target }}/gh-release/tinymist"
mv "target/${{ matrix.rust-target }}/release/tinymist.dwarf" "target/${{ matrix.rust-target }}/release/tinymist-${{ env.target }}.dwarf" mv "target/${{ matrix.rust-target }}/gh-release/tinymist.dwarf" "target/${{ matrix.rust-target }}/gh-release/tinymist-${{ env.target }}.dwarf"
- name: Copy binary to output directory - name: Copy binary to output directory
if: (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') if: (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true')
shell: pwsh shell: pwsh
run: | run: |
cp "target/${{ matrix.rust-target }}/release/tinymist$(If ('${{ matrix.platform }}' -eq 'win32') { '.exe' } else { '' } )" "editors/vscode/out/" cp "target/${{ matrix.rust-target }}/gh-release/tinymist$(If ('${{ matrix.platform }}' -eq 'win32') { '.exe' } else { '' } )" "editors/vscode/out/"
cp "target/${{ matrix.rust-target }}/release/tinymist$(If ('${{ matrix.platform }}' -eq 'win32') { '.exe' } else { '' } )" "tinymist-${{ env.target }}$(If ('${{ matrix.platform }}' -eq 'win32') { '.exe' } else { '' } )" cp "target/${{ matrix.rust-target }}/gh-release/tinymist$(If ('${{ matrix.platform }}' -eq 'win32') { '.exe' } else { '' } )" "tinymist-${{ env.target }}$(If ('${{ matrix.platform }}' -eq 'win32') { '.exe' } else { '' } )"
- name: Test tinymist - name: Test tinymist
run: | run: |
cargo test --release --workspace --target ${{ matrix.rust-target }} cargo test --profile=gh-release --workspace --target ${{ matrix.rust-target }}
if: (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') && (matrix.platform == 'linux') && (matrix.arch == 'x64') if: (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') && (matrix.platform == 'linux') && (matrix.arch == 'x64')
- name: Upload split debug symbols for windows - name: Upload split debug symbols for windows
if: matrix.platform == 'win32' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') if: matrix.platform == 'win32' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true')
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: tinymist-${{ env.target }}.pdb name: tinymist-${{ env.target }}.pdb
path: target/${{ matrix.rust-target }}/release/tinymist-${{ env.target }}.pdb path: target/${{ matrix.rust-target }}/gh-release/tinymist-${{ env.target }}.pdb
- name: Upload split debug symbols for linux - name: Upload split debug symbols for linux
if: matrix.platform == 'linux' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') if: matrix.platform == 'linux' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true')
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: tinymist-${{ env.target }}.debug name: tinymist-${{ env.target }}.debug
path: target/${{ matrix.rust-target }}/release/tinymist-${{ env.target }}.debug path: target/${{ matrix.rust-target }}/gh-release/tinymist-${{ env.target }}.debug
compression-level: 0 compression-level: 0
- name: Upload split debug symbols for mac - name: Upload split debug symbols for mac
if: matrix.platform == 'darwin' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') if: matrix.platform == 'darwin' && (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true')
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: tinymist-${{ env.target }}.dwarf name: tinymist-${{ env.target }}.dwarf
path: target/${{ matrix.rust-target }}/release/tinymist-${{ env.target }}.dwarf path: target/${{ matrix.rust-target }}/gh-release/tinymist-${{ env.target }}.dwarf
- name: Upload binary artifact - name: Upload binary artifact
if: (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true') if: (startsWith(github.ref, 'refs/tags/') || matrix.regular_build == 'true')
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
@ -209,22 +209,22 @@ jobs:
working-directory: ./editors/vscode working-directory: ./editors/vscode
- name: Build tinymist binary - name: Build tinymist binary
run: | run: |
cargo build --release -p tinymist --target $RUST_TARGET cargo build --profile=gh-release -p tinymist --target $RUST_TARGET
- name: Split debug symbols - name: Split debug symbols
run: | run: |
cd target/$RUST_TARGET/release cd target/$RUST_TARGET/gh-release
objcopy --compress-debug-sections --only-keep-debug "tinymist" "tinymist-${{ env.target }}.debug" objcopy --compress-debug-sections --only-keep-debug "tinymist" "tinymist-${{ env.target }}.debug"
objcopy --strip-debug --add-gnu-debuglink="tinymist-${{ env.target }}.debug" "tinymist" objcopy --strip-debug --add-gnu-debuglink="tinymist-${{ env.target }}.debug" "tinymist"
- name: Upload split debug symbols - name: Upload split debug symbols
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:
name: tinymist-${{ env.target }}.debug name: tinymist-${{ env.target }}.debug
path: target/${{ env.RUST_TARGET }}/release/tinymist-${{ env.target }}.debug path: target/${{ env.RUST_TARGET }}/gh-release/tinymist-${{ env.target }}.debug
- name: Copy binary to output directory - name: Copy binary to output directory
run: | run: |
mkdir -p editors/vscode/out mkdir -p editors/vscode/out
cp "target/${{ env.RUST_TARGET }}/release/tinymist" "editors/vscode/out/" cp "target/${{ env.RUST_TARGET }}/gh-release/tinymist" "editors/vscode/out/"
cp "target/${{ env.RUST_TARGET }}/release/tinymist" "tinymist-${{ env.target }}" cp "target/${{ env.RUST_TARGET }}/gh-release/tinymist" "tinymist-${{ env.target }}"
- name: Upload binary artifact - name: Upload binary artifact
uses: actions/upload-artifact@v4 uses: actions/upload-artifact@v4
with: with:

76
.vscode/launch.json vendored
View file

@ -26,6 +26,82 @@
"${workspaceFolder}/editors/vscode/out/**/*.js" "${workspaceFolder}/editors/vscode/out/**/*.js"
], ],
"preLaunchTask": "VS Code Extension Prelaunch [Jaeger]" "preLaunchTask": "VS Code Extension Prelaunch [Jaeger]"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug unit tests in executable 'tinymist'",
"cargo": {
"args": [
"test",
"--no-run",
"--bin=tinymist",
"--package=tinymist"
],
"filter": {
"name": "tinymist",
"kind": "bin"
}
},
"args": [],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug unit tests in library 'tinymist-query'",
"cargo": {
"args": [
"test",
"--no-run",
"--lib",
"--package=tinymist-query"
],
"filter": {
"name": "tinymist-query",
"kind": "lib"
}
},
"args": [],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug unit tests in library 'tinymist-render'",
"cargo": {
"args": [
"test",
"--no-run",
"--lib",
"--package=tinymist-render"
],
"filter": {
"name": "tinymist-render",
"kind": "lib"
}
},
"args": [],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug integration test 'tinymist-e2e-tests'",
"cargo": {
"args": [
"test",
"--no-run",
"--test=tinymist-e2e-tests",
"--package=tests"
],
"filter": {
"name": "tinymist-e2e-tests",
"kind": "test"
}
},
"args": [],
"cwd": "${workspaceFolder}"
} }
] ]
} }

View file

@ -96,11 +96,13 @@ opt-level = 3
opt-level = 3 opt-level = 3
[profile.release] [profile.release]
# lto = true # Enable link-time optimization
debug = true debug = true
opt-level = 3 # Optimize for speed
[profile.gh-release]
inherits = "release"
# lto = true # Enable link-time optimization
codegen-units = 1 # Reduce number of codegen units to increase optimizations codegen-units = 1 # Reduce number of codegen units to increase optimizations
panic = 'abort' # Abort on panic panic = "abort" # Abort on panic
[workspace.lints.rust] [workspace.lints.rust]
missing_docs = "warn" missing_docs = "warn"

View file

@ -78,7 +78,7 @@ impl Analysis {
.modules .modules
.values() .values()
.map(|v| { .map(|v| {
v.def_use_lexical_heirarchy v.def_use_lexical_hierarchy
.output .output
.as_ref() .as_ref()
.map_or(0, |e| e.iter().map(|e| e.estimated_memory()).sum()) .map_or(0, |e| e.iter().map(|e| e.estimated_memory()).sum())
@ -145,13 +145,13 @@ impl<Inputs, Output> ComputingNode<Inputs, Output> {
/// ///
/// You should not holds across requests, because source code may change. /// You should not holds across requests, because source code may change.
pub struct ModuleAnalysisGlobalCache { pub struct ModuleAnalysisGlobalCache {
def_use_lexical_heirarchy: ComputingNode<Source, EcoVec<LexicalHierarchy>>, def_use_lexical_hierarchy: ComputingNode<Source, EcoVec<LexicalHierarchy>>,
} }
impl Default for ModuleAnalysisGlobalCache { impl Default for ModuleAnalysisGlobalCache {
fn default() -> Self { fn default() -> Self {
Self { Self {
def_use_lexical_heirarchy: ComputingNode::new("def_use_lexical_heirarchy"), def_use_lexical_hierarchy: ComputingNode::new("def_use_lexical_hierarchy"),
} }
} }
} }
@ -172,7 +172,7 @@ pub struct AnalysisCaches {
} }
/// The resources for analysis. /// The resources for analysis.
pub trait AnaylsisResources { pub trait AnalysisResources {
/// Get the world surface for Typst compiler. /// Get the world surface for Typst compiler.
fn world(&self) -> &dyn World; fn world(&self) -> &dyn World;
@ -201,7 +201,7 @@ pub trait AnaylsisResources {
/// The context for analyzers. /// The context for analyzers.
pub struct AnalysisContext<'a> { pub struct AnalysisContext<'a> {
/// The world surface for Typst compiler /// The world surface for Typst compiler
pub resources: &'a dyn AnaylsisResources, pub resources: &'a dyn AnalysisResources,
/// The analysis data /// The analysis data
pub analysis: CowMut<'a, Analysis>, pub analysis: CowMut<'a, Analysis>,
caches: AnalysisCaches, caches: AnalysisCaches,
@ -209,7 +209,7 @@ pub struct AnalysisContext<'a> {
impl<'w> AnalysisContext<'w> { impl<'w> AnalysisContext<'w> {
/// Create a new analysis context. /// Create a new analysis context.
pub fn new(resources: &'w dyn AnaylsisResources, a: Analysis) -> Self { pub fn new(resources: &'w dyn AnalysisResources, a: Analysis) -> Self {
Self { Self {
resources, resources,
analysis: CowMut::Owned(a), analysis: CowMut::Owned(a),
@ -218,7 +218,7 @@ impl<'w> AnalysisContext<'w> {
} }
/// Create a new analysis context with borrowing the analysis data. /// Create a new analysis context with borrowing the analysis data.
pub fn new_borrow(resources: &'w dyn AnaylsisResources, a: &'w mut Analysis) -> Self { pub fn new_borrow(resources: &'w dyn AnalysisResources, a: &'w mut Analysis) -> Self {
Self { Self {
resources, resources,
analysis: CowMut::Borrowed(a), analysis: CowMut::Borrowed(a),
@ -355,7 +355,7 @@ impl<'w> AnalysisContext<'w> {
.modules .modules
.entry(source.id()) .entry(source.id())
.or_default() .or_default()
.def_use_lexical_heirarchy .def_use_lexical_hierarchy
.compute(source, |_before, after| { .compute(source, |_before, after| {
crate::syntax::get_lexical_hierarchy(after, crate::syntax::LexicalScopeKind::DefUse) crate::syntax::get_lexical_hierarchy(after, crate::syntax::LexicalScopeKind::DefUse)
}) })

View file

@ -310,7 +310,6 @@ fn complete_path(
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use insta::with_settings; use insta::with_settings;
use lsp_types::{CompletionItem, CompletionList};
use super::*; use super::*;
use crate::tests::*; use crate::tests::*;

View file

@ -1,4 +1,4 @@
use crate::{path_to_url, prelude::*}; use crate::prelude::*;
/// Stores diagnostics for files. /// Stores diagnostics for files.
pub type DiagnosticsMap = HashMap<Url, Vec<LspDiagnostic>>; pub type DiagnosticsMap = HashMap<Url, Vec<LspDiagnostic>>;

View file

@ -1,6 +1,6 @@
--- ---
source: crates/tinymist-query/src/inlay_hint.rs source: crates/tinymist-query/src/inlay_hint.rs
expression: "JsonRepr::new_redacted(result, &REDACT_LOC)" expression: "JsonRepr::new_redacted(result, &REDACT_LOC)"
input_file: crates/tinymist-query/src/fixtures/inlay_hints/imcomplete-expression.typ input_file: crates/tinymist-query/src/fixtures/inlay_hints/incomplete-expression.typ
--- ---
[] []

View file

@ -1,7 +1,6 @@
use std::ops::Range; use std::ops::Range;
use log::debug; use log::debug;
use lsp_types::LocationLink;
use crate::{ use crate::{
prelude::*, prelude::*,
@ -70,7 +69,7 @@ impl SemanticRequest for GotoDeclarationRequest {
}); });
} }
debug!("goto_declartion: {links:?}"); debug!("goto_declaration: {links:?}");
Some(GotoDeclarationResponse::Link(links)) Some(GotoDeclarationResponse::Link(links))
} }
} }

View file

@ -1,7 +1,6 @@
use std::ops::Range; use std::ops::Range;
use log::debug; use log::debug;
use typst::foundations::Value;
use typst::syntax::FileId as TypstFileId; use typst::syntax::FileId as TypstFileId;
use crate::{ use crate::{
@ -122,7 +121,7 @@ pub(crate) fn find_definition(
} }
}; };
// syntatic definition // syntactic definition
let def_use = ctx.def_use(source)?; let def_use = ctx.def_use(source)?;
let ident_ref = match use_site.cast::<ast::Expr>()? { let ident_ref = match use_site.cast::<ast::Expr>()? {
ast::Expr::Ident(e) => IdentRef { ast::Expr::Ident(e) => IdentRef {

View file

@ -175,7 +175,7 @@ mod polymorphic {
pub struct ServerInfoRequest {} pub struct ServerInfoRequest {}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ServerInfoReponse { pub struct ServerInfoResponse {
pub root: Option<PathBuf>, pub root: Option<PathBuf>,
#[serde(rename = "fontPaths")] #[serde(rename = "fontPaths")]
pub font_paths: Vec<PathBuf>, pub font_paths: Vec<PathBuf>,
@ -188,7 +188,7 @@ mod polymorphic {
pub enum FoldRequestFeature { pub enum FoldRequestFeature {
PinnedFirst, PinnedFirst,
Unique, Unique,
Mergable, Mergeable,
ContextFreeUnique, ContextFreeUnique,
} }
@ -224,8 +224,8 @@ mod polymorphic {
pub fn fold_feature(&self) -> FoldRequestFeature { pub fn fold_feature(&self) -> FoldRequestFeature {
use FoldRequestFeature::*; use FoldRequestFeature::*;
match self { match self {
CompilerQueryRequest::OnExport(..) => Mergable, CompilerQueryRequest::OnExport(..) => Mergeable,
CompilerQueryRequest::OnSaveExport(..) => Mergable, CompilerQueryRequest::OnSaveExport(..) => Mergeable,
CompilerQueryRequest::Hover(..) => PinnedFirst, CompilerQueryRequest::Hover(..) => PinnedFirst,
CompilerQueryRequest::GotoDefinition(..) => PinnedFirst, CompilerQueryRequest::GotoDefinition(..) => PinnedFirst,
CompilerQueryRequest::GotoDeclaration(..) => PinnedFirst, CompilerQueryRequest::GotoDeclaration(..) => PinnedFirst,
@ -234,12 +234,12 @@ mod polymorphic {
CompilerQueryRequest::DocumentColor(..) => PinnedFirst, CompilerQueryRequest::DocumentColor(..) => PinnedFirst,
CompilerQueryRequest::ColorPresentation(..) => ContextFreeUnique, CompilerQueryRequest::ColorPresentation(..) => ContextFreeUnique,
CompilerQueryRequest::CodeLens(..) => Unique, CompilerQueryRequest::CodeLens(..) => Unique,
CompilerQueryRequest::Completion(..) => Mergable, CompilerQueryRequest::Completion(..) => Mergeable,
CompilerQueryRequest::SignatureHelp(..) => PinnedFirst, CompilerQueryRequest::SignatureHelp(..) => PinnedFirst,
CompilerQueryRequest::Rename(..) => Mergable, CompilerQueryRequest::Rename(..) => Mergeable,
CompilerQueryRequest::PrepareRename(..) => Mergable, CompilerQueryRequest::PrepareRename(..) => Mergeable,
CompilerQueryRequest::DocumentSymbol(..) => ContextFreeUnique, CompilerQueryRequest::DocumentSymbol(..) => ContextFreeUnique,
CompilerQueryRequest::Symbol(..) => Mergable, CompilerQueryRequest::Symbol(..) => Mergeable,
CompilerQueryRequest::SemanticTokensFull(..) => ContextFreeUnique, CompilerQueryRequest::SemanticTokensFull(..) => ContextFreeUnique,
CompilerQueryRequest::SemanticTokensDelta(..) => ContextFreeUnique, CompilerQueryRequest::SemanticTokensDelta(..) => ContextFreeUnique,
CompilerQueryRequest::Formatting(..) => ContextFreeUnique, CompilerQueryRequest::Formatting(..) => ContextFreeUnique,
@ -247,7 +247,7 @@ mod polymorphic {
CompilerQueryRequest::SelectionRange(..) => ContextFreeUnique, CompilerQueryRequest::SelectionRange(..) => ContextFreeUnique,
CompilerQueryRequest::DocumentMetrics(..) => PinnedFirst, CompilerQueryRequest::DocumentMetrics(..) => PinnedFirst,
CompilerQueryRequest::ServerInfo(..) => Mergable, CompilerQueryRequest::ServerInfo(..) => Mergeable,
} }
} }
@ -306,7 +306,7 @@ mod polymorphic {
SelectionRange(Option<Vec<SelectionRange>>), SelectionRange(Option<Vec<SelectionRange>>),
DocumentMetrics(Option<DocumentMetricsResponse>), DocumentMetrics(Option<DocumentMetricsResponse>),
ServerInfo(Option<HashMap<String, ServerInfoReponse>>), ServerInfo(Option<HashMap<String, ServerInfoResponse>>),
} }
} }

View file

@ -372,8 +372,6 @@ mod test {
use lsp_types::Position; use lsp_types::Position;
use typst::syntax::Source; use typst::syntax::Source;
use crate::{lsp_to_typst, PositionEncoding};
use super::*; use super::*;
#[test] #[test]

View file

@ -45,7 +45,7 @@ pub fn find_source_by_expr(
current: TypstFileId, current: TypstFileId,
e: ast::Expr, e: ast::Expr,
) -> Option<Source> { ) -> Option<Source> {
// todo: this could be vaild: import("path.typ"), where v is parenthesized // todo: this could be valid: import("path.typ"), where v is parenthesized
match e { match e {
ast::Expr::Str(s) => find_source_by_import_path(world, current, s.get().as_str()), ast::Expr::Str(s) => find_source_by_import_path(world, current, s.get().as_str()),
_ => None, _ => None,

View file

@ -1,4 +1,4 @@
use std::{collections::HashMap, sync::Once}; use std::sync::Once;
use super::find_imports; use super::find_imports;
use crate::prelude::*; use crate::prelude::*;

View file

@ -27,14 +27,14 @@ pub use insta::assert_snapshot;
pub use typst_ts_compiler::TypstSystemWorld; pub use typst_ts_compiler::TypstSystemWorld;
use crate::{ use crate::{
analysis::{Analysis, AnaylsisResources}, analysis::{Analysis, AnalysisResources},
prelude::AnalysisContext, prelude::AnalysisContext,
typst_to_lsp, LspPosition, PositionEncoding, typst_to_lsp, LspPosition, PositionEncoding,
}; };
struct WrapWorld<'a>(&'a mut TypstSystemWorld); struct WrapWorld<'a>(&'a mut TypstSystemWorld);
impl<'a> AnaylsisResources for WrapWorld<'a> { impl<'a> AnalysisResources for WrapWorld<'a> {
fn world(&self) -> &dyn typst::World { fn world(&self) -> &dyn typst::World {
self.0 self.0
} }

View file

@ -1,14 +1,14 @@
//! The typst actors running compilations. //! The typst actors running compilations.
//! //!
//! ```ascii //! ```ascii
//! ┌────────────────────────────────┐ //! ┌────────────────────────────────┐
//! │ main::compile_actor (client)│ //! │ main::compile_actor (client)│
//! └─────┬────────────────────▲─────┘ //! └─────┬────────────────────▲─────┘
//! │ │ //! │ │
//! ┌─────▼────────────────────┴─────┐ ┌────────────┐ //! ┌─────▼────────────────────┴─────┐ ┌────────────┐
//! │compiler::compile_actor (server)│◄───────►│notify_actor│ //! │compiler::compile_actor (server)│◄───────►│notify_actor│
//! └─────┬────────────────────▲─────┘ └────────────┘ //! └─────┬────────────────────▲─────┘ └────────────┘
//! │ │ //! │ │
//! ┌─────▼────────────────────┴─────┐ handler ┌────────────┐ //! ┌─────▼────────────────────┴─────┐ handler ┌────────────┐
//! │compiler::compile_driver ├────────►│ rest actors│ //! │compiler::compile_driver ├────────►│ rest actors│
//! └────────────────────────────────┘ └────────────┘ //! └────────────────────────────────┘ └────────────┘
@ -16,7 +16,7 @@
//! //!
//! We generally use typst in two ways. //! We generally use typst in two ways.
//! + creates a [`CompileDriver`] and run compilation in fly. //! + creates a [`CompileDriver`] and run compilation in fly.
//! + creates a [`CompileServerActor`], wraps the drvier, and runs //! + creates a [`CompileServerActor`], wraps the driver, and runs
//! [`CompileDriver`] incrementally. //! [`CompileDriver`] incrementally.
//! //!
//! For latter case, an additional [`CompileClientActor`] is created to //! For latter case, an additional [`CompileClientActor`] is created to
@ -36,8 +36,8 @@ use anyhow::anyhow;
use log::{error, info, trace}; use log::{error, info, trace};
use parking_lot::Mutex; use parking_lot::Mutex;
use tinymist_query::{ use tinymist_query::{
analysis::{Analysis, AnalysisContext, AnaylsisResources}, analysis::{Analysis, AnalysisContext, AnalysisResources},
DiagnosticsMap, ExportKind, ServerInfoReponse, VersionedDocument, DiagnosticsMap, ExportKind, ServerInfoResponse, VersionedDocument,
}; };
use tinymist_render::PeriscopeRenderer; use tinymist_render::PeriscopeRenderer;
use tokio::sync::{broadcast, mpsc, oneshot, watch}; use tokio::sync::{broadcast, mpsc, oneshot, watch};
@ -245,7 +245,7 @@ impl CompileDriver {
struct WrapWorld<'a>(&'a mut LspWorld, &'a PeriscopeRenderer); struct WrapWorld<'a>(&'a mut LspWorld, &'a PeriscopeRenderer);
impl<'a> AnaylsisResources for WrapWorld<'a> { impl<'a> AnalysisResources for WrapWorld<'a> {
fn world(&self) -> &dyn typst::World { fn world(&self) -> &dyn typst::World {
self.0 self.0
} }
@ -433,12 +433,12 @@ impl CompileClientActor {
}); });
} }
pub fn collect_server_info(&self) -> anyhow::Result<HashMap<String, ServerInfoReponse>> { pub fn collect_server_info(&self) -> anyhow::Result<HashMap<String, ServerInfoResponse>> {
let dg = self.diag_group.clone(); let dg = self.diag_group.clone();
let res = self.steal(move |c| { let res = self.steal(move |c| {
let cc = &c.compiler.compiler; let cc = &c.compiler.compiler;
let info = ServerInfoReponse { let info = ServerInfoResponse {
root: cc.world().entry.root().map(|e| e.as_ref().to_owned()), root: cc.world().entry.root().map(|e| e.as_ref().to_owned()),
font_paths: cc.world().font_resolver.font_paths().to_owned(), font_paths: cc.world().font_resolver.font_paths().to_owned(),
inputs: cc.world().inputs.as_ref().deref().clone(), inputs: cc.world().inputs.as_ref().deref().clone(),

View file

@ -261,7 +261,7 @@ impl ProtocolError {
ProtocolError("disconnected channel".into(), true) ProtocolError("disconnected channel".into(), true)
} }
/// Whether this error occured due to a disconnected channel. /// Whether this error occurred due to a disconnected channel.
pub fn channel_is_disconnected(&self) -> bool { pub fn channel_is_disconnected(&self) -> bool {
self.1 self.1
} }

View file

@ -46,7 +46,7 @@ Please see [Extra Settings](#extra-settings) for more configuration.
### Working with Multiple-File Projects ### Working with Multiple-File Projects
The solution is a bit internal, which should get futher improvement, but you can pin a main file by command. The solution is a bit internal, which should get further improvement, but you can pin a main file by command.
```lua ```lua
# pin the main file # pin the main file

View file

@ -27,7 +27,7 @@ const PAREN_BLOCK = generatePattern(6, "\\(", "\\)");
const CODE_BLOCK = generatePattern(6, "\\{", "\\}"); const CODE_BLOCK = generatePattern(6, "\\{", "\\}");
const BRACE_FREE_EXPR = /[^\s\}\{\[\]][^\}\{\[\]]*/.source; const BRACE_FREE_EXPR = /[^\s\}\{\[\]][^\}\{\[\]]*/.source;
// todo: This is invokable // todo: This is invocable
const codeBlock: textmate.Pattern = { const codeBlock: textmate.Pattern = {
// name: "meta.block.continuous.typst", // name: "meta.block.continuous.typst",
begin: /\{/, begin: /\{/,
@ -534,7 +534,7 @@ const expression = (): textmate.Grammar => {
}, },
}, },
/// parentheisized expressions: (...) /// parentheisized expressions: (...)
// todo: This is invokable // todo: This is invocable
{ {
begin: /\(/, begin: /\(/,
end: /\)/, end: /\)/,

View file

@ -441,7 +441,7 @@ export const SymbolPicker = () => {
), ),
div({ style: "flex: 1;" }, (_dom?: Element) => div({ style: "flex: 1;" }, (_dom?: Element) =>
div( div(
...catergorize( ...categorize(
CATEGORY_INFO, CATEGORY_INFO,
pickSymbols(pickers.val, filteredPickers.val) pickSymbols(pickers.val, filteredPickers.val)
) )
@ -452,7 +452,7 @@ export const SymbolPicker = () => {
); );
}; };
function catergorize( function categorize(
catsRaw: SymbolCategory[], catsRaw: SymbolCategory[],
symInfo: InstantiatedSymbolItem[] symInfo: InstantiatedSymbolItem[]
): InstantiatedSymbolCategory[] { ): InstantiatedSymbolCategory[] {

View file

@ -8,7 +8,7 @@ import { Diagnostics } from "./features/diagnostics";
import { SymbolPicker } from "./features/symbol-picker"; import { SymbolPicker } from "./features/symbol-picker";
/// The components that can be rendered by the frontend. /// The components that can be rendered by the frontend.
/// Typicially, each component corresponds to a single tool (Application). /// Typically, each component corresponds to a single tool (Application).
type PageComponent = type PageComponent =
| "template-gallery" | "template-gallery"
| "tracing" | "tracing"

View file

@ -2678,8 +2678,16 @@ std-env@^3.3.3:
resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.7.0.tgz#c9f7386ced6ecf13360b6c6c55b8aaa4ef7481d2" resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.7.0.tgz#c9f7386ced6ecf13360b6c6c55b8aaa4ef7481d2"
integrity sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg== integrity sha512-JPbdCEQLj1w5GilpiHAx3qJvFndqybBysA3qUOnznweH4QbNYUsW/ea8QzSrnh0vNsezMMw5bcVool8lM0gwzg==
"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.1.0: "string-width-cjs@npm:string-width@^4.2.0":
name string-width-cjs version "4.2.3"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
dependencies:
emoji-regex "^8.0.0"
is-fullwidth-code-point "^3.0.0"
strip-ansi "^6.0.1"
string-width@^4.1.0:
version "4.2.3" version "4.2.3"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
@ -2731,7 +2739,14 @@ string_decoder@^1.1.1:
dependencies: dependencies:
safe-buffer "~5.2.0" safe-buffer "~5.2.0"
"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: "strip-ansi-cjs@npm:strip-ansi@^6.0.1":
version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
dependencies:
ansi-regex "^5.0.1"
strip-ansi@^6.0.0, strip-ansi@^6.0.1:
version "6.0.1" version "6.0.1"
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==