mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-07-07 21:15:03 +00:00
dev: init
This commit is contained in:
commit
ccd51eb19a
41 changed files with 10490 additions and 0 deletions
8
.gitignore
vendored
Normal file
8
.gitignore
vendored
Normal file
|
@ -0,0 +1,8 @@
|
|||
/target/
|
||||
/editors/lapce/target/
|
||||
result*
|
||||
.direnv
|
||||
.envrc
|
||||
node_modules/
|
||||
/editors/vscode/out/
|
||||
/editors/lapce/out/
|
10
.prettierignore
Normal file
10
.prettierignore
Normal file
|
@ -0,0 +1,10 @@
|
|||
.git/**
|
||||
.github/**
|
||||
.vscode/**
|
||||
assets/**
|
||||
src/**
|
||||
target/**
|
||||
*.toml
|
||||
*.txt
|
||||
*.lock
|
||||
*.md
|
5
.vscode/extensions.json
vendored
Normal file
5
.vscode/extensions.json
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
{
|
||||
"recommendations": [
|
||||
"rust-lang.rust-analyzer"
|
||||
]
|
||||
}
|
31
.vscode/launch.json
vendored
Normal file
31
.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,31 @@
|
|||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Run Extension",
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"runtimeExecutable": "${execPath}",
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceFolder}/editors/vscode"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/editors/vscode/out/**/*.js"
|
||||
],
|
||||
"preLaunchTask": "VS Code Extension Prelaunch"
|
||||
},
|
||||
{
|
||||
"name": "Run Extension [Jaeger]",
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"runtimeExecutable": "${execPath}",
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceFolder}/editors/vscode"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/editors/vscode/out/**/*.js"
|
||||
],
|
||||
"preLaunchTask": "VS Code Extension Prelaunch [Jaeger]"
|
||||
}
|
||||
]
|
||||
}
|
6
.vscode/settings.json
vendored
Normal file
6
.vscode/settings.json
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"eslint.format.enable": true,
|
||||
"eslint.workingDirectories": [
|
||||
"editors/vscode"
|
||||
]
|
||||
}
|
75
.vscode/tasks.json
vendored
Normal file
75
.vscode/tasks.json
vendored
Normal file
|
@ -0,0 +1,75 @@
|
|||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "VS Code Extension Prelaunch",
|
||||
"dependsOn": [
|
||||
"Compile VS Code Extension",
|
||||
"Build Debug LSP Binary",
|
||||
"Copy Debug LSP Binary to VS Code Extension"
|
||||
],
|
||||
"dependsOrder": "sequence",
|
||||
},
|
||||
{
|
||||
"label": "VS Code Extension Prelaunch [Jaeger]",
|
||||
"dependsOn": [
|
||||
"Compile VS Code Extension",
|
||||
"Build Debug LSP Binary [Jaeger]",
|
||||
"Copy Debug LSP Binary to VS Code Extension"
|
||||
],
|
||||
"dependsOrder": "sequence"
|
||||
},
|
||||
{
|
||||
"label": "Compile VS Code Extension",
|
||||
"type": "npm",
|
||||
"script": "compile",
|
||||
"path": "editors/vscode",
|
||||
"group": "build",
|
||||
},
|
||||
{
|
||||
"label": "Build Debug LSP Binary",
|
||||
"type": "cargo",
|
||||
"command": "build",
|
||||
"args": [ "--bin", "tinymist" ],
|
||||
"problemMatcher": [
|
||||
"$rustc"
|
||||
],
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "Build Debug LSP Binary [Jaeger]",
|
||||
"type": "cargo",
|
||||
"command": "build",
|
||||
"args": [ "--bin", "tinymist", "--features", "jaeger" ],
|
||||
"problemMatcher": [
|
||||
"$rustc"
|
||||
],
|
||||
"group": "build"
|
||||
},
|
||||
{
|
||||
"label": "Copy Debug LSP Binary to VS Code Extension",
|
||||
"type": "shell",
|
||||
"windows": {
|
||||
"command": "cp",
|
||||
"args": [
|
||||
"${workspaceFolder}\\target\\debug\\tinymist.exe",
|
||||
"${workspaceFolder}\\editors\\vscode\\out\\"
|
||||
]
|
||||
},
|
||||
"linux": {
|
||||
"command": "cp",
|
||||
"args": [
|
||||
"${workspaceFolder}/target/debug/tinymist",
|
||||
"${workspaceFolder}/editors/vscode/out/"
|
||||
]
|
||||
},
|
||||
"osx": {
|
||||
"command": "cp",
|
||||
"args": [
|
||||
"${workspaceFolder}/target/debug/tinymist",
|
||||
"${workspaceFolder}/editors/vscode/out/"
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
4636
Cargo.lock
generated
Normal file
4636
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
94
Cargo.toml
Normal file
94
Cargo.toml
Normal file
|
@ -0,0 +1,94 @@
|
|||
[workspace.package]
|
||||
description = "Fast lsp implementation for typst."
|
||||
authors = ["Myriad-Dreamin <camiyoru@gmail.com>"]
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
readme = "README.md"
|
||||
license = "Apache-2.0"
|
||||
homepage = "https://github.com/_/tinymist"
|
||||
repository = "https://github.com/_/tinymist"
|
||||
rust-version = "1.74"
|
||||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = ["crates/*", "external/typst-preview"]
|
||||
|
||||
[workspace.dependencies]
|
||||
|
||||
once_cell = "1"
|
||||
anyhow = "1"
|
||||
|
||||
fxhash = "0.2.1"
|
||||
ecow = "0.2.0"
|
||||
comemo = "0.3"
|
||||
ena = "0.14.2"
|
||||
futures = "0.3"
|
||||
regex = "1.8.1"
|
||||
itertools = "0.12.0"
|
||||
lazy_static = "1.4.0"
|
||||
env_logger = "0.11.1"
|
||||
log = "0.4.17"
|
||||
percent-encoding = "2.3.0"
|
||||
strum = { version = "0.25.0", features = ["derive"] }
|
||||
async-trait = "0.1.73"
|
||||
parking_lot = "0.12.1"
|
||||
thiserror = "1.0.44"
|
||||
|
||||
typst = "0.10.0"
|
||||
typst-ide = "0.10.0"
|
||||
typst-pdf = "0.10.0"
|
||||
typst-assets = { git = "https://github.com/typst/typst-assets", rev = "79e1c84" }
|
||||
typst-ts-core = { git = "https://github.com/Myriad-Dreamin/typst.ts", rev = "98e3d3a42877b195f87223060882d55fd5aaa04a", package = "typst-ts-core" }
|
||||
typst-ts-compiler = { version = "0.4.2-rc6" }
|
||||
typst-preview = { path = "external/typst-preview" }
|
||||
|
||||
tower-lsp = "0.20.0"
|
||||
|
||||
clap = { version = "4.4", features = ["derive", "env", "unicode", "wrap_help"] }
|
||||
clap_builder = { version = "4", features = ["string"] }
|
||||
clap_complete = "4.4"
|
||||
clap_complete_fig = "4.4"
|
||||
clap_mangen = { version = "0.2.15" }
|
||||
vergen = { version = "8.2.5", features = [
|
||||
"build",
|
||||
"cargo",
|
||||
"git",
|
||||
"gitcl",
|
||||
"rustc",
|
||||
] }
|
||||
tokio = { version = "1.34.0", features = [
|
||||
"macros",
|
||||
"rt-multi-thread",
|
||||
"io-std",
|
||||
] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
|
||||
divan = "0.1.7"
|
||||
insta = "1.34"
|
||||
|
||||
[profile.release]
|
||||
lto = true # Enable link-time optimization
|
||||
strip = true # Strip symbols from binary*
|
||||
opt-level = 3 # Optimize for speed
|
||||
codegen-units = 1 # Reduce number of codegen units to increase optimizations
|
||||
panic = 'abort' # Abort on panic
|
||||
|
||||
[workspace.lints.rust]
|
||||
missing_docs = "warn"
|
||||
|
||||
[workspace.lints.clippy]
|
||||
uninlined_format_args = "warn"
|
||||
missing_errors_doc = "warn"
|
||||
missing_panics_doc = "warn"
|
||||
missing_safety_doc = "warn"
|
||||
undocumented_unsafe_blocks = "warn"
|
||||
|
||||
[patch.crates-io]
|
||||
typst = { git = "https://github.com/Myriad-Dreamin/typst.git", branch = "typst.ts-v0.10.0-half" }
|
||||
typst-ide = { git = "https://github.com/Myriad-Dreamin/typst.git", branch = "typst.ts-v0.10.0-half" }
|
||||
typst-pdf = { git = "https://github.com/Myriad-Dreamin/typst.git", branch = "typst.ts-v0.10.0-half" }
|
||||
typst-syntax = { git = "https://github.com/Myriad-Dreamin/typst.git", branch = "typst.ts-v0.10.0-half" }
|
||||
typst-ts-svg-exporter = { git = "https://github.com/Myriad-Dreamin/typst.ts", rev = "98e3d3a42877b195f87223060882d55fd5aaa04a", package = "typst-ts-svg-exporter" }
|
||||
typst-ts-core = { git = "https://github.com/Myriad-Dreamin/typst.ts", rev = "98e3d3a42877b195f87223060882d55fd5aaa04a", package = "typst-ts-core" }
|
||||
typst-ts-compiler = { git = "https://github.com/Myriad-Dreamin/typst.ts", rev = "98e3d3a42877b195f87223060882d55fd5aaa04a", package = "typst-ts-compiler" }
|
201
LICENSE
Normal file
201
LICENSE
Normal file
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2023 Myriad Dreamin, Nathan Varner
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
7
README.md
Normal file
7
README.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
# Tinymist LSP
|
||||
|
||||
Tinymist [ˈtaɪni mɪst], a language server for [Typst](https://typst.app/) [taɪpst]. You can also call it "微霭" in Chinese.
|
||||
|
||||
## Acknowledgements
|
||||
|
||||
- It is developed based on [typst-lsp](https://github.com/nvarner/typst-lsp)
|
56
crates/tinymist/Cargo.toml
Normal file
56
crates/tinymist/Cargo.toml
Normal file
|
@ -0,0 +1,56 @@
|
|||
[package]
|
||||
name = "tinymist-cli"
|
||||
description = "CLI for tinymist."
|
||||
categories = ["compilers", "command-line-utilities"]
|
||||
keywords = ["cli", "language", "typst"]
|
||||
authors.workspace = true
|
||||
version.workspace = true
|
||||
license.workspace = true
|
||||
edition.workspace = true
|
||||
homepage.workspace = true
|
||||
repository.workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "tinymist"
|
||||
path = "src/main.rs"
|
||||
test = false
|
||||
doctest = false
|
||||
bench = false
|
||||
doc = false
|
||||
|
||||
[dependencies]
|
||||
|
||||
once_cell.workspace = true
|
||||
anyhow.workspace = true
|
||||
comemo.workspace = true
|
||||
thiserror.workspace = true
|
||||
tokio.workspace = true
|
||||
futures.workspace = true
|
||||
regex.workspace = true
|
||||
itertools.workspace = true
|
||||
lazy_static.workspace = true
|
||||
strum.workspace = true
|
||||
async-trait.workspace = true
|
||||
env_logger.workspace = true
|
||||
log.workspace = true
|
||||
percent-encoding.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
parking_lot.workspace = true
|
||||
|
||||
typst.workspace = true
|
||||
typst-ide.workspace = true
|
||||
typst-pdf.workspace = true
|
||||
typst-assets = { workspace = true, features = ["fonts"] }
|
||||
|
||||
typst-ts-core = { version = "0.4.2-rc6", default-features = false, features = [
|
||||
"flat-vector",
|
||||
"vector-bbox",
|
||||
] }
|
||||
typst-ts-compiler.workspace = true
|
||||
typst-preview.workspace = true
|
||||
|
||||
tower-lsp.workspace = true
|
||||
|
||||
# [lints]
|
||||
# workspace = true
|
2
crates/tinymist/src/actor/mod.rs
Normal file
2
crates/tinymist/src/actor/mod.rs
Normal file
|
@ -0,0 +1,2 @@
|
|||
pub mod render;
|
||||
pub mod typst;
|
100
crates/tinymist/src/actor/render.rs
Normal file
100
crates/tinymist/src/actor/render.rs
Normal file
|
@ -0,0 +1,100 @@
|
|||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use log::info;
|
||||
use tokio::sync::{
|
||||
broadcast::{self, error::RecvError},
|
||||
watch,
|
||||
};
|
||||
use typst_ts_core::TypstDocument;
|
||||
|
||||
use crate::config::ExportPdfMode;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum RenderActorRequest {
|
||||
Render,
|
||||
ChangeConfig(PdfExportConfig),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PdfExportConfig {
|
||||
path: PathBuf,
|
||||
mode: ExportPdfMode,
|
||||
}
|
||||
|
||||
pub struct PdfExportActor {
|
||||
render_rx: broadcast::Receiver<RenderActorRequest>,
|
||||
document: watch::Receiver<Option<Arc<TypstDocument>>>,
|
||||
|
||||
config: Option<PdfExportConfig>,
|
||||
}
|
||||
|
||||
impl PdfExportActor {
|
||||
pub fn new(
|
||||
document: watch::Receiver<Option<Arc<TypstDocument>>>,
|
||||
render_rx: broadcast::Receiver<RenderActorRequest>,
|
||||
) -> Self {
|
||||
Self {
|
||||
render_rx,
|
||||
document,
|
||||
|
||||
config: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run(mut self) {
|
||||
loop {
|
||||
tokio::select! {
|
||||
req = self.render_rx.recv() => {
|
||||
let req = match req {
|
||||
Ok(req) => req,
|
||||
Err(RecvError::Closed) => {
|
||||
info!("render actor channel closed");
|
||||
break;
|
||||
}
|
||||
Err(RecvError::Lagged(_)) => {
|
||||
info!("render actor channel lagged");
|
||||
continue;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
match req {
|
||||
RenderActorRequest::Render => {
|
||||
let Some(document) = self.document.borrow().clone() else {
|
||||
info!("PdfRenderActor: document is not ready");
|
||||
continue;
|
||||
};
|
||||
|
||||
if let Some(cfg) = self.config.as_ref() {
|
||||
if cfg.mode == ExportPdfMode::OnType {
|
||||
self.export_pdf(&document, &cfg.path).await.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
RenderActorRequest::ChangeConfig(config) => {
|
||||
self.config = Some(config);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn export_pdf(&self, doc: &TypstDocument, path: &Path) -> anyhow::Result<()> {
|
||||
// todo: Some(pdf_uri.as_str())
|
||||
// todo: timestamp world.now()
|
||||
info!("exporting PDF {path}", path = path.display());
|
||||
|
||||
let data = typst_pdf::pdf(doc, None, None);
|
||||
|
||||
std::fs::write(path, data).context("failed to export PDF")?;
|
||||
|
||||
info!("PDF export complete");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
1552
crates/tinymist/src/actor/typst.rs
Normal file
1552
crates/tinymist/src/actor/typst.rs
Normal file
File diff suppressed because it is too large
Load diff
107
crates/tinymist/src/analysis/analyze.rs
Normal file
107
crates/tinymist/src/analysis/analyze.rs
Normal file
|
@ -0,0 +1,107 @@
|
|||
use comemo::Track;
|
||||
use typst::diag::EcoString;
|
||||
use typst::engine::{Engine, Route};
|
||||
use typst::eval::{Tracer, Vm};
|
||||
use typst::foundations::{Label, Scopes, Value};
|
||||
use typst::introspection::{Introspector, Locator};
|
||||
use typst::model::{BibliographyElem, Document};
|
||||
use typst::syntax::{ast, LinkedNode, Span, SyntaxKind};
|
||||
use typst::World;
|
||||
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
|
||||
|
||||
/// Try to determine a set of possible values for an expression.
|
||||
pub fn analyze_expr(world: &dyn World, node: &LinkedNode) -> EcoVec<Value> {
|
||||
match node.cast::<ast::Expr>() {
|
||||
Some(ast::Expr::None(_)) => eco_vec![Value::None],
|
||||
Some(ast::Expr::Auto(_)) => eco_vec![Value::Auto],
|
||||
Some(ast::Expr::Bool(v)) => eco_vec![Value::Bool(v.get())],
|
||||
Some(ast::Expr::Int(v)) => eco_vec![Value::Int(v.get())],
|
||||
Some(ast::Expr::Float(v)) => eco_vec![Value::Float(v.get())],
|
||||
Some(ast::Expr::Numeric(v)) => eco_vec![Value::numeric(v.get())],
|
||||
Some(ast::Expr::Str(v)) => eco_vec![Value::Str(v.get().into())],
|
||||
|
||||
Some(ast::Expr::FieldAccess(access)) => {
|
||||
let Some(child) = node.children().next() else {
|
||||
return eco_vec![];
|
||||
};
|
||||
analyze_expr(world, &child)
|
||||
.into_iter()
|
||||
.filter_map(|target| target.field(&access.field()).ok())
|
||||
.collect()
|
||||
}
|
||||
|
||||
Some(_) => {
|
||||
if let Some(parent) = node.parent() {
|
||||
if parent.kind() == SyntaxKind::FieldAccess && node.index() > 0 {
|
||||
return analyze_expr(world, parent);
|
||||
}
|
||||
}
|
||||
|
||||
let mut tracer = Tracer::new();
|
||||
tracer.inspect(node.span());
|
||||
typst::compile(world, &mut tracer).ok();
|
||||
tracer.values()
|
||||
}
|
||||
|
||||
_ => eco_vec![],
|
||||
}
|
||||
}
|
||||
|
||||
/// Try to load a module from the current source file.
|
||||
pub fn analyze_import(world: &dyn World, source: &LinkedNode) -> Option<Value> {
|
||||
let source = analyze_expr(world, source).into_iter().next()?;
|
||||
if source.scope().is_some() {
|
||||
return Some(source);
|
||||
}
|
||||
|
||||
let mut locator = Locator::default();
|
||||
let introspector = Introspector::default();
|
||||
let mut tracer = Tracer::new();
|
||||
let engine = Engine {
|
||||
world: world.track(),
|
||||
route: Route::default(),
|
||||
introspector: introspector.track(),
|
||||
locator: &mut locator,
|
||||
tracer: tracer.track_mut(),
|
||||
};
|
||||
|
||||
let mut vm = Vm::new(engine, Scopes::new(Some(world.library())), Span::detached());
|
||||
typst::eval::import(&mut vm, source, Span::detached(), true)
|
||||
.ok()
|
||||
.map(Value::Module)
|
||||
}
|
||||
|
||||
/// Find all labels and details for them.
|
||||
///
|
||||
/// Returns:
|
||||
/// - All labels and descriptions for them, if available
|
||||
/// - A split offset: All labels before this offset belong to nodes, all after
|
||||
/// belong to a bibliography.
|
||||
pub fn analyze_labels(document: &Document) -> (Vec<(Label, Option<EcoString>)>, usize) {
|
||||
let mut output = vec![];
|
||||
|
||||
// Labels in the document.
|
||||
for elem in document.introspector.all() {
|
||||
let Some(label) = elem.label() else { continue };
|
||||
let details = elem
|
||||
.get_by_name("caption")
|
||||
.or_else(|| elem.get_by_name("body"))
|
||||
.and_then(|field| match field {
|
||||
Value::Content(content) => Some(content),
|
||||
_ => None,
|
||||
})
|
||||
.as_ref()
|
||||
.unwrap_or(elem)
|
||||
.plain_text();
|
||||
output.push((label, Some(details)));
|
||||
}
|
||||
|
||||
let split = output.len();
|
||||
|
||||
// Bibliography keys.
|
||||
for (key, detail) in BibliographyElem::keys(document.introspector.track()) {
|
||||
output.push((Label::new(&key), detail));
|
||||
}
|
||||
|
||||
(output, split)
|
||||
}
|
1
crates/tinymist/src/analysis/mod.rs
Normal file
1
crates/tinymist/src/analysis/mod.rs
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod analyze;
|
234
crates/tinymist/src/config.rs
Normal file
234
crates/tinymist/src/config.rs
Normal file
|
@ -0,0 +1,234 @@
|
|||
use std::{fmt, path::PathBuf};
|
||||
|
||||
use anyhow::bail;
|
||||
use futures::future::BoxFuture;
|
||||
use itertools::Itertools;
|
||||
use serde::Deserialize;
|
||||
use serde_json::{Map, Value};
|
||||
use tower_lsp::lsp_types::{
|
||||
self, ConfigurationItem, InitializeParams, PositionEncodingKind, Registration,
|
||||
};
|
||||
|
||||
use crate::ext::InitializeParamsExt;
|
||||
|
||||
const CONFIG_REGISTRATION_ID: &str = "config";
|
||||
const CONFIG_METHOD_ID: &str = "workspace/didChangeConfiguration";
|
||||
|
||||
pub fn get_config_registration() -> Registration {
|
||||
Registration {
|
||||
id: CONFIG_REGISTRATION_ID.to_owned(),
|
||||
method: CONFIG_METHOD_ID.to_owned(),
|
||||
register_options: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum ExperimentalFormatterMode {
|
||||
#[default]
|
||||
Off,
|
||||
On,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum ExportPdfMode {
|
||||
Never,
|
||||
#[default]
|
||||
OnSave,
|
||||
OnType,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub enum SemanticTokensMode {
|
||||
Disable,
|
||||
#[default]
|
||||
Enable,
|
||||
}
|
||||
|
||||
pub type Listener<T> = Box<dyn FnMut(&T) -> BoxFuture<anyhow::Result<()>> + Send + Sync>;
|
||||
|
||||
const CONFIG_ITEMS: &[&str] = &[
|
||||
"exportPdf",
|
||||
"rootPath",
|
||||
"semanticTokens",
|
||||
"experimentalFormatterMode",
|
||||
];
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Config {
|
||||
pub export_pdf: ExportPdfMode,
|
||||
pub root_path: Option<PathBuf>,
|
||||
pub semantic_tokens: SemanticTokensMode,
|
||||
pub formatter: ExperimentalFormatterMode,
|
||||
semantic_tokens_listeners: Vec<Listener<SemanticTokensMode>>,
|
||||
formatter_listeners: Vec<Listener<ExperimentalFormatterMode>>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn get_items() -> Vec<ConfigurationItem> {
|
||||
let sections = CONFIG_ITEMS
|
||||
.iter()
|
||||
.flat_map(|item| [format!("tinymist.{item}"), item.to_string()]);
|
||||
|
||||
sections
|
||||
.map(|section| ConfigurationItem {
|
||||
section: Some(section),
|
||||
..Default::default()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn values_to_map(values: Vec<Value>) -> Map<String, Value> {
|
||||
let unpaired_values = values
|
||||
.into_iter()
|
||||
.tuples()
|
||||
.map(|(a, b)| if !a.is_null() { a } else { b });
|
||||
|
||||
CONFIG_ITEMS
|
||||
.iter()
|
||||
.map(|item| item.to_string())
|
||||
.zip(unpaired_values)
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn listen_semantic_tokens(&mut self, listener: Listener<SemanticTokensMode>) {
|
||||
self.semantic_tokens_listeners.push(listener);
|
||||
}
|
||||
|
||||
// pub fn listen_formatting(&mut self, listener:
|
||||
// Listener<ExperimentalFormatterMode>) { self.formatter_listeners.
|
||||
// push(listener); }
|
||||
|
||||
pub async fn update(&mut self, update: &Value) -> anyhow::Result<()> {
|
||||
if let Value::Object(update) = update {
|
||||
self.update_by_map(update).await
|
||||
} else {
|
||||
bail!("got invalid configuration object {update}")
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn update_by_map(&mut self, update: &Map<String, Value>) -> anyhow::Result<()> {
|
||||
let export_pdf = update
|
||||
.get("exportPdf")
|
||||
.map(ExportPdfMode::deserialize)
|
||||
.and_then(Result::ok);
|
||||
if let Some(export_pdf) = export_pdf {
|
||||
self.export_pdf = export_pdf;
|
||||
}
|
||||
|
||||
let root_path = update.get("rootPath");
|
||||
if let Some(root_path) = root_path {
|
||||
if root_path.is_null() {
|
||||
self.root_path = None;
|
||||
}
|
||||
if let Some(root_path) = root_path.as_str().map(PathBuf::from) {
|
||||
self.root_path = Some(root_path);
|
||||
}
|
||||
}
|
||||
|
||||
let semantic_tokens = update
|
||||
.get("semanticTokens")
|
||||
.map(SemanticTokensMode::deserialize)
|
||||
.and_then(Result::ok);
|
||||
if let Some(semantic_tokens) = semantic_tokens {
|
||||
for listener in &mut self.semantic_tokens_listeners {
|
||||
listener(&semantic_tokens).await?;
|
||||
}
|
||||
self.semantic_tokens = semantic_tokens;
|
||||
}
|
||||
|
||||
let formatter = update
|
||||
.get("experimentalFormatterMode")
|
||||
.map(ExperimentalFormatterMode::deserialize)
|
||||
.and_then(Result::ok);
|
||||
if let Some(formatter) = formatter {
|
||||
for listener in &mut self.formatter_listeners {
|
||||
listener(&formatter).await?;
|
||||
}
|
||||
self.formatter = formatter;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Config {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("Config")
|
||||
.field("export_pdf", &self.export_pdf)
|
||||
.field("formatter", &self.formatter)
|
||||
.field("semantic_tokens", &self.semantic_tokens)
|
||||
.field(
|
||||
"semantic_tokens_listeners",
|
||||
&format_args!("Vec[len = {}]", self.semantic_tokens_listeners.len()),
|
||||
)
|
||||
.field(
|
||||
"formatter_listeners",
|
||||
&format_args!("Vec[len = {}]", self.formatter_listeners.len()),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
/// What counts as "1 character" for string indexing. We should always prefer
|
||||
/// UTF-8, but support UTF-16 as long as it is standard. For more background on
|
||||
/// encodings and LSP, try ["The bottom emoji breaks rust-analyzer"](https://fasterthanli.me/articles/the-bottom-emoji-breaks-rust-analyzer),
|
||||
/// a well-written article on the topic.
|
||||
#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, Default)]
|
||||
pub enum PositionEncoding {
|
||||
/// "1 character" means "1 UTF-16 code unit"
|
||||
///
|
||||
/// This is the only required encoding for LSPs to support, but it's not a
|
||||
/// natural one (unless you're working in JS). Prefer UTF-8, and refer
|
||||
/// to the article linked in the `PositionEncoding` docs for more
|
||||
/// background.
|
||||
#[default]
|
||||
Utf16,
|
||||
/// "1 character" means "1 byte"
|
||||
Utf8,
|
||||
}
|
||||
|
||||
impl From<PositionEncoding> for lsp_types::PositionEncodingKind {
|
||||
fn from(position_encoding: PositionEncoding) -> Self {
|
||||
match position_encoding {
|
||||
PositionEncoding::Utf16 => Self::UTF16,
|
||||
PositionEncoding::Utf8 => Self::UTF8,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Configuration set at initialization that won't change within a single
|
||||
/// session
|
||||
#[derive(Debug)]
|
||||
pub struct ConstConfig {
|
||||
pub position_encoding: PositionEncoding,
|
||||
pub supports_semantic_tokens_dynamic_registration: bool,
|
||||
pub supports_document_formatting_dynamic_registration: bool,
|
||||
pub supports_config_change_registration: bool,
|
||||
}
|
||||
|
||||
impl ConstConfig {
|
||||
fn choose_encoding(params: &InitializeParams) -> PositionEncoding {
|
||||
let encodings = params.position_encodings();
|
||||
if encodings.contains(&PositionEncodingKind::UTF8) {
|
||||
PositionEncoding::Utf8
|
||||
} else {
|
||||
PositionEncoding::Utf16
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&InitializeParams> for ConstConfig {
|
||||
fn from(params: &InitializeParams) -> Self {
|
||||
Self {
|
||||
position_encoding: Self::choose_encoding(params),
|
||||
supports_semantic_tokens_dynamic_registration: params
|
||||
.supports_semantic_tokens_dynamic_registration(),
|
||||
supports_document_formatting_dynamic_registration: params
|
||||
.supports_document_formatting_dynamic_registration(),
|
||||
supports_config_change_registration: params.supports_config_change_registration(),
|
||||
}
|
||||
}
|
||||
}
|
141
crates/tinymist/src/ext.rs
Normal file
141
crates/tinymist/src/ext.rs
Normal file
|
@ -0,0 +1,141 @@
|
|||
use std::ffi::OsStr;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use tower_lsp::lsp_types::{DocumentFormattingClientCapabilities, Url};
|
||||
use tower_lsp::lsp_types::{
|
||||
InitializeParams, Position, PositionEncodingKind, SemanticTokensClientCapabilities,
|
||||
};
|
||||
use typst::syntax::VirtualPath;
|
||||
|
||||
use crate::config::PositionEncoding;
|
||||
|
||||
pub trait InitializeParamsExt {
|
||||
fn position_encodings(&self) -> &[PositionEncodingKind];
|
||||
fn supports_config_change_registration(&self) -> bool;
|
||||
fn semantic_tokens_capabilities(&self) -> Option<&SemanticTokensClientCapabilities>;
|
||||
fn document_formatting_capabilities(&self) -> Option<&DocumentFormattingClientCapabilities>;
|
||||
fn supports_semantic_tokens_dynamic_registration(&self) -> bool;
|
||||
fn supports_document_formatting_dynamic_registration(&self) -> bool;
|
||||
fn root_paths(&self) -> Vec<PathBuf>;
|
||||
}
|
||||
|
||||
static DEFAULT_ENCODING: [PositionEncodingKind; 1] = [PositionEncodingKind::UTF16];
|
||||
|
||||
impl InitializeParamsExt for InitializeParams {
|
||||
fn position_encodings(&self) -> &[PositionEncodingKind] {
|
||||
self.capabilities
|
||||
.general
|
||||
.as_ref()
|
||||
.and_then(|general| general.position_encodings.as_ref())
|
||||
.map(|encodings| encodings.as_slice())
|
||||
.unwrap_or(&DEFAULT_ENCODING)
|
||||
}
|
||||
|
||||
fn supports_config_change_registration(&self) -> bool {
|
||||
self.capabilities
|
||||
.workspace
|
||||
.as_ref()
|
||||
.and_then(|workspace| workspace.configuration)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn semantic_tokens_capabilities(&self) -> Option<&SemanticTokensClientCapabilities> {
|
||||
self.capabilities
|
||||
.text_document
|
||||
.as_ref()?
|
||||
.semantic_tokens
|
||||
.as_ref()
|
||||
}
|
||||
|
||||
fn document_formatting_capabilities(&self) -> Option<&DocumentFormattingClientCapabilities> {
|
||||
self.capabilities
|
||||
.text_document
|
||||
.as_ref()?
|
||||
.formatting
|
||||
.as_ref()
|
||||
}
|
||||
|
||||
fn supports_semantic_tokens_dynamic_registration(&self) -> bool {
|
||||
self.semantic_tokens_capabilities()
|
||||
.and_then(|semantic_tokens| semantic_tokens.dynamic_registration)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn supports_document_formatting_dynamic_registration(&self) -> bool {
|
||||
self.document_formatting_capabilities()
|
||||
.and_then(|document_format| document_format.dynamic_registration)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
#[allow(deprecated)] // `self.root_path` is marked as deprecated
|
||||
fn root_paths(&self) -> Vec<PathBuf> {
|
||||
match self.workspace_folders.as_ref() {
|
||||
Some(roots) => roots
|
||||
.iter()
|
||||
.map(|root| &root.uri)
|
||||
.map(Url::to_file_path)
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.unwrap(),
|
||||
None => self
|
||||
.root_uri
|
||||
.as_ref()
|
||||
.map(|uri| uri.to_file_path().unwrap())
|
||||
.or_else(|| self.root_path.clone().map(PathBuf::from))
|
||||
.into_iter()
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait StrExt {
|
||||
fn encoded_len(&self, encoding: PositionEncoding) -> usize;
|
||||
}
|
||||
|
||||
impl StrExt for str {
|
||||
fn encoded_len(&self, encoding: PositionEncoding) -> usize {
|
||||
match encoding {
|
||||
PositionEncoding::Utf8 => self.len(),
|
||||
PositionEncoding::Utf16 => self.chars().map(char::len_utf16).sum(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait VirtualPathExt {
|
||||
fn with_extension(&self, extension: impl AsRef<OsStr>) -> Self;
|
||||
}
|
||||
|
||||
impl VirtualPathExt for VirtualPath {
|
||||
fn with_extension(&self, extension: impl AsRef<OsStr>) -> Self {
|
||||
Self::new(self.as_rooted_path().with_extension(extension))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PositionExt {
|
||||
fn delta(&self, to: &Self) -> PositionDelta;
|
||||
}
|
||||
|
||||
impl PositionExt for Position {
|
||||
/// Calculates the delta from `self` to `to`. This is in the `SemanticToken`
|
||||
/// sense, so the delta's `character` is relative to `self`'s
|
||||
/// `character` iff `self` and `to` are on the same line. Otherwise,
|
||||
/// it's relative to the start of the line `to` is on.
|
||||
fn delta(&self, to: &Self) -> PositionDelta {
|
||||
let line_delta = to.line - self.line;
|
||||
let char_delta = if line_delta == 0 {
|
||||
to.character - self.character
|
||||
} else {
|
||||
to.character
|
||||
};
|
||||
|
||||
PositionDelta {
|
||||
delta_line: line_delta,
|
||||
delta_start: char_delta,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Copy, Clone, Default)]
|
||||
pub struct PositionDelta {
|
||||
pub delta_line: u32,
|
||||
pub delta_start: u32,
|
||||
}
|
636
crates/tinymist/src/lsp.rs
Normal file
636
crates/tinymist/src/lsp.rs
Normal file
|
@ -0,0 +1,636 @@
|
|||
use std::borrow::Cow;
|
||||
|
||||
use anyhow::Context;
|
||||
use async_trait::async_trait;
|
||||
use futures::FutureExt;
|
||||
use log::{error, info, trace};
|
||||
use serde_json::Value as JsonValue;
|
||||
use tower_lsp::lsp_types::*;
|
||||
use tower_lsp::{jsonrpc, LanguageServer};
|
||||
use typst_ts_core::config::CompileOpts;
|
||||
|
||||
use crate::actor;
|
||||
use crate::actor::typst::{
|
||||
CompilerQueryResponse, CompletionRequest, DocumentSymbolRequest, HoverRequest,
|
||||
OnSaveExportRequest, SelectionRangeRequest, SemanticTokensDeltaRequest,
|
||||
SemanticTokensFullRequest, SignatureHelpRequest, SymbolRequest,
|
||||
};
|
||||
use crate::config::{
|
||||
get_config_registration, Config, ConstConfig, ExperimentalFormatterMode, ExportPdfMode,
|
||||
SemanticTokensMode,
|
||||
};
|
||||
use crate::ext::InitializeParamsExt;
|
||||
// use crate::server::formatting::{get_formatting_registration,
|
||||
// get_formatting_unregistration};
|
||||
// use crate::workspace::Workspace;
|
||||
|
||||
use super::semantic_tokens::{
|
||||
get_semantic_tokens_options, get_semantic_tokens_registration,
|
||||
get_semantic_tokens_unregistration,
|
||||
};
|
||||
use super::TypstServer;
|
||||
|
||||
macro_rules! run_query {
|
||||
($self: expr, $query: ident, $req: expr) => {{
|
||||
let req = $req;
|
||||
$self
|
||||
.universe()
|
||||
.query(actor::typst::CompilerQueryRequest::$query(req.clone()))
|
||||
.await
|
||||
.map_err(|err| {
|
||||
error!("error getting $query: {err} with request {req:?}");
|
||||
jsonrpc::Error::internal_error()
|
||||
})
|
||||
.map(|resp| {
|
||||
let CompilerQueryResponse::$query(resp) = resp else {
|
||||
unreachable!()
|
||||
};
|
||||
resp
|
||||
})
|
||||
}};
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl LanguageServer for TypstServer {
|
||||
async fn initialize(&self, params: InitializeParams) -> jsonrpc::Result<InitializeResult> {
|
||||
// self.tracing_init();
|
||||
|
||||
let cluster = {
|
||||
let root_paths = params.root_paths();
|
||||
let primary_root = root_paths.first().cloned().unwrap_or_default();
|
||||
actor::typst::create_cluster(
|
||||
self.client.clone(),
|
||||
root_paths,
|
||||
CompileOpts {
|
||||
root_dir: primary_root,
|
||||
// todo: font paths
|
||||
// font_paths: arguments.font_paths.clone(),
|
||||
with_embedded_fonts: typst_assets::fonts().map(Cow::Borrowed).collect(),
|
||||
..CompileOpts::default()
|
||||
},
|
||||
)
|
||||
};
|
||||
|
||||
let (cluster, cluster_bg) = cluster.split();
|
||||
|
||||
self.universe
|
||||
.set(cluster)
|
||||
.map_err(|_| ())
|
||||
.expect("the cluster is already initialized");
|
||||
|
||||
self.const_config
|
||||
.set(ConstConfig::from(¶ms))
|
||||
.expect("const config should not yet be initialized");
|
||||
|
||||
tokio::spawn(cluster_bg.run());
|
||||
|
||||
if let Some(init) = ¶ms.initialization_options {
|
||||
let mut config = self.config.write().await;
|
||||
config
|
||||
.update(init)
|
||||
.await
|
||||
.as_ref()
|
||||
.map_err(ToString::to_string)
|
||||
.map_err(jsonrpc::Error::invalid_params)?;
|
||||
}
|
||||
|
||||
let config = self.config.read().await;
|
||||
|
||||
let semantic_tokens_provider = match config.semantic_tokens {
|
||||
SemanticTokensMode::Enable
|
||||
if !params.supports_semantic_tokens_dynamic_registration() =>
|
||||
{
|
||||
Some(get_semantic_tokens_options().into())
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let document_formatting_provider = match config.formatter {
|
||||
ExperimentalFormatterMode::On
|
||||
if !params.supports_document_formatting_dynamic_registration() =>
|
||||
{
|
||||
Some(OneOf::Left(true))
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
|
||||
Ok(InitializeResult {
|
||||
capabilities: ServerCapabilities {
|
||||
signature_help_provider: Some(SignatureHelpOptions {
|
||||
trigger_characters: Some(vec!["(".to_string(), ",".to_string()]),
|
||||
retrigger_characters: None,
|
||||
work_done_progress_options: WorkDoneProgressOptions {
|
||||
work_done_progress: None,
|
||||
},
|
||||
}),
|
||||
hover_provider: Some(HoverProviderCapability::Simple(true)),
|
||||
completion_provider: Some(CompletionOptions {
|
||||
trigger_characters: Some(vec![
|
||||
String::from("#"),
|
||||
String::from("."),
|
||||
String::from("@"),
|
||||
]),
|
||||
..Default::default()
|
||||
}),
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(
|
||||
TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
change: Some(TextDocumentSyncKind::INCREMENTAL),
|
||||
save: Some(TextDocumentSyncSaveOptions::Supported(true)),
|
||||
..Default::default()
|
||||
},
|
||||
)),
|
||||
semantic_tokens_provider,
|
||||
execute_command_provider: Some(ExecuteCommandOptions {
|
||||
commands: LspCommand::all_as_string(),
|
||||
work_done_progress_options: WorkDoneProgressOptions {
|
||||
work_done_progress: None,
|
||||
},
|
||||
}),
|
||||
document_symbol_provider: Some(OneOf::Left(true)),
|
||||
workspace_symbol_provider: Some(OneOf::Left(true)),
|
||||
selection_range_provider: Some(SelectionRangeProviderCapability::Simple(true)),
|
||||
workspace: Some(WorkspaceServerCapabilities {
|
||||
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
|
||||
supported: Some(true),
|
||||
change_notifications: Some(OneOf::Left(true)),
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
document_formatting_provider,
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
async fn initialized(&self, _: InitializedParams) {
|
||||
let const_config = self.const_config();
|
||||
let mut config = self.config.write().await;
|
||||
|
||||
if const_config.supports_semantic_tokens_dynamic_registration {
|
||||
trace!("setting up to dynamically register semantic token support");
|
||||
|
||||
let client = self.client.clone();
|
||||
let register = move || {
|
||||
trace!("dynamically registering semantic tokens");
|
||||
let client = client.clone();
|
||||
async move {
|
||||
let options = get_semantic_tokens_options();
|
||||
client
|
||||
.register_capability(vec![get_semantic_tokens_registration(options)])
|
||||
.await
|
||||
.context("could not register semantic tokens")
|
||||
}
|
||||
};
|
||||
|
||||
let client = self.client.clone();
|
||||
let unregister = move || {
|
||||
trace!("unregistering semantic tokens");
|
||||
let client = client.clone();
|
||||
async move {
|
||||
client
|
||||
.unregister_capability(vec![get_semantic_tokens_unregistration()])
|
||||
.await
|
||||
.context("could not unregister semantic tokens")
|
||||
}
|
||||
};
|
||||
|
||||
if config.semantic_tokens == SemanticTokensMode::Enable {
|
||||
if let Some(err) = register().await.err() {
|
||||
error!("could not dynamically register semantic tokens: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
config.listen_semantic_tokens(Box::new(move |mode| match mode {
|
||||
SemanticTokensMode::Enable => register().boxed(),
|
||||
SemanticTokensMode::Disable => unregister().boxed(),
|
||||
}));
|
||||
}
|
||||
|
||||
// if const_config.supports_document_formatting_dynamic_registration {
|
||||
// trace!("setting up to dynamically register document formatting support");
|
||||
|
||||
// let client = self.client.clone();
|
||||
// let register = move || {
|
||||
// trace!("dynamically registering document formatting");
|
||||
// let client = client.clone();
|
||||
// async move {
|
||||
// client
|
||||
// .register_capability(vec![get_formatting_registration()])
|
||||
// .await
|
||||
// .context("could not register document formatting")
|
||||
// }
|
||||
// };
|
||||
|
||||
// let client = self.client.clone();
|
||||
// let unregister = move || {
|
||||
// trace!("unregistering document formatting");
|
||||
// let client = client.clone();
|
||||
// async move {
|
||||
// client
|
||||
// .unregister_capability(vec![get_formatting_unregistration()])
|
||||
// .await
|
||||
// .context("could not unregister document formatting")
|
||||
// }
|
||||
// };
|
||||
|
||||
// if config.formatter == ExperimentalFormatterMode::On {
|
||||
// if let Some(err) = register().await.err() {
|
||||
// error!("could not dynamically register document formatting:
|
||||
// {err}"); }
|
||||
// }
|
||||
|
||||
// config.listen_formatting(Box::new(move |formatter| match formatter {
|
||||
// ExperimentalFormatterMode::On => register().boxed(),
|
||||
// ExperimentalFormatterMode::Off => unregister().boxed(),
|
||||
// }));
|
||||
// }
|
||||
|
||||
if const_config.supports_config_change_registration {
|
||||
trace!("setting up to request config change notifications");
|
||||
|
||||
let err = self
|
||||
.client
|
||||
.register_capability(vec![get_config_registration()])
|
||||
.await
|
||||
.err();
|
||||
if let Some(err) = err {
|
||||
error!("could not register to watch config changes: {err}");
|
||||
}
|
||||
}
|
||||
|
||||
// trace!("setting up to watch Typst files");
|
||||
// let watch_files_error = self
|
||||
// .client
|
||||
// .register_capability(vec![self.get_watcher_registration()])
|
||||
// .await
|
||||
// .err();
|
||||
// if let Some(err) = watch_files_error {
|
||||
// error!("could not register to watch Typst files: {err}");
|
||||
// }
|
||||
|
||||
info!("server initialized");
|
||||
}
|
||||
|
||||
async fn shutdown(&self) -> jsonrpc::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn did_open(&self, params: DidOpenTextDocumentParams) {
|
||||
let path = params.text_document.uri.to_file_path().unwrap();
|
||||
let text = params.text_document.text;
|
||||
|
||||
let universe = self.universe();
|
||||
universe.create_source(path.clone(), text).await.unwrap();
|
||||
}
|
||||
|
||||
async fn did_close(&self, params: DidCloseTextDocumentParams) {
|
||||
let path = params.text_document.uri.to_file_path().unwrap();
|
||||
|
||||
let universe = self.universe();
|
||||
universe.remove_source(path.clone()).await.unwrap();
|
||||
// self.client.publish_diagnostics(uri, Vec::new(), None).await;
|
||||
}
|
||||
|
||||
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
||||
let path = params.text_document.uri.to_file_path().unwrap();
|
||||
let changes = params.content_changes;
|
||||
|
||||
let universe = self.universe();
|
||||
universe
|
||||
.edit_source(path.clone(), changes, self.const_config().position_encoding)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
async fn did_save(&self, params: DidSaveTextDocumentParams) {
|
||||
let uri = params.text_document.uri;
|
||||
let path = uri.to_file_path().unwrap();
|
||||
|
||||
let config = self.config.read().await;
|
||||
|
||||
if config.export_pdf == ExportPdfMode::OnSave {
|
||||
let _ = run_query!(self, OnSaveExport, OnSaveExportRequest { path });
|
||||
}
|
||||
}
|
||||
|
||||
// async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams)
|
||||
// { let changes = params.changes;
|
||||
|
||||
// let mut workspace = self.workspace().write().await;
|
||||
|
||||
// for change in changes {
|
||||
// self.handle_file_change_event(&mut workspace, change);
|
||||
// }
|
||||
// }
|
||||
|
||||
// async fn did_change_workspace_folders(&self, params:
|
||||
// DidChangeWorkspaceFoldersParams) { let event = params.event;
|
||||
|
||||
// let mut workspace = self.workspace().write().await;
|
||||
|
||||
// if let Err(err) = workspace.handle_workspace_folders_change_event(&event)
|
||||
// { error!("error when changing workspace folders: {err}");
|
||||
// }
|
||||
// }
|
||||
|
||||
async fn execute_command(
|
||||
&self,
|
||||
params: ExecuteCommandParams,
|
||||
) -> jsonrpc::Result<Option<JsonValue>> {
|
||||
let ExecuteCommandParams {
|
||||
command,
|
||||
arguments,
|
||||
work_done_progress_params: _,
|
||||
} = params;
|
||||
match LspCommand::parse(&command) {
|
||||
Some(LspCommand::ExportPdf) => {
|
||||
self.command_export_pdf(arguments).await?;
|
||||
}
|
||||
Some(LspCommand::ClearCache) => {
|
||||
self.command_clear_cache(arguments).await?;
|
||||
}
|
||||
None => {
|
||||
error!("asked to execute unknown command");
|
||||
return Err(jsonrpc::Error::method_not_found());
|
||||
}
|
||||
};
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
|
||||
let uri = ¶ms.text_document_position_params.text_document.uri;
|
||||
let path = uri.to_file_path().unwrap();
|
||||
let position = params.text_document_position_params.position;
|
||||
let position_encoding = self.const_config().position_encoding;
|
||||
|
||||
run_query!(
|
||||
self,
|
||||
Hover,
|
||||
HoverRequest {
|
||||
path,
|
||||
position,
|
||||
position_encoding,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async fn completion(
|
||||
&self,
|
||||
params: CompletionParams,
|
||||
) -> jsonrpc::Result<Option<CompletionResponse>> {
|
||||
let uri = params.text_document_position.text_document.uri;
|
||||
let path = uri.to_file_path().unwrap();
|
||||
let position = params.text_document_position.position;
|
||||
let explicit = params
|
||||
.context
|
||||
.map(|context| context.trigger_kind == CompletionTriggerKind::INVOKED)
|
||||
.unwrap_or(false);
|
||||
let position_encoding = self.const_config().position_encoding;
|
||||
|
||||
run_query!(
|
||||
self,
|
||||
Completion,
|
||||
CompletionRequest {
|
||||
path,
|
||||
position,
|
||||
position_encoding,
|
||||
explicit,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async fn signature_help(
|
||||
&self,
|
||||
params: SignatureHelpParams,
|
||||
) -> jsonrpc::Result<Option<SignatureHelp>> {
|
||||
let uri = params.text_document_position_params.text_document.uri;
|
||||
let path = uri.to_file_path().unwrap();
|
||||
let position = params.text_document_position_params.position;
|
||||
let position_encoding = self.const_config().position_encoding;
|
||||
|
||||
run_query!(
|
||||
self,
|
||||
SignatureHelp,
|
||||
SignatureHelpRequest {
|
||||
path,
|
||||
position,
|
||||
position_encoding,
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async fn document_symbol(
|
||||
&self,
|
||||
params: DocumentSymbolParams,
|
||||
) -> jsonrpc::Result<Option<DocumentSymbolResponse>> {
|
||||
let uri = params.text_document.uri;
|
||||
let path = uri.to_file_path().unwrap();
|
||||
let position_encoding = self.const_config().position_encoding;
|
||||
|
||||
run_query!(
|
||||
self,
|
||||
DocumentSymbol,
|
||||
DocumentSymbolRequest {
|
||||
path,
|
||||
position_encoding
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async fn symbol(
|
||||
&self,
|
||||
params: WorkspaceSymbolParams,
|
||||
) -> jsonrpc::Result<Option<Vec<SymbolInformation>>> {
|
||||
let pattern = (!params.query.is_empty()).then_some(params.query);
|
||||
let position_encoding = self.const_config().position_encoding;
|
||||
|
||||
run_query!(
|
||||
self,
|
||||
Symbol,
|
||||
SymbolRequest {
|
||||
pattern,
|
||||
position_encoding
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async fn selection_range(
|
||||
&self,
|
||||
params: SelectionRangeParams,
|
||||
) -> jsonrpc::Result<Option<Vec<SelectionRange>>> {
|
||||
let uri = params.text_document.uri;
|
||||
let path = uri.to_file_path().unwrap();
|
||||
let positions = params.positions;
|
||||
let position_encoding = self.const_config().position_encoding;
|
||||
|
||||
run_query!(
|
||||
self,
|
||||
SelectionRange,
|
||||
SelectionRangeRequest {
|
||||
path,
|
||||
positions,
|
||||
position_encoding
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async fn semantic_tokens_full(
|
||||
&self,
|
||||
params: SemanticTokensParams,
|
||||
) -> jsonrpc::Result<Option<SemanticTokensResult>> {
|
||||
let uri = params.text_document.uri;
|
||||
let path = uri.to_file_path().unwrap();
|
||||
let position_encoding = self.const_config().position_encoding;
|
||||
|
||||
run_query!(
|
||||
self,
|
||||
SemanticTokensFull,
|
||||
SemanticTokensFullRequest {
|
||||
path,
|
||||
position_encoding
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async fn semantic_tokens_full_delta(
|
||||
&self,
|
||||
params: SemanticTokensDeltaParams,
|
||||
) -> jsonrpc::Result<Option<SemanticTokensFullDeltaResult>> {
|
||||
let uri = params.text_document.uri;
|
||||
let path = uri.to_file_path().unwrap();
|
||||
let previous_result_id = params.previous_result_id;
|
||||
let position_encoding = self.const_config().position_encoding;
|
||||
|
||||
run_query!(
|
||||
self,
|
||||
SemanticTokensDelta,
|
||||
SemanticTokensDeltaRequest {
|
||||
path,
|
||||
previous_result_id,
|
||||
position_encoding
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
async fn did_change_configuration(&self, params: DidChangeConfigurationParams) {
|
||||
// For some clients, we don't get the actual changed configuration and need to
|
||||
// poll for it https://github.com/microsoft/language-server-protocol/issues/676
|
||||
let values = match params.settings {
|
||||
JsonValue::Object(settings) => Ok(settings),
|
||||
_ => self
|
||||
.client
|
||||
.configuration(Config::get_items())
|
||||
.await
|
||||
.map(Config::values_to_map),
|
||||
};
|
||||
|
||||
let result = match values {
|
||||
Ok(values) => {
|
||||
let mut config = self.config.write().await;
|
||||
config.update_by_map(&values).await
|
||||
}
|
||||
Err(err) => Err(err.into()),
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(()) => {
|
||||
info!("new settings applied");
|
||||
}
|
||||
Err(err) => {
|
||||
error!("error applying new settings: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// async fn formatting(
|
||||
// &self,
|
||||
// params: DocumentFormattingParams,
|
||||
// ) -> jsonrpc::Result<Option<Vec<TextEdit>>> {
|
||||
// let uri = params.text_document.uri;
|
||||
|
||||
// let edits = self
|
||||
// .scope_with_source(&uri)
|
||||
// .await
|
||||
// .map_err(|err| {
|
||||
// error!("error getting document to format: {err} {uri}");
|
||||
// jsonrpc::Error::internal_error()
|
||||
// })?
|
||||
// .run2(|source, project| self.format_document(project, source))
|
||||
// .await
|
||||
// .map_err(|err| {
|
||||
// error!("error formatting document: {err} {uri}");
|
||||
// jsonrpc::Error::internal_error()
|
||||
// })?;
|
||||
|
||||
// Ok(Some(edits))
|
||||
// }
|
||||
}
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum LspCommand {
|
||||
ExportPdf,
|
||||
ClearCache,
|
||||
}
|
||||
|
||||
impl From<LspCommand> for String {
|
||||
fn from(command: LspCommand) -> Self {
|
||||
match command {
|
||||
LspCommand::ExportPdf => "tinymist.doPdfExport".to_string(),
|
||||
LspCommand::ClearCache => "tinymist.doClearCache".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LspCommand {
|
||||
pub fn parse(command: &str) -> Option<Self> {
|
||||
match command {
|
||||
"tinymist.doPdfExport" => Some(Self::ExportPdf),
|
||||
"tinymist.doClearCache" => Some(Self::ClearCache),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all_as_string() -> Vec<String> {
|
||||
vec![Self::ExportPdf.into(), Self::ClearCache.into()]
|
||||
}
|
||||
}
|
||||
|
||||
/// Here are implemented the handlers for each command.
|
||||
impl TypstServer {
|
||||
/// Export the current document as a PDF file. The client is responsible for
|
||||
/// passing the correct file URI.
|
||||
pub async fn command_export_pdf(&self, arguments: Vec<JsonValue>) -> jsonrpc::Result<()> {
|
||||
if arguments.is_empty() {
|
||||
return Err(jsonrpc::Error::invalid_params("Missing file URI argument"));
|
||||
}
|
||||
let Some(file_uri) = arguments.first().and_then(|v| v.as_str()) else {
|
||||
return Err(jsonrpc::Error::invalid_params(
|
||||
"Missing file URI as first argument",
|
||||
));
|
||||
};
|
||||
let file_uri = Url::parse(file_uri)
|
||||
.map_err(|_| jsonrpc::Error::invalid_params("Parameter is not a valid URI"))?;
|
||||
let path = file_uri
|
||||
.to_file_path()
|
||||
.map_err(|_| jsonrpc::Error::invalid_params("URI is not a file URI"))?;
|
||||
|
||||
let _ = run_query!(self, OnSaveExport, OnSaveExportRequest { path });
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Clear all cached resources.
|
||||
pub async fn command_clear_cache(&self, _arguments: Vec<JsonValue>) -> jsonrpc::Result<()> {
|
||||
// self.workspace().write().await.clear().map_err(|err| {
|
||||
// error!("could not clear cache: {err}");
|
||||
// jsonrpc::Error::internal_error()
|
||||
// })?;
|
||||
|
||||
// self.typst(|_| comemo::evict(0)).await;
|
||||
|
||||
// Ok(())
|
||||
|
||||
todo!()
|
||||
}
|
||||
}
|
382
crates/tinymist/src/lsp_typst_boundary.rs
Normal file
382
crates/tinymist/src/lsp_typst_boundary.rs
Normal file
|
@ -0,0 +1,382 @@
|
|||
//! Conversions between Typst and LSP types and representations
|
||||
|
||||
use tower_lsp::lsp_types;
|
||||
use typst::syntax::Source;
|
||||
|
||||
pub type LspPosition = lsp_types::Position;
|
||||
/// The interpretation of an `LspCharacterOffset` depends on the
|
||||
/// `LspPositionEncoding`
|
||||
pub type LspCharacterOffset = u32;
|
||||
pub type LspPositionEncoding = crate::config::PositionEncoding;
|
||||
/// Byte offset (i.e. UTF-8 bytes) in Typst files, either from the start of the
|
||||
/// line or the file
|
||||
pub type TypstOffset = usize;
|
||||
pub type TypstSpan = typst::syntax::Span;
|
||||
|
||||
/// An LSP range. It needs its associated `LspPositionEncoding` to be used. The
|
||||
/// `LspRange` struct provides this range with that encoding.
|
||||
pub type LspRawRange = lsp_types::Range;
|
||||
pub type TypstRange = std::ops::Range<usize>;
|
||||
|
||||
pub type TypstTooltip = typst_ide::Tooltip;
|
||||
pub type LspHoverContents = lsp_types::HoverContents;
|
||||
|
||||
pub type LspDiagnostic = lsp_types::Diagnostic;
|
||||
pub type TypstDiagnostic = typst::diag::SourceDiagnostic;
|
||||
|
||||
pub type LspSeverity = lsp_types::DiagnosticSeverity;
|
||||
pub type TypstSeverity = typst::diag::Severity;
|
||||
|
||||
pub type LspParamInfo = lsp_types::ParameterInformation;
|
||||
pub type TypstParamInfo = typst::foundations::ParamInfo;
|
||||
|
||||
/// An LSP range with its associated encoding.
|
||||
pub struct LspRange {
|
||||
pub raw_range: LspRawRange,
|
||||
pub encoding: LspPositionEncoding,
|
||||
}
|
||||
|
||||
impl LspRange {
|
||||
pub fn new(raw_range: LspRawRange, encoding: LspPositionEncoding) -> Self {
|
||||
Self {
|
||||
raw_range,
|
||||
encoding,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_range_on(self, source: &Source) -> TypstRange {
|
||||
lsp_to_typst::range(&self, source)
|
||||
}
|
||||
}
|
||||
|
||||
pub type LspCompletion = lsp_types::CompletionItem;
|
||||
pub type LspCompletionKind = lsp_types::CompletionItemKind;
|
||||
pub type TypstCompletion = typst_ide::Completion;
|
||||
pub type TypstCompletionKind = typst_ide::CompletionKind;
|
||||
|
||||
pub mod lsp_to_typst {
|
||||
use typst::syntax::Source;
|
||||
|
||||
use super::*;
|
||||
|
||||
pub fn position_to_offset(
|
||||
lsp_position: LspPosition,
|
||||
lsp_position_encoding: LspPositionEncoding,
|
||||
typst_source: &Source,
|
||||
) -> TypstOffset {
|
||||
match lsp_position_encoding {
|
||||
LspPositionEncoding::Utf8 => {
|
||||
let line_index = lsp_position.line as usize;
|
||||
let column_index = lsp_position.character as usize;
|
||||
typst_source
|
||||
.line_column_to_byte(line_index, column_index)
|
||||
.unwrap()
|
||||
}
|
||||
LspPositionEncoding::Utf16 => {
|
||||
// We have a line number and a UTF-16 offset into that line. We want a byte
|
||||
// offset into the file.
|
||||
//
|
||||
// Typst's `Source` provides several UTF-16 methods:
|
||||
// - `len_utf16` for the length of the file
|
||||
// - `byte_to_utf16` to convert a byte offset from the start of the file to a
|
||||
// UTF-16 offset from the start of the file
|
||||
// - `utf16_to_byte` to do the opposite of `byte_to_utf16`
|
||||
//
|
||||
// Unfortunately, none of these address our needs well, so we do some math
|
||||
// instead. This is not the fastest possible implementation, but
|
||||
// it's the most reasonable without access to the internal state
|
||||
// of `Source`.
|
||||
|
||||
// TODO: Typst's `Source` could easily provide an implementation of the method
|
||||
// we need here. Submit a PR against `typst` to add it, then
|
||||
// update this if/when merged.
|
||||
|
||||
let line_index = lsp_position.line as usize;
|
||||
let utf16_offset_in_line = lsp_position.character as usize;
|
||||
|
||||
let byte_line_offset = typst_source.line_to_byte(line_index).unwrap();
|
||||
let utf16_line_offset = typst_source.byte_to_utf16(byte_line_offset).unwrap();
|
||||
let utf16_offset = utf16_line_offset + utf16_offset_in_line;
|
||||
|
||||
typst_source.utf16_to_byte(utf16_offset).unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn range(lsp_range: &LspRange, source: &Source) -> TypstRange {
|
||||
let lsp_start = lsp_range.raw_range.start;
|
||||
let typst_start = position_to_offset(lsp_start, lsp_range.encoding, source);
|
||||
|
||||
let lsp_end = lsp_range.raw_range.end;
|
||||
let typst_end = position_to_offset(lsp_end, lsp_range.encoding, source);
|
||||
|
||||
TypstRange {
|
||||
start: typst_start,
|
||||
end: typst_end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod typst_to_lsp {
|
||||
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::{Captures, Regex};
|
||||
use tower_lsp::lsp_types::{
|
||||
CompletionTextEdit, Documentation, InsertTextFormat, LanguageString, MarkedString,
|
||||
MarkupContent, MarkupKind, TextEdit,
|
||||
};
|
||||
use typst::diag::EcoString;
|
||||
use typst::foundations::{CastInfo, Repr};
|
||||
use typst::syntax::Source;
|
||||
|
||||
use super::*;
|
||||
|
||||
pub fn offset_to_position(
|
||||
typst_offset: TypstOffset,
|
||||
lsp_position_encoding: LspPositionEncoding,
|
||||
typst_source: &Source,
|
||||
) -> LspPosition {
|
||||
let line_index = typst_source.byte_to_line(typst_offset).unwrap();
|
||||
let column_index = typst_source.byte_to_column(typst_offset).unwrap();
|
||||
|
||||
let lsp_line = line_index as u32;
|
||||
let lsp_column = match lsp_position_encoding {
|
||||
LspPositionEncoding::Utf8 => column_index as LspCharacterOffset,
|
||||
LspPositionEncoding::Utf16 => {
|
||||
// See the implementation of `lsp_to_typst::position_to_offset` for discussion
|
||||
// relevant to this function.
|
||||
|
||||
// TODO: Typst's `Source` could easily provide an implementation of the method
|
||||
// we need here. Submit a PR to `typst` to add it, then update
|
||||
// this if/when merged.
|
||||
|
||||
let utf16_offset = typst_source.byte_to_utf16(typst_offset).unwrap();
|
||||
|
||||
let byte_line_offset = typst_source.line_to_byte(line_index).unwrap();
|
||||
let utf16_line_offset = typst_source.byte_to_utf16(byte_line_offset).unwrap();
|
||||
|
||||
let utf16_column_offset = utf16_offset - utf16_line_offset;
|
||||
utf16_column_offset as LspCharacterOffset
|
||||
}
|
||||
};
|
||||
|
||||
LspPosition::new(lsp_line, lsp_column)
|
||||
}
|
||||
|
||||
pub fn range(
|
||||
typst_range: TypstRange,
|
||||
typst_source: &Source,
|
||||
lsp_position_encoding: LspPositionEncoding,
|
||||
) -> LspRange {
|
||||
let typst_start = typst_range.start;
|
||||
let lsp_start = offset_to_position(typst_start, lsp_position_encoding, typst_source);
|
||||
|
||||
let typst_end = typst_range.end;
|
||||
let lsp_end = offset_to_position(typst_end, lsp_position_encoding, typst_source);
|
||||
|
||||
let raw_range = LspRawRange::new(lsp_start, lsp_end);
|
||||
LspRange::new(raw_range, lsp_position_encoding)
|
||||
}
|
||||
|
||||
fn completion_kind(typst_completion_kind: TypstCompletionKind) -> LspCompletionKind {
|
||||
match typst_completion_kind {
|
||||
TypstCompletionKind::Syntax => LspCompletionKind::SNIPPET,
|
||||
TypstCompletionKind::Func => LspCompletionKind::FUNCTION,
|
||||
TypstCompletionKind::Param => LspCompletionKind::VARIABLE,
|
||||
TypstCompletionKind::Constant => LspCompletionKind::CONSTANT,
|
||||
TypstCompletionKind::Symbol(_) => LspCompletionKind::TEXT,
|
||||
TypstCompletionKind::Type => LspCompletionKind::CLASS,
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref TYPST_SNIPPET_PLACEHOLDER_RE: Regex = Regex::new(r"\$\{(.*?)\}").unwrap();
|
||||
}
|
||||
|
||||
/// Adds numbering to placeholders in snippets
|
||||
fn snippet(typst_snippet: &EcoString) -> String {
|
||||
let mut counter = 1;
|
||||
let result =
|
||||
TYPST_SNIPPET_PLACEHOLDER_RE.replace_all(typst_snippet.as_str(), |cap: &Captures| {
|
||||
let substitution = format!("${{{}:{}}}", counter, &cap[1]);
|
||||
counter += 1;
|
||||
substitution
|
||||
});
|
||||
|
||||
result.to_string()
|
||||
}
|
||||
|
||||
pub fn completion(
|
||||
typst_completion: &TypstCompletion,
|
||||
lsp_replace: LspRawRange,
|
||||
) -> LspCompletion {
|
||||
let typst_snippet = typst_completion
|
||||
.apply
|
||||
.as_ref()
|
||||
.unwrap_or(&typst_completion.label);
|
||||
let lsp_snippet = snippet(typst_snippet);
|
||||
let text_edit = CompletionTextEdit::Edit(TextEdit::new(lsp_replace, lsp_snippet));
|
||||
|
||||
LspCompletion {
|
||||
label: typst_completion.label.to_string(),
|
||||
kind: Some(completion_kind(typst_completion.kind.clone())),
|
||||
detail: typst_completion.detail.as_ref().map(String::from),
|
||||
text_edit: Some(text_edit),
|
||||
insert_text_format: Some(InsertTextFormat::SNIPPET),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn completions(
|
||||
typst_completions: &[TypstCompletion],
|
||||
lsp_replace: LspRawRange,
|
||||
) -> Vec<LspCompletion> {
|
||||
typst_completions
|
||||
.iter()
|
||||
.map(|typst_completion| completion(typst_completion, lsp_replace))
|
||||
.collect_vec()
|
||||
}
|
||||
|
||||
pub fn tooltip(typst_tooltip: &TypstTooltip) -> LspHoverContents {
|
||||
let lsp_marked_string = match typst_tooltip {
|
||||
TypstTooltip::Text(text) => MarkedString::String(text.to_string()),
|
||||
TypstTooltip::Code(code) => MarkedString::LanguageString(LanguageString {
|
||||
language: "typst".to_owned(),
|
||||
value: code.to_string(),
|
||||
}),
|
||||
};
|
||||
LspHoverContents::Scalar(lsp_marked_string)
|
||||
}
|
||||
|
||||
pub fn param_info(typst_param_info: &TypstParamInfo) -> LspParamInfo {
|
||||
LspParamInfo {
|
||||
label: lsp_types::ParameterLabel::Simple(typst_param_info.name.to_owned()),
|
||||
documentation: param_info_to_docs(typst_param_info),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn param_info_to_label(typst_param_info: &TypstParamInfo) -> String {
|
||||
format!(
|
||||
"{}: {}",
|
||||
typst_param_info.name,
|
||||
cast_info_to_label(&typst_param_info.input)
|
||||
)
|
||||
}
|
||||
|
||||
fn param_info_to_docs(typst_param_info: &TypstParamInfo) -> Option<Documentation> {
|
||||
if !typst_param_info.docs.is_empty() {
|
||||
Some(Documentation::MarkupContent(MarkupContent {
|
||||
value: typst_param_info.docs.to_owned(),
|
||||
kind: MarkupKind::Markdown,
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cast_info_to_label(cast_info: &CastInfo) -> String {
|
||||
match cast_info {
|
||||
CastInfo::Any => "any".to_owned(),
|
||||
CastInfo::Value(value, _) => value.repr().to_string(),
|
||||
CastInfo::Type(ty) => ty.to_string(),
|
||||
CastInfo::Union(options) => options.iter().map(cast_info_to_label).join(" "),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use typst::syntax::Source;
|
||||
|
||||
use crate::config::PositionEncoding;
|
||||
use crate::lsp_typst_boundary::lsp_to_typst;
|
||||
|
||||
use super::*;
|
||||
|
||||
const ENCODING_TEST_STRING: &str = "test 🥺 test";
|
||||
|
||||
#[test]
|
||||
fn utf16_position_to_utf8_offset() {
|
||||
let source = Source::detached(ENCODING_TEST_STRING);
|
||||
|
||||
let start = LspPosition {
|
||||
line: 0,
|
||||
character: 0,
|
||||
};
|
||||
let emoji = LspPosition {
|
||||
line: 0,
|
||||
character: 5,
|
||||
};
|
||||
let post_emoji = LspPosition {
|
||||
line: 0,
|
||||
character: 7,
|
||||
};
|
||||
let end = LspPosition {
|
||||
line: 0,
|
||||
character: 12,
|
||||
};
|
||||
|
||||
let start_offset =
|
||||
lsp_to_typst::position_to_offset(start, PositionEncoding::Utf16, &source);
|
||||
let start_actual = 0;
|
||||
|
||||
let emoji_offset =
|
||||
lsp_to_typst::position_to_offset(emoji, PositionEncoding::Utf16, &source);
|
||||
let emoji_actual = 5;
|
||||
|
||||
let post_emoji_offset =
|
||||
lsp_to_typst::position_to_offset(post_emoji, PositionEncoding::Utf16, &source);
|
||||
let post_emoji_actual = 9;
|
||||
|
||||
let end_offset = lsp_to_typst::position_to_offset(end, PositionEncoding::Utf16, &source);
|
||||
let end_actual = 14;
|
||||
|
||||
assert_eq!(start_offset, start_actual);
|
||||
assert_eq!(emoji_offset, emoji_actual);
|
||||
assert_eq!(post_emoji_offset, post_emoji_actual);
|
||||
assert_eq!(end_offset, end_actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn utf8_offset_to_utf16_position() {
|
||||
let source = Source::detached(ENCODING_TEST_STRING);
|
||||
|
||||
let start = 0;
|
||||
let emoji = 5;
|
||||
let post_emoji = 9;
|
||||
let end = 14;
|
||||
|
||||
let start_position = LspPosition {
|
||||
line: 0,
|
||||
character: 0,
|
||||
};
|
||||
let start_actual =
|
||||
typst_to_lsp::offset_to_position(start, PositionEncoding::Utf16, &source);
|
||||
|
||||
let emoji_position = LspPosition {
|
||||
line: 0,
|
||||
character: 5,
|
||||
};
|
||||
let emoji_actual =
|
||||
typst_to_lsp::offset_to_position(emoji, PositionEncoding::Utf16, &source);
|
||||
|
||||
let post_emoji_position = LspPosition {
|
||||
line: 0,
|
||||
character: 7,
|
||||
};
|
||||
let post_emoji_actual =
|
||||
typst_to_lsp::offset_to_position(post_emoji, PositionEncoding::Utf16, &source);
|
||||
|
||||
let end_position = LspPosition {
|
||||
line: 0,
|
||||
character: 12,
|
||||
};
|
||||
let end_actual = typst_to_lsp::offset_to_position(end, PositionEncoding::Utf16, &source);
|
||||
|
||||
assert_eq!(start_position, start_actual);
|
||||
assert_eq!(emoji_position, emoji_actual);
|
||||
assert_eq!(post_emoji_position, post_emoji_actual);
|
||||
assert_eq!(end_position, end_actual);
|
||||
}
|
||||
}
|
60
crates/tinymist/src/main.rs
Normal file
60
crates/tinymist/src/main.rs
Normal file
|
@ -0,0 +1,60 @@
|
|||
//! # tinymist LSP Server
|
||||
|
||||
mod config;
|
||||
mod ext;
|
||||
mod lsp_typst_boundary;
|
||||
mod server;
|
||||
|
||||
// pub mod formatting;
|
||||
pub mod actor;
|
||||
pub mod analysis;
|
||||
pub mod lsp;
|
||||
pub mod semantic_tokens;
|
||||
|
||||
use server::TypstServer;
|
||||
|
||||
use tower_lsp::{LspService, Server};
|
||||
|
||||
// #[derive(Debug, Clone)]
|
||||
// struct Args {}
|
||||
|
||||
// fn arg_parser() -> OptionParser<Args> {
|
||||
// construct!(Args {}).to_options().version(
|
||||
// format!(
|
||||
// "{}, commit {} (Typst version {TYPST_VERSION})",
|
||||
// env!("CARGO_PKG_VERSION"),
|
||||
// env!("GIT_COMMIT")
|
||||
// )
|
||||
// .as_str(),
|
||||
// )
|
||||
// }
|
||||
|
||||
// pub const TYPST_VERSION: &str = env!("TYPST_VERSION");
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let _ = env_logger::builder()
|
||||
// TODO: set this back to Info
|
||||
.filter_module("tinymist", log::LevelFilter::Trace)
|
||||
// .filter_module("tinymist", log::LevelFilter::Debug)
|
||||
.filter_module("typst_preview", log::LevelFilter::Debug)
|
||||
.filter_module("typst_ts", log::LevelFilter::Info)
|
||||
// TODO: set this back to Info
|
||||
.filter_module(
|
||||
"typst_ts_compiler::service::compile",
|
||||
log::LevelFilter::Debug,
|
||||
)
|
||||
.filter_module("typst_ts_compiler::service::watch", log::LevelFilter::Debug)
|
||||
.try_init();
|
||||
|
||||
run().await;
|
||||
}
|
||||
|
||||
async fn run() {
|
||||
let stdin = tokio::io::stdin();
|
||||
let stdout = tokio::io::stdout();
|
||||
|
||||
let (service, socket) = LspService::new(TypstServer::new);
|
||||
|
||||
Server::new(stdin, stdout, socket).serve(service).await;
|
||||
}
|
75
crates/tinymist/src/semantic_tokens/delta.rs
Normal file
75
crates/tinymist/src/semantic_tokens/delta.rs
Normal file
|
@ -0,0 +1,75 @@
|
|||
use tower_lsp::lsp_types::{SemanticToken, SemanticTokensEdit};
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CachedTokens {
|
||||
tokens: Vec<SemanticToken>,
|
||||
id: u64,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct Cache {
|
||||
last_sent: Option<CachedTokens>,
|
||||
next_id: u64,
|
||||
}
|
||||
|
||||
impl Cache {
|
||||
pub fn try_take_result(&mut self, id: &str) -> Option<Vec<SemanticToken>> {
|
||||
let id = id.parse::<u64>().ok()?;
|
||||
match self.last_sent.take() {
|
||||
Some(cached) if cached.id == id => Some(cached.tokens),
|
||||
Some(cached) => {
|
||||
// replace after taking
|
||||
self.last_sent = Some(cached);
|
||||
None
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cache_result(&mut self, tokens: Vec<SemanticToken>) -> String {
|
||||
let id = self.get_next_id();
|
||||
let cached = CachedTokens { tokens, id };
|
||||
self.last_sent = Some(cached);
|
||||
id.to_string()
|
||||
}
|
||||
|
||||
fn get_next_id(&mut self) -> u64 {
|
||||
let id = self.next_id;
|
||||
self.next_id += 1;
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
pub fn token_delta(from: &[SemanticToken], to: &[SemanticToken]) -> Vec<SemanticTokensEdit> {
|
||||
// Taken from `rust-analyzer`'s algorithm
|
||||
// https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/semantic_tokens.rs#L219
|
||||
|
||||
let start = from
|
||||
.iter()
|
||||
.zip(to.iter())
|
||||
.take_while(|(x, y)| x == y)
|
||||
.count();
|
||||
|
||||
let (_, from) = from.split_at(start);
|
||||
let (_, to) = to.split_at(start);
|
||||
|
||||
let dist_from_end = from
|
||||
.iter()
|
||||
.rev()
|
||||
.zip(to.iter().rev())
|
||||
.take_while(|(x, y)| x == y)
|
||||
.count();
|
||||
|
||||
let (from, _) = from.split_at(from.len() - dist_from_end);
|
||||
let (to, _) = to.split_at(to.len() - dist_from_end);
|
||||
|
||||
if from.is_empty() && to.is_empty() {
|
||||
vec![]
|
||||
} else {
|
||||
vec![SemanticTokensEdit {
|
||||
start: 5 * start as u32,
|
||||
delete_count: 5 * from.len() as u32,
|
||||
data: Some(to.into()),
|
||||
}]
|
||||
}
|
||||
}
|
238
crates/tinymist/src/semantic_tokens/mod.rs
Normal file
238
crates/tinymist/src/semantic_tokens/mod.rs
Normal file
|
@ -0,0 +1,238 @@
|
|||
use itertools::Itertools;
|
||||
use strum::IntoEnumIterator;
|
||||
use tower_lsp::lsp_types::{
|
||||
Registration, SemanticToken, SemanticTokensEdit, SemanticTokensFullOptions,
|
||||
SemanticTokensLegend, SemanticTokensOptions, Unregistration,
|
||||
};
|
||||
use typst::diag::EcoString;
|
||||
use typst::syntax::{ast, LinkedNode, Source, SyntaxKind};
|
||||
|
||||
use crate::actor::typst::CompileCluster;
|
||||
use crate::config::PositionEncoding;
|
||||
|
||||
use self::delta::token_delta;
|
||||
use self::modifier_set::ModifierSet;
|
||||
use self::token_encode::encode_tokens;
|
||||
use self::typst_tokens::{Modifier, TokenType};
|
||||
|
||||
pub use self::delta::Cache as SemanticTokenCache;
|
||||
|
||||
mod delta;
|
||||
mod modifier_set;
|
||||
mod token_encode;
|
||||
mod typst_tokens;
|
||||
|
||||
pub fn get_legend() -> SemanticTokensLegend {
|
||||
SemanticTokensLegend {
|
||||
token_types: TokenType::iter().map(Into::into).collect(),
|
||||
token_modifiers: Modifier::iter().map(Into::into).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
const SEMANTIC_TOKENS_REGISTRATION_ID: &str = "semantic_tokens";
|
||||
const SEMANTIC_TOKENS_METHOD_ID: &str = "textDocument/semanticTokens";
|
||||
|
||||
pub fn get_semantic_tokens_registration(options: SemanticTokensOptions) -> Registration {
|
||||
Registration {
|
||||
id: SEMANTIC_TOKENS_REGISTRATION_ID.to_owned(),
|
||||
method: SEMANTIC_TOKENS_METHOD_ID.to_owned(),
|
||||
register_options: Some(
|
||||
serde_json::to_value(options)
|
||||
.expect("semantic tokens options should be representable as JSON value"),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_semantic_tokens_unregistration() -> Unregistration {
|
||||
Unregistration {
|
||||
id: SEMANTIC_TOKENS_REGISTRATION_ID.to_owned(),
|
||||
method: SEMANTIC_TOKENS_METHOD_ID.to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_semantic_tokens_options() -> SemanticTokensOptions {
|
||||
SemanticTokensOptions {
|
||||
legend: get_legend(),
|
||||
full: Some(SemanticTokensFullOptions::Delta { delta: Some(true) }),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl CompileCluster {
|
||||
pub fn get_semantic_tokens_full(
|
||||
&self,
|
||||
source: &Source,
|
||||
encoding: PositionEncoding,
|
||||
) -> (Vec<SemanticToken>, String) {
|
||||
let root = LinkedNode::new(source.root());
|
||||
|
||||
let tokens = tokenize_tree(&root, ModifierSet::empty());
|
||||
let encoded_tokens = encode_tokens(tokens, source, encoding);
|
||||
let output_tokens = encoded_tokens.map(|(token, _)| token).collect_vec();
|
||||
|
||||
let result_id = self
|
||||
.semantic_tokens_delta_cache
|
||||
.write()
|
||||
.cache_result(output_tokens.clone());
|
||||
|
||||
(output_tokens, result_id)
|
||||
}
|
||||
|
||||
pub fn try_semantic_tokens_delta_from_result_id(
|
||||
&self,
|
||||
source: &Source,
|
||||
result_id: &str,
|
||||
encoding: PositionEncoding,
|
||||
) -> (Result<Vec<SemanticTokensEdit>, Vec<SemanticToken>>, String) {
|
||||
let cached = self
|
||||
.semantic_tokens_delta_cache
|
||||
.write()
|
||||
.try_take_result(result_id);
|
||||
|
||||
// this call will overwrite the cache, so need to read from cache first
|
||||
let (tokens, result_id) = self.get_semantic_tokens_full(source, encoding);
|
||||
|
||||
match cached {
|
||||
Some(cached) => (Ok(token_delta(&cached, &tokens)), result_id),
|
||||
None => (Err(tokens), result_id),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn tokenize_single_node(node: &LinkedNode, modifiers: ModifierSet) -> Option<Token> {
|
||||
let is_leaf = node.children().next().is_none();
|
||||
|
||||
token_from_node(node)
|
||||
.or_else(|| is_leaf.then_some(TokenType::Text))
|
||||
.map(|token_type| Token::new(token_type, modifiers, node))
|
||||
}
|
||||
|
||||
/// Tokenize a node and its children
|
||||
fn tokenize_tree<'a>(
|
||||
root: &LinkedNode<'a>,
|
||||
parent_modifiers: ModifierSet,
|
||||
) -> Box<dyn Iterator<Item = Token> + 'a> {
|
||||
let modifiers = parent_modifiers | modifiers_from_node(root);
|
||||
|
||||
let token = tokenize_single_node(root, modifiers).into_iter();
|
||||
let children = root
|
||||
.children()
|
||||
.flat_map(move |child| tokenize_tree(&child, modifiers));
|
||||
Box::new(token.chain(children))
|
||||
}
|
||||
|
||||
pub struct Token {
|
||||
pub token_type: TokenType,
|
||||
pub modifiers: ModifierSet,
|
||||
pub offset: usize,
|
||||
pub source: EcoString,
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn new(token_type: TokenType, modifiers: ModifierSet, node: &LinkedNode) -> Self {
|
||||
let source = node.get().clone().into_text();
|
||||
|
||||
Self {
|
||||
token_type,
|
||||
modifiers,
|
||||
offset: node.offset(),
|
||||
source,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines the [`Modifier`]s to be applied to a node and all its children.
|
||||
///
|
||||
/// Note that this does not recurse up, so calling it on a child node may not
|
||||
/// return a modifier that should be applied to it due to a parent.
|
||||
fn modifiers_from_node(node: &LinkedNode) -> ModifierSet {
|
||||
match node.kind() {
|
||||
SyntaxKind::Emph => ModifierSet::new(&[Modifier::Emph]),
|
||||
SyntaxKind::Strong => ModifierSet::new(&[Modifier::Strong]),
|
||||
SyntaxKind::Math | SyntaxKind::Equation => ModifierSet::new(&[Modifier::Math]),
|
||||
_ => ModifierSet::empty(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines the best [`TokenType`] for an entire node and its children, if
|
||||
/// any. If there is no single `TokenType`, or none better than `Text`, returns
|
||||
/// `None`.
|
||||
///
|
||||
/// In tokenization, returning `Some` stops recursion, while returning `None`
|
||||
/// continues and attempts to tokenize each of `node`'s children. If there are
|
||||
/// no children, `Text` is taken as the default.
|
||||
fn token_from_node(node: &LinkedNode) -> Option<TokenType> {
|
||||
use SyntaxKind::*;
|
||||
|
||||
match node.kind() {
|
||||
Star if node.parent_kind() == Some(Strong) => Some(TokenType::Punctuation),
|
||||
Star if node.parent_kind() == Some(ModuleImport) => Some(TokenType::Operator),
|
||||
|
||||
Underscore if node.parent_kind() == Some(Emph) => Some(TokenType::Punctuation),
|
||||
Underscore if node.parent_kind() == Some(MathAttach) => Some(TokenType::Operator),
|
||||
|
||||
MathIdent | Ident => Some(token_from_ident(node)),
|
||||
Hash => token_from_hashtag(node),
|
||||
|
||||
LeftBrace | RightBrace | LeftBracket | RightBracket | LeftParen | RightParen | Comma
|
||||
| Semicolon | Colon => Some(TokenType::Punctuation),
|
||||
Linebreak | Escape | Shorthand => Some(TokenType::Escape),
|
||||
Link => Some(TokenType::Link),
|
||||
Raw => Some(TokenType::Raw),
|
||||
Label => Some(TokenType::Label),
|
||||
RefMarker => Some(TokenType::Ref),
|
||||
Heading | HeadingMarker => Some(TokenType::Heading),
|
||||
ListMarker | EnumMarker | TermMarker => Some(TokenType::ListMarker),
|
||||
MathAlignPoint | Plus | Minus | Slash | Hat | Dot | Eq | EqEq | ExclEq | Lt | LtEq | Gt
|
||||
| GtEq | PlusEq | HyphEq | StarEq | SlashEq | Dots | Arrow | Not | And | Or => {
|
||||
Some(TokenType::Operator)
|
||||
}
|
||||
Dollar => Some(TokenType::Delimiter),
|
||||
None | Auto | Let | Show | If | Else | For | In | While | Break | Continue | Return
|
||||
| Import | Include | As | Set => Some(TokenType::Keyword),
|
||||
Bool => Some(TokenType::Bool),
|
||||
Int | Float | Numeric => Some(TokenType::Number),
|
||||
Str => Some(TokenType::String),
|
||||
LineComment | BlockComment => Some(TokenType::Comment),
|
||||
Error => Some(TokenType::Error),
|
||||
|
||||
// Disambiguate from `SyntaxKind::None`
|
||||
_ => Option::None,
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: differentiate also using tokens in scope, not just context
|
||||
fn is_function_ident(ident: &LinkedNode) -> bool {
|
||||
let Some(next) = ident.next_leaf() else {
|
||||
return false;
|
||||
};
|
||||
let function_call = matches!(next.kind(), SyntaxKind::LeftParen)
|
||||
&& matches!(
|
||||
next.parent_kind(),
|
||||
Some(SyntaxKind::Args | SyntaxKind::Params)
|
||||
);
|
||||
let function_content = matches!(next.kind(), SyntaxKind::LeftBracket)
|
||||
&& matches!(next.parent_kind(), Some(SyntaxKind::ContentBlock));
|
||||
function_call || function_content
|
||||
}
|
||||
|
||||
fn token_from_ident(ident: &LinkedNode) -> TokenType {
|
||||
if is_function_ident(ident) {
|
||||
TokenType::Function
|
||||
} else {
|
||||
TokenType::Interpolated
|
||||
}
|
||||
}
|
||||
|
||||
fn get_expr_following_hashtag<'a>(hashtag: &LinkedNode<'a>) -> Option<LinkedNode<'a>> {
|
||||
hashtag
|
||||
.next_sibling()
|
||||
.filter(|next| next.cast::<ast::Expr>().map_or(false, |expr| expr.hash()))
|
||||
.and_then(|node| node.leftmost_leaf())
|
||||
}
|
||||
|
||||
fn token_from_hashtag(hashtag: &LinkedNode) -> Option<TokenType> {
|
||||
get_expr_following_hashtag(hashtag)
|
||||
.as_ref()
|
||||
.and_then(token_from_node)
|
||||
}
|
33
crates/tinymist/src/semantic_tokens/modifier_set.rs
Normal file
33
crates/tinymist/src/semantic_tokens/modifier_set.rs
Normal file
|
@ -0,0 +1,33 @@
|
|||
use std::ops;
|
||||
|
||||
use super::typst_tokens::Modifier;
|
||||
|
||||
#[derive(Default, Clone, Copy)]
|
||||
pub struct ModifierSet(u32);
|
||||
|
||||
impl ModifierSet {
|
||||
pub fn empty() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
|
||||
pub fn new(modifiers: &[Modifier]) -> Self {
|
||||
let bits = modifiers
|
||||
.iter()
|
||||
.copied()
|
||||
.map(Modifier::bitmask)
|
||||
.fold(0, |bits, mask| bits | mask);
|
||||
Self(bits)
|
||||
}
|
||||
|
||||
pub fn bitset(self) -> u32 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl ops::BitOr for ModifierSet {
|
||||
type Output = Self;
|
||||
|
||||
fn bitor(self, rhs: Self) -> Self::Output {
|
||||
Self(self.0 | rhs.0)
|
||||
}
|
||||
}
|
44
crates/tinymist/src/semantic_tokens/token_encode.rs
Normal file
44
crates/tinymist/src/semantic_tokens/token_encode.rs
Normal file
|
@ -0,0 +1,44 @@
|
|||
use tower_lsp::lsp_types::{Position, SemanticToken};
|
||||
use typst::diag::EcoString;
|
||||
use typst::syntax::Source;
|
||||
|
||||
use crate::config::PositionEncoding;
|
||||
use crate::ext::{PositionExt, StrExt};
|
||||
use crate::lsp_typst_boundary::typst_to_lsp;
|
||||
|
||||
use super::Token;
|
||||
|
||||
pub(super) fn encode_tokens<'a>(
|
||||
tokens: impl Iterator<Item = Token> + 'a,
|
||||
source: &'a Source,
|
||||
encoding: PositionEncoding,
|
||||
) -> impl Iterator<Item = (SemanticToken, EcoString)> + 'a {
|
||||
tokens.scan(Position::new(0, 0), move |last_position, token| {
|
||||
let (encoded_token, source_code, position) =
|
||||
encode_token(token, last_position, source, encoding);
|
||||
*last_position = position;
|
||||
Some((encoded_token, source_code))
|
||||
})
|
||||
}
|
||||
|
||||
fn encode_token(
|
||||
token: Token,
|
||||
last_position: &Position,
|
||||
source: &Source,
|
||||
encoding: PositionEncoding,
|
||||
) -> (SemanticToken, EcoString, Position) {
|
||||
let position = typst_to_lsp::offset_to_position(token.offset, encoding, source);
|
||||
let delta = last_position.delta(&position);
|
||||
|
||||
let length = token.source.as_str().encoded_len(encoding);
|
||||
|
||||
let lsp_token = SemanticToken {
|
||||
delta_line: delta.delta_line,
|
||||
delta_start: delta.delta_start,
|
||||
length: length as u32,
|
||||
token_type: token.token_type as u32,
|
||||
token_modifiers_bitset: token.modifiers.bitset(),
|
||||
};
|
||||
|
||||
(lsp_token, token.source, position)
|
||||
}
|
133
crates/tinymist/src/semantic_tokens/typst_tokens.rs
Normal file
133
crates/tinymist/src/semantic_tokens/typst_tokens.rs
Normal file
|
@ -0,0 +1,133 @@
|
|||
//! Types for tokens used for Typst syntax
|
||||
|
||||
use strum::EnumIter;
|
||||
use tower_lsp::lsp_types::{SemanticTokenModifier, SemanticTokenType};
|
||||
|
||||
const BOOL: SemanticTokenType = SemanticTokenType::new("bool");
|
||||
const PUNCTUATION: SemanticTokenType = SemanticTokenType::new("punct");
|
||||
const ESCAPE: SemanticTokenType = SemanticTokenType::new("escape");
|
||||
const LINK: SemanticTokenType = SemanticTokenType::new("link");
|
||||
const RAW: SemanticTokenType = SemanticTokenType::new("raw");
|
||||
const LABEL: SemanticTokenType = SemanticTokenType::new("label");
|
||||
const REF: SemanticTokenType = SemanticTokenType::new("ref");
|
||||
const HEADING: SemanticTokenType = SemanticTokenType::new("heading");
|
||||
const LIST_MARKER: SemanticTokenType = SemanticTokenType::new("marker");
|
||||
const LIST_TERM: SemanticTokenType = SemanticTokenType::new("term");
|
||||
const DELIMITER: SemanticTokenType = SemanticTokenType::new("delim");
|
||||
const INTERPOLATED: SemanticTokenType = SemanticTokenType::new("pol");
|
||||
const ERROR: SemanticTokenType = SemanticTokenType::new("error");
|
||||
const TEXT: SemanticTokenType = SemanticTokenType::new("text");
|
||||
|
||||
/// Very similar to [`typst_ide::Tag`], but with convenience traits, and
|
||||
/// extensible because we want to further customize highlighting
|
||||
#[derive(Clone, Copy, EnumIter)]
|
||||
#[repr(u32)]
|
||||
pub enum TokenType {
|
||||
// Standard LSP types
|
||||
Comment,
|
||||
String,
|
||||
Keyword,
|
||||
Operator,
|
||||
Number,
|
||||
Function,
|
||||
Decorator,
|
||||
// Custom types
|
||||
Bool,
|
||||
Punctuation,
|
||||
Escape,
|
||||
Link,
|
||||
Raw,
|
||||
Label,
|
||||
Ref,
|
||||
Heading,
|
||||
ListMarker,
|
||||
ListTerm,
|
||||
Delimiter,
|
||||
Interpolated,
|
||||
Error,
|
||||
/// Any text in markup without a more specific token type, possible styled.
|
||||
///
|
||||
/// We perform styling (like bold and italics) via modifiers. That means
|
||||
/// everything that should receive styling needs to be a token so we can
|
||||
/// apply a modifier to it. This token type is mostly for that, since
|
||||
/// text should usually not be specially styled.
|
||||
Text,
|
||||
}
|
||||
|
||||
impl From<TokenType> for SemanticTokenType {
|
||||
fn from(token_type: TokenType) -> Self {
|
||||
use TokenType::*;
|
||||
|
||||
match token_type {
|
||||
Comment => Self::COMMENT,
|
||||
String => Self::STRING,
|
||||
Keyword => Self::KEYWORD,
|
||||
Operator => Self::OPERATOR,
|
||||
Number => Self::NUMBER,
|
||||
Function => Self::FUNCTION,
|
||||
Decorator => Self::DECORATOR,
|
||||
Bool => BOOL,
|
||||
Punctuation => PUNCTUATION,
|
||||
Escape => ESCAPE,
|
||||
Link => LINK,
|
||||
Raw => RAW,
|
||||
Label => LABEL,
|
||||
Ref => REF,
|
||||
Heading => HEADING,
|
||||
ListMarker => LIST_MARKER,
|
||||
ListTerm => LIST_TERM,
|
||||
Delimiter => DELIMITER,
|
||||
Interpolated => INTERPOLATED,
|
||||
Error => ERROR,
|
||||
Text => TEXT,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const STRONG: SemanticTokenModifier = SemanticTokenModifier::new("strong");
|
||||
const EMPH: SemanticTokenModifier = SemanticTokenModifier::new("emph");
|
||||
const MATH: SemanticTokenModifier = SemanticTokenModifier::new("math");
|
||||
|
||||
#[derive(Clone, Copy, EnumIter)]
|
||||
#[repr(u8)]
|
||||
pub enum Modifier {
|
||||
Strong,
|
||||
Emph,
|
||||
Math,
|
||||
}
|
||||
|
||||
impl Modifier {
|
||||
pub fn index(self) -> u8 {
|
||||
self as u8
|
||||
}
|
||||
|
||||
pub fn bitmask(self) -> u32 {
|
||||
0b1 << self.index()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Modifier> for SemanticTokenModifier {
|
||||
fn from(modifier: Modifier) -> Self {
|
||||
use Modifier::*;
|
||||
|
||||
match modifier {
|
||||
Strong => STRONG,
|
||||
Emph => EMPH,
|
||||
Math => MATH,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn ensure_not_too_many_modifiers() {
|
||||
// Because modifiers are encoded in a 32 bit bitmask, we can't have more than 32
|
||||
// modifiers
|
||||
assert!(Modifier::iter().len() <= 32);
|
||||
}
|
||||
}
|
158
crates/tinymist/src/server.rs
Normal file
158
crates/tinymist/src/server.rs
Normal file
|
@ -0,0 +1,158 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
use tokio::sync::{Mutex, RwLock};
|
||||
pub use tower_lsp::Client as LspHost;
|
||||
use typst::model::Document;
|
||||
|
||||
use crate::actor::typst::CompileCluster;
|
||||
use crate::config::{Config, ConstConfig};
|
||||
|
||||
pub struct TypstServer {
|
||||
pub client: LspHost,
|
||||
pub document: Mutex<Arc<Document>>,
|
||||
// typst_thread: TypstThread,
|
||||
pub universe: OnceCell<CompileCluster>,
|
||||
pub config: Arc<RwLock<Config>>,
|
||||
pub const_config: OnceCell<ConstConfig>,
|
||||
}
|
||||
|
||||
impl TypstServer {
|
||||
pub fn new(client: LspHost) -> Self {
|
||||
Self {
|
||||
// typst_thread: Default::default(),
|
||||
universe: Default::default(),
|
||||
config: Default::default(),
|
||||
const_config: Default::default(),
|
||||
client,
|
||||
document: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn const_config(&self) -> &ConstConfig {
|
||||
self.const_config
|
||||
.get()
|
||||
.expect("const config should be initialized")
|
||||
}
|
||||
|
||||
pub fn universe(&self) -> &CompileCluster {
|
||||
self.universe.get().expect("universe should be initialized")
|
||||
}
|
||||
|
||||
// pub fn typst_global_scopes(&self) -> typst::foundations::Scopes {
|
||||
// typst::foundations::Scopes::new(Some(&TYPST_STDLIB))
|
||||
// }
|
||||
|
||||
// pub async fn register_workspace_files(&self) -> FsResult<()> {
|
||||
// let mut workspace = self.workspace().write().await;
|
||||
// workspace.register_files()
|
||||
// }
|
||||
|
||||
// async fn read_workspace(&self) -> RwLockReadGuard<Workspace> {
|
||||
// self.workspace().read().await
|
||||
// }
|
||||
|
||||
// async fn read_workspace_owned(&self) -> OwnedRwLockReadGuard<Workspace> {
|
||||
// Arc::clone(self.workspace()).read_owned().await
|
||||
// }
|
||||
|
||||
// pub async fn project_and_full_id(&self, uri: &Url) -> FsResult<(Project,
|
||||
// FullFileId)> { let workspace = self.read_workspace_owned().await;
|
||||
// let full_id = workspace.full_id(uri)?;
|
||||
// let project = Project::new(full_id.package(), workspace);
|
||||
// Ok((project, full_id))
|
||||
// }
|
||||
|
||||
// pub async fn scope_with_source(&self, uri: &Url) -> FsResult<SourceScope> {
|
||||
// let (project, _) = self.project_and_full_id(uri).await?;
|
||||
// let source = project.read_source_by_uri(uri)?;
|
||||
// Ok(SourceScope { project, source })
|
||||
// }
|
||||
|
||||
// pub async fn thread_with_world(
|
||||
// &self,
|
||||
// builder: impl Into<WorldBuilder<'_>>,
|
||||
// ) -> FsResult<WorldThread> {
|
||||
// let (main, project) =
|
||||
// builder.into().main_project(self.workspace()).await?;
|
||||
|
||||
// Ok(WorldThread {
|
||||
// main,
|
||||
// main_project: project,
|
||||
// typst_thread: &self.typst_thread,
|
||||
// })
|
||||
// }
|
||||
|
||||
// /// Run the given function on the Typst thread, passing back its return
|
||||
// /// value.
|
||||
// pub async fn typst<T: Send + 'static>(
|
||||
// &self,
|
||||
// f: impl FnOnce(runtime::Handle) -> T + Send + 'static,
|
||||
// ) -> T {
|
||||
// self.typst_thread.run(f).await
|
||||
// }
|
||||
}
|
||||
|
||||
// pub struct SourceScope {
|
||||
// source: Source,
|
||||
// project: Project,
|
||||
// }
|
||||
|
||||
// impl SourceScope {
|
||||
// pub fn run<T>(self, f: impl FnOnce(&Source, &Project) -> T) -> T {
|
||||
// f(&self.source, &self.project)
|
||||
// }
|
||||
|
||||
// pub fn run2<T>(self, f: impl FnOnce(Source, Project) -> T) -> T {
|
||||
// f(self.source, self.project)
|
||||
// }
|
||||
// }
|
||||
|
||||
// pub struct WorldThread<'a> {
|
||||
// main: Source,
|
||||
// main_project: Project,
|
||||
// typst_thread: &'a TypstThread,
|
||||
// }
|
||||
|
||||
// impl<'a> WorldThread<'a> {
|
||||
// pub async fn run<T: Send + 'static>(
|
||||
// self,
|
||||
// f: impl FnOnce(ProjectWorld) -> T + Send + 'static,
|
||||
// ) -> T {
|
||||
// self.typst_thread
|
||||
// .run_with_world(self.main_project, self.main, f)
|
||||
// .await
|
||||
// }
|
||||
// }
|
||||
|
||||
// pub enum WorldBuilder<'a> {
|
||||
// MainUri(&'a Url),
|
||||
// MainAndProject(Source, Project),
|
||||
// }
|
||||
|
||||
// impl<'a> WorldBuilder<'a> {
|
||||
// async fn main_project(self, workspace: &Arc<RwLock<Workspace>>) ->
|
||||
// FsResult<(Source, Project)> { match self {
|
||||
// Self::MainUri(uri) => {
|
||||
// let workspace = Arc::clone(workspace).read_owned().await;
|
||||
// let full_id = workspace.full_id(uri)?;
|
||||
// let source = workspace.read_source(uri)?;
|
||||
// let project = Project::new(full_id.package(), workspace);
|
||||
// Ok((source, project))
|
||||
// }
|
||||
// Self::MainAndProject(main, project) => Ok((main, project)),
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl<'a> From<&'a Url> for WorldBuilder<'a> {
|
||||
// fn from(uri: &'a Url) -> Self {
|
||||
// Self::MainUri(uri)
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl From<(Source, Project)> for WorldBuilder<'static> {
|
||||
// fn from((main, project): (Source, Project)) -> Self {
|
||||
// Self::MainAndProject(main, project)
|
||||
// }
|
||||
// }
|
3
editors/vscode/.eslintignore
Normal file
3
editors/vscode/.eslintignore
Normal file
|
@ -0,0 +1,3 @@
|
|||
.eslintrc.js
|
||||
out/
|
||||
node_modules/
|
38
editors/vscode/.eslintrc.js
Normal file
38
editors/vscode/.eslintrc.js
Normal file
|
@ -0,0 +1,38 @@
|
|||
module.exports = {
|
||||
env: {
|
||||
es2021: true,
|
||||
node: true,
|
||||
},
|
||||
extends: [
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:@typescript-eslint/recommended-requiring-type-checking",
|
||||
"plugin:@typescript-eslint/strict",
|
||||
"prettier",
|
||||
],
|
||||
overrides: [],
|
||||
parserOptions: {
|
||||
ecmaVersion: "latest",
|
||||
sourceType: "module",
|
||||
project: "tsconfig.json",
|
||||
},
|
||||
rules: {
|
||||
"no-unused-vars": "off",
|
||||
"@typescript-eslint/no-unused-vars": ["error", { argsIgnorePattern: "^_" }],
|
||||
"@typescript-eslint/no-unused-vars": ["error", { argsIgnorePattern: "^_" }],
|
||||
"class-methods-use-this": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": "error",
|
||||
"@typescript-eslint/no-explicit-any": "error",
|
||||
"init-declarations": "off",
|
||||
"@typescript-eslint/init-declarations": "error",
|
||||
"no-undef-init": "off",
|
||||
"@typescript-eslint/strict-boolean-expressions": [
|
||||
"error",
|
||||
{
|
||||
allowString: false,
|
||||
allowNumber: false,
|
||||
allowNullableObject: false,
|
||||
allowNullableEnum: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
1
editors/vscode/.gitignore
vendored
Normal file
1
editors/vscode/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
tinymist-*.vsix
|
11
editors/vscode/.prettierignore
Normal file
11
editors/vscode/.prettierignore
Normal file
|
@ -0,0 +1,11 @@
|
|||
.DS_Store
|
||||
icons/
|
||||
node_modules/
|
||||
out/
|
||||
.env
|
||||
.env.*
|
||||
|
||||
# Ignore files for PNPM, NPM and YARN
|
||||
pnpm-lock.yaml
|
||||
package-lock.json
|
||||
yarn.lock
|
13
editors/vscode/.prettierrc.js
Normal file
13
editors/vscode/.prettierrc.js
Normal file
|
@ -0,0 +1,13 @@
|
|||
module.exports = {
|
||||
// same options as rust-analyzer, otherwise defaults from prettier
|
||||
printWidth: 100,
|
||||
tabWidth: 4,
|
||||
useTabs: false,
|
||||
semi: true,
|
||||
singleQuote: false,
|
||||
quoteProps: "as-needed",
|
||||
trailingComma: "es5",
|
||||
bracketSpacing: true,
|
||||
arrowParens: "always",
|
||||
singleAttributePerLine: false,
|
||||
};
|
11
editors/vscode/.vscodeignore
Normal file
11
editors/vscode/.vscodeignore
Normal file
|
@ -0,0 +1,11 @@
|
|||
**
|
||||
!language-configuration.json
|
||||
!typst.tmLanguage.json
|
||||
!out/extension.js
|
||||
!out/tinymist
|
||||
!out/tinymist.exe
|
||||
!package.json
|
||||
!package-lock.json
|
||||
!icons/**
|
||||
!README.md
|
||||
!LICENSE.md
|
201
editors/vscode/LICENSE
Normal file
201
editors/vscode/LICENSE
Normal file
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2023 Myriad Dreamin, Nathan Varner
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
29
editors/vscode/README.md
Normal file
29
editors/vscode/README.md
Normal file
|
@ -0,0 +1,29 @@
|
|||
# tinymist Typst LSP VS Code Extension
|
||||
|
||||
A VS Code extension for Typst.
|
||||
|
||||
## Features
|
||||
|
||||
- Syntax highlighting, error reporting, code completion, and function signature
|
||||
help
|
||||
- Compiles to PDF on save (configurable to as-you-type, or can be disabled)
|
||||
|
||||
## Usage Tips
|
||||
|
||||
- This extension compiles to PDF, but it doesn't have a PDF viewer yet. To view
|
||||
the output as you work, install a PDF viewer extension, such as
|
||||
`vscode-pdf`.
|
||||
- To configure when PDFs are compiled:
|
||||
1. Open settings
|
||||
- File -> Preferences -> Settings (Linux, Windows)
|
||||
- Code -> Preferences -> Settings (Mac)
|
||||
2. Search for "Typst Export PDF"
|
||||
3. Change the Export PDF setting
|
||||
- `onSave` makes a PDF after saving the Typst file
|
||||
- `onType` makes PDF files live, as you type
|
||||
- `never` disables PDF compilation
|
||||
|
||||
## Technical
|
||||
|
||||
The extension uses [Typst LSP](https://github.com/_/tinymist) on the
|
||||
backend.
|
BIN
editors/vscode/icons/typst-small.png
Normal file
BIN
editors/vscode/icons/typst-small.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 482 B |
46
editors/vscode/language-configuration.json
Normal file
46
editors/vscode/language-configuration.json
Normal file
|
@ -0,0 +1,46 @@
|
|||
{
|
||||
"comments": {
|
||||
"lineComment": "//",
|
||||
"blockComment": ["/*", "*/"]
|
||||
},
|
||||
"brackets": [
|
||||
["[", "]"],
|
||||
["{", "}"],
|
||||
["(", ")"]
|
||||
],
|
||||
"autoClosingPairs": [
|
||||
{
|
||||
"open": "[",
|
||||
"close": "]"
|
||||
},
|
||||
{
|
||||
"open": "{",
|
||||
"close": "}"
|
||||
},
|
||||
{
|
||||
"open": "(",
|
||||
"close": ")"
|
||||
},
|
||||
{
|
||||
"open": "\"",
|
||||
"close": "\"",
|
||||
"notIn": ["string"]
|
||||
},
|
||||
{
|
||||
"open": "$",
|
||||
"close": "$",
|
||||
"notIn": ["string"]
|
||||
}
|
||||
],
|
||||
"autoCloseBefore": "$ \n\t",
|
||||
"surroundingPairs": [
|
||||
["[", "]"],
|
||||
["{", "}"],
|
||||
["(", ")"],
|
||||
["\"", "\""],
|
||||
["*", "*"],
|
||||
["_", "_"],
|
||||
["`", "`"],
|
||||
["$", "$"]
|
||||
]
|
||||
}
|
349
editors/vscode/package.json
Normal file
349
editors/vscode/package.json
Normal file
|
@ -0,0 +1,349 @@
|
|||
{
|
||||
"name": "tinymist",
|
||||
"description": "A language server for Typst",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/Myriad-Dreamin/tinymist"
|
||||
},
|
||||
"displayName": "Tinymist Typst LSP",
|
||||
"author": "Myriad-Dreamin",
|
||||
"contributors": [
|
||||
"Myriad-Dreamin",
|
||||
"Nathan Varner"
|
||||
],
|
||||
"publisher": "Myriad-Dreamin",
|
||||
"license": "Apache-2.0 OR MIT",
|
||||
"version": "0.12.0",
|
||||
"engines": {
|
||||
"vscode": "^1.71.0"
|
||||
},
|
||||
"main": "./out/extension.js",
|
||||
"contributes": {
|
||||
"configuration": {
|
||||
"type": "object",
|
||||
"title": "Typst LSP",
|
||||
"properties": {
|
||||
"tinymist.exportPdf": {
|
||||
"title": "Export PDF",
|
||||
"description": "The extension can export PDFs of your Typst files. This setting controls whether this feature is enabled and how often it runs.",
|
||||
"type": "string",
|
||||
"default": "onSave",
|
||||
"enum": [
|
||||
"never",
|
||||
"onSave",
|
||||
"onType"
|
||||
],
|
||||
"enumDescriptions": [
|
||||
"Never export PDFs, you will manually run typst.",
|
||||
"Export PDFs when you save a file.",
|
||||
"Export PDFs as you type in a file."
|
||||
]
|
||||
},
|
||||
"tinymist.rootPath": {
|
||||
"title": "Root path",
|
||||
"description": "Configure the root for absolute paths in typst",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
],
|
||||
"default": null
|
||||
},
|
||||
"tinymist.semanticTokens": {
|
||||
"title": "Semantic tokens mode",
|
||||
"description": "Enable or disable semantic tokens (LSP syntax highlighting)",
|
||||
"type": "string",
|
||||
"default": "enable",
|
||||
"enum": [
|
||||
"enable",
|
||||
"disable"
|
||||
],
|
||||
"enumDescriptions": [
|
||||
"Use semantic tokens for syntax highlighting",
|
||||
"Do not use semantic tokens for syntax highlighting"
|
||||
]
|
||||
},
|
||||
"tinymist.serverPath": {
|
||||
"title": "Path to server executable",
|
||||
"description": "The extension can use a local tinymist executable instead of the one bundled with the extension. This setting controls the path to the executable.",
|
||||
"type": [
|
||||
"string",
|
||||
"null"
|
||||
],
|
||||
"default": null
|
||||
},
|
||||
"tinymist.trace.server": {
|
||||
"scope": "window",
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"off",
|
||||
"messages",
|
||||
"verbose"
|
||||
],
|
||||
"default": "off",
|
||||
"description": "Traces the communication between VS Code and the language server."
|
||||
},
|
||||
"tinymist.experimentalFormatterMode": {
|
||||
"title": "Enable Experimental Formatter",
|
||||
"description": "The extension can format Typst files using typstfmt (experimental).",
|
||||
"type": "string",
|
||||
"default": "off",
|
||||
"enum": [
|
||||
"off",
|
||||
"on"
|
||||
],
|
||||
"enumDescriptions": [
|
||||
"Formatter is not activated.",
|
||||
"Experimental formatter is activated."
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"configurationDefaults": {
|
||||
"[typst]": {
|
||||
"editor.wordWrap": "on",
|
||||
"editor.semanticHighlighting.enabled": true,
|
||||
"editor.tabSize": 2
|
||||
}
|
||||
},
|
||||
"languages": [
|
||||
{
|
||||
"id": "typst",
|
||||
"configuration": "./language-configuration.json",
|
||||
"extensions": [
|
||||
".typ"
|
||||
],
|
||||
"aliases": [
|
||||
"Typst"
|
||||
],
|
||||
"icon": {
|
||||
"light": "./icons/typst-small.png",
|
||||
"dark": "./icons/typst-small.png"
|
||||
}
|
||||
}
|
||||
],
|
||||
"semanticTokenTypes": [
|
||||
{
|
||||
"id": "bool",
|
||||
"description": "A boolean literal"
|
||||
},
|
||||
{
|
||||
"id": "punct",
|
||||
"description": "Punctuation in code"
|
||||
},
|
||||
{
|
||||
"id": "escape",
|
||||
"description": "Escape sequence"
|
||||
},
|
||||
{
|
||||
"id": "link",
|
||||
"description": "Hyperlink"
|
||||
},
|
||||
{
|
||||
"id": "raw",
|
||||
"description": "Raw text"
|
||||
},
|
||||
{
|
||||
"id": "label",
|
||||
"description": "Label"
|
||||
},
|
||||
{
|
||||
"id": "ref",
|
||||
"description": "Reference to a label"
|
||||
},
|
||||
{
|
||||
"id": "heading",
|
||||
"description": "Heading"
|
||||
},
|
||||
{
|
||||
"id": "marker",
|
||||
"description": "List, enum, or term list marker"
|
||||
},
|
||||
{
|
||||
"id": "term",
|
||||
"description": "Term in a term list"
|
||||
},
|
||||
{
|
||||
"id": "delim",
|
||||
"description": "Delimiter of a different type of markup"
|
||||
},
|
||||
{
|
||||
"id": "pol",
|
||||
"description": "Interpolated variable"
|
||||
},
|
||||
{
|
||||
"id": "error",
|
||||
"description": "Syntax error"
|
||||
},
|
||||
{
|
||||
"id": "text",
|
||||
"description": "Text"
|
||||
}
|
||||
],
|
||||
"semanticTokenModifiers": [
|
||||
{
|
||||
"id": "math",
|
||||
"description": "Math mode markup"
|
||||
},
|
||||
{
|
||||
"id": "strong",
|
||||
"description": "Strong (usually bolded) text"
|
||||
},
|
||||
{
|
||||
"id": "emph",
|
||||
"description": "Emphasized (usually italicized) text"
|
||||
}
|
||||
],
|
||||
"semanticTokenScopes": [
|
||||
{
|
||||
"language": "typst",
|
||||
"scopes": {
|
||||
"*.strong.emph": [
|
||||
"markup.bold.typst markup.italic.typst"
|
||||
],
|
||||
"*.strong": [
|
||||
"markup.bold.typst"
|
||||
],
|
||||
"*.emph": [
|
||||
"markup.italic.typst"
|
||||
],
|
||||
"*.math": [
|
||||
"markup.math.typst"
|
||||
],
|
||||
"bool": [
|
||||
"constant.language.boolean.typst"
|
||||
],
|
||||
"punct": [
|
||||
"punctuation.typst",
|
||||
"punctuation.definition.typst"
|
||||
],
|
||||
"escape": [
|
||||
"constant.character.escape.typst",
|
||||
"keyword.operator.typst",
|
||||
"punctuation.definition.typst"
|
||||
],
|
||||
"link": [
|
||||
"markup.underline.link.typst"
|
||||
],
|
||||
"raw": [
|
||||
"markup.inline.raw.typst",
|
||||
"markup.raw.inline.typst"
|
||||
],
|
||||
"delim.math": [
|
||||
"punctuation.definition.math.typst",
|
||||
"punctuation.definition.string.end.math.typst",
|
||||
"string.quoted.other.typst"
|
||||
],
|
||||
"operator.math": [
|
||||
"keyword.operator.math.typst"
|
||||
],
|
||||
"heading": [
|
||||
"markup.heading.typst"
|
||||
],
|
||||
"marker": [
|
||||
"markup.list.typst punctuation.definition.list.begin.typst",
|
||||
"markup.list.typst",
|
||||
"punctuation.definition.list.begin.typst"
|
||||
],
|
||||
"term": [
|
||||
"markup.list.term.typst",
|
||||
"markup.bold.term.typst"
|
||||
],
|
||||
"label": [
|
||||
"string.other.link.title.typst",
|
||||
"entity.name.label.typst",
|
||||
"meta.link.inline.typst",
|
||||
"markup.underline.link.typst"
|
||||
],
|
||||
"ref": [
|
||||
"string.other.link.typst",
|
||||
"markup.other.reference.typst",
|
||||
"entity.name.label.typst",
|
||||
"meta.link.inline.typst",
|
||||
"markup.underline.link.typst"
|
||||
],
|
||||
"pol": [
|
||||
"meta.interpolation.typst",
|
||||
"variable.typst"
|
||||
],
|
||||
"error": [
|
||||
"invalid.typst"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
{
|
||||
"command": "tinymist.exportCurrentPdf",
|
||||
"title": "Export the currently open file as PDF",
|
||||
"category": "Typst"
|
||||
},
|
||||
{
|
||||
"command": "tinymist.showPdf",
|
||||
"title": "Show the compiled PDF of the currently opened typst file",
|
||||
"category": "Typst",
|
||||
"icon": "$(open-preview)"
|
||||
},
|
||||
{
|
||||
"command": "tinymist.clearCache",
|
||||
"title": "Clear all cached resources",
|
||||
"category": "Typst"
|
||||
}
|
||||
],
|
||||
"menus": {
|
||||
"commandPalette": [
|
||||
{
|
||||
"command": "tinymist.exportCurrentPdf",
|
||||
"when": "editorLangId == typst"
|
||||
},
|
||||
{
|
||||
"command": "tinymist.clearCache",
|
||||
"when": "editorLangId == typst"
|
||||
}
|
||||
],
|
||||
"editor/title": [
|
||||
{
|
||||
"command": "tinymist.showPdf",
|
||||
"group": "navigation",
|
||||
"when": "editorLangId == typst"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"activationEvents": [
|
||||
"onLanguage:typst",
|
||||
"onCommand:tinymist.exportCurrentPdf",
|
||||
"onCommand:tinymist.clearCache"
|
||||
],
|
||||
"scripts": {
|
||||
"build-base": "esbuild ./src/extension.ts --bundle --outfile=out/extension.js --external:vscode --format=cjs --platform=node --target=node16",
|
||||
"vscode:prepublish": "npm run build-base -- --minify",
|
||||
"package": "vsce package",
|
||||
"compile": "npm run build-base -- --sourcemap",
|
||||
"watch": "npm run build-base -- --sourcemap --watch",
|
||||
"check": "tsc --noEmit",
|
||||
"lint": "eslint ./src --ext .ts",
|
||||
"lint-fix": "eslint ./src --ext .ts --fix",
|
||||
"format-check": "prettier --check .",
|
||||
"format": "prettier --write .",
|
||||
"test": ""
|
||||
},
|
||||
"dependencies": {
|
||||
"vscode-languageclient": "^9.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "^20.8.10",
|
||||
"@types/vscode": "~1.71.0",
|
||||
"@typescript-eslint/eslint-plugin": "^6.9.1",
|
||||
"@typescript-eslint/parser": "^6.9.1",
|
||||
"@vscode/vsce": "^2.22.0",
|
||||
"esbuild": "^0.19.5",
|
||||
"eslint": "^8.52.0",
|
||||
"eslint-config-prettier": "^9.0.0",
|
||||
"eslint-plugin-import": "^2.29.0",
|
||||
"eslint-plugin-n": "^16.2.0",
|
||||
"eslint-plugin-promise": "^6.1.1",
|
||||
"ovsx": "^0.8.3",
|
||||
"prettier": "^3.0.3",
|
||||
"typescript": "^5.2.2"
|
||||
}
|
||||
}
|
174
editors/vscode/src/extension.ts
Normal file
174
editors/vscode/src/extension.ts
Normal file
|
@ -0,0 +1,174 @@
|
|||
import {
|
||||
type ExtensionContext,
|
||||
workspace,
|
||||
window,
|
||||
commands,
|
||||
ViewColumn,
|
||||
Uri,
|
||||
WorkspaceConfiguration,
|
||||
} from "vscode";
|
||||
import * as path from "path";
|
||||
import * as child_process from "child_process";
|
||||
|
||||
import {
|
||||
LanguageClient,
|
||||
type LanguageClientOptions,
|
||||
type ServerOptions,
|
||||
} from "vscode-languageclient/node";
|
||||
|
||||
let client: LanguageClient | undefined = undefined;
|
||||
|
||||
export function activate(context: ExtensionContext): Promise<void> {
|
||||
return startClient(context).catch((e) => {
|
||||
void window.showErrorMessage(`Failed to activate tinymist: ${e}`);
|
||||
throw e;
|
||||
});
|
||||
}
|
||||
|
||||
async function startClient(context: ExtensionContext): Promise<void> {
|
||||
const config = workspace.getConfiguration("tinymist");
|
||||
const serverCommand = getServer(config);
|
||||
const run = {
|
||||
command: serverCommand,
|
||||
options: { env: Object.assign({}, process.env, { RUST_BACKTRACE: "1" }) },
|
||||
};
|
||||
const serverOptions: ServerOptions = {
|
||||
run,
|
||||
debug: run,
|
||||
};
|
||||
|
||||
const clientOptions: LanguageClientOptions = {
|
||||
documentSelector: [{ scheme: "file", language: "typst" }],
|
||||
initializationOptions: config,
|
||||
};
|
||||
|
||||
client = new LanguageClient(
|
||||
"tinymist",
|
||||
"Tinymist Typst Language Server",
|
||||
serverOptions,
|
||||
clientOptions
|
||||
);
|
||||
|
||||
context.subscriptions.push(
|
||||
commands.registerCommand("tinymist.exportCurrentPdf", commandExportCurrentPdf)
|
||||
);
|
||||
context.subscriptions.push(commands.registerCommand("tinymist.showPdf", commandShowPdf));
|
||||
context.subscriptions.push(commands.registerCommand("tinymist.clearCache", commandClearCache));
|
||||
|
||||
return client.start();
|
||||
}
|
||||
|
||||
export function deactivate(): Promise<void> | undefined {
|
||||
return client?.stop();
|
||||
}
|
||||
|
||||
function getServer(conf: WorkspaceConfiguration): string {
|
||||
const pathInConfig = conf.get<string | null>("serverPath");
|
||||
if (pathInConfig !== undefined && pathInConfig !== null && pathInConfig !== "") {
|
||||
const validation = validateServer(pathInConfig);
|
||||
if (!validation.valid) {
|
||||
throw new Error(
|
||||
`\`tinymist.serverPath\` (${pathInConfig}) does not point to a valid tinymist binary:\n${validation.message}`
|
||||
);
|
||||
}
|
||||
return pathInConfig;
|
||||
}
|
||||
const windows = process.platform === "win32";
|
||||
const suffix = windows ? ".exe" : "";
|
||||
const binaryName = "tinymist" + suffix;
|
||||
|
||||
const bundledPath = path.resolve(__dirname, binaryName);
|
||||
|
||||
const bundledValidation = validateServer(bundledPath);
|
||||
if (bundledValidation.valid) {
|
||||
return bundledPath;
|
||||
}
|
||||
|
||||
const binaryValidation = validateServer(binaryName);
|
||||
if (binaryValidation.valid) {
|
||||
return binaryName;
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Could not find a valid tinymist binary.\nBundled: ${bundledValidation.message}\nIn PATH: ${binaryValidation.message}`
|
||||
);
|
||||
}
|
||||
|
||||
function validateServer(path: string): { valid: true } | { valid: false; message: string } {
|
||||
try {
|
||||
const result = child_process.spawnSync(path);
|
||||
if (result.status === 0) {
|
||||
return { valid: true };
|
||||
} else {
|
||||
const statusMessage = result.status !== null ? [`return status: ${result.status}`] : [];
|
||||
const errorMessage =
|
||||
result.error?.message !== undefined ? [`error: ${result.error.message}`] : [];
|
||||
const messages = [statusMessage, errorMessage];
|
||||
const messageSuffix =
|
||||
messages.length !== 0 ? `:\n\t${messages.flat().join("\n\t")}` : "";
|
||||
const message = `Failed to launch '${path}'${messageSuffix}`;
|
||||
return { valid: false, message };
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof Error) {
|
||||
return { valid: false, message: `Failed to launch '${path}': ${e.message}` };
|
||||
} else {
|
||||
return { valid: false, message: `Failed to launch '${path}': ${JSON.stringify(e)}` };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function commandExportCurrentPdf(): Promise<void> {
|
||||
const activeEditor = window.activeTextEditor;
|
||||
if (activeEditor === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const uri = activeEditor.document.uri.toString();
|
||||
|
||||
await client?.sendRequest("workspace/executeCommand", {
|
||||
command: "tinymist.doPdfExport",
|
||||
arguments: [uri],
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements the functionality for the 'Show PDF' button shown in the editor title
|
||||
* if a `.typ` file is opened.
|
||||
*/
|
||||
async function commandShowPdf(): Promise<void> {
|
||||
const activeEditor = window.activeTextEditor;
|
||||
if (activeEditor === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const uri = activeEditor.document.uri;
|
||||
// change the file extension to `.pdf` as we want to open the pdf file
|
||||
// and not the currently opened `.typ` file.
|
||||
const n = uri.toString().lastIndexOf(".");
|
||||
const pdf_uri = Uri.parse(uri.toString().slice(0, n) + ".pdf");
|
||||
|
||||
try {
|
||||
await workspace.fs.stat(pdf_uri);
|
||||
} catch {
|
||||
// only create pdf if it does not exist yet
|
||||
await commandExportCurrentPdf();
|
||||
} finally {
|
||||
// here we can be sure that the pdf exists
|
||||
await commands.executeCommand("vscode.open", pdf_uri, ViewColumn.Beside);
|
||||
}
|
||||
}
|
||||
|
||||
async function commandClearCache(): Promise<void> {
|
||||
const activeEditor = window.activeTextEditor;
|
||||
if (activeEditor === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const uri = activeEditor.document.uri.toString();
|
||||
|
||||
await client?.sendRequest("workspace/executeCommand", {
|
||||
command: "tinymist.doClearCache",
|
||||
arguments: [uri],
|
||||
});
|
||||
}
|
13
editors/vscode/tsconfig.json
Normal file
13
editors/vscode/tsconfig.json
Normal file
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"target": "es2020",
|
||||
"lib": ["es2020"],
|
||||
"outDir": "out",
|
||||
"rootDir": "src",
|
||||
"sourceMap": true,
|
||||
"strict": true
|
||||
},
|
||||
"include": ["src"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
576
editors/vscode/typst.tmLanguage.json
Normal file
576
editors/vscode/typst.tmLanguage.json
Normal file
|
@ -0,0 +1,576 @@
|
|||
{
|
||||
"name": "typst",
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#markup"
|
||||
}
|
||||
],
|
||||
"repository": {
|
||||
"comments": {
|
||||
"patterns": [
|
||||
{
|
||||
"name": "comment.block.typst",
|
||||
"begin": "/\\*",
|
||||
"end": "\\*/",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.comment.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#comments"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "comment.line.double-slash.typst",
|
||||
"begin": "(?<!:)//",
|
||||
"end": "\n",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.comment.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#comments"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"common": {
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#comments"
|
||||
}
|
||||
]
|
||||
},
|
||||
"markup": {
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#common"
|
||||
},
|
||||
{
|
||||
"name": "constant.character.escape.content.typst",
|
||||
"match": "\\\\([\\\\/\\[\\]{}#*_=~`$-.]|u\\{[0-9a-zA-Z]*\\}?)"
|
||||
},
|
||||
{
|
||||
"name": "punctuation.definition.linebreak.typst",
|
||||
"match": "\\\\"
|
||||
},
|
||||
{
|
||||
"name": "punctuation.definition.nonbreaking-space.typst",
|
||||
"match": "~"
|
||||
},
|
||||
{
|
||||
"name": "punctuation.definition.shy.typst",
|
||||
"match": "-\\?"
|
||||
},
|
||||
{
|
||||
"name": "punctuation.definition.em-dash.typst",
|
||||
"match": "---"
|
||||
},
|
||||
{
|
||||
"name": "punctuation.definition.en-dash.typst",
|
||||
"match": "--"
|
||||
},
|
||||
{
|
||||
"name": "punctuation.definition.ellipsis.typst",
|
||||
"match": "\\.\\.\\."
|
||||
},
|
||||
{
|
||||
"name": "constant.symbol.typst",
|
||||
"match": ":([a-zA-Z0-9]+:)+"
|
||||
},
|
||||
{
|
||||
"name": "markup.bold.typst",
|
||||
"begin": "(^\\*|\\*$|((?<=\\W|_)\\*)|(\\*(?=\\W|_)))",
|
||||
"end": "(^\\*|\\*$|((?<=\\W|_)\\*)|(\\*(?=\\W|_)))|\n|(?=\\])",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.bold.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#markup"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "markup.italic.typst",
|
||||
"begin": "(^_|_$|((?<=\\W|_)_)|(_(?=\\W|_)))",
|
||||
"end": "(^_|_$|((?<=\\W|_)_)|(_(?=\\W|_)))|\n|(?=\\])",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.italic.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#markup"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "markup.underline.link.typst",
|
||||
"match": "https?://[0-9a-zA-Z~/%#&=',;\\.\\+\\?]*"
|
||||
},
|
||||
{
|
||||
"name": "markup.raw.block.typst",
|
||||
"begin": "`{3,}",
|
||||
"end": "\\0",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.raw.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "markup.raw.inline.typst",
|
||||
"begin": "`",
|
||||
"end": "`",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.raw.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "string.other.math.typst",
|
||||
"begin": "\\$",
|
||||
"end": "\\$",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.string.math.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "markup.heading.typst",
|
||||
"contentName": "entity.name.section.typst",
|
||||
"begin": "^\\s*=+\\s+",
|
||||
"end": "\n|(?=<)",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.heading.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#markup"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "punctuation.definition.list.unnumbered.typst",
|
||||
"match": "^\\s*-\\s+"
|
||||
},
|
||||
{
|
||||
"name": "punctuation.definition.list.numbered.typst",
|
||||
"match": "^\\s*([0-9]*\\.|\\+)\\s+"
|
||||
},
|
||||
{
|
||||
"match": "^\\s*(/)\\s+([^:]*:)",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "punctuation.definition.list.description.typst"
|
||||
},
|
||||
"2": {
|
||||
"name": "markup.list.term.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "entity.other.label.typst",
|
||||
"match": "<[[:alpha:]_][[:alnum:]_-]*>",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "punctuation.definition.label.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "entity.other.reference.typst",
|
||||
"match": "(@)[[:alpha:]_][[:alnum:]_-]*",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "punctuation.definition.reference.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"begin": "(#)(let|set|show)\\b",
|
||||
"end": "\n|(;)|(?=])",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "keyword.other.typst"
|
||||
},
|
||||
"1": {
|
||||
"name": "punctuation.definition.keyword.typst"
|
||||
}
|
||||
},
|
||||
"endCaptures": {
|
||||
"1": {
|
||||
"name": "punctuation.terminator.statement.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#code"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "keyword.other.typst",
|
||||
"match": "(#)(as|in)\\b",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "punctuation.definition.keyword.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"begin": "((#)if|(?<=(}|])\\s*)else)\\b",
|
||||
"end": "\n|(?=])|(?<=}|])",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "keyword.control.conditional.typst"
|
||||
},
|
||||
"2": {
|
||||
"name": "punctuation.definition.keyword.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#code"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"begin": "(#)(for|while)\\b",
|
||||
"end": "\n|(?=])|(?<=}|])",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "keyword.control.loop.typst"
|
||||
},
|
||||
"1": {
|
||||
"name": "punctuation.definition.keyword.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#code"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "keyword.control.loop.typst",
|
||||
"match": "(#)(break|continue)\\b",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "punctuation.definition.keyword.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"begin": "(#)(import|include|export)\\b",
|
||||
"end": "\n|(;)|(?=])",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "keyword.control.import.typst"
|
||||
},
|
||||
"1": {
|
||||
"name": "punctuation.definition.keyword.typst"
|
||||
}
|
||||
},
|
||||
"endCaptures": {
|
||||
"1": {
|
||||
"name": "punctuation.terminator.statement.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#code"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "keyword.control.flow.typst",
|
||||
"match": "(#)(return)\\b",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "punctuation.definition.keyword.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"comment": "Function name",
|
||||
"name": "entity.name.function.typst",
|
||||
"match": "((#)[[:alpha:]_][[:alnum:]_-]*!?)(?=\\[|\\()",
|
||||
"captures": {
|
||||
"2": {
|
||||
"name": "punctuation.definition.function.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"comment": "Function arguments",
|
||||
"begin": "(?<=#[[:alpha:]_][[:alnum:]_-]*!?)\\(",
|
||||
"end": "\\)",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.group.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#arguments"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "entity.other.interpolated.typst",
|
||||
"match": "(#)[[:alpha:]_][.[:alnum:]_-]*",
|
||||
"captures": {
|
||||
"1": {
|
||||
"name": "punctuation.definition.variable.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "meta.block.content.typst",
|
||||
"begin": "#",
|
||||
"end": "\\s",
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#code"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"code": {
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#common"
|
||||
},
|
||||
{
|
||||
"name": "meta.block.code.typst",
|
||||
"begin": "{",
|
||||
"end": "}",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.block.code.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#code"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "meta.block.content.typst",
|
||||
"begin": "\\[",
|
||||
"end": "\\]",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.block.content.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#markup"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "comment.line.double-slash.typst",
|
||||
"begin": "//",
|
||||
"end": "\n",
|
||||
"beginCaptures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.comment.typst"
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "punctuation.separator.colon.typst",
|
||||
"match": ":"
|
||||
},
|
||||
{
|
||||
"name": "punctuation.separator.comma.typst",
|
||||
"match": ","
|
||||
},
|
||||
{
|
||||
"name": "keyword.operator.typst",
|
||||
"match": "=>|\\.\\."
|
||||
},
|
||||
{
|
||||
"name": "keyword.operator.relational.typst",
|
||||
"match": "==|!=|<=|<|>=|>"
|
||||
},
|
||||
{
|
||||
"name": "keyword.operator.assignment.typst",
|
||||
"match": "\\+=|-=|\\*=|/=|="
|
||||
},
|
||||
{
|
||||
"name": "keyword.operator.arithmetic.typst",
|
||||
"match": "\\+|\\*|/|(?<![[:alpha:]_][[:alnum:]_-]*)-(?![:alnum:]_-]*[[:alpha:]_])"
|
||||
},
|
||||
{
|
||||
"name": "keyword.operator.word.typst",
|
||||
"match": "\\b(and|or|not)\\b"
|
||||
},
|
||||
{
|
||||
"name": "keyword.other.typst",
|
||||
"match": "\\b(let|as|in|set|show)\\b"
|
||||
},
|
||||
{
|
||||
"name": "keyword.control.conditional.typst",
|
||||
"match": "\\b(if|else)\\b"
|
||||
},
|
||||
{
|
||||
"name": "keyword.control.loop.typst",
|
||||
"match": "\\b(for|while|break|continue)\\b"
|
||||
},
|
||||
{
|
||||
"name": "keyword.control.import.typst",
|
||||
"match": "\\b(import|include|export)\\b"
|
||||
},
|
||||
{
|
||||
"name": "keyword.control.flow.typst",
|
||||
"match": "\\b(return)\\b"
|
||||
},
|
||||
{
|
||||
"include": "#constants"
|
||||
},
|
||||
{
|
||||
"comment": "Function name",
|
||||
"name": "entity.name.function.typst",
|
||||
"match": "\\b[[:alpha:]_][[:alnum:]_-]*!?(?=\\[|\\()"
|
||||
},
|
||||
{
|
||||
"comment": "Function name",
|
||||
"name": "entity.name.function.typst",
|
||||
"match": "(?<=\\bshow\\s*)\\b[[:alpha:]_][[:alnum:]_-]*(?=\\s*[:.])"
|
||||
},
|
||||
{
|
||||
"comment": "Function arguments",
|
||||
"begin": "(?<=\\b[[:alpha:]_][[:alnum:]_-]*!?)\\(",
|
||||
"end": "\\)",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.group.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#arguments"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "variable.other.typst",
|
||||
"match": "\\b[[:alpha:]_][[:alnum:]_-]*\\b"
|
||||
},
|
||||
{
|
||||
"name": "meta.group.typst",
|
||||
"begin": "\\(",
|
||||
"end": "\\)|(?=;)",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.group.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"include": "#code"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"constants": {
|
||||
"patterns": [
|
||||
{
|
||||
"name": "constant.language.none.typst",
|
||||
"match": "\\bnone\\b"
|
||||
},
|
||||
{
|
||||
"name": "constant.language.auto.typst",
|
||||
"match": "\\bauto\\b"
|
||||
},
|
||||
{
|
||||
"name": "constant.language.boolean.typst",
|
||||
"match": "\\b(true|false)\\b"
|
||||
},
|
||||
{
|
||||
"name": "constant.numeric.length.typst",
|
||||
"match": "\\b(\\d*)?\\.?\\d+([eE][+-]?\\d+)?(mm|pt|cm|in|em)\\b"
|
||||
},
|
||||
{
|
||||
"name": "constant.numeric.angle.typst",
|
||||
"match": "\\b(\\d*)?\\.?\\d+([eE][+-]?\\d+)?(rad|deg)\\b"
|
||||
},
|
||||
{
|
||||
"name": "constant.numeric.percentage.typst",
|
||||
"match": "\\b(\\d*)?\\.?\\d+([eE][+-]?\\d+)?%"
|
||||
},
|
||||
{
|
||||
"name": "constant.numeric.fr.typst",
|
||||
"match": "\\b(\\d*)?\\.?\\d+([eE][+-]?\\d+)?fr"
|
||||
},
|
||||
{
|
||||
"name": "constant.numeric.integer.typst",
|
||||
"match": "\\b\\d+\\b"
|
||||
},
|
||||
{
|
||||
"name": "constant.numeric.float.typst",
|
||||
"match": "\\b(\\d*)?\\.?\\d+([eE][+-]?\\d+)?\\b"
|
||||
},
|
||||
{
|
||||
"name": "string.quoted.double.typst",
|
||||
"begin": "\"",
|
||||
"end": "\"",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.string.typst"
|
||||
}
|
||||
},
|
||||
"patterns": [
|
||||
{
|
||||
"name": "constant.character.escape.string.typst",
|
||||
"match": "\\\\([\\\\\"nrt]|u\\{?[0-9a-zA-Z]*\\}?)"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "string.other.math.typst",
|
||||
"begin": "\\$",
|
||||
"end": "\\$",
|
||||
"captures": {
|
||||
"0": {
|
||||
"name": "punctuation.definition.string.math.typst"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"arguments": {
|
||||
"patterns": [
|
||||
{
|
||||
"name": "variable.parameter.typst",
|
||||
"match": "\\b[[:alpha:]_][[:alnum:]_-]*(?=:)"
|
||||
},
|
||||
{
|
||||
"include": "#code"
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"scopeName": "source.typst"
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue