mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-08-03 09:52:27 +00:00
feat: adapt to rust analyzer's server crate
This commit is contained in:
parent
619a4f9f14
commit
9c9fcd91ba
19 changed files with 1078 additions and 600 deletions
90
Cargo.lock
generated
90
Cargo.lock
generated
|
@ -144,17 +144,6 @@ dependencies = [
|
|||
"syn 2.0.52",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "auto_impl"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c87f3f15e7794432337fc718554eaa4dc8f04c9677a950ffe366f20a162ae42"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.52",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "autocfg"
|
||||
version = "1.1.0"
|
||||
|
@ -2068,10 +2057,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||
checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c"
|
||||
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.94.1"
|
||||
name = "lsp-server"
|
||||
version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c66bfd44a06ae10647fe3f8214762e9369fd4248df1350924b4ef9e770a85ea1"
|
||||
checksum = "248f65b78f6db5d8e1b1604b4098a28b43d21a8eb1deeca22b1c421b276c7095"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"log",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.95.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "158c1911354ef73e8fe42da6b10c0484cb65c7f1007f28022e847706c1ab6984"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"serde",
|
||||
|
@ -3599,10 +3600,13 @@ dependencies = [
|
|||
"clap_complete_fig",
|
||||
"clap_mangen",
|
||||
"comemo 0.4.0",
|
||||
"crossbeam-channel",
|
||||
"env_logger",
|
||||
"futures",
|
||||
"itertools 0.12.1",
|
||||
"log",
|
||||
"lsp-server",
|
||||
"lsp-types",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"paste",
|
||||
|
@ -3611,8 +3615,6 @@ dependencies = [
|
|||
"tinymist-query",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower",
|
||||
"tower-lsp",
|
||||
"typst",
|
||||
"typst-assets",
|
||||
"typst-pdf",
|
||||
|
@ -3632,13 +3634,13 @@ dependencies = [
|
|||
"itertools 0.12.1",
|
||||
"lazy_static",
|
||||
"log",
|
||||
"lsp-types",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"strum 0.25.0",
|
||||
"tower-lsp",
|
||||
"typst",
|
||||
"typst-ide",
|
||||
"typst-ts-compiler",
|
||||
|
@ -3782,60 +3784,6 @@ dependencies = [
|
|||
"winnow",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
version = "0.4.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"pin-project",
|
||||
"pin-project-lite",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-layer"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0"
|
||||
|
||||
[[package]]
|
||||
name = "tower-lsp"
|
||||
version = "0.20.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d4ba052b54a6627628d9b3c34c176e7eda8359b7da9acd497b9f20998d118508"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"auto_impl",
|
||||
"bytes",
|
||||
"dashmap",
|
||||
"futures",
|
||||
"httparse",
|
||||
"lsp-types",
|
||||
"memchr",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower",
|
||||
"tower-lsp-macros",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-lsp-macros"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "84fd902d4e0b9a4b27f2f440108dc034e1758628a9b702f8ec61ad66355422fa"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.52",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower-service"
|
||||
version = "0.3.2"
|
||||
|
|
|
@ -41,8 +41,9 @@ typst-ts-core = { version = "0.4.2-rc6" }
|
|||
typst-ts-compiler = { version = "0.4.2-rc6" }
|
||||
typst-preview = { path = "external/typst-preview" }
|
||||
|
||||
tower = { version = "0.4", default-features = false, features = ["util"] }
|
||||
tower-lsp = "0.20.0"
|
||||
lsp-server = "0.7.3"
|
||||
lsp-types = { version = "=0.95.0", features = ["proposed"] }
|
||||
crossbeam-channel = "0.5.8"
|
||||
|
||||
clap = { version = "4.4", features = ["derive", "env", "unicode", "wrap_help"] }
|
||||
clap_builder = { version = "4", features = ["string"] }
|
||||
|
@ -86,8 +87,8 @@ missing_docs = "warn"
|
|||
|
||||
[workspace.lints.clippy]
|
||||
uninlined_format_args = "warn"
|
||||
missing_errors_doc = "warn"
|
||||
missing_panics_doc = "warn"
|
||||
# missing_errors_doc = "warn"
|
||||
# missing_panics_doc = "warn"
|
||||
missing_safety_doc = "warn"
|
||||
undocumented_unsafe_blocks = "warn"
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@ typst-ts-core = { version = "0.4.2-rc6", default-features = false, features = [
|
|||
] }
|
||||
typst-ts-compiler.workspace = true
|
||||
|
||||
tower-lsp.workspace = true
|
||||
lsp-types.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
once_cell.workspace = true
|
||||
|
|
|
@ -2,7 +2,7 @@ use std::ops::Range;
|
|||
|
||||
use anyhow::anyhow;
|
||||
use log::info;
|
||||
use tower_lsp::lsp_types::SymbolKind;
|
||||
use lsp_types::SymbolKind;
|
||||
use typst::syntax::{ast, LinkedNode, Source, SyntaxKind};
|
||||
use typst_ts_core::typst::prelude::{eco_vec, EcoVec};
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use comemo::Track;
|
||||
use log::debug;
|
||||
use tower_lsp::lsp_types::LocationLink;
|
||||
use lsp_types::LocationLink;
|
||||
|
||||
use crate::{analysis::find_definition, prelude::*};
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::{borrow::Cow, ops::Range};
|
||||
|
||||
use log::debug;
|
||||
use tower_lsp::lsp_types::{InlayHintKind, InlayHintLabel};
|
||||
use lsp_types::{InlayHintKind, InlayHintLabel};
|
||||
use typst::{
|
||||
foundations::{Args, Closure},
|
||||
syntax::SyntaxNode,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
//! Conversions between Typst and LSP types and representations
|
||||
|
||||
use tower_lsp::lsp_types;
|
||||
use lsp_types;
|
||||
|
||||
pub type LspPosition = lsp_types::Position;
|
||||
/// The interpretation of an `LspCharacterOffset` depends on the
|
||||
|
@ -130,11 +130,11 @@ pub mod typst_to_lsp {
|
|||
|
||||
use itertools::Itertools;
|
||||
use lazy_static::lazy_static;
|
||||
use regex::{Captures, Regex};
|
||||
use tower_lsp::lsp_types::{
|
||||
use lsp_types::{
|
||||
CompletionTextEdit, Documentation, InsertTextFormat, LanguageString, MarkedString,
|
||||
MarkupContent, MarkupKind, TextEdit,
|
||||
};
|
||||
use regex::{Captures, Regex};
|
||||
use typst::diag::EcoString;
|
||||
use typst::foundations::{CastInfo, Repr};
|
||||
use typst::syntax::Source;
|
||||
|
|
|
@ -8,7 +8,7 @@ pub use std::{
|
|||
pub use comemo::{Track, Tracked};
|
||||
pub use itertools::{Format, Itertools};
|
||||
pub use log::{error, trace};
|
||||
pub use tower_lsp::lsp_types::{
|
||||
pub use lsp_types::{
|
||||
CompletionResponse, DiagnosticRelatedInformation, DocumentSymbol, DocumentSymbolResponse,
|
||||
Documentation, FoldingRange, GotoDefinitionResponse, Hover, InlayHint, Location as LspLocation,
|
||||
MarkupContent, MarkupKind, Position as LspPosition, PrepareRenameResponse, SelectionRange,
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use std::{collections::HashSet, os::windows::fs::FileTypeExt};
|
||||
|
||||
use log::{debug, warn};
|
||||
use tower_lsp::lsp_types::TextEdit;
|
||||
use lsp_types::TextEdit;
|
||||
|
||||
use crate::{
|
||||
analysis::{find_definition, find_imports, find_lexical_references_after, Definition},
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use tower_lsp::lsp_types::{SemanticToken, SemanticTokensEdit};
|
||||
use lsp_types::{SemanticToken, SemanticTokensEdit};
|
||||
|
||||
#[derive(Debug)]
|
||||
struct CachedTokens {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use itertools::Itertools;
|
||||
use parking_lot::RwLock;
|
||||
use strum::IntoEnumIterator;
|
||||
use tower_lsp::lsp_types::{
|
||||
use lsp_types::{
|
||||
Registration, SemanticToken, SemanticTokensEdit, SemanticTokensFullOptions,
|
||||
SemanticTokensLegend, SemanticTokensOptions, Unregistration,
|
||||
};
|
||||
use parking_lot::RwLock;
|
||||
use strum::IntoEnumIterator;
|
||||
use typst::diag::EcoString;
|
||||
use typst::syntax::{ast, LinkedNode, Source, SyntaxKind};
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use tower_lsp::lsp_types::{Position, SemanticToken};
|
||||
use lsp_types::{Position, SemanticToken};
|
||||
use typst::diag::EcoString;
|
||||
use typst::syntax::Source;
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
//! Types for tokens used for Typst syntax
|
||||
|
||||
use lsp_types::{SemanticTokenModifier, SemanticTokenType};
|
||||
use strum::EnumIter;
|
||||
use tower_lsp::lsp_types::{SemanticTokenModifier, SemanticTokenType};
|
||||
|
||||
const BOOL: SemanticTokenType = SemanticTokenType::new("bool");
|
||||
const PUNCTUATION: SemanticTokenType = SemanticTokenType::new("punct");
|
||||
|
|
|
@ -47,8 +47,9 @@ typst-ts-core = { version = "0.4.2-rc6", default-features = false, features = [
|
|||
typst-ts-compiler.workspace = true
|
||||
typst-preview.workspace = true
|
||||
|
||||
tower.workspace = true
|
||||
tower-lsp.workspace = true
|
||||
lsp-server.workspace = true
|
||||
crossbeam-channel.workspace = true
|
||||
lsp-types.workspace = true
|
||||
|
||||
[features]
|
||||
default = ["cli"]
|
||||
|
@ -59,5 +60,5 @@ anyhow.workspace = true
|
|||
vergen.workspace = true
|
||||
cargo_metadata = "0.18.0"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
# [lints]
|
||||
# workspace = true
|
||||
|
|
|
@ -9,12 +9,13 @@ use std::{
|
|||
use anyhow::anyhow;
|
||||
use futures::future::join_all;
|
||||
use log::{debug, error, info, trace, warn};
|
||||
use lsp_types::{Diagnostic, TextDocumentContentChangeEvent, Url};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use tinymist_query::{
|
||||
lsp_to_typst, CompilerQueryRequest, CompilerQueryResponse, DiagnosticsMap, FoldRequestFeature,
|
||||
LspDiagnostic, OnSaveExportRequest, PositionEncoding, SemanticTokenCache,
|
||||
};
|
||||
use tokio::sync::{broadcast, mpsc, watch, Mutex, RwLock};
|
||||
use tower_lsp::lsp_types::{Diagnostic, TextDocumentContentChangeEvent, Url};
|
||||
use tokio::sync::{broadcast, mpsc, watch};
|
||||
use typst::{
|
||||
diag::{FileResult, SourceDiagnostic, SourceResult},
|
||||
layout::Position,
|
||||
|
@ -50,35 +51,6 @@ type Node = CompileNode<CompileHandler>;
|
|||
|
||||
type DiagnosticsSender = mpsc::UnboundedSender<(String, Option<DiagnosticsMap>)>;
|
||||
|
||||
// pub struct LazyCompileDriver {
|
||||
// value: QueryRef<CompileDriver, (), (Vec<PathBuf>, CompileOpts)>,
|
||||
// }
|
||||
|
||||
// impl LazyCompileDriver {
|
||||
// pub fn new(roots: Vec<PathBuf>, opts: CompileOpts) -> Self {
|
||||
// Self {
|
||||
// value: QueryRef::with_context((roots, opts)),
|
||||
// }
|
||||
// }
|
||||
|
||||
// pub fn get(&self) -> &CompileDriver {
|
||||
// let value = self.value.compute_with_context_ref(|(roots, opts)| {
|
||||
// let driver = CompileDriver::new(roots.clone(), opts);
|
||||
// Ok(driver)
|
||||
// });
|
||||
|
||||
// value.unwrap()
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl Deref for LazyCompileDriver {
|
||||
// type Target = CompileDriver;
|
||||
|
||||
// fn deref(&self) -> &Self::Target {
|
||||
// self.get()
|
||||
// }
|
||||
// }
|
||||
|
||||
pub struct CompileCluster {
|
||||
roots: Vec<PathBuf>,
|
||||
actor_factory: ActorFactory,
|
||||
|
@ -122,19 +94,19 @@ impl CompileCluster {
|
|||
(self, actor)
|
||||
}
|
||||
|
||||
pub async fn activate_doc(&self, new_entry: Option<ImmutPath>) -> Result<(), Error> {
|
||||
pub fn activate_doc(&self, new_entry: Option<ImmutPath>) -> Result<(), Error> {
|
||||
match new_entry {
|
||||
Some(new_entry) => self.primary.wait().change_entry(new_entry).await?,
|
||||
Some(new_entry) => self.primary.wait().change_entry(new_entry)?,
|
||||
None => {
|
||||
self.primary.wait().disable().await;
|
||||
self.primary.wait().disable();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn pin_main(&self, new_entry: Option<Url>) -> Result<(), Error> {
|
||||
let mut m = self.main.lock().await;
|
||||
pub fn pin_main(&self, new_entry: Option<Url>) -> Result<(), Error> {
|
||||
let mut m = self.main.lock();
|
||||
match (new_entry, m.is_some()) {
|
||||
(Some(new_entry), true) => {
|
||||
let path = new_entry
|
||||
|
@ -142,7 +114,7 @@ impl CompileCluster {
|
|||
.map_err(|_| error_once!("invalid url"))?;
|
||||
let path = path.as_path().into();
|
||||
|
||||
m.as_mut().unwrap().wait().change_entry(path).await
|
||||
m.as_mut().unwrap().wait().change_entry(path)
|
||||
}
|
||||
(Some(new_entry), false) => {
|
||||
let path = new_entry
|
||||
|
@ -159,7 +131,7 @@ impl CompileCluster {
|
|||
}
|
||||
(None, true) => {
|
||||
// todo: unpin main
|
||||
m.as_mut().unwrap().wait().disable().await;
|
||||
m.as_mut().unwrap().wait().disable();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -263,7 +235,7 @@ impl CompileClusterActor {
|
|||
return;
|
||||
}
|
||||
|
||||
host.publish_diagnostics(uri, diags, version).await
|
||||
host.publish_diagnostics(uri, diags, version)
|
||||
}
|
||||
|
||||
async fn flush_primary_diagnostics(&mut self, enable: bool) {
|
||||
|
@ -278,6 +250,8 @@ impl CompileClusterActor {
|
|||
}
|
||||
Some(diags)
|
||||
});
|
||||
// todo: .flatten() removed
|
||||
// let to_publish = diags.flatten().cloned().collect();
|
||||
let to_publish = diags.flatten().cloned().collect();
|
||||
|
||||
Self::do_publish_diagnostics(&self.host, url.clone(), to_publish, None, false)
|
||||
|
@ -387,27 +361,27 @@ struct MemoryFileMeta {
|
|||
}
|
||||
|
||||
impl CompileCluster {
|
||||
async fn update_source(&self, files: FileChangeSet) -> Result<(), Error> {
|
||||
fn update_source(&self, files: FileChangeSet) -> Result<(), Error> {
|
||||
let primary = self.primary.clone();
|
||||
let main = self.main.clone();
|
||||
let primary = Some(&primary);
|
||||
let main = main.lock().await;
|
||||
let main = main.lock();
|
||||
let main = main.as_ref();
|
||||
let clients_to_notify = (primary.iter()).chain(main.iter());
|
||||
|
||||
for client in clients_to_notify {
|
||||
let iw = client.wait().inner.lock().await;
|
||||
let iw = client.wait().inner.lock();
|
||||
iw.add_memory_changes(MemoryEvent::Update(files.clone()));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn create_source(&self, path: PathBuf, content: String) -> Result<(), Error> {
|
||||
pub fn create_source(&self, path: PathBuf, content: String) -> Result<(), Error> {
|
||||
let now = Time::now();
|
||||
let path: ImmutPath = path.into();
|
||||
|
||||
self.memory_changes.write().await.insert(
|
||||
self.memory_changes.write().insert(
|
||||
path.clone(),
|
||||
MemoryFileMeta {
|
||||
mt: now,
|
||||
|
@ -421,22 +395,22 @@ impl CompileCluster {
|
|||
// todo: is it safe to believe that the path is normalized?
|
||||
let files = FileChangeSet::new_inserts(vec![(path, FileResult::Ok((now, content)).into())]);
|
||||
|
||||
self.update_source(files).await
|
||||
self.update_source(files)
|
||||
}
|
||||
|
||||
pub async fn remove_source(&self, path: PathBuf) -> Result<(), Error> {
|
||||
pub fn remove_source(&self, path: PathBuf) -> Result<(), Error> {
|
||||
let path: ImmutPath = path.into();
|
||||
|
||||
self.memory_changes.write().await.remove(&path);
|
||||
self.memory_changes.write().remove(&path);
|
||||
log::info!("remove source: {:?}", path);
|
||||
|
||||
// todo: is it safe to believe that the path is normalized?
|
||||
let files = FileChangeSet::new_removes(vec![path]);
|
||||
|
||||
self.update_source(files).await
|
||||
self.update_source(files)
|
||||
}
|
||||
|
||||
pub async fn edit_source(
|
||||
pub fn edit_source(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
content: Vec<TextDocumentContentChangeEvent>,
|
||||
|
@ -445,7 +419,7 @@ impl CompileCluster {
|
|||
let now = Time::now();
|
||||
let path: ImmutPath = path.into();
|
||||
|
||||
let mut memory_changes = self.memory_changes.write().await;
|
||||
let mut memory_changes = self.memory_changes.write();
|
||||
|
||||
let meta = memory_changes
|
||||
.get_mut(&path)
|
||||
|
@ -473,7 +447,7 @@ impl CompileCluster {
|
|||
|
||||
let files = FileChangeSet::new_inserts(vec![(path.clone(), snapshot)]);
|
||||
|
||||
self.update_source(files).await
|
||||
self.update_source(files)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -481,7 +455,7 @@ macro_rules! query_state {
|
|||
($self:ident, $method:ident, $req:expr) => {{
|
||||
let doc = $self.handler.result.lock().unwrap().clone().ok();
|
||||
let enc = $self.position_encoding;
|
||||
let res = $self.steal_world(move |w| $req.request(w, doc, enc)).await;
|
||||
let res = $self.steal_world(move |w| $req.request(w, doc, enc));
|
||||
res.map(CompilerQueryResponse::$method)
|
||||
}};
|
||||
}
|
||||
|
@ -489,7 +463,7 @@ macro_rules! query_state {
|
|||
macro_rules! query_world {
|
||||
($self:ident, $method:ident, $req:expr) => {{
|
||||
let enc = $self.position_encoding;
|
||||
let res = $self.steal_world(move |w| $req.request(w, enc)).await;
|
||||
let res = $self.steal_world(move |w| $req.request(w, enc));
|
||||
res.map(CompilerQueryResponse::$method)
|
||||
}};
|
||||
}
|
||||
|
@ -497,7 +471,7 @@ macro_rules! query_world {
|
|||
macro_rules! query_source {
|
||||
($self:ident, $method:ident, $req:expr) => {{
|
||||
let path: ImmutPath = $req.path.clone().into();
|
||||
let vfs = $self.memory_changes.read().await;
|
||||
let vfs = $self.memory_changes.read();
|
||||
let snapshot = vfs
|
||||
.get(&path)
|
||||
.ok_or_else(|| anyhow!("file missing {:?}", $self.memory_changes))?;
|
||||
|
@ -512,7 +486,7 @@ macro_rules! query_source {
|
|||
macro_rules! query_tokens_cache {
|
||||
($self:ident, $method:ident, $req:expr) => {{
|
||||
let path: ImmutPath = $req.path.clone().into();
|
||||
let vfs = $self.memory_changes.read().await;
|
||||
let vfs = $self.memory_changes.read();
|
||||
let snapshot = vfs.get(&path).ok_or_else(|| anyhow!("file missing"))?;
|
||||
let source = snapshot.content.clone();
|
||||
|
||||
|
@ -523,10 +497,7 @@ macro_rules! query_tokens_cache {
|
|||
}
|
||||
|
||||
impl CompileCluster {
|
||||
pub async fn query(
|
||||
&self,
|
||||
query: CompilerQueryRequest,
|
||||
) -> anyhow::Result<CompilerQueryResponse> {
|
||||
pub fn query(&self, query: CompilerQueryRequest) -> anyhow::Result<CompilerQueryResponse> {
|
||||
use CompilerQueryRequest::*;
|
||||
|
||||
match query {
|
||||
|
@ -536,20 +507,20 @@ impl CompileCluster {
|
|||
SelectionRange(req) => query_source!(self, SelectionRange, req),
|
||||
DocumentSymbol(req) => query_source!(self, DocumentSymbol, req),
|
||||
_ => {
|
||||
let main = self.main.lock().await;
|
||||
let main = self.main.lock();
|
||||
|
||||
let query_target = match main.as_ref() {
|
||||
Some(main) => main,
|
||||
None => {
|
||||
// todo: race condition, we need atomic primary query
|
||||
if let Some(path) = query.associated_path() {
|
||||
self.primary.wait().change_entry(path.into()).await?;
|
||||
self.primary.wait().change_entry(path.into())?;
|
||||
}
|
||||
&self.primary
|
||||
}
|
||||
};
|
||||
|
||||
query_target.wait().query(query).await
|
||||
query_target.wait().query(query)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -752,38 +723,36 @@ impl<H: CompilationHandle> CompileNode<H> {
|
|||
}
|
||||
|
||||
/// Steal the compiler thread and run the given function.
|
||||
pub async fn steal_async<Ret: Send + 'static>(
|
||||
pub fn steal<Ret: Send + 'static>(
|
||||
&self,
|
||||
f: impl FnOnce(&mut CompileService<H>, tokio::runtime::Handle) -> Ret + Send + 'static,
|
||||
f: impl FnOnce(&mut CompileService<H>) -> Ret + Send + 'static,
|
||||
) -> ZResult<Ret> {
|
||||
self.inner.lock().await.steal_async(f).await
|
||||
self.inner.lock().steal(f)
|
||||
}
|
||||
|
||||
// todo: stop main
|
||||
async fn disable(&self) {
|
||||
let res = self
|
||||
.steal_async(move |compiler, _| {
|
||||
let path = Path::new("detached.typ");
|
||||
let root = compiler.compiler.world().workspace_root();
|
||||
fn disable(&self) {
|
||||
let res = self.steal(move |compiler| {
|
||||
let path = Path::new("detached.typ");
|
||||
let root = compiler.compiler.world().workspace_root();
|
||||
|
||||
let driver = &mut compiler.compiler.compiler.inner.compiler;
|
||||
driver.set_entry_file(path.to_owned());
|
||||
let driver = &mut compiler.compiler.compiler.inner.compiler;
|
||||
driver.set_entry_file(path.to_owned());
|
||||
|
||||
// todo: suitable approach to avoid panic
|
||||
driver.notify_fs_event(typst_ts_compiler::vfs::notify::FilesystemEvent::Update(
|
||||
typst_ts_compiler::vfs::notify::FileChangeSet::new_inserts(vec![(
|
||||
root.join("detached.typ").into(),
|
||||
Ok((Time::now(), Bytes::from("".as_bytes()))).into(),
|
||||
)]),
|
||||
));
|
||||
})
|
||||
.await;
|
||||
// todo: suitable approach to avoid panic
|
||||
driver.notify_fs_event(typst_ts_compiler::vfs::notify::FilesystemEvent::Update(
|
||||
typst_ts_compiler::vfs::notify::FileChangeSet::new_inserts(vec![(
|
||||
root.join("detached.typ").into(),
|
||||
Ok((Time::now(), Bytes::from("".as_bytes()))).into(),
|
||||
)]),
|
||||
));
|
||||
});
|
||||
if let Err(err) = res {
|
||||
error!("failed to disable main: {:#}", err);
|
||||
}
|
||||
}
|
||||
|
||||
async fn change_entry(&self, path: ImmutPath) -> Result<(), Error> {
|
||||
fn change_entry(&self, path: ImmutPath) -> Result<(), Error> {
|
||||
if !path.is_absolute() {
|
||||
return Err(error_once!("entry file must be absolute", path: path.display()));
|
||||
}
|
||||
|
@ -808,18 +777,16 @@ impl<H: CompilationHandle> CompileNode<H> {
|
|||
next.display()
|
||||
);
|
||||
|
||||
let res = self
|
||||
.steal_async(move |compiler, _| {
|
||||
let root = compiler.compiler.world().workspace_root();
|
||||
if !path.starts_with(&root) {
|
||||
warn!("entry file is not in workspace root {}", path.display());
|
||||
return;
|
||||
}
|
||||
let res = self.steal(move |compiler| {
|
||||
let root = compiler.compiler.world().workspace_root();
|
||||
if !path.starts_with(&root) {
|
||||
warn!("entry file is not in workspace root {}", path.display());
|
||||
return;
|
||||
}
|
||||
|
||||
let driver = &mut compiler.compiler.compiler.inner.compiler;
|
||||
driver.set_entry_file(path.as_ref().to_owned());
|
||||
})
|
||||
.await;
|
||||
let driver = &mut compiler.compiler.compiler.inner.compiler;
|
||||
driver.set_entry_file(path.as_ref().to_owned());
|
||||
});
|
||||
|
||||
if res.is_err() {
|
||||
let mut entry = entry.lock().unwrap();
|
||||
|
@ -832,7 +799,7 @@ impl<H: CompilationHandle> CompileNode<H> {
|
|||
|
||||
// todo: trigger recompile
|
||||
let files = FileChangeSet::new_inserts(vec![]);
|
||||
let inner = self.inner.lock().await;
|
||||
let inner = self.inner.lock();
|
||||
inner.add_memory_changes(MemoryEvent::Update(files))
|
||||
}
|
||||
|
||||
|
@ -941,16 +908,13 @@ impl<H: CompilationHandle> CompileNode<H> {
|
|||
}
|
||||
}
|
||||
|
||||
pub async fn query(
|
||||
&self,
|
||||
query: CompilerQueryRequest,
|
||||
) -> anyhow::Result<CompilerQueryResponse> {
|
||||
pub fn query(&self, query: CompilerQueryRequest) -> anyhow::Result<CompilerQueryResponse> {
|
||||
use CompilerQueryRequest::*;
|
||||
assert!(query.fold_feature() != FoldRequestFeature::ContextFreeUnique);
|
||||
|
||||
match query {
|
||||
CompilerQueryRequest::OnSaveExport(OnSaveExportRequest { path }) => {
|
||||
self.on_save_export(path).await?;
|
||||
self.on_save_export(path)?;
|
||||
Ok(CompilerQueryResponse::OnSaveExport(()))
|
||||
}
|
||||
Hover(req) => query_state!(self, Hover, req),
|
||||
|
@ -969,17 +933,17 @@ impl<H: CompilationHandle> CompileNode<H> {
|
|||
}
|
||||
}
|
||||
|
||||
async fn on_save_export(&self, _path: PathBuf) -> anyhow::Result<()> {
|
||||
fn on_save_export(&self, _path: PathBuf) -> anyhow::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn steal_world<T: Send + Sync + 'static>(
|
||||
fn steal_world<T: Send + Sync + 'static>(
|
||||
&self,
|
||||
f: impl FnOnce(&TypstSystemWorld) -> T + Send + Sync + 'static,
|
||||
) -> anyhow::Result<T> {
|
||||
let mut client = self.inner.lock().await;
|
||||
let fut = client.steal_async(move |compiler, _| f(compiler.compiler.world()));
|
||||
let mut client = self.inner.lock();
|
||||
let fut = client.steal(move |compiler| f(compiler.compiler.world()));
|
||||
|
||||
Ok(fut.await?)
|
||||
Ok(fut?)
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,57 +0,0 @@
|
|||
//! Logging middleware for language server.
|
||||
|
||||
use core::fmt;
|
||||
use core::task::{Context, Poll};
|
||||
use std::time::Instant;
|
||||
|
||||
use futures::future::BoxFuture;
|
||||
use tower_lsp::jsonrpc::{Request, Response};
|
||||
|
||||
/// A middleware that logs requests and responses.
|
||||
pub struct LogService<S> {
|
||||
/// The inner service.
|
||||
pub inner: S,
|
||||
/// Whether to log the time to process on end of requests.
|
||||
pub show_time: bool,
|
||||
}
|
||||
|
||||
impl<S> tower::Service<Request> for LogService<S>
|
||||
where
|
||||
S: tower::Service<Request, Response = Option<Response>>,
|
||||
S::Future: Send + 'static,
|
||||
{
|
||||
type Response = S::Response;
|
||||
type Error = S::Error;
|
||||
type Future = BoxFuture<'static, Result<Self::Response, Self::Error>>;
|
||||
|
||||
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||
self.inner.poll_ready(cx)
|
||||
}
|
||||
|
||||
fn call(&mut self, request: Request) -> Self::Future {
|
||||
struct DisplayId(Option<tower_lsp::jsonrpc::Id>);
|
||||
|
||||
impl fmt::Display for DisplayId {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let Some(id) = &self.0 else { return Ok(()) };
|
||||
write!(f, "({id})")
|
||||
}
|
||||
}
|
||||
|
||||
// Before request.
|
||||
let id = DisplayId(request.id().cloned());
|
||||
let show_time = self.show_time.then(Instant::now);
|
||||
log::info!("request{id}: start {method}", method = request.method());
|
||||
|
||||
let fut = self.inner.call(request);
|
||||
Box::pin(async move {
|
||||
let response = fut.await?;
|
||||
|
||||
// After request.
|
||||
let delta_msg = show_time.map(|s| format!(" in {:?}", s.elapsed()));
|
||||
let delta_msg = delta_msg.as_deref().unwrap_or("");
|
||||
log::info!("request{id}: finished{delta_msg}");
|
||||
Ok(response)
|
||||
})
|
||||
}
|
||||
}
|
|
@ -2,25 +2,39 @@
|
|||
|
||||
mod args;
|
||||
|
||||
use std::io::Write;
|
||||
use std::io::{self, BufRead, Read, Write};
|
||||
|
||||
use clap::Parser;
|
||||
use log::info;
|
||||
use tinymist::{logging::LogService, TypstLanguageServer};
|
||||
use tokio::io::AsyncRead;
|
||||
use tokio_util::io::InspectReader;
|
||||
use tower_lsp::{LspService, Server};
|
||||
use log::{info, trace, warn};
|
||||
use lsp_types::{InitializeParams, InitializedParams};
|
||||
use serde::de::DeserializeOwned;
|
||||
use tinymist::{transport::io_transport, LspHost, TypstLanguageServer};
|
||||
|
||||
use crate::args::CliArguments;
|
||||
|
||||
// use lsp_types::OneOf;
|
||||
// use lsp_types::{
|
||||
// request::GotoDefinition, GotoDefinitionResponse, InitializeParams,
|
||||
// ServerCapabilities, };
|
||||
|
||||
use lsp_server::{Connection, Message, Response};
|
||||
|
||||
fn from_json<T: DeserializeOwned>(
|
||||
what: &'static str,
|
||||
json: &serde_json::Value,
|
||||
) -> anyhow::Result<T> {
|
||||
serde_json::from_value(json.clone())
|
||||
.map_err(|e| anyhow::format_err!("Failed to deserialize {what}: {e}; {json}"))
|
||||
}
|
||||
|
||||
/// The main entry point.
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
// Start logging
|
||||
let _ = {
|
||||
use log::LevelFilter::*;
|
||||
env_logger::builder()
|
||||
.filter_module("tinymist", Info)
|
||||
.filter_module("tinymist", Debug)
|
||||
.filter_module("typst_preview", Debug)
|
||||
.filter_module("typst_ts", Info)
|
||||
.filter_module("typst_ts_compiler::service::compile", Info)
|
||||
|
@ -28,34 +42,141 @@ async fn main() {
|
|||
.try_init()
|
||||
};
|
||||
|
||||
// Note that we must have our logging only write out to stderr.
|
||||
eprintln!("starting generic LSP server");
|
||||
|
||||
// Parse command line arguments
|
||||
let args = CliArguments::parse();
|
||||
info!("Arguments: {:#?}", args);
|
||||
|
||||
// Set up input and output
|
||||
let stdin: Box<dyn AsyncRead + Unpin> = if !args.replay.is_empty() {
|
||||
// Get input from file
|
||||
let file = tokio::fs::File::open(&args.replay).await.unwrap();
|
||||
Box::new(file)
|
||||
} else if args.mirror.is_empty() {
|
||||
// Get input from stdin
|
||||
Box::new(tokio::io::stdin())
|
||||
} else {
|
||||
// Get input from stdin and mirror to file
|
||||
let mut file = std::fs::File::create(&args.mirror).unwrap();
|
||||
Box::new(InspectReader::new(tokio::io::stdin(), move |bytes| {
|
||||
file.write_all(bytes).unwrap();
|
||||
}))
|
||||
let mirror = args.mirror.clone();
|
||||
let i = move || -> Box<dyn BufRead> {
|
||||
if !args.replay.is_empty() {
|
||||
// Get input from file
|
||||
let file = std::fs::File::open(&args.replay).unwrap();
|
||||
let file = std::io::BufReader::new(file);
|
||||
Box::new(file)
|
||||
} else if mirror.is_empty() {
|
||||
// Get input from stdin
|
||||
let stdin = std::io::stdin().lock();
|
||||
Box::new(stdin)
|
||||
} else {
|
||||
// todo: mirror
|
||||
let file = std::fs::File::create(&mirror).unwrap();
|
||||
let stdin = std::io::stdin().lock();
|
||||
Box::new(MirrorWriter(stdin, file, std::sync::Once::new()))
|
||||
}
|
||||
};
|
||||
let stdout = tokio::io::stdout();
|
||||
let o = || std::io::stdout().lock();
|
||||
|
||||
// Set up LSP server
|
||||
let (inner, socket) = LspService::new(TypstLanguageServer::new);
|
||||
let service = LogService {
|
||||
inner,
|
||||
show_time: true,
|
||||
// Create the transport. Includes the stdio (stdin and stdout) versions but this
|
||||
// could also be implemented to use sockets or HTTP.
|
||||
let (sender, receiver, io_threads) = io_transport(i, o);
|
||||
let connection = Connection { sender, receiver };
|
||||
|
||||
let (initialize_id, initialize_params) = match connection.initialize_start() {
|
||||
Ok(it) => it,
|
||||
Err(e) => {
|
||||
if e.channel_is_disconnected() {
|
||||
io_threads.join()?;
|
||||
}
|
||||
return Err(e.into());
|
||||
}
|
||||
};
|
||||
trace!("InitializeParams: {initialize_params}");
|
||||
let initialize_params = from_json::<InitializeParams>("InitializeParams", &initialize_params)?;
|
||||
|
||||
// Handle requests
|
||||
Server::new(stdin, stdout, socket).serve(service).await;
|
||||
let host = LspHost::new(connection.sender);
|
||||
let mut service = TypstLanguageServer::new(host.clone());
|
||||
|
||||
let initialize_result = service.initialize(initialize_params.clone());
|
||||
|
||||
// todo: better send
|
||||
host.complete_request(
|
||||
&mut service,
|
||||
match initialize_result {
|
||||
Ok(cap) => Response::new_ok(initialize_id, Some(cap)),
|
||||
Err(err) => Response::new_err(initialize_id, err.code, err.message),
|
||||
},
|
||||
);
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ProtocolError(String, bool);
|
||||
|
||||
impl ProtocolError {
|
||||
pub(crate) fn new(msg: impl Into<String>) -> Self {
|
||||
ProtocolError(msg.into(), false)
|
||||
}
|
||||
|
||||
pub(crate) fn disconnected() -> ProtocolError {
|
||||
ProtocolError("disconnected channel".into(), true)
|
||||
}
|
||||
|
||||
/// Whether this error occured due to a disconnected channel.
|
||||
pub fn channel_is_disconnected(&self) -> bool {
|
||||
self.1
|
||||
}
|
||||
}
|
||||
|
||||
let initialized_ack = match &connection.receiver.recv() {
|
||||
Ok(Message::Notification(n)) if n.method == "initialized" => Ok(()),
|
||||
Ok(msg) => Err(ProtocolError::new(format!(
|
||||
r#"expected initialized notification, got: {msg:?}"#
|
||||
))),
|
||||
Err(e) => {
|
||||
log::error!("failed to receive initialized notification: {e}");
|
||||
Err(ProtocolError::disconnected())
|
||||
}
|
||||
};
|
||||
if let Err(e) = initialized_ack {
|
||||
if e.channel_is_disconnected() {
|
||||
io_threads.join()?;
|
||||
}
|
||||
return Err(anyhow::anyhow!(
|
||||
"failed to receive initialized notification: {e:?}"
|
||||
));
|
||||
}
|
||||
|
||||
service.initialized(InitializedParams {});
|
||||
|
||||
// // Set up LSP server
|
||||
// let (inner, socket) = LspService::new();
|
||||
// let service = LogService {
|
||||
// inner,
|
||||
// show_time: true,
|
||||
// };
|
||||
|
||||
// // Handle requests
|
||||
// Server::new(stdin, stdout, socket).serve(service).await;
|
||||
|
||||
service.main_loop(connection.receiver)?;
|
||||
|
||||
io_threads.join()?;
|
||||
info!("server did shut down");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
struct MirrorWriter<R: Read, W: Write>(R, W, std::sync::Once);
|
||||
|
||||
impl<R: Read, W: Write> Read for MirrorWriter<R, W> {
|
||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
|
||||
self.0.read(buf)
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read + BufRead, W: Write> BufRead for MirrorWriter<R, W> {
|
||||
fn fill_buf(&mut self) -> io::Result<&[u8]> {
|
||||
let buf = self.0.fill_buf()?;
|
||||
if let Err(err) = self.1.write_all(buf) {
|
||||
self.2.call_once(|| {
|
||||
warn!("failed to write to mirror: {err}");
|
||||
});
|
||||
}
|
||||
Ok(buf)
|
||||
}
|
||||
|
||||
fn consume(&mut self, amt: usize) {
|
||||
self.0.consume(amt);
|
||||
}
|
||||
}
|
||||
|
|
82
crates/tinymist/src/transport.rs
Normal file
82
crates/tinymist/src/transport.rs
Normal file
|
@ -0,0 +1,82 @@
|
|||
use std::{
|
||||
io::{self, BufRead, Write},
|
||||
thread,
|
||||
};
|
||||
|
||||
use log::trace;
|
||||
|
||||
use crossbeam_channel::{bounded, Receiver, Sender};
|
||||
|
||||
use crate::Message;
|
||||
|
||||
/// Creates an LSP connection via io.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// use std::io::{stdin, stdout};
|
||||
/// use tinymist::transport::{io_transport, IoThreads};
|
||||
/// use lsp_server::Message;
|
||||
/// use crossbeam_channel::{bounded, Receiver, Sender};
|
||||
/// pub fn stdio_transport() -> (Sender<Message>, Receiver<Message>, IoThreads) {
|
||||
/// io_transport(|| stdin().lock(), || stdout().lock())
|
||||
/// }
|
||||
/// ```
|
||||
pub fn io_transport<I: BufRead, O: Write>(
|
||||
inp: impl FnOnce() -> I + Send + Sync + 'static,
|
||||
out: impl FnOnce() -> O + Send + Sync + 'static,
|
||||
) -> (Sender<Message>, Receiver<Message>, IoThreads) {
|
||||
// todo: set cap back to 0
|
||||
let (writer_sender, writer_receiver) = bounded::<Message>(1024);
|
||||
let writer = thread::spawn(move || {
|
||||
let mut out = out();
|
||||
writer_receiver
|
||||
.into_iter()
|
||||
.try_for_each(|it| it.write(&mut out))
|
||||
});
|
||||
let (reader_sender, reader_receiver) = bounded::<Message>(1024);
|
||||
let reader = thread::spawn(move || {
|
||||
let mut inp = inp();
|
||||
while let Some(msg) = Message::read(&mut inp)? {
|
||||
let is_exit = matches!(&msg, Message::Notification(n) if n.method == "exit");
|
||||
|
||||
trace!("sending message {:#?}", msg);
|
||||
reader_sender
|
||||
.send(msg)
|
||||
.expect("receiver was dropped, failed to send a message");
|
||||
|
||||
if is_exit {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
let threads = IoThreads { reader, writer };
|
||||
(writer_sender, reader_receiver, threads)
|
||||
}
|
||||
|
||||
/// A pair of threads for reading and writing LSP messages.
|
||||
pub struct IoThreads {
|
||||
reader: thread::JoinHandle<io::Result<()>>,
|
||||
writer: thread::JoinHandle<io::Result<()>>,
|
||||
}
|
||||
|
||||
impl IoThreads {
|
||||
/// Waits for the reader and writer threads to finish.
|
||||
pub fn join(self) -> io::Result<()> {
|
||||
match self.reader.join() {
|
||||
Ok(r) => r?,
|
||||
Err(err) => {
|
||||
println!("reader panicked!");
|
||||
std::panic::panic_any(err)
|
||||
}
|
||||
}
|
||||
match self.writer.join() {
|
||||
Ok(r) => r,
|
||||
Err(err) => {
|
||||
println!("writer panicked!");
|
||||
std::panic::panic_any(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue