mirror of
https://github.com/Myriad-Dreamin/tinymist.git
synced 2025-11-20 20:05:34 +00:00
feat: add js transport to sync-ls (#2029)
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
Some checks are pending
tinymist::auto_tag / auto-tag (push) Waiting to run
tinymist::ci / Duplicate Actions Detection (push) Waiting to run
tinymist::ci / Check Clippy, Formatting, Completion, Documentation, and Tests (Linux) (push) Waiting to run
tinymist::ci / Check Minimum Rust version and Tests (Windows) (push) Waiting to run
tinymist::ci / prepare-build (push) Waiting to run
tinymist::ci / announce (push) Blocked by required conditions
tinymist::ci / build (push) Blocked by required conditions
tinymist::gh_pages / build-gh-pages (push) Waiting to run
This commit is contained in:
parent
5613bd29be
commit
a561059a4e
18 changed files with 471 additions and 476 deletions
4
.github/workflows/release-crates.yml
vendored
4
.github/workflows/release-crates.yml
vendored
|
|
@ -32,11 +32,11 @@ jobs:
|
|||
sudo apt-get install llvm
|
||||
- name: Publish crates
|
||||
run: |
|
||||
cargo publish --no-verify -p sync-ls || true
|
||||
cargo publish --no-verify -p typst-shim || true
|
||||
cargo publish --no-verify -p tinymist-derive || true
|
||||
cargo publish --no-verify -p tinymist-l10n || true
|
||||
cargo publish --no-verify -p tinymist-std || true
|
||||
cargo publish --no-verify -p sync-ls || true
|
||||
cargo publish --no-verify -p tinymist-package || true
|
||||
cargo publish --no-verify -p tinymist-vfs || true
|
||||
cargo publish --no-verify -p tinymist-world || true
|
||||
|
|
@ -56,11 +56,11 @@ jobs:
|
|||
cargo publish --no-verify -p tinymist-cli || true
|
||||
- name: Verifies crate health (Optional)
|
||||
run: |
|
||||
cargo publish --dry-run -p sync-ls
|
||||
cargo publish --dry-run -p typst-shim
|
||||
cargo publish --dry-run -p tinymist-derive
|
||||
cargo publish --dry-run -p tinymist-l10n
|
||||
cargo publish --dry-run -p tinymist-std
|
||||
cargo publish --dry-run -p sync-ls
|
||||
cargo publish --dry-run -p tinymist-vfs
|
||||
cargo publish --dry-run -p tinymist-package
|
||||
cargo publish --dry-run -p tinymist-world
|
||||
|
|
|
|||
3
Cargo.lock
generated
3
Cargo.lock
generated
|
|
@ -3872,14 +3872,17 @@ dependencies = [
|
|||
"crossbeam-channel",
|
||||
"dapts",
|
||||
"futures",
|
||||
"js-sys",
|
||||
"log",
|
||||
"lsp-types",
|
||||
"parking_lot",
|
||||
"serde",
|
||||
"serde-wasm-bindgen",
|
||||
"serde_json",
|
||||
"tinymist-std",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"wasm-bindgen",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
|
|
@ -27,12 +27,16 @@ tinymist-std.workspace = true
|
|||
clap = { workspace = true, optional = true }
|
||||
tokio = { workspace = true, features = ["rt"], optional = true }
|
||||
tokio-util = { workspace = true, optional = true }
|
||||
js-sys = { workspace = true, optional = true }
|
||||
wasm-bindgen = { workspace = true, optional = true }
|
||||
serde-wasm-bindgen = { workspace = true, optional = true }
|
||||
|
||||
[features]
|
||||
dap = ["dapts"]
|
||||
lsp = ["lsp-types"]
|
||||
server = ["tokio"]
|
||||
system = ["tokio", "tokio/time", "tokio-util", "clap"]
|
||||
system = ["tokio", "tokio/time", "tokio-util", "clap", "tinymist-std/system"]
|
||||
web = ["js-sys", "wasm-bindgen", "serde-wasm-bindgen", "tinymist-std/web"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
features = ["dap", "lsp", "system", "server"]
|
||||
|
|
|
|||
|
|
@ -28,3 +28,24 @@ pub use crate::msg::ResponseError;
|
|||
pub type LspResult<T> = Result<T, ResponseError>;
|
||||
/// The common event type for language servers.
|
||||
pub type Event = Box<dyn Any + Send>;
|
||||
|
||||
/// Note that we must have our logging only write out to stderr.
|
||||
#[cfg(feature = "web")]
|
||||
fn dummy_transport<M: TryFrom<Message, Error = anyhow::Error> + GetMessageKind>() -> Connection<M> {
|
||||
let (event_sender, event_receiver) = crossbeam_channel::bounded::<crate::Event>(0);
|
||||
let (writer_sender, writer_receiver) = crossbeam_channel::bounded::<Message>(0);
|
||||
Connection {
|
||||
// lsp_sender,
|
||||
// lsp_receiver,
|
||||
sender: TConnectionTx {
|
||||
event: event_sender,
|
||||
lsp: writer_sender,
|
||||
marker: std::marker::PhantomData,
|
||||
},
|
||||
receiver: TConnectionRx {
|
||||
event: event_receiver,
|
||||
lsp: writer_receiver,
|
||||
marker: std::marker::PhantomData,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,8 +6,8 @@ use serde::de::DeserializeOwned;
|
|||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{
|
||||
invalid_data_fmt, read_msg_text, write_msg_text, ExtractError, LspOrDapResponse, RequestId,
|
||||
ResponseError,
|
||||
invalid_data_fmt, read_msg_text, write_msg_text, ExtractError, LspOrDapResponse, LspResult,
|
||||
RequestId, ResponseError,
|
||||
};
|
||||
|
||||
/// A message in the Language Server Protocol.
|
||||
|
|
@ -134,26 +134,19 @@ pub struct Response {
|
|||
}
|
||||
|
||||
impl Response {
|
||||
/// Creates a response with the success payload.
|
||||
pub fn new_ok<R: serde::Serialize>(id: RequestId, result: R) -> Response {
|
||||
Response {
|
||||
/// Creates a response
|
||||
pub fn new(id: RequestId, result: LspResult<serde_json::Value>) -> Response {
|
||||
match result {
|
||||
Ok(result) => Response {
|
||||
id,
|
||||
result: Some(serde_json::to_value(result).unwrap()),
|
||||
result: Some(result),
|
||||
error: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a response with the failure reason.
|
||||
pub fn new_err(id: RequestId, code: i32, message: String) -> Response {
|
||||
let error = ResponseError {
|
||||
code,
|
||||
message,
|
||||
data: None,
|
||||
};
|
||||
Response {
|
||||
},
|
||||
Err(err) => Response {
|
||||
id,
|
||||
result: None,
|
||||
error: Some(error),
|
||||
error: Some(err),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -185,21 +185,17 @@ pub enum MessageKind {
|
|||
/// Gets the kind of the message.
|
||||
pub trait GetMessageKind {
|
||||
/// Returns the kind of the message.
|
||||
fn get_message_kind() -> MessageKind;
|
||||
const MESSAGE_KIND: MessageKind;
|
||||
}
|
||||
|
||||
#[cfg(feature = "lsp")]
|
||||
impl GetMessageKind for LspMessage {
|
||||
fn get_message_kind() -> MessageKind {
|
||||
MessageKind::Lsp
|
||||
}
|
||||
const MESSAGE_KIND: MessageKind = MessageKind::Lsp;
|
||||
}
|
||||
|
||||
#[cfg(feature = "dap")]
|
||||
impl GetMessageKind for DapMessage {
|
||||
fn get_message_kind() -> MessageKind {
|
||||
MessageKind::Dap
|
||||
}
|
||||
const MESSAGE_KIND: MessageKind = MessageKind::Dap;
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
|
|
|
|||
|
|
@ -11,6 +11,8 @@ use std::any::Any;
|
|||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::pin::Pin;
|
||||
#[cfg(feature = "web")]
|
||||
use std::sync::atomic::AtomicU32;
|
||||
use std::sync::{Arc, Weak};
|
||||
|
||||
use futures::future::MaybeDone;
|
||||
|
|
@ -19,8 +21,6 @@ use serde::Serialize;
|
|||
use serde_json::{from_value, Value as JsonValue};
|
||||
use tinymist_std::time::Instant;
|
||||
|
||||
#[cfg(feature = "lsp")]
|
||||
use crate::lsp::{Notification, Request};
|
||||
use crate::msg::*;
|
||||
use crate::req_queue;
|
||||
use crate::*;
|
||||
|
|
@ -35,6 +35,8 @@ pub type LspResponseFuture<T> = LspResult<ResponseFuture<T>>;
|
|||
pub type SchedulableResponse<T> = LspResponseFuture<LspResult<T>>;
|
||||
/// The common response future type for language servers.
|
||||
pub type AnySchedulableResponse = SchedulableResponse<JsonValue>;
|
||||
/// The result of a scheduling response
|
||||
pub type ScheduleResult = AnySchedulableResponse;
|
||||
/// The result of a scheduled response which could be finally caught by
|
||||
/// `schedule_tail`.
|
||||
/// - Returns Ok(Some()) -> Already responded
|
||||
|
|
@ -152,15 +154,7 @@ impl<S: 'static> TypedLspClient<S> {
|
|||
|
||||
/// Sends a event to the client itself.
|
||||
pub fn send_event<T: std::any::Any + Send + 'static>(&self, event: T) {
|
||||
let Some(sender) = self.sender.upgrade() else {
|
||||
log::warn!("failed to send request: connection closed");
|
||||
return;
|
||||
};
|
||||
|
||||
let Err(res) = sender.event.send(Box::new(event)) else {
|
||||
return;
|
||||
};
|
||||
log::warn!("failed to send event: {res:?}");
|
||||
self.sender.send_event(event);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -181,6 +175,8 @@ impl<S> std::ops::Deref for TypedLspClient<S> {
|
|||
}
|
||||
}
|
||||
|
||||
// send_request: Function,
|
||||
// send_notification: Function,
|
||||
/// The root of the language server host.
|
||||
/// Will close connection when dropped.
|
||||
#[derive(Debug, Clone)]
|
||||
|
|
@ -198,8 +194,27 @@ impl LspClientRoot {
|
|||
let _strong = Arc::new(sender.into());
|
||||
let weak = LspClient {
|
||||
handle,
|
||||
msg_kind: M::get_message_kind(),
|
||||
msg_kind: M::MESSAGE_KIND,
|
||||
sender: TransportHost::System(SystemTransportSender {
|
||||
sender: Arc::downgrade(&_strong),
|
||||
}),
|
||||
req_queue: Arc::new(Mutex::new(ReqQueue::default())),
|
||||
|
||||
hook: Arc::new(()),
|
||||
};
|
||||
Self { weak, _strong }
|
||||
}
|
||||
|
||||
/// Creates a new language server host from js.
|
||||
#[cfg(feature = "web")]
|
||||
pub fn new_js(handle: tokio::runtime::Handle, transport: JsTransportSender) -> Self {
|
||||
let dummy = dummy_transport::<LspMessage>();
|
||||
|
||||
let _strong = Arc::new(dummy.sender.into());
|
||||
let weak = LspClient {
|
||||
handle,
|
||||
msg_kind: LspMessage::MESSAGE_KIND,
|
||||
sender: TransportHost::Js(transport),
|
||||
req_queue: Arc::new(Mutex::new(ReqQueue::default())),
|
||||
|
||||
hook: Arc::new(()),
|
||||
|
|
@ -222,6 +237,166 @@ impl LspClientRoot {
|
|||
type ReqHandler = Box<dyn for<'a> FnOnce(&'a mut dyn Any, LspOrDapResponse) + Send + Sync>;
|
||||
type ReqQueue = req_queue::ReqQueue<(String, Instant), ReqHandler>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum TransportHost {
|
||||
System(SystemTransportSender),
|
||||
#[cfg(feature = "web")]
|
||||
Js(JsTransportSender),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct SystemTransportSender {
|
||||
pub(crate) sender: Weak<ConnectionTx>,
|
||||
}
|
||||
|
||||
/// Creates a new js transport host.
|
||||
#[cfg(feature = "web")]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct JsTransportSender {
|
||||
event_id: Arc<AtomicU32>,
|
||||
events: Arc<Mutex<HashMap<u32, Event>>>,
|
||||
pub(crate) sender_event: js_sys::Function,
|
||||
pub(crate) sender_request: js_sys::Function,
|
||||
pub(crate) sender_notification: js_sys::Function,
|
||||
}
|
||||
|
||||
#[cfg(feature = "web")]
|
||||
impl JsTransportSender {
|
||||
/// Creates a new JS transport host.
|
||||
pub fn new(
|
||||
sender_event: js_sys::Function,
|
||||
sender_request: js_sys::Function,
|
||||
sender_notification: js_sys::Function,
|
||||
) -> Self {
|
||||
Self {
|
||||
event_id: Arc::new(AtomicU32::new(0)),
|
||||
events: Arc::new(Mutex::new(HashMap::new())),
|
||||
sender_event,
|
||||
sender_request,
|
||||
sender_notification,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "web")]
|
||||
/// SAFETY:
|
||||
/// This is only safe if the `JsTransportHost` is used in a single thread.
|
||||
unsafe impl Send for TransportHost {}
|
||||
|
||||
#[cfg(feature = "web")]
|
||||
/// SAFETY:
|
||||
/// This is only safe if the `JsTransportHost` is used in a single thread.
|
||||
unsafe impl Sync for TransportHost {}
|
||||
|
||||
impl TransportHost {
|
||||
/// Sends a event to the server itself.
|
||||
pub fn send_event<T: std::any::Any + Send + 'static>(&self, event: T) {
|
||||
match self {
|
||||
TransportHost::System(host) => {
|
||||
let Some(sender) = host.sender.upgrade() else {
|
||||
log::warn!("failed to send request: connection closed");
|
||||
return;
|
||||
};
|
||||
|
||||
if let Err(res) = sender.event.send(Box::new(event)) {
|
||||
log::warn!("failed to send event: {res:?}");
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "web")]
|
||||
TransportHost::Js(host) => {
|
||||
let event_id = {
|
||||
let event_id = host
|
||||
.event_id
|
||||
.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
|
||||
let mut lg = host.events.lock();
|
||||
lg.insert(event_id, Box::new(event));
|
||||
js_sys::Number::from(event_id)
|
||||
};
|
||||
if let Err(err) = host
|
||||
.sender_event
|
||||
.call1(&wasm_bindgen::JsValue::UNDEFINED, &event_id.into())
|
||||
{
|
||||
log::error!("failed to send event: {err:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn send_message(&self, response: Message) {
|
||||
match self {
|
||||
TransportHost::System(host) => {
|
||||
let Some(sender) = host.sender.upgrade() else {
|
||||
log::warn!("failed to send response: connection closed");
|
||||
return;
|
||||
};
|
||||
if let Err(res) = sender.lsp.send(response) {
|
||||
log::warn!("failed to send response: {res:?}");
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "web")]
|
||||
TransportHost::Js(host) => match response {
|
||||
#[cfg(feature = "lsp")]
|
||||
Message::Lsp(lsp::Message::Request(req)) => {
|
||||
let msg = to_js_value(&req).expect("failed to serialize request to js value");
|
||||
if let Err(err) = host
|
||||
.sender_request
|
||||
.call1(&wasm_bindgen::JsValue::UNDEFINED, &msg)
|
||||
{
|
||||
log::error!("failed to send request: {err:?}");
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "lsp")]
|
||||
Message::Lsp(lsp::Message::Notification(req)) => {
|
||||
let msg = to_js_value(&req).expect("failed to serialize request to js value");
|
||||
if let Err(err) = host
|
||||
.sender_notification
|
||||
.call1(&wasm_bindgen::JsValue::UNDEFINED, &msg)
|
||||
{
|
||||
log::error!("failed to send request: {err:?}");
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "lsp")]
|
||||
Message::Lsp(lsp::Message::Response(req)) => {
|
||||
panic!("unexpected response to js world: {req:?}");
|
||||
}
|
||||
#[cfg(feature = "dap")]
|
||||
Message::Dap(dap::Message::Request(req)) => {
|
||||
let msg = to_js_value(&req).expect("failed to serialize request to js value");
|
||||
if let Err(err) = host
|
||||
.sender_request
|
||||
.call1(&wasm_bindgen::JsValue::UNDEFINED, &msg)
|
||||
{
|
||||
log::error!("failed to send request: {err:?}");
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "dap")]
|
||||
Message::Dap(dap::Message::Event(req)) => {
|
||||
let msg = to_js_value(&req).expect("failed to serialize request to js value");
|
||||
if let Err(err) = host
|
||||
.sender_notification
|
||||
.call1(&wasm_bindgen::JsValue::UNDEFINED, &msg)
|
||||
{
|
||||
log::error!("failed to send request: {err:?}");
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "dap")]
|
||||
Message::Dap(dap::Message::Response(req)) => {
|
||||
panic!("unexpected response to js world: {req:?}");
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// todo: poor performance, struct -> serde_json -> serde_wasm_bindgen ->
|
||||
// serialize -> deserialize??
|
||||
#[cfg(feature = "web")]
|
||||
fn to_js_value<T: serde::Serialize>(
|
||||
value: &T,
|
||||
) -> Result<wasm_bindgen::JsValue, serde_wasm_bindgen::Error> {
|
||||
value.serialize(&serde_wasm_bindgen::Serializer::new().serialize_maps_as_objects(true))
|
||||
}
|
||||
|
||||
/// The host for the language server, or known as the LSP client.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LspClient {
|
||||
|
|
@ -229,7 +404,7 @@ pub struct LspClient {
|
|||
pub handle: tokio::runtime::Handle,
|
||||
|
||||
pub(crate) msg_kind: MessageKind,
|
||||
pub(crate) sender: Weak<ConnectionTx>,
|
||||
sender: TransportHost,
|
||||
pub(crate) req_queue: Arc<Mutex<ReqQueue>>,
|
||||
|
||||
pub(crate) hook: Arc<dyn LsHook>,
|
||||
|
|
@ -261,14 +436,7 @@ impl LspClient {
|
|||
|
||||
/// Sends a event to the server itself.
|
||||
pub fn send_event<T: std::any::Any + Send + 'static>(&self, event: T) {
|
||||
let Some(sender) = self.sender.upgrade() else {
|
||||
log::warn!("failed to send request: connection closed");
|
||||
return;
|
||||
};
|
||||
|
||||
if let Err(res) = sender.event.send(Box::new(event)) {
|
||||
log::warn!("failed to send event: {res:?}");
|
||||
}
|
||||
self.sender.send_event(event);
|
||||
}
|
||||
|
||||
/// Completes an server2client request in the request queue.
|
||||
|
|
@ -308,19 +476,11 @@ impl LspClient {
|
|||
.register(id.clone(), (method.to_owned(), received_at));
|
||||
}
|
||||
|
||||
/// Responds a typed result to the client.
|
||||
pub fn respond_result<T: Serialize>(&self, id: RequestId, result: LspResult<T>) {
|
||||
let result = result.and_then(|t| serde_json::to_value(t).map_err(internal_error));
|
||||
self.respond_any_result(id, result);
|
||||
}
|
||||
|
||||
fn respond_any_result(&self, id: RequestId, result: LspResult<JsonValue>) {
|
||||
fn respond_result(&self, id: RequestId, result: LspResult<JsonValue>) {
|
||||
let req_id = id.clone();
|
||||
let msg: Message = match (self.msg_kind, result) {
|
||||
#[cfg(feature = "lsp")]
|
||||
(MessageKind::Lsp, Ok(resp)) => lsp::Response::new_ok(id, resp).into(),
|
||||
#[cfg(feature = "lsp")]
|
||||
(MessageKind::Lsp, Err(e)) => lsp::Response::new_err(id, e.code, e.message).into(),
|
||||
(MessageKind::Lsp, res) => lsp::Response::new(id, res).into(),
|
||||
#[cfg(feature = "dap")]
|
||||
(MessageKind::Dap, Ok(resp)) => dap::Response::success(RequestId::dap(id), resp).into(),
|
||||
#[cfg(feature = "dap")]
|
||||
|
|
@ -341,53 +501,28 @@ impl LspClient {
|
|||
|
||||
self.hook.stop_request(&id, &method, received_at);
|
||||
|
||||
let Some(sender) = self.sender.upgrade() else {
|
||||
log::warn!("failed to send response ({method}, {id}): connection closed");
|
||||
return;
|
||||
};
|
||||
if let Err(res) = sender.lsp.send(response) {
|
||||
log::warn!("failed to send response ({method}, {id}): {res:?}");
|
||||
}
|
||||
self.sender.send_message(response);
|
||||
}
|
||||
}
|
||||
|
||||
impl LspClient {
|
||||
/// Schedules a request from the client.
|
||||
pub fn schedule<T: Serialize + 'static>(
|
||||
&self,
|
||||
req_id: RequestId,
|
||||
resp: SchedulableResponse<T>,
|
||||
) -> ScheduledResult {
|
||||
let resp = resp?;
|
||||
|
||||
use futures::future::MaybeDone::*;
|
||||
match resp {
|
||||
Done(output) => {
|
||||
self.respond_result(req_id, output);
|
||||
}
|
||||
Future(fut) => {
|
||||
let client = self.clone();
|
||||
let req_id = req_id.clone();
|
||||
self.handle.spawn(async move {
|
||||
client.respond_result(req_id, fut.await);
|
||||
});
|
||||
}
|
||||
Gone => {
|
||||
log::warn!("response for request({req_id:?}) already taken");
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Some(()))
|
||||
}
|
||||
|
||||
/// Finally sends the response if it is not sent before.
|
||||
/// From the definition, the response is already sent if it is `Some(())`.
|
||||
pub fn schedule_tail(&self, req_id: RequestId, resp: ScheduledResult) {
|
||||
pub async fn schedule_tail(self, req_id: RequestId, resp: ScheduleResult) {
|
||||
match resp {
|
||||
// Already responded
|
||||
Ok(Some(())) => {}
|
||||
// The requests that doesn't start.
|
||||
_ => self.respond_result(req_id, resp),
|
||||
Ok(MaybeDone::Done(result)) => {
|
||||
self.respond_result(req_id, result);
|
||||
}
|
||||
Ok(MaybeDone::Future(result)) => {
|
||||
self.respond_result(req_id, result.await);
|
||||
}
|
||||
Ok(MaybeDone::Gone) => {
|
||||
log::warn!("response for request({req_id:?}) already taken");
|
||||
self.respond_result(req_id, Err(internal_error("response already taken")));
|
||||
}
|
||||
Err(err) => {
|
||||
self.respond_result(req_id, Err(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -429,9 +564,9 @@ impl LsHook for () {
|
|||
}
|
||||
|
||||
type AsyncHandler<S, T, R> = fn(srv: &mut S, args: T) -> SchedulableResponse<R>;
|
||||
type RawHandler<S, T> = fn(srv: &mut S, req_id: RequestId, args: T) -> ScheduledResult;
|
||||
type RawHandler<S, T> = fn(srv: &mut S, args: T) -> ScheduleResult;
|
||||
type BoxPureHandler<S, T> = Box<dyn Fn(&mut S, T) -> LspResult<()>>;
|
||||
type BoxHandler<S, T> = Box<dyn Fn(&mut S, &LspClient, RequestId, T) -> ScheduledResult>;
|
||||
type BoxHandler<S, T> = Box<dyn Fn(&mut S, T) -> SchedulableResponse<JsonValue>>;
|
||||
type ExecuteCmdMap<S> = HashMap<&'static str, BoxHandler<S, Vec<JsonValue>>>;
|
||||
type RegularCmdMap<S> = HashMap<&'static str, BoxHandler<S, JsonValue>>;
|
||||
type NotifyCmdMap<S> = HashMap<&'static str, BoxPureHandler<S, JsonValue>>;
|
||||
|
|
@ -510,26 +645,14 @@ where
|
|||
self
|
||||
}
|
||||
|
||||
/// Registers a raw resource handler.
|
||||
pub fn with_resource_(
|
||||
mut self,
|
||||
path: ImmutPath,
|
||||
handler: RawHandler<Args::S, Vec<JsonValue>>,
|
||||
) -> Self {
|
||||
self.resource_handlers.insert(path, raw_to_boxed(handler));
|
||||
self
|
||||
}
|
||||
|
||||
/// Registers an async resource handler.
|
||||
pub fn with_resource(
|
||||
mut self,
|
||||
path: &'static str,
|
||||
handler: fn(&mut Args::S, Vec<JsonValue>) -> AnySchedulableResponse,
|
||||
) -> Self {
|
||||
self.resource_handlers.insert(
|
||||
Path::new(path).into(),
|
||||
Box::new(move |s, client, req_id, req| client.schedule(req_id, handler(s, req))),
|
||||
);
|
||||
self.resource_handlers
|
||||
.insert(Path::new(path).into(), Box::new(handler));
|
||||
self
|
||||
}
|
||||
|
||||
|
|
@ -639,7 +762,7 @@ impl<M, Args: Initializer> LsDriver<M, Args> {
|
|||
|
||||
/// Get static resources with help of tinymist service, for example, a
|
||||
/// static help pages for some typst function.
|
||||
pub fn get_resources(&mut self, req_id: RequestId, args: Vec<JsonValue>) -> ScheduledResult {
|
||||
pub fn get_resources(&mut self, args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let s = self.state.opt_mut().ok_or_else(not_initialized)?;
|
||||
|
||||
let path =
|
||||
|
|
@ -651,7 +774,7 @@ impl<M, Args: Initializer> LsDriver<M, Args> {
|
|||
};
|
||||
|
||||
// Note our redirection will keep the first path argument in the args vec.
|
||||
handler(s, &self.client, req_id, args)
|
||||
handler(s, args)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -701,8 +824,24 @@ fn from_json<T: serde::de::DeserializeOwned>(json: JsonValue) -> LspResult<T> {
|
|||
serde_json::from_value(json).map_err(invalid_request)
|
||||
}
|
||||
|
||||
fn raw_to_boxed<S: 'static, T: 'static>(handler: RawHandler<S, T>) -> BoxHandler<S, T> {
|
||||
Box::new(move |s, _client, req_id, req| handler(s, req_id, req))
|
||||
/// Erases the response type to a generic `JsonValue`.
|
||||
pub fn erased_response<T: Serialize + 'static>(resp: SchedulableResponse<T>) -> ScheduleResult {
|
||||
/// Responds a typed result to the client.
|
||||
fn map_respond_result<T: Serialize>(result: LspResult<T>) -> LspResult<JsonValue> {
|
||||
result.and_then(|t| serde_json::to_value(t).map_err(internal_error))
|
||||
}
|
||||
|
||||
let resp = resp?;
|
||||
|
||||
use futures::future::MaybeDone::*;
|
||||
Ok(match resp {
|
||||
Done(result) => MaybeDone::Done(map_respond_result(result)),
|
||||
Future(fut) => MaybeDone::Future(Box::pin(async move { map_respond_result(fut.await) })),
|
||||
Gone => {
|
||||
log::warn!("response already taken");
|
||||
MaybeDone::Done(Err(internal_error("response already taken")))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn resp_err(code: ErrorCode, msg: impl fmt::Display) -> ResponseError {
|
||||
|
|
|
|||
|
|
@ -12,14 +12,7 @@ impl<S: 'static> TypedLspClient<S> {
|
|||
|
||||
/// Sends an untyped dap_event to the client.
|
||||
pub fn send_dap_event_(&self, evt: dap::Event) {
|
||||
let method = &evt.event;
|
||||
let Some(sender) = self.sender.upgrade() else {
|
||||
log::warn!("failed to send dap event ({method}): connection closed");
|
||||
return;
|
||||
};
|
||||
if let Err(res) = sender.lsp.send(evt.into()) {
|
||||
log::warn!("failed to send dap event: {res:?}");
|
||||
}
|
||||
self.sender.send_message(evt.into());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -27,16 +20,6 @@ impl<Args: Initializer> LsBuilder<DapMessage, Args>
|
|||
where
|
||||
Args::S: 'static,
|
||||
{
|
||||
/// Registers an raw event handler.
|
||||
pub fn with_command_(
|
||||
mut self,
|
||||
cmd: &'static str,
|
||||
handler: RawHandler<Args::S, Vec<JsonValue>>,
|
||||
) -> Self {
|
||||
self.command_handlers.insert(cmd, raw_to_boxed(handler));
|
||||
self
|
||||
}
|
||||
|
||||
/// Registers an async command handler.
|
||||
pub fn with_command<R: Serialize + 'static>(
|
||||
mut self,
|
||||
|
|
@ -45,7 +28,7 @@ where
|
|||
) -> Self {
|
||||
self.command_handlers.insert(
|
||||
cmd,
|
||||
Box::new(move |s, client, req_id, req| client.schedule(req_id, handler(s, req))),
|
||||
Box::new(move |s, req| erased_response(handler(s, req))),
|
||||
);
|
||||
self
|
||||
}
|
||||
|
|
@ -56,7 +39,7 @@ where
|
|||
mut self,
|
||||
handler: RawHandler<Args::S, JsonValue>,
|
||||
) -> Self {
|
||||
self.req_handlers.insert(R::COMMAND, raw_to_boxed(handler));
|
||||
self.req_handlers.insert(R::COMMAND, Box::new(handler));
|
||||
self
|
||||
}
|
||||
|
||||
|
|
@ -65,11 +48,11 @@ where
|
|||
/// request.
|
||||
pub fn with_request_<R: dapts::IRequest>(
|
||||
mut self,
|
||||
handler: fn(&mut Args::S, RequestId, R::Arguments) -> ScheduledResult,
|
||||
handler: fn(&mut Args::S, R::Arguments) -> ScheduleResult,
|
||||
) -> Self {
|
||||
self.req_handlers.insert(
|
||||
R::COMMAND,
|
||||
Box::new(move |s, _client, req_id, req| handler(s, req_id, from_json(req)?)),
|
||||
Box::new(move |s, req| handler(s, from_json(req)?)),
|
||||
);
|
||||
self
|
||||
}
|
||||
|
|
@ -81,9 +64,7 @@ where
|
|||
) -> Self {
|
||||
self.req_handlers.insert(
|
||||
R::COMMAND,
|
||||
Box::new(move |s, client, req_id, req| {
|
||||
client.schedule(req_id, handler(s, from_json(req)?))
|
||||
}),
|
||||
Box::new(move |s, req| erased_response(handler(s, from_json(req)?))),
|
||||
);
|
||||
self
|
||||
}
|
||||
|
|
@ -100,6 +81,7 @@ where
|
|||
///
|
||||
/// See [`transport::MirrorArgs`] for information about the record-replay
|
||||
/// feature.
|
||||
#[cfg(feature = "system")]
|
||||
pub fn start(
|
||||
&mut self,
|
||||
inbox: TConnectionRx<DapMessage>,
|
||||
|
|
@ -133,6 +115,7 @@ where
|
|||
}
|
||||
|
||||
/// Starts the debug adaptor on the given connection.
|
||||
#[cfg(feature = "system")]
|
||||
pub fn start_(&mut self, inbox: TConnectionRx<DapMessage>) -> anyhow::Result<()> {
|
||||
use EventOrMessage::*;
|
||||
|
||||
|
|
@ -156,7 +139,13 @@ where
|
|||
|
||||
event_handler(s, &self.client, event)?;
|
||||
}
|
||||
Msg(DapMessage::Request(req)) => self.on_request(loop_start, req),
|
||||
Msg(DapMessage::Request(req)) => {
|
||||
let client = self.client.clone();
|
||||
let req_id = (req.seq as i32).into();
|
||||
client.register_request(&req.command, &req_id, loop_start);
|
||||
let fut = client.schedule_tail(req_id, self.on_request(req));
|
||||
self.client.handle.spawn(fut);
|
||||
}
|
||||
Msg(DapMessage::Event(not)) => {
|
||||
self.on_event(loop_start, not)?;
|
||||
}
|
||||
|
|
@ -180,12 +169,8 @@ where
|
|||
|
||||
/// Registers and handles a request. This should only be called once per
|
||||
/// incoming request.
|
||||
fn on_request(&mut self, request_received: Instant, req: dap::Request) {
|
||||
let req_id = (req.seq as i32).into();
|
||||
self.client
|
||||
.register_request(&req.command, &req_id, request_received);
|
||||
|
||||
let resp = match (&mut self.state, &*req.command) {
|
||||
fn on_request(&mut self, req: dap::Request) -> ScheduleResult {
|
||||
match (&mut self.state, &*req.command) {
|
||||
(State::Uninitialized(args), dapts::request::Initialize::COMMAND) => {
|
||||
// todo: what will happen if the request cannot be deserialized?
|
||||
let params = serde_json::from_value::<Args::I>(req.arguments);
|
||||
|
|
@ -240,22 +225,18 @@ where
|
|||
break 'serve_req just_result(Err(method_not_found()));
|
||||
};
|
||||
|
||||
let result = handler(s, &self.client, req_id.clone(), req.arguments);
|
||||
self.client.schedule_tail(req_id, result);
|
||||
let resp = handler(s, req.arguments);
|
||||
|
||||
if is_disconnect {
|
||||
self.state = State::ShuttingDown;
|
||||
}
|
||||
|
||||
return;
|
||||
resp
|
||||
}
|
||||
(State::ShuttingDown, _) => {
|
||||
just_result(Err(invalid_request("server is shutting down")))
|
||||
}
|
||||
};
|
||||
|
||||
let result = self.client.schedule(req_id.clone(), resp);
|
||||
self.client.schedule_tail(req_id, result);
|
||||
}
|
||||
}
|
||||
|
||||
/// Handles an incoming event.
|
||||
|
|
|
|||
|
|
@ -4,19 +4,6 @@ use lsp_types::{notification::Notification as Notif, request::Request as Req, *}
|
|||
|
||||
type PureHandler<S, T> = fn(srv: &mut S, args: T) -> LspResult<()>;
|
||||
|
||||
/// Converts a `ScheduledResult` to a `SchedulableResponse`.
|
||||
macro_rules! reschedule {
|
||||
($expr:expr) => {
|
||||
match $expr {
|
||||
Ok(Some(())) => return,
|
||||
Ok(None) => Ok(futures::future::MaybeDone::Done(Ok(
|
||||
serde_json::Value::Null,
|
||||
))),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl<S: 'static> TypedLspClient<S> {
|
||||
/// Sends a request to the client and registers a handler handled by the
|
||||
/// service `S`.
|
||||
|
|
@ -45,13 +32,7 @@ impl LspClient {
|
|||
Box::new(|s, resp| handler(s, resp.try_into().unwrap())),
|
||||
);
|
||||
|
||||
let Some(sender) = self.sender.upgrade() else {
|
||||
log::warn!("failed to send request: connection closed");
|
||||
return;
|
||||
};
|
||||
if let Err(res) = sender.lsp.send(request.into()) {
|
||||
log::warn!("failed to send request: {res:?}");
|
||||
}
|
||||
self.sender.send_message(request.into());
|
||||
}
|
||||
|
||||
/// Completes an client2server request in the request queue.
|
||||
|
|
@ -66,14 +47,7 @@ impl LspClient {
|
|||
|
||||
/// Sends an untyped notification to the client.
|
||||
pub fn send_notification_(&self, notif: lsp::Notification) {
|
||||
let method = ¬if.method;
|
||||
let Some(sender) = self.sender.upgrade() else {
|
||||
log::warn!("failed to send notification ({method}): connection closed");
|
||||
return;
|
||||
};
|
||||
if let Err(res) = sender.lsp.send(notif.into()) {
|
||||
log::warn!("failed to send notification: {res:?}");
|
||||
}
|
||||
self.sender.send_message(notif.into());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -87,7 +61,7 @@ where
|
|||
cmd: &'static str,
|
||||
handler: RawHandler<Args::S, Vec<JsonValue>>,
|
||||
) -> Self {
|
||||
self.command_handlers.insert(cmd, raw_to_boxed(handler));
|
||||
self.command_handlers.insert(cmd, Box::new(handler));
|
||||
self
|
||||
}
|
||||
|
||||
|
|
@ -99,7 +73,7 @@ where
|
|||
) -> Self {
|
||||
self.command_handlers.insert(
|
||||
cmd,
|
||||
Box::new(move |s, client, req_id, req| client.schedule(req_id, handler(s, req))),
|
||||
Box::new(move |s, req| erased_response(handler(s, req))),
|
||||
);
|
||||
self
|
||||
}
|
||||
|
|
@ -125,7 +99,7 @@ where
|
|||
/// Registers a raw request handler that handlers a kind of untyped lsp
|
||||
/// request.
|
||||
pub fn with_raw_request<R: Req>(mut self, handler: RawHandler<Args::S, JsonValue>) -> Self {
|
||||
self.req_handlers.insert(R::METHOD, raw_to_boxed(handler));
|
||||
self.req_handlers.insert(R::METHOD, Box::new(handler));
|
||||
self
|
||||
}
|
||||
|
||||
|
|
@ -134,11 +108,11 @@ where
|
|||
/// request.
|
||||
pub fn with_request_<R: Req>(
|
||||
mut self,
|
||||
handler: fn(&mut Args::S, RequestId, R::Params) -> ScheduledResult,
|
||||
handler: fn(&mut Args::S, R::Params) -> ScheduleResult,
|
||||
) -> Self {
|
||||
self.req_handlers.insert(
|
||||
R::METHOD,
|
||||
Box::new(move |s, _client, req_id, req| handler(s, req_id, from_json(req)?)),
|
||||
Box::new(move |s, req| handler(s, from_json(req)?)),
|
||||
);
|
||||
self
|
||||
}
|
||||
|
|
@ -150,9 +124,7 @@ where
|
|||
) -> Self {
|
||||
self.req_handlers.insert(
|
||||
R::METHOD,
|
||||
Box::new(move |s, client, req_id, req| {
|
||||
client.schedule(req_id, handler(s, from_json(req)?))
|
||||
}),
|
||||
Box::new(move |s, req| erased_response(handler(s, from_json(req)?))),
|
||||
);
|
||||
self
|
||||
}
|
||||
|
|
@ -247,10 +219,21 @@ where
|
|||
|
||||
event_handler(s, &self.client, event)?;
|
||||
}
|
||||
Msg(LspMessage::Request(req)) => self.on_lsp_request(loop_start, req),
|
||||
Msg(LspMessage::Request(req)) => {
|
||||
let client = self.client.clone();
|
||||
let req_id = req.id.clone();
|
||||
client.register_request(&req.method, &req_id, loop_start);
|
||||
let fut =
|
||||
client.schedule_tail(req_id, self.on_lsp_request(&req.method, req.params));
|
||||
self.client.handle.spawn(fut);
|
||||
}
|
||||
Msg(LspMessage::Notification(not)) => {
|
||||
let is_exit = not.method == EXIT_METHOD;
|
||||
self.on_notification(loop_start, not)?;
|
||||
self.client.hook.start_notification(¬.method);
|
||||
let result = self.on_notification(¬.method, not.params);
|
||||
self.client
|
||||
.hook
|
||||
.stop_notification(¬.method, loop_start, result);
|
||||
if is_exit {
|
||||
return Ok(());
|
||||
}
|
||||
|
|
@ -273,17 +256,45 @@ where
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Handles an incoming server event.
|
||||
#[cfg(feature = "web")]
|
||||
pub fn on_server_event(&mut self, event_id: u32) {
|
||||
let evt = match &self.client.sender {
|
||||
TransportHost::Js(sender) => sender.events.lock().remove(&event_id),
|
||||
TransportHost::System(_) => {
|
||||
panic!("cannot send server event in system transport");
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(event) = evt {
|
||||
let Some(event_handler) = self.events.get(&event.as_ref().type_id()) else {
|
||||
log::warn!("unhandled event: {:?}", event.as_ref().type_id());
|
||||
return;
|
||||
};
|
||||
|
||||
let s = match &mut self.state {
|
||||
State::Uninitialized(u) => ServiceState::Uninitialized(u.as_deref_mut()),
|
||||
State::Initializing(s) | State::Ready(s) => ServiceState::Ready(s),
|
||||
State::ShuttingDown => {
|
||||
log::warn!("server is shutting down");
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let res = event_handler(s, &self.client, event);
|
||||
if let Err(err) = res {
|
||||
log::error!("failed to handle server event {event_id}: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Registers and handles a request. This should only be called once per
|
||||
/// incoming request.
|
||||
pub fn on_lsp_request(&mut self, request_received: Instant, req: Request) {
|
||||
self.client
|
||||
.register_request(&req.method, &req.id, request_received);
|
||||
|
||||
let req_id = req.id.clone();
|
||||
let resp = match (&mut self.state, &*req.method) {
|
||||
pub fn on_lsp_request(&mut self, method: &str, params: JsonValue) -> ScheduleResult {
|
||||
match (&mut self.state, method) {
|
||||
(State::Uninitialized(args), request::Initialize::METHOD) => {
|
||||
// todo: what will happen if the request cannot be deserialized?
|
||||
let params = serde_json::from_value::<Args::I>(req.params);
|
||||
let params = serde_json::from_value::<Args::I>(params);
|
||||
match params {
|
||||
Ok(params) => {
|
||||
let args = args.take().expect("already initialized");
|
||||
|
|
@ -301,11 +312,8 @@ where
|
|||
just_result(Err(invalid_request("server is already initialized")))
|
||||
}
|
||||
// todo: generalize this
|
||||
(State::Ready(..), request::ExecuteCommand::METHOD) => {
|
||||
reschedule!(self.on_execute_command(req))
|
||||
}
|
||||
(State::Ready(s), _) => 'serve_req: {
|
||||
let method = req.method.as_str();
|
||||
(State::Ready(..), request::ExecuteCommand::METHOD) => self.on_execute_command(params),
|
||||
(State::Ready(s), method) => 'serve_req: {
|
||||
let is_shutdown = method == request::Shutdown::METHOD;
|
||||
|
||||
let Some(handler) = self.requests.get(method) else {
|
||||
|
|
@ -313,29 +321,25 @@ where
|
|||
break 'serve_req just_result(Err(method_not_found()));
|
||||
};
|
||||
|
||||
let result = handler(s, &self.client, req_id.clone(), req.params);
|
||||
self.client.schedule_tail(req_id, result);
|
||||
let resp = handler(s, params);
|
||||
|
||||
if is_shutdown {
|
||||
self.state = State::ShuttingDown;
|
||||
}
|
||||
|
||||
return;
|
||||
resp
|
||||
}
|
||||
(State::ShuttingDown, _) => {
|
||||
just_result(Err(invalid_request("server is shutting down")))
|
||||
}
|
||||
};
|
||||
|
||||
let result = self.client.schedule(req_id.clone(), resp);
|
||||
self.client.schedule_tail(req_id, result);
|
||||
}
|
||||
}
|
||||
|
||||
/// The entry point for the `workspace/executeCommand` request.
|
||||
fn on_execute_command(&mut self, req: Request) -> ScheduledResult {
|
||||
fn on_execute_command(&mut self, params: JsonValue) -> ScheduleResult {
|
||||
let s = self.state.opt_mut().ok_or_else(not_initialized)?;
|
||||
|
||||
let params = from_value::<ExecuteCommandParams>(req.params)
|
||||
let params = from_value::<ExecuteCommandParams>(params)
|
||||
.map_err(|e| invalid_params(e.to_string()))?;
|
||||
|
||||
let ExecuteCommandParams {
|
||||
|
|
@ -344,38 +348,28 @@ where
|
|||
|
||||
// todo: generalize this
|
||||
if command == "tinymist.getResources" {
|
||||
self.get_resources(req.id, arguments)
|
||||
self.get_resources(arguments)
|
||||
} else {
|
||||
let Some(handler) = self.commands.get(command.as_str()) else {
|
||||
log::error!("asked to execute unknown command: {command}");
|
||||
return Err(method_not_found());
|
||||
};
|
||||
handler(s, &self.client, req.id, arguments)
|
||||
handler(s, arguments)
|
||||
}
|
||||
}
|
||||
|
||||
/// Handles an incoming notification.
|
||||
pub fn on_notification(
|
||||
&mut self,
|
||||
received_at: Instant,
|
||||
not: Notification,
|
||||
) -> anyhow::Result<()> {
|
||||
self.client.hook.start_notification(¬.method);
|
||||
let handle = |s, Notification { method, params }: Notification| {
|
||||
let Some(handler) = self.notifications.get(method.as_str()) else {
|
||||
pub fn on_notification(&mut self, method: &str, params: JsonValue) -> LspResult<()> {
|
||||
let handle = |s, method: &str, params: JsonValue| {
|
||||
let Some(handler) = self.notifications.get(method) else {
|
||||
log::warn!("unhandled notification: {method}");
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
let result = handler(s, params);
|
||||
self.client
|
||||
.hook
|
||||
.stop_notification(&method, received_at, result);
|
||||
|
||||
Ok(())
|
||||
handler(s, params)
|
||||
};
|
||||
|
||||
match (&mut self.state, &*not.method) {
|
||||
match (&mut self.state, method) {
|
||||
(state, notification::Initialized::METHOD) => {
|
||||
let mut s = State::ShuttingDown;
|
||||
std::mem::swap(state, &mut s);
|
||||
|
|
@ -395,9 +389,9 @@ where
|
|||
return Ok(());
|
||||
}
|
||||
};
|
||||
handle(s, not)
|
||||
handle(s, method, params)
|
||||
}
|
||||
(State::Ready(state), _) => handle(state, not),
|
||||
(State::Ready(state), method) => handle(state, method, params),
|
||||
// todo: whether it is safe to ignore notifications
|
||||
(State::Uninitialized(..) | State::Initializing(..), method) => {
|
||||
log::warn!("server is not ready yet, while received notification {method}");
|
||||
|
|
|
|||
|
|
@ -46,7 +46,7 @@ pub fn with_stdio_transport<M: TryFrom<Message, Error = anyhow::Error> + GetMess
|
|||
args: MirrorArgs,
|
||||
f: impl FnOnce(Connection<M>) -> anyhow::Result<()>,
|
||||
) -> anyhow::Result<()> {
|
||||
with_stdio_transport_impl(args, M::get_message_kind(), |conn| f(conn.into()))
|
||||
with_stdio_transport_impl(args, M::MESSAGE_KIND, |conn| f(conn.into()))
|
||||
}
|
||||
|
||||
/// Note that we must have our logging only write out to stderr.
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ strum.workspace = true
|
|||
sync-ls = { workspace = true, features = ["lsp", "server", "system"] }
|
||||
tinymist-assets = { workspace = true }
|
||||
tinymist-query.workspace = true
|
||||
tinymist-std.workspace = true
|
||||
tinymist-std = { workspace = true, default-features = false }
|
||||
tinymist = { workspace = true, default-features = false, features = ["system"] }
|
||||
tinymist-project = { workspace = true, features = ["lsp"] }
|
||||
tinymist-render.workspace = true
|
||||
|
|
|
|||
|
|
@ -21,7 +21,7 @@
|
|||
"scripts": {
|
||||
"build:dev": "wasm-pack build --target web --dev -- --no-default-features --features web,no-content-hint",
|
||||
"build:node": "wasm-pack build --target nodejs -- --no-default-features --features web,no-content-hint",
|
||||
"build": "wasm-pack build --target web -- --no-default-features --features web,no-content-hint",
|
||||
"build": "wasm-pack build --target web --dev -- --no-default-features --features web,no-content-hint",
|
||||
"publish:dry": "npm publish --dry-run",
|
||||
"publish:lib": "npm publish || exit 0",
|
||||
"test:chrome": "wasm-pack test --chrome --headless --release",
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ use std::path::PathBuf;
|
|||
use lsp_types::TextDocumentIdentifier;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value as JsonValue;
|
||||
use sync_ls::RequestId;
|
||||
#[cfg(feature = "trace")]
|
||||
use task::TraceParams;
|
||||
use tinymist_assets::TYPST_PREVIEW_HTML;
|
||||
|
|
@ -23,7 +22,7 @@ use typst::syntax::{LinkedNode, Source};
|
|||
use world::TaskInputs;
|
||||
|
||||
use super::*;
|
||||
use crate::lsp::query::{run_query, LspClientExt};
|
||||
use crate::lsp::query::run_query;
|
||||
use crate::tool::ast::AstRepr;
|
||||
|
||||
#[cfg(feature = "system")]
|
||||
|
|
@ -86,7 +85,7 @@ struct ExportSyntaxRangeOpts {
|
|||
/// Here are implemented the handlers for each command.
|
||||
impl ServerState {
|
||||
/// Export the current document as PDF file(s).
|
||||
pub fn export_pdf(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
|
||||
pub fn export_pdf(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
|
||||
let creation_timestamp = if let Some(value) = opts.creation_timestamp {
|
||||
|
|
@ -101,7 +100,6 @@ impl ServerState {
|
|||
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
req_id,
|
||||
ProjectTask::ExportPdf(ExportPdfTask {
|
||||
export,
|
||||
pdf_standards: pdf_standards.unwrap_or_default(),
|
||||
|
|
@ -113,11 +111,10 @@ impl ServerState {
|
|||
}
|
||||
|
||||
/// Export the current document as HTML file(s).
|
||||
pub fn export_html(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
|
||||
pub fn export_html(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
req_id,
|
||||
ProjectTask::ExportHtml(ExportHtmlTask { export }),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
|
|
@ -125,15 +122,10 @@ impl ServerState {
|
|||
}
|
||||
|
||||
/// Export the current document as Markdown file(s).
|
||||
pub fn export_markdown(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
mut args: Vec<JsonValue>,
|
||||
) -> ScheduledResult {
|
||||
pub fn export_markdown(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportTypliteOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
req_id,
|
||||
ProjectTask::ExportMd(ExportMarkdownTask {
|
||||
processor: opts.processor,
|
||||
assets_path: opts.assets_path,
|
||||
|
|
@ -145,11 +137,10 @@ impl ServerState {
|
|||
}
|
||||
|
||||
/// Export the current document as Tex file(s).
|
||||
pub fn export_tex(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
|
||||
pub fn export_tex(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportTypliteOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
req_id,
|
||||
ProjectTask::ExportTeX(ExportTeXTask {
|
||||
processor: opts.processor,
|
||||
assets_path: opts.assets_path,
|
||||
|
|
@ -161,11 +152,10 @@ impl ServerState {
|
|||
}
|
||||
|
||||
/// Export the current document as Text file(s).
|
||||
pub fn export_text(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
|
||||
pub fn export_text(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
let export = self.config.export_task();
|
||||
self.export(
|
||||
req_id,
|
||||
ProjectTask::ExportText(ExportTextTask { export }),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
|
|
@ -173,7 +163,7 @@ impl ServerState {
|
|||
}
|
||||
|
||||
/// Query the current document and export the result as JSON file(s).
|
||||
pub fn export_query(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
|
||||
pub fn export_query(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as QueryOpts);
|
||||
// todo: deprecate it
|
||||
let _ = opts.strict;
|
||||
|
|
@ -184,7 +174,6 @@ impl ServerState {
|
|||
}
|
||||
|
||||
self.export(
|
||||
req_id,
|
||||
ProjectTask::Query(QueryTask {
|
||||
format: opts.format,
|
||||
output_extension: opts.output_extension,
|
||||
|
|
@ -199,14 +188,13 @@ impl ServerState {
|
|||
}
|
||||
|
||||
/// Export the current document as Svg file(s).
|
||||
pub fn export_svg(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
|
||||
pub fn export_svg(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
|
||||
let mut export = self.config.export_task();
|
||||
select_page(&mut export, opts.page).map_err(invalid_params)?;
|
||||
|
||||
self.export(
|
||||
req_id,
|
||||
ProjectTask::ExportSvg(ExportSvgTask { export }),
|
||||
opts.open.unwrap_or_default(),
|
||||
args,
|
||||
|
|
@ -214,7 +202,7 @@ impl ServerState {
|
|||
}
|
||||
|
||||
/// Export the current document as Png file(s).
|
||||
pub fn export_png(&mut self, req_id: RequestId, mut args: Vec<JsonValue>) -> ScheduledResult {
|
||||
pub fn export_png(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let opts = get_arg_or_default!(args[1] as ExportOpts);
|
||||
|
||||
let ppi = opts.ppi.unwrap_or(144.);
|
||||
|
|
@ -227,7 +215,6 @@ impl ServerState {
|
|||
select_page(&mut export, opts.page).map_err(invalid_params)?;
|
||||
|
||||
self.export(
|
||||
req_id,
|
||||
ProjectTask::ExportPng(ExportPngTask {
|
||||
fill: opts.fill,
|
||||
ppi,
|
||||
|
|
@ -242,14 +229,14 @@ impl ServerState {
|
|||
/// for passing the correct absolute path of typst document.
|
||||
pub fn export(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
|
||||
task: ProjectTask,
|
||||
open: bool,
|
||||
mut args: Vec<JsonValue>,
|
||||
) -> ScheduledResult {
|
||||
) -> ScheduleResult {
|
||||
let path = get_arg!(args[0] as PathBuf);
|
||||
|
||||
run_query!(req_id, self.OnExport(path, open, task))
|
||||
run_query!(self.OnExport(path, open, task))
|
||||
}
|
||||
|
||||
/// Export a range of the current document as Ansi highlighted text.
|
||||
|
|
@ -294,9 +281,7 @@ impl ServerState {
|
|||
range: Option<LspRange>,
|
||||
f: impl Fn(Source, Option<Range<usize>>) -> LspResult<T>,
|
||||
) -> LspResult<T> {
|
||||
let s = self
|
||||
.query_source(path.into(), Ok)
|
||||
.map_err(|e| internal_error(format!("cannot find source: {e}")))?;
|
||||
let s = self.query_source(path.into(), Ok)?;
|
||||
|
||||
// todo: cannot select syntax-sensitive data well
|
||||
// let node = LinkedNode::new(s.root());
|
||||
|
|
@ -512,11 +497,7 @@ impl ServerState {
|
|||
}
|
||||
|
||||
/// Interact with the code context at the source file.
|
||||
pub fn interact_code_context(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
_arguments: Vec<JsonValue>,
|
||||
) -> ScheduledResult {
|
||||
pub fn interact_code_context(&mut self, _arguments: Vec<JsonValue>) -> ScheduleResult {
|
||||
let queries = _arguments.into_iter().next().ok_or_else(|| {
|
||||
invalid_params("The first parameter is not a valid code context query array")
|
||||
})?;
|
||||
|
|
@ -533,7 +514,7 @@ impl ServerState {
|
|||
let path = as_path(params.text_document);
|
||||
let query = params.query;
|
||||
|
||||
run_query!(req_id, self.InteractCodeContext(path, query))
|
||||
run_query!(self.InteractCodeContext(path, query))
|
||||
}
|
||||
|
||||
/// Get the trace data of the document.
|
||||
|
|
@ -631,31 +612,19 @@ impl ServerState {
|
|||
}
|
||||
|
||||
/// Get the metrics of the document.
|
||||
pub fn get_document_metrics(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
mut args: Vec<JsonValue>,
|
||||
) -> ScheduledResult {
|
||||
pub fn get_document_metrics(&mut self, mut args: Vec<JsonValue>) -> ScheduleResult {
|
||||
let path = get_arg!(args[0] as PathBuf);
|
||||
run_query!(req_id, self.DocumentMetrics(path))
|
||||
run_query!(self.DocumentMetrics(path))
|
||||
}
|
||||
|
||||
/// Get all syntactic labels in workspace.
|
||||
pub fn get_workspace_labels(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
_arguments: Vec<JsonValue>,
|
||||
) -> ScheduledResult {
|
||||
run_query!(req_id, self.WorkspaceLabel())
|
||||
pub fn get_workspace_labels(&mut self, _arguments: Vec<JsonValue>) -> ScheduleResult {
|
||||
run_query!(self.WorkspaceLabel())
|
||||
}
|
||||
|
||||
/// Get the server info.
|
||||
pub fn get_server_info(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
_arguments: Vec<JsonValue>,
|
||||
) -> ScheduledResult {
|
||||
run_query!(req_id, self.ServerInfo())
|
||||
pub fn get_server_info(&mut self, _arguments: Vec<JsonValue>) -> ScheduleResult {
|
||||
run_query!(self.ServerInfo())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -111,10 +111,10 @@ impl ServerState {
|
|||
pub fn query_source<T>(
|
||||
&self,
|
||||
path: ImmutPath,
|
||||
f: impl FnOnce(Source) -> Result<T>,
|
||||
) -> Result<T> {
|
||||
f: impl FnOnce(Source) -> LspResult<T>,
|
||||
) -> LspResult<T> {
|
||||
let snapshot = self.memory_changes.get(&path);
|
||||
let snapshot = snapshot.ok_or_else(|| anyhow::anyhow!("file missing {path:?}"))?;
|
||||
let snapshot = snapshot.ok_or_else(|| internal_error(format!("file missing {path:?}")))?;
|
||||
let source = snapshot.clone();
|
||||
f(source)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -91,10 +91,10 @@ impl Initializer for SuperInit {
|
|||
} = self;
|
||||
let const_config = config.const_config.clone();
|
||||
// Bootstrap server
|
||||
let service = ServerState::main(client, config, err.is_none());
|
||||
let state = ServerState::main(client, config, err.is_none());
|
||||
|
||||
if let Some(err) = err {
|
||||
return (service, Err(err));
|
||||
return (state, Err(err));
|
||||
}
|
||||
|
||||
let semantic_tokens_provider = (!const_config.tokens_dynamic_registration).then(|| {
|
||||
|
|
@ -209,6 +209,6 @@ impl Initializer for SuperInit {
|
|||
};
|
||||
|
||||
let res = serde_json::to_value(res).map_err(|e| invalid_params(e.to_string()));
|
||||
(service, just_result(res))
|
||||
(state, just_result(res))
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
//! tinymist's language server
|
||||
|
||||
use futures::future::MaybeDone;
|
||||
use lsp_types::request::GotoDeclarationParams;
|
||||
use lsp_types::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
|
@ -8,223 +7,134 @@ use sync_ls::*;
|
|||
use tinymist_query::{
|
||||
CompilerQueryRequest, CompilerQueryResponse, FoldRequestFeature, SyntaxRequest,
|
||||
};
|
||||
use tinymist_std::{ImmutPath, Result};
|
||||
use tinymist_std::ImmutPath;
|
||||
|
||||
use crate::project::{EntryState, TaskInputs, DETACHED_ENTRY};
|
||||
use crate::{as_path, as_path_, as_path_pos, FormatterMode, ServerState};
|
||||
|
||||
/// The future type for a lsp query.
|
||||
pub type QueryFuture = Result<ResponseFuture<Result<CompilerQueryResponse>>>;
|
||||
|
||||
pub trait LspClientExt {
|
||||
fn schedule_query(&self, req_id: RequestId, query_fut: QueryFuture) -> ScheduledResult;
|
||||
}
|
||||
|
||||
impl LspClientExt for LspClient {
|
||||
/// Schedules a query from the client.
|
||||
fn schedule_query(&self, req_id: RequestId, query_fut: QueryFuture) -> ScheduledResult {
|
||||
let fut = query_fut.map_err(|e| internal_error(e.to_string()))?;
|
||||
let fut: SchedulableResponse<CompilerQueryResponse> = Ok(match fut {
|
||||
MaybeDone::Done(res) => {
|
||||
MaybeDone::Done(res.map_err(|err| internal_error(err.to_string())))
|
||||
}
|
||||
MaybeDone::Future(fut) => MaybeDone::Future(Box::pin(async move {
|
||||
let res = fut.await;
|
||||
res.map_err(|err| internal_error(err.to_string()))
|
||||
})),
|
||||
MaybeDone::Gone => MaybeDone::Gone,
|
||||
});
|
||||
self.schedule(req_id, fut)
|
||||
}
|
||||
}
|
||||
pub type QueryFuture = SchedulableResponse<CompilerQueryResponse>;
|
||||
|
||||
macro_rules! run_query {
|
||||
($req_id: ident, $self: ident.$query: ident ($($arg_key:ident),* $(,)?)) => {{
|
||||
($self: ident.$query: ident ($($arg_key:ident),* $(,)?)) => {{
|
||||
use tinymist_query::*;
|
||||
let req = paste::paste! { [<$query Request>] { $($arg_key),* } };
|
||||
let query_fut = $self.query(CompilerQueryRequest::$query(req.clone()));
|
||||
$self.client.untyped().schedule_query($req_id, query_fut)
|
||||
erased_response($self.query(CompilerQueryRequest::$query(req.clone())))
|
||||
}};
|
||||
}
|
||||
pub(crate) use run_query;
|
||||
|
||||
/// LSP Standard Language Features
|
||||
impl ServerState {
|
||||
pub(crate) fn goto_definition(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: GotoDefinitionParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn goto_definition(&mut self, params: GotoDefinitionParams) -> ScheduleResult {
|
||||
let (path, position) = as_path_pos(params.text_document_position_params);
|
||||
run_query!(req_id, self.GotoDefinition(path, position))
|
||||
run_query!(self.GotoDefinition(path, position))
|
||||
}
|
||||
|
||||
pub(crate) fn goto_declaration(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: GotoDeclarationParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn goto_declaration(&mut self, params: GotoDeclarationParams) -> ScheduleResult {
|
||||
let (path, position) = as_path_pos(params.text_document_position_params);
|
||||
run_query!(req_id, self.GotoDeclaration(path, position))
|
||||
run_query!(self.GotoDeclaration(path, position))
|
||||
}
|
||||
|
||||
pub(crate) fn references(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: ReferenceParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn references(&mut self, params: ReferenceParams) -> ScheduleResult {
|
||||
let (path, position) = as_path_pos(params.text_document_position);
|
||||
run_query!(req_id, self.References(path, position))
|
||||
run_query!(self.References(path, position))
|
||||
}
|
||||
|
||||
pub(crate) fn hover(&mut self, req_id: RequestId, params: HoverParams) -> ScheduledResult {
|
||||
pub(crate) fn hover(&mut self, params: HoverParams) -> ScheduleResult {
|
||||
let (path, position) = as_path_pos(params.text_document_position_params);
|
||||
self.implicit_focus_entry(|| Some(path.as_path().into()), 'h');
|
||||
|
||||
self.implicit_position = Some(position);
|
||||
run_query!(req_id, self.Hover(path, position))
|
||||
run_query!(self.Hover(path, position))
|
||||
}
|
||||
|
||||
pub(crate) fn folding_range(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: FoldingRangeParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn folding_range(&mut self, params: FoldingRangeParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
let line_folding_only = self.const_config().doc_line_folding_only;
|
||||
self.implicit_focus_entry(|| Some(path.as_path().into()), 'f');
|
||||
run_query!(req_id, self.FoldingRange(path, line_folding_only))
|
||||
run_query!(self.FoldingRange(path, line_folding_only))
|
||||
}
|
||||
|
||||
pub(crate) fn selection_range(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: SelectionRangeParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn selection_range(&mut self, params: SelectionRangeParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
let positions = params.positions;
|
||||
run_query!(req_id, self.SelectionRange(path, positions))
|
||||
run_query!(self.SelectionRange(path, positions))
|
||||
}
|
||||
|
||||
pub(crate) fn document_highlight(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: DocumentHighlightParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn document_highlight(&mut self, params: DocumentHighlightParams) -> ScheduleResult {
|
||||
let (path, position) = as_path_pos(params.text_document_position_params);
|
||||
run_query!(req_id, self.DocumentHighlight(path, position))
|
||||
run_query!(self.DocumentHighlight(path, position))
|
||||
}
|
||||
|
||||
pub(crate) fn document_symbol(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: DocumentSymbolParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn document_symbol(&mut self, params: DocumentSymbolParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
run_query!(req_id, self.DocumentSymbol(path))
|
||||
run_query!(self.DocumentSymbol(path))
|
||||
}
|
||||
|
||||
pub(crate) fn semantic_tokens_full(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: SemanticTokensParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn semantic_tokens_full(&mut self, params: SemanticTokensParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
self.implicit_focus_entry(|| Some(path.as_path().into()), 't');
|
||||
run_query!(req_id, self.SemanticTokensFull(path))
|
||||
run_query!(self.SemanticTokensFull(path))
|
||||
}
|
||||
|
||||
pub(crate) fn semantic_tokens_full_delta(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
|
||||
params: SemanticTokensDeltaParams,
|
||||
) -> ScheduledResult {
|
||||
) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
let previous_result_id = params.previous_result_id;
|
||||
self.implicit_focus_entry(|| Some(path.as_path().into()), 't');
|
||||
run_query!(req_id, self.SemanticTokensDelta(path, previous_result_id))
|
||||
run_query!(self.SemanticTokensDelta(path, previous_result_id))
|
||||
}
|
||||
|
||||
pub(crate) fn formatting(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: DocumentFormattingParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn formatting(&mut self, params: DocumentFormattingParams) -> ScheduleResult {
|
||||
if matches!(self.config.formatter_mode, FormatterMode::Disable) {
|
||||
return Ok(None);
|
||||
return just_ok(serde_json::Value::Null);
|
||||
}
|
||||
|
||||
let path: ImmutPath = as_path(params.text_document).as_path().into();
|
||||
let source = self
|
||||
.query_source(path, |source: typst::syntax::Source| Ok(source))
|
||||
.map_err(|e| internal_error(format!("could not format document: {e}")))?;
|
||||
self.client.schedule(req_id, self.formatter.run(source))
|
||||
let source = self.query_source(path, |source: typst::syntax::Source| Ok(source))?;
|
||||
erased_response(self.formatter.run(source))
|
||||
}
|
||||
|
||||
pub(crate) fn inlay_hint(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: InlayHintParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn inlay_hint(&mut self, params: InlayHintParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
let range = params.range;
|
||||
run_query!(req_id, self.InlayHint(path, range))
|
||||
run_query!(self.InlayHint(path, range))
|
||||
}
|
||||
|
||||
pub(crate) fn document_color(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: DocumentColorParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn document_color(&mut self, params: DocumentColorParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
run_query!(req_id, self.DocumentColor(path))
|
||||
run_query!(self.DocumentColor(path))
|
||||
}
|
||||
|
||||
pub(crate) fn document_link(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: DocumentLinkParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn document_link(&mut self, params: DocumentLinkParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
run_query!(req_id, self.DocumentLink(path))
|
||||
run_query!(self.DocumentLink(path))
|
||||
}
|
||||
|
||||
pub(crate) fn color_presentation(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: ColorPresentationParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn color_presentation(&mut self, params: ColorPresentationParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
let color = params.color;
|
||||
let range = params.range;
|
||||
run_query!(req_id, self.ColorPresentation(path, color, range))
|
||||
run_query!(self.ColorPresentation(path, color, range))
|
||||
}
|
||||
|
||||
pub(crate) fn code_action(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: CodeActionParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn code_action(&mut self, params: CodeActionParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
let range = params.range;
|
||||
let context = params.context;
|
||||
run_query!(req_id, self.CodeAction(path, range, context))
|
||||
run_query!(self.CodeAction(path, range, context))
|
||||
}
|
||||
|
||||
pub(crate) fn code_lens(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: CodeLensParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn code_lens(&mut self, params: CodeLensParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
run_query!(req_id, self.CodeLens(path))
|
||||
run_query!(self.CodeLens(path))
|
||||
}
|
||||
|
||||
pub(crate) fn completion(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: CompletionParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn completion(&mut self, params: CompletionParams) -> ScheduleResult {
|
||||
let (path, position) = as_path_pos(params.text_document_position);
|
||||
let context = params.context.as_ref();
|
||||
let explicit =
|
||||
|
|
@ -234,58 +144,39 @@ impl ServerState {
|
|||
.and_then(|c| c.chars().next());
|
||||
|
||||
self.implicit_position = Some(position);
|
||||
run_query!(
|
||||
req_id,
|
||||
self.Completion(path, position, explicit, trigger_character)
|
||||
)
|
||||
run_query!(self.Completion(path, position, explicit, trigger_character))
|
||||
}
|
||||
|
||||
pub(crate) fn signature_help(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: SignatureHelpParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn signature_help(&mut self, params: SignatureHelpParams) -> ScheduleResult {
|
||||
let (path, position) = as_path_pos(params.text_document_position_params);
|
||||
|
||||
self.implicit_position = Some(position);
|
||||
run_query!(req_id, self.SignatureHelp(path, position))
|
||||
run_query!(self.SignatureHelp(path, position))
|
||||
}
|
||||
|
||||
pub(crate) fn rename(&mut self, req_id: RequestId, params: RenameParams) -> ScheduledResult {
|
||||
pub(crate) fn rename(&mut self, params: RenameParams) -> ScheduleResult {
|
||||
let (path, position) = as_path_pos(params.text_document_position);
|
||||
let new_name = params.new_name;
|
||||
run_query!(req_id, self.Rename(path, position, new_name))
|
||||
run_query!(self.Rename(path, position, new_name))
|
||||
}
|
||||
|
||||
pub(crate) fn prepare_rename(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: TextDocumentPositionParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn prepare_rename(&mut self, params: TextDocumentPositionParams) -> ScheduleResult {
|
||||
let (path, position) = as_path_pos(params);
|
||||
run_query!(req_id, self.PrepareRename(path, position))
|
||||
run_query!(self.PrepareRename(path, position))
|
||||
}
|
||||
|
||||
pub(crate) fn symbol(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: WorkspaceSymbolParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn symbol(&mut self, params: WorkspaceSymbolParams) -> ScheduleResult {
|
||||
let pattern = (!params.query.is_empty()).then_some(params.query);
|
||||
run_query!(req_id, self.Symbol(pattern))
|
||||
run_query!(self.Symbol(pattern))
|
||||
}
|
||||
|
||||
pub(crate) fn on_enter(&mut self, req_id: RequestId, params: OnEnterParams) -> ScheduledResult {
|
||||
pub(crate) fn on_enter(&mut self, params: OnEnterParams) -> ScheduleResult {
|
||||
let path = as_path(params.text_document);
|
||||
let range = params.range;
|
||||
run_query!(req_id, self.OnEnter(path, range))
|
||||
run_query!(self.OnEnter(path, range))
|
||||
}
|
||||
|
||||
pub(crate) fn will_rename_files(
|
||||
&mut self,
|
||||
req_id: RequestId,
|
||||
params: RenameFilesParams,
|
||||
) -> ScheduledResult {
|
||||
pub(crate) fn will_rename_files(&mut self, params: RenameFilesParams) -> ScheduleResult {
|
||||
log::info!("will rename files {params:?}");
|
||||
let paths = params
|
||||
.files
|
||||
|
|
@ -299,7 +190,7 @@ impl ServerState {
|
|||
.collect::<Option<Vec<_>>>()
|
||||
.ok_or_else(|| invalid_params("invalid urls"))?;
|
||||
|
||||
run_query!(req_id, self.WillRenameFiles(paths))
|
||||
run_query!(self.WillRenameFiles(paths))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -329,9 +220,7 @@ impl ServerState {
|
|||
#[cfg(feature = "export")]
|
||||
OnExport(req) => return self.on_export(req),
|
||||
#[cfg(not(feature = "export"))]
|
||||
OnExport(_req) => {
|
||||
return Err(tinymist_std::error_once!("export feature is not enabled"))
|
||||
}
|
||||
OnExport(_req) => return Err(internal_error("export feature is not enabled")),
|
||||
ServerInfo(_) => return self.collect_server_info(),
|
||||
// todo: query on dedicate projects
|
||||
_ => return self.query_on(query),
|
||||
|
|
@ -343,7 +232,9 @@ impl ServerState {
|
|||
type R = CompilerQueryResponse;
|
||||
assert!(query.fold_feature() != FoldRequestFeature::ContextFreeUnique);
|
||||
|
||||
let (mut snap, stat) = self.query_snapshot_with_stat(&query)?;
|
||||
let (mut snap, stat) = self
|
||||
.query_snapshot_with_stat(&query)
|
||||
.map_err(internal_error)?;
|
||||
// todo: whether it is safe to inherit success_doc with changed entry
|
||||
if !self.is_pinning() {
|
||||
let input = query
|
||||
|
|
@ -377,7 +268,7 @@ impl ServerState {
|
|||
}
|
||||
}
|
||||
|
||||
match query {
|
||||
let res = match query {
|
||||
SemanticTokensFull(req) => snap.run_semantic(req, R::SemanticTokensFull),
|
||||
SemanticTokensDelta(req) => snap.run_semantic(req, R::SemanticTokensDelta),
|
||||
InteractCodeContext(req) => snap.run_semantic(req, R::InteractCodeContext),
|
||||
|
|
@ -400,7 +291,9 @@ impl ServerState {
|
|||
WorkspaceLabel(req) => snap.run_semantic(req, R::WorkspaceLabel),
|
||||
DocumentMetrics(req) => snap.run_stateful(req, R::DocumentMetrics),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
};
|
||||
|
||||
res.map_err(internal_error)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -438,7 +438,7 @@ impl ServerState {
|
|||
let query_stats = self.project.analysis.report_query_stats();
|
||||
let alloc_stats = self.project.analysis.report_alloc_stats();
|
||||
|
||||
let snap = self.snapshot()?;
|
||||
let snap = self.snapshot().map_err(internal_error)?;
|
||||
just_future(async move {
|
||||
let w = snap.world();
|
||||
|
||||
|
|
|
|||
|
|
@ -8,7 +8,7 @@ use std::{ops::DerefMut, pin::Pin};
|
|||
|
||||
use reflexo::ImmutPath;
|
||||
use reflexo_typst::{Bytes, CompilationTask, ExportComputation};
|
||||
use sync_ls::just_future;
|
||||
use sync_ls::{internal_error, just_future};
|
||||
use tinymist_project::LspWorld;
|
||||
use tinymist_query::OnExportRequest;
|
||||
use tinymist_std::error::prelude::*;
|
||||
|
|
@ -61,7 +61,7 @@ impl ServerState {
|
|||
}
|
||||
});
|
||||
|
||||
let snap = self.snapshot()?;
|
||||
let snap = self.snapshot().map_err(internal_error)?;
|
||||
just_future(async move {
|
||||
let snap = snap.task(TaskInputs {
|
||||
entry: Some(entry),
|
||||
|
|
@ -70,7 +70,9 @@ impl ServerState {
|
|||
|
||||
let is_html = matches!(task, ProjectTask::ExportHtml { .. });
|
||||
let artifact = CompiledArtifact::from_graph(snap.clone(), is_html);
|
||||
let res = ExportTask::do_export(task, artifact, lock_dir).await?;
|
||||
let res = ExportTask::do_export(task, artifact, lock_dir)
|
||||
.await
|
||||
.map_err(internal_error)?;
|
||||
if let Some(update_dep) = update_dep {
|
||||
tokio::spawn(update_dep(snap));
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue