Store server context in workspace

This commit is contained in:
Patrick Förster 2022-04-18 11:16:50 +02:00
parent b35c0d80c1
commit bc63a90137
22 changed files with 256 additions and 331 deletions

View file

@ -89,7 +89,7 @@ impl ComponentDatabase {
.iter()
.find(|metadata| metadata.name == name)?;
let desc = metadata.description.to_owned()?;
let desc = metadata.description.clone()?;
Some(MarkupContent {
kind: MarkupKind::PlainText,
value: desc,

View file

@ -1,34 +0,0 @@
use std::{
path::PathBuf,
sync::{Mutex, RwLock},
};
use lsp_types::{ClientCapabilities, ClientInfo};
use crate::{
distro::{DistributionKind, Resolver},
Options,
};
#[derive(Debug)]
pub struct ServerContext {
pub current_directory: PathBuf,
pub distro_kind: Mutex<DistributionKind>,
pub resolver: Mutex<Resolver>,
pub client_capabilities: Mutex<ClientCapabilities>,
pub client_info: Mutex<Option<ClientInfo>>,
pub options: RwLock<Options>,
}
impl ServerContext {
pub fn new(current_dir: PathBuf) -> Self {
Self {
current_directory: current_dir,
distro_kind: Mutex::new(DistributionKind::Unknown),
resolver: Mutex::new(Resolver::default()),
client_capabilities: Mutex::default(),
client_info: Mutex::default(),
options: RwLock::default(),
}
}
}

View file

@ -7,7 +7,7 @@ use std::{
use crossbeam_channel::Sender;
use dashmap::DashMap;
use crate::{Document, ServerContext, Uri, Workspace};
use crate::{Document, Uri, Workspace};
pub enum DiagnosticsMessage {
Analyze {
@ -23,7 +23,7 @@ pub struct DiagnosticsDebouncer {
}
impl DiagnosticsDebouncer {
pub fn launch<A>(context: Arc<ServerContext>, action: A) -> Self
pub fn launch<A>(action: A) -> Self
where
A: Fn(Workspace, Document) + Send + Clone + 'static,
{
@ -37,14 +37,7 @@ impl DiagnosticsDebouncer {
document,
}) = receiver.recv()
{
let delay = {
context
.options
.read()
.unwrap()
.diagnostics_delay
.unwrap_or(300)
};
let delay = workspace.options.diagnostics_delay.unwrap_or(300);
if let Some(time) = last_task_time_by_uri.get(&document.uri) {
if time.elapsed().as_millis() < delay as u128 {

View file

@ -8,7 +8,7 @@ use crate::{
bibtex, build_log,
latex::{self, LatexAnalyzerContext},
},
DocumentLanguage, ServerContext, Uri,
DocumentLanguage, Uri, Workspace,
};
#[derive(Debug, Clone)]
@ -86,7 +86,7 @@ impl fmt::Debug for Document {
impl Document {
pub fn parse(
context: &ServerContext,
workspace: &Workspace,
uri: Arc<Uri>,
text: Arc<String>,
language: DocumentLanguage,
@ -98,9 +98,9 @@ impl Document {
let green = latex::parse(&text).green;
let root = latex::SyntaxNode::new_root(green.clone());
let base_uri = match &context.options.read().unwrap().root_directory {
let base_uri = match &workspace.options.root_directory {
Some(root_dir) => {
let root_dir = context.current_directory.join(&root_dir);
let root_dir = workspace.current_directory.join(&root_dir);
Uri::from_directory_path(root_dir)
.map(Arc::new)
.unwrap_or_else(|()| Arc::clone(&uri))
@ -109,7 +109,7 @@ impl Document {
};
let mut context = LatexAnalyzerContext {
inner: context,
workspace,
extras: latex::Extras::default(),
document_uri: Arc::clone(&uri),
base_uri,

View file

@ -138,14 +138,10 @@ impl BuildEngine {
}
let path = document.uri.to_file_path().unwrap();
let supports_progress = {
request
.context
.client_capabilities
.lock()
.unwrap()
.has_work_done_progress_support()
};
let supports_progress = request
.workspace
.client_capabilities
.has_work_done_progress_support();
let token = format!("texlab-build-{}", Uuid::new_v4());
let progress_reporter = ProgressReporter {
@ -156,7 +152,7 @@ impl BuildEngine {
};
progress_reporter.start(&document.uri)?;
let options = { request.context.options.read().unwrap().clone() };
let options = &request.workspace.options;
let build_dir = options
.root_directory
@ -207,7 +203,6 @@ impl BuildEngine {
text_document: TextDocumentIdentifier::new(request.uri.as_ref().clone().into()),
},
uri: request.uri,
context: request.context,
workspace: request.workspace,
};
forward_search::execute_forward_search(request);

View file

@ -42,7 +42,7 @@ pub fn complete_imports<'a>(
items.push(item);
}
let resolver = context.request.context.resolver.lock().unwrap();
let resolver = &context.request.workspace.resolver;
for file_name in resolver
.files_by_name
.keys()

View file

@ -96,16 +96,14 @@ fn current_dir(
) -> Option<PathBuf> {
let mut path = context
.request
.context
.workspace
.options
.read()
.unwrap()
.root_directory
.as_ref()
.map(|root_directory| {
context
.request
.context
.workspace
.current_directory
.join(root_directory)
})

View file

@ -114,14 +114,11 @@ pub fn complete(request: FeatureRequest<CompletionParams>) -> Option<CompletionL
let is_incomplete = if context
.request
.context
.workspace
.client_info
.lock()
.unwrap()
.as_ref()
.map(|info| info.name.as_str())
.unwrap_or_default()
== "Visual Studio Code"
.as_ref()
.map_or(false, |info| info.name.as_str() == "Visual Studio Code")
{
true
} else {
@ -314,10 +311,8 @@ fn convert_internal_items(
InternalCompletionItemData::BeginCommand => {
if context
.request
.context
.workspace
.client_capabilities
.lock()
.unwrap()
.text_document
.as_ref()
.and_then(|cap| cap.completion.as_ref())

View file

@ -31,10 +31,8 @@ pub fn image_documentation(
fn supports_images(request: &FeatureRequest<CompletionParams>) -> bool {
request
.context
.workspace
.client_capabilities
.lock()
.unwrap()
.text_document
.as_ref()
.and_then(|cap| cap.completion.as_ref())
@ -48,10 +46,8 @@ pub fn adjust_kind(
kind: CompletionItemKind,
) -> CompletionItemKind {
if let Some(value_set) = request
.context
.workspace
.client_capabilities
.lock()
.unwrap()
.text_document
.as_ref()
.and_then(|cap| cap.completion.as_ref())

View file

@ -21,10 +21,8 @@ pub fn format_bibtex_internal(
let line_length = {
request
.context
.workspace
.options
.read()
.unwrap()
.formatter_line_length
.map(|value| {
if value <= 0 {

View file

@ -15,7 +15,7 @@ pub fn format_with_latexindent(
let directory = tempdir().ok()?;
let document = request.main_document();
let options = request.context.options.read().unwrap();
let options = &request.workspace.options;
let current_dir = options
.root_directory
.as_ref()
@ -41,8 +41,6 @@ pub fn format_with_latexindent(
let modify_line_breaks = options.latexindent.modify_line_breaks;
drop(options);
let path = directory.path();
let _ = fs::copy(
current_dir.join("localSettings.yaml"),

View file

@ -13,11 +13,11 @@ pub fn format_source_code(
request: FeatureRequest<DocumentFormattingParams>,
) -> Option<Vec<TextEdit>> {
let mut edits = None;
if request.context.options.read().unwrap().bibtex_formatter == BibtexFormatter::Texlab {
if request.workspace.options.bibtex_formatter == BibtexFormatter::Texlab {
edits = edits.or_else(|| format_bibtex_internal(&request));
}
if request.context.options.read().unwrap().latex_formatter == LatexFormatter::Texlab {
if request.workspace.options.latex_formatter == LatexFormatter::Texlab {
edits = edits.or_else(|| Some(vec![]));
}

View file

@ -28,15 +28,7 @@ pub struct ForwardSearchResult {
pub fn execute_forward_search(
request: FeatureRequest<TextDocumentPositionParams>,
) -> Option<ForwardSearchResult> {
let options = {
request
.context
.options
.read()
.unwrap()
.forward_search
.clone()
};
let options = &request.workspace.options.forward_search;
if options.executable.is_none() || options.args.is_none() {
return Some(ForwardSearchResult {
@ -76,14 +68,15 @@ pub fn execute_forward_search(
let args: Vec<String> = options
.args
.as_ref()
.unwrap()
.into_iter()
.iter()
.flat_map(|arg| {
replace_placeholder(&tex_path, &pdf_path, request.params.position.line, arg)
})
.collect();
let status = match run_process(options.executable.unwrap(), args) {
let status = match run_process(options.executable.as_ref().unwrap(), args) {
Ok(()) => ForwardSearchStatus::SUCCESS,
Err(why) => {
error!("Unable to execute forward search: {}", why);
@ -97,10 +90,10 @@ fn replace_placeholder(
tex_file: &Path,
pdf_file: &Path,
line_number: u32,
argument: String,
argument: &str,
) -> Option<String> {
let result = if argument.starts_with('"') || argument.ends_with('"') {
argument
argument.to_string()
} else {
argument
.replace("%f", tex_file.to_str()?)
@ -110,7 +103,7 @@ fn replace_placeholder(
Some(result)
}
fn run_process(executable: String, args: Vec<String>) -> io::Result<()> {
fn run_process(executable: &str, args: Vec<String>) -> io::Result<()> {
Command::new(executable)
.args(args)
.stdin(Stdio::null())

View file

@ -16,7 +16,7 @@ mod symbol;
use std::sync::Arc;
use crate::{Document, ServerContext, Uri, Workspace};
use crate::{Document, Uri, Workspace};
#[cfg(feature = "completion")]
pub use self::completion::{complete, CompletionItemData, COMPLETION_LIMIT};
@ -36,7 +36,6 @@ pub use self::{
#[derive(Clone)]
pub struct FeatureRequest<P> {
pub context: Arc<ServerContext>,
pub params: P,
pub workspace: Workspace,
pub uri: Arc<Uri>,
@ -61,10 +60,7 @@ mod testing {
};
use typed_builder::TypedBuilder;
use crate::{
distro::Resolver, DocumentLanguage, DocumentVisibility, Options, ServerContext, Uri,
Workspace,
};
use crate::{distro::Resolver, DocumentLanguage, DocumentVisibility, Options, Uri, Workspace};
use super::*;
@ -124,29 +120,22 @@ mod testing {
TextDocumentIdentifier::new(uri.as_ref().clone().into())
}
fn context(&self) -> Arc<ServerContext> {
let cx = ServerContext::new(self.current_directory.clone());
*cx.client_capabilities.lock().unwrap() = self.client_capabilities.clone();
*cx.client_info.lock().unwrap() = self.client_info.clone();
*cx.options.write().unwrap() = self.options();
*cx.resolver.lock().unwrap() = self.resolver.clone();
Arc::new(cx)
}
fn workspace(&self) -> Workspace {
let mut workspace = Workspace {
client_capabilities: Arc::new(self.client_capabilities.clone()),
client_info: Arc::new(self.client_info.clone()),
options: Arc::new(self.options()),
resolver: Arc::new(self.resolver.clone()),
..Workspace::default()
};
fn workspace(&self, cx: &ServerContext) -> Workspace {
let mut workspace = Workspace::default();
for (name, source_code) in &self.files {
let uri = self.uri(name);
let path = uri.to_file_path().unwrap();
let text = Arc::new(source_code.trim().to_string());
let language = DocumentLanguage::by_path(&path).expect("unknown document language");
workspace
.open(
cx,
uri,
Arc::new(source_code.trim().to_string()),
language,
DocumentVisibility::Visible,
)
.open(uri, text, language, DocumentVisibility::Visible)
.unwrap();
}
@ -154,11 +143,9 @@ mod testing {
}
fn request<P>(&self, params: P) -> FeatureRequest<P> {
let context = self.context();
let workspace = self.workspace(&context);
let workspace = self.workspace();
let uri = self.uri(self.main);
FeatureRequest {
context,
params,
workspace: workspace.slice(&uri),
uri,

View file

@ -10,7 +10,7 @@ use lsp_types::{
TextDocumentIdentifier, WorkDoneProgressParams, WorkspaceSymbolParams,
};
use crate::{ClientCapabilitiesExt, ServerContext, Uri, Workspace};
use crate::{ClientCapabilitiesExt, Uri, Workspace};
use self::{
bibtex::find_bibtex_symbols, latex::find_latex_symbols, project_order::ProjectOrdering,
@ -23,10 +23,8 @@ pub fn find_document_symbols(req: FeatureRequest<DocumentSymbolParams>) -> Docum
find_latex_symbols(&req, &mut buf);
find_bibtex_symbols(&req, &mut buf);
if req
.context
.workspace
.client_capabilities
.lock()
.unwrap()
.has_hierarchical_document_symbol_support()
{
DocumentSymbolResponse::Nested(
@ -55,7 +53,6 @@ struct WorkspaceSymbol {
}
pub fn find_workspace_symbols(
context: Arc<ServerContext>,
workspace: &Workspace,
params: &WorkspaceSymbolParams,
) -> Vec<SymbolInformation> {
@ -63,7 +60,6 @@ pub fn find_workspace_symbols(
for document in workspace.documents_by_uri.values() {
let request = FeatureRequest {
context: Arc::clone(&context),
uri: Arc::clone(&document.uri),
params: DocumentSymbolParams {
text_document: TextDocumentIdentifier::new(document.uri.as_ref().clone().into()),

View file

@ -107,17 +107,18 @@ mod tests {
use anyhow::Result;
use crate::{DocumentLanguage, DocumentVisibility, ServerContext};
use crate::{DocumentLanguage, DocumentVisibility};
use super::*;
#[test]
fn test_no_cycles() -> Result<()> {
let context = ServerContext::new(std::env::temp_dir());
let mut workspace = Workspace::default();
let mut workspace = Workspace {
current_directory: Arc::new(std::env::temp_dir()),
..Workspace::default()
};
let a = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/a.tex")?),
Arc::new(String::new()),
DocumentLanguage::Latex,
@ -125,7 +126,6 @@ mod tests {
)?;
let b = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/b.tex")?),
Arc::new(String::new()),
DocumentLanguage::Latex,
@ -133,7 +133,6 @@ mod tests {
)?;
let c = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/c.tex")?),
Arc::new(r#"\include{b}\include{a}"#.to_string()),
DocumentLanguage::Latex,
@ -150,11 +149,12 @@ mod tests {
#[test]
fn test_cycles() -> Result<()> {
let context = ServerContext::new(std::env::temp_dir());
let mut workspace = Workspace::default();
let mut workspace = Workspace {
current_directory: Arc::new(std::env::temp_dir()),
..Workspace::default()
};
let a = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/a.tex")?),
Arc::new(r#"\include{b}"#.to_string()),
DocumentLanguage::Latex,
@ -162,7 +162,6 @@ mod tests {
)?;
let b = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/b.tex")?),
Arc::new(r#"\include{a}"#.to_string()),
DocumentLanguage::Latex,
@ -170,7 +169,6 @@ mod tests {
)?;
let c = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/c.tex")?),
Arc::new(r#"\include{a}"#.to_string()),
DocumentLanguage::Latex,
@ -187,11 +185,12 @@ mod tests {
#[test]
fn test_multiple_roots() -> Result<()> {
let context = ServerContext::new(std::env::temp_dir());
let mut workspace = Workspace::default();
let mut workspace = Workspace {
current_directory: Arc::new(std::env::temp_dir()),
..Workspace::default()
};
let a = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/a.tex")?),
Arc::new(r#"\include{b}"#.to_string()),
DocumentLanguage::Latex,
@ -199,7 +198,6 @@ mod tests {
)?;
let b = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/b.tex")?),
Arc::new(r#""#.to_string()),
DocumentLanguage::Latex,
@ -207,7 +205,6 @@ mod tests {
)?;
let c = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/c.tex")?),
Arc::new(r#""#.to_string()),
DocumentLanguage::Latex,
@ -215,7 +212,6 @@ mod tests {
)?;
let d = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/d.tex")?),
Arc::new(r#"\include{c}"#.to_string()),
DocumentLanguage::Latex,

View file

@ -3,7 +3,6 @@ mod capabilities;
pub mod citation;
mod client;
pub mod component_db;
mod context;
pub mod diagnostics;
mod dispatch;
pub mod distro;
@ -24,7 +23,6 @@ mod workspace;
pub use self::{
capabilities::ClientCapabilitiesExt,
context::ServerContext,
document::*,
label::*,
lang_data::*,

View file

@ -4,7 +4,7 @@ use std::{
};
use anyhow::Result;
use crossbeam_channel::Sender;
use crossbeam_channel::{Receiver, Sender};
use log::{error, info, warn};
use lsp_server::{Connection, Message, RequestId};
use lsp_types::{notification::*, request::*, *};
@ -25,13 +25,20 @@ use crate::{
},
req_queue::{IncomingData, ReqQueue},
ClientCapabilitiesExt, DocumentLanguage, DocumentVisibility, LineIndex, LineIndexExt, Options,
ServerContext, Uri, Workspace, WorkspaceEvent,
Uri, Workspace, WorkspaceEvent,
};
#[derive(Debug)]
enum InternalMessage {
SetDistro(Distribution),
SetOptions(Options),
}
#[derive(Clone)]
pub struct Server {
connection: Arc<Connection>,
context: Arc<ServerContext>,
internal_tx: Sender<InternalMessage>,
internal_rx: Receiver<InternalMessage>,
req_queue: Arc<Mutex<ReqQueue>>,
workspace: Workspace,
static_debouncer: Arc<DiagnosticsDebouncer>,
@ -47,26 +54,26 @@ impl Server {
current_dir: PathBuf,
load_resolver: bool,
) -> Result<Self> {
let context = Arc::new(ServerContext::new(current_dir));
let req_queue = Arc::default();
let workspace = Workspace::default();
let workspace = Workspace {
current_directory: Arc::new(current_dir),
..Workspace::default()
};
let diag_manager = Arc::new(Mutex::new(DiagnosticsManager::default()));
let static_debouncer = Arc::new(create_static_debouncer(
Arc::clone(&diag_manager),
&connection,
Arc::clone(&context),
));
let chktex_debouncer = Arc::new(create_chktex_debouncer(
diag_manager,
&connection,
Arc::clone(&context),
));
let chktex_debouncer = Arc::new(create_chktex_debouncer(diag_manager, &connection));
let (internal_tx, internal_rx) = crossbeam_channel::unbounded();
Ok(Self {
connection: Arc::new(connection),
context,
internal_tx,
internal_rx,
req_queue,
workspace,
static_debouncer,
@ -77,8 +84,9 @@ impl Server {
})
}
fn spawn(&self, job: impl FnOnce() + Send + 'static) {
self.pool.lock().unwrap().execute(job);
fn spawn(&self, job: impl FnOnce(Self) + Send + 'static) {
let server = self.clone();
self.pool.lock().unwrap().execute(move || job(server));
}
fn capabilities(&self) -> ServerCapabilities {
@ -131,8 +139,8 @@ impl Server {
let (id, params) = self.connection.initialize_start()?;
let params: InitializeParams = serde_json::from_value(params)?;
*self.context.client_capabilities.lock().unwrap() = params.capabilities;
*self.context.client_info.lock().unwrap() = params.client_info;
self.workspace.client_capabilities = Arc::new(params.capabilities);
self.workspace.client_info = Arc::new(params.client_info);
let result = InitializeResult {
capabilities: self.capabilities(),
@ -144,35 +152,33 @@ impl Server {
self.connection
.initialize_finish(id, serde_json::to_value(result)?)?;
let cx = Arc::clone(&self.context);
if self.load_resolver {
self.spawn(move || {
self.spawn(move |server| {
let distro = Distribution::detect();
info!("Detected distribution: {}", distro.kind);
*cx.resolver.lock().unwrap() = distro.resolver;
server
.internal_tx
.send(InternalMessage::SetDistro(distro))
.unwrap();
});
}
self.register_diagnostics_handler();
let server = self.clone();
self.spawn(move || {
self.spawn(move |server| {
server.register_config_capability();
server.register_file_watching();
server.pull_config();
});
self.pull_config();
self.reparse_all()?;
Ok(())
}
fn register_file_watching(&self) {
if self
.context
.workspace
.client_capabilities
.lock()
.unwrap()
.has_file_watching_support()
{
let options = DidChangeWatchedFilesRegistrationOptions {
@ -205,9 +211,11 @@ impl Server {
}
fn register_config_capability(&self) {
let client_capabilities = self.context.client_capabilities.lock().unwrap();
if client_capabilities.has_push_configuration_support() {
drop(client_capabilities);
if self
.workspace
.client_capabilities
.has_push_configuration_support()
{
let reg = Registration {
id: "pull-config".to_string(),
method: DidChangeConfiguration::METHOD.to_string(),
@ -258,10 +266,8 @@ impl Server {
fn pull_config(&self) {
if !self
.context
.workspace
.client_capabilities
.lock()
.unwrap()
.has_pull_configuration_support()
{
return;
@ -281,7 +287,7 @@ impl Server {
) {
Ok(mut json) => {
let value = json.pop().expect("invalid configuration request");
let new_options = match serde_json::from_value(value) {
let options = match serde_json::from_value(value) {
Ok(new_options) => new_options,
Err(why) => {
warn!("Invalid configuration section \"texlab\": {}", why);
@ -289,8 +295,9 @@ impl Server {
}
};
let mut options = self.context.options.write().unwrap();
*options = new_options;
self.internal_tx
.send(InternalMessage::SetOptions(options))
.unwrap();
}
Err(why) => {
error!("Retrieving configuration failed: {}", why);
@ -316,7 +323,7 @@ impl Server {
let uri = Uri::from(change.uri);
match change.typ {
FileChangeType::CREATED | FileChangeType::CHANGED => {
self.workspace.reload(&self.context, path)?;
self.workspace.reload(path)?;
}
FileChangeType::DELETED => {
self.workspace.documents_by_uri.remove(&uri);
@ -330,14 +337,18 @@ impl Server {
}
fn did_change_configuration(&mut self, params: DidChangeConfigurationParams) -> Result<()> {
let client_capabilities = { self.context.client_capabilities.lock().unwrap().clone() };
if client_capabilities.has_pull_configuration_support() {
self.pull_config();
self.reparse_all()?;
if self
.workspace
.client_capabilities
.has_pull_configuration_support()
{
self.spawn(move |server| {
server.pull_config();
});
} else {
match serde_json::from_value(params.settings) {
Ok(new_options) => {
*self.context.options.write().unwrap() = new_options;
Ok(options) => {
self.workspace.options = options;
}
Err(why) => {
error!("Invalid configuration: {}", why);
@ -354,19 +365,17 @@ impl Server {
let language_id = &params.text_document.language_id;
let language = DocumentLanguage::by_language_id(language_id);
let document = self.workspace.open(
&self.context,
Arc::new(params.text_document.uri.into()),
Arc::new(params.text_document.text),
language.unwrap_or(DocumentLanguage::Latex),
DocumentVisibility::Visible,
)?;
let should_lint = { self.context.options.read().unwrap().chktex.on_open_and_save };
if let Some(document) = self
.workspace
.documents_by_uri
.get(document.uri.as_ref())
.filter(|_| should_lint)
.filter(|_| self.workspace.options.chktex.on_open_and_save)
.cloned()
{
self.chktex_debouncer
@ -387,7 +396,6 @@ impl Server {
apply_document_edit(&mut text, params.content_changes);
let language = old_document.data.language();
let new_document = self.workspace.open(
&self.context,
Arc::clone(&uri),
Arc::new(text),
language,
@ -407,7 +415,7 @@ impl Server {
),
);
if self.context.options.read().unwrap().chktex.on_edit {
if self.workspace.options.chktex.on_edit {
self.chktex_debouncer
.sender
.send(DiagnosticsMessage::Analyze {
@ -418,7 +426,7 @@ impl Server {
}
None => match uri.to_file_path() {
Ok(path) => {
self.workspace.load(&self.context, path)?;
self.workspace.load(path)?;
}
Err(_) => return Ok(()),
},
@ -430,12 +438,11 @@ impl Server {
fn did_save(&self, params: DidSaveTextDocumentParams) -> Result<()> {
let uri = Uri::from(params.text_document.uri);
let should_build = { self.context.options.read().unwrap().build.on_save };
if let Some(request) = self
.workspace
.documents_by_uri
.get(&uri)
.filter(|_| should_build)
.filter(|_| self.workspace.options.build.on_save)
.map(|document| {
self.feature_request(
Arc::clone(&document.uri),
@ -445,12 +452,10 @@ impl Server {
)
})
{
let lsp_sender = self.connection.sender.clone();
let req_queue = Arc::clone(&self.req_queue);
let build_engine = Arc::clone(&self.build_engine);
self.spawn(move || {
build_engine
.build(request, &req_queue, &lsp_sender)
self.spawn(move |server| {
server
.build_engine
.build(request, &server.req_queue, &server.connection.sender)
.unwrap_or_else(|why| {
error!("Build failed: {}", why);
BuildResult {
@ -460,12 +465,11 @@ impl Server {
});
}
let should_lint = { self.context.options.read().unwrap().chktex.on_open_and_save };
if let Some(document) = self
.workspace
.documents_by_uri
.get(&uri)
.filter(|_| should_lint)
.filter(|_| self.workspace.options.chktex.on_open_and_save)
.cloned()
{
self.chktex_debouncer
@ -486,7 +490,6 @@ impl Server {
fn feature_request<P>(&self, uri: Arc<Uri>, params: P) -> FeatureRequest<P> {
FeatureRequest {
context: Arc::clone(&self.context),
params,
workspace: self.workspace.slice(&uri),
uri,
@ -505,11 +508,12 @@ impl Server {
R: Serialize,
H: FnOnce(FeatureRequest<P>) -> R + Send + 'static,
{
let request = self.feature_request(uri, params);
let sender = self.connection.sender.clone();
self.spawn(move || {
self.spawn(move |server| {
let request = server.feature_request(uri, params);
let result = handler(request);
sender
server
.connection
.sender
.send(lsp_server::Response::new_ok(id, result).into())
.unwrap();
});
@ -530,12 +534,11 @@ impl Server {
}
fn workspace_symbols(&self, id: RequestId, params: WorkspaceSymbolParams) -> Result<()> {
let sender = self.connection.sender.clone();
let context = Arc::clone(&self.context);
let workspace = self.workspace.clone();
self.spawn(move || {
let result = find_workspace_symbols(context, &workspace, &params);
sender
self.spawn(move |server| {
let result = find_workspace_symbols(&server.workspace, &params);
server
.connection
.sender
.send(lsp_server::Response::new_ok(id, result).into())
.unwrap();
});
@ -563,9 +566,7 @@ impl Server {
#[cfg(feature = "completion")]
fn completion_resolve(&self, id: RequestId, mut item: CompletionItem) -> Result<()> {
let sender = self.connection.sender.clone();
let workspace = self.workspace.clone();
self.spawn(move || {
self.spawn(move |server| {
match serde_json::from_value(item.data.clone().unwrap()).unwrap() {
crate::features::CompletionItemData::Package
| crate::features::CompletionItemData::Class => {
@ -575,7 +576,7 @@ impl Server {
}
#[cfg(feature = "citation")]
crate::features::CompletionItemData::Citation { uri, key } => {
if let Some(document) = workspace.documents_by_uri.get(&uri) {
if let Some(document) = server.workspace.documents_by_uri.get(&uri) {
if let Some(data) = document.data.as_bibtex() {
let markup = crate::citation::render_citation(
&crate::syntax::bibtex::SyntaxNode::new_root(data.green.clone()),
@ -588,8 +589,9 @@ impl Server {
_ => {}
};
drop(workspace);
sender
server
.connection
.sender
.send(lsp_server::Response::new_ok(id, item).into())
.unwrap();
});
@ -729,7 +731,6 @@ impl Server {
.collect::<Vec<_>>()
{
self.workspace.open(
&self.context,
Arc::clone(&document.uri),
document.text.clone(),
document.data.language(),
@ -741,83 +742,97 @@ impl Server {
}
fn process_messages(&mut self) -> Result<()> {
let receiver = self.connection.receiver.clone();
for msg in &receiver {
match msg {
Message::Request(request) => {
if self.connection.handle_shutdown(&request)? {
return Ok(());
}
loop {
crossbeam_channel::select! {
recv(&self.connection.receiver) -> msg => {
match msg? {
Message::Request(request) => {
if self.connection.handle_shutdown(&request)? {
return Ok(());
}
self.register_incoming_request(request.id.clone());
if let Some(response) = RequestDispatcher::new(request)
.on::<DocumentLinkRequest, _>(|id, params| self.document_link(id, params))?
.on::<FoldingRangeRequest, _>(|id, params| self.folding_range(id, params))?
.on::<References, _>(|id, params| self.references(id, params))?
.on::<HoverRequest, _>(|id, params| self.hover(id, params))?
.on::<DocumentSymbolRequest, _>(|id, params| {
self.document_symbols(id, params)
})?
.on::<WorkspaceSymbol, _>(|id, params| self.workspace_symbols(id, params))?
.on::<Completion, _>(|id, params| {
#[cfg(feature = "completion")]
self.completion(id, params)?;
Ok(())
})?
.on::<ResolveCompletionItem, _>(|id, params| {
#[cfg(feature = "completion")]
self.completion_resolve(id, params)?;
Ok(())
})?
.on::<GotoDefinition, _>(|id, params| self.goto_definition(id, params))?
.on::<PrepareRenameRequest, _>(|id, params| {
self.prepare_rename(id, params)
})?
.on::<Rename, _>(|id, params| self.rename(id, params))?
.on::<DocumentHighlightRequest, _>(|id, params| {
self.document_highlight(id, params)
})?
.on::<Formatting, _>(|id, params| self.formatting(id, params))?
.on::<BuildRequest, _>(|id, params| self.build(id, params))?
.on::<ForwardSearchRequest, _>(|id, params| {
self.forward_search(id, params)
})?
.on::<SemanticTokensRangeRequest, _>(|id, params| {
self.semantic_tokens_range(id, params)
})?
.default()
{
self.connection.sender.send(response.into())?;
}
self.register_incoming_request(request.id.clone());
if let Some(response) = RequestDispatcher::new(request)
.on::<DocumentLinkRequest, _>(|id, params| self.document_link(id, params))?
.on::<FoldingRangeRequest, _>(|id, params| self.folding_range(id, params))?
.on::<References, _>(|id, params| self.references(id, params))?
.on::<HoverRequest, _>(|id, params| self.hover(id, params))?
.on::<DocumentSymbolRequest, _>(|id, params| {
self.document_symbols(id, params)
})?
.on::<WorkspaceSymbol, _>(|id, params| self.workspace_symbols(id, params))?
.on::<Completion, _>(|id, params| {
#[cfg(feature = "completion")]
self.completion(id, params)?;
Ok(())
})?
.on::<ResolveCompletionItem, _>(|id, params| {
#[cfg(feature = "completion")]
self.completion_resolve(id, params)?;
Ok(())
})?
.on::<GotoDefinition, _>(|id, params| self.goto_definition(id, params))?
.on::<PrepareRenameRequest, _>(|id, params| {
self.prepare_rename(id, params)
})?
.on::<Rename, _>(|id, params| self.rename(id, params))?
.on::<DocumentHighlightRequest, _>(|id, params| {
self.document_highlight(id, params)
})?
.on::<Formatting, _>(|id, params| self.formatting(id, params))?
.on::<BuildRequest, _>(|id, params| self.build(id, params))?
.on::<ForwardSearchRequest, _>(|id, params| {
self.forward_search(id, params)
})?
.on::<SemanticTokensRangeRequest, _>(|id, params| {
self.semantic_tokens_range(id, params)
})?
.default()
{
self.connection.sender.send(response.into())?;
}
}
Message::Notification(notification) => {
NotificationDispatcher::new(notification)
.on::<Cancel, _>(|params| self.cancel(params))?
.on::<DidChangeConfiguration, _>(|params| {
self.did_change_configuration(params)
})?
.on::<DidChangeWatchedFiles, _>(|params| {
self.did_change_watched_files(params)
})?
.on::<DidOpenTextDocument, _>(|params| self.did_open(params))?
.on::<DidChangeTextDocument, _>(|params| self.did_change(params))?
.on::<DidSaveTextDocument, _>(|params| self.did_save(params))?
.on::<DidCloseTextDocument, _>(|params| self.did_close(params))?
.default();
}
Message::Response(response) => {
let mut req_queue = self.req_queue.lock().unwrap();
if let Some(data) = req_queue.outgoing.complete(response.id) {
let result = match response.error {
Some(error) => Err(error),
None => Ok(response.result.unwrap_or_default()),
};
data.sender.send(result)?;
}
}
};
},
recv(&self.internal_rx) -> msg => {
match msg? {
InternalMessage::SetDistro(distro) => {
self.workspace.resolver = Arc::new(distro.resolver);
self.reparse_all()?;
}
InternalMessage::SetOptions(options) => {
self.workspace.options = Arc::new(options);
self.reparse_all()?;
}
};
}
Message::Notification(notification) => {
NotificationDispatcher::new(notification)
.on::<Cancel, _>(|params| self.cancel(params))?
.on::<DidChangeConfiguration, _>(|params| {
self.did_change_configuration(params)
})?
.on::<DidChangeWatchedFiles, _>(|params| {
self.did_change_watched_files(params)
})?
.on::<DidOpenTextDocument, _>(|params| self.did_open(params))?
.on::<DidChangeTextDocument, _>(|params| self.did_change(params))?
.on::<DidSaveTextDocument, _>(|params| self.did_save(params))?
.on::<DidCloseTextDocument, _>(|params| self.did_close(params))?
.default();
}
Message::Response(response) => {
let mut req_queue = self.req_queue.lock().unwrap();
if let Some(data) = req_queue.outgoing.complete(response.id) {
let result = match response.error {
Some(error) => Err(error),
None => Ok(response.result.unwrap_or_default()),
};
data.sender.send(result)?;
}
}
}
};
}
Ok(())
}
pub fn run(mut self) -> Result<()> {
@ -833,10 +848,9 @@ impl Server {
fn create_static_debouncer(
manager: Arc<Mutex<DiagnosticsManager>>,
conn: &Connection,
context: Arc<ServerContext>,
) -> DiagnosticsDebouncer {
let sender = conn.sender.clone();
DiagnosticsDebouncer::launch(context, move |workspace, document| {
DiagnosticsDebouncer::launch(move |workspace, document| {
let mut manager = manager.lock().unwrap();
manager.update_static(&workspace, Arc::clone(&document.uri));
if let Err(why) = publish_diagnostics(&sender, &workspace, &manager) {
@ -848,13 +862,11 @@ fn create_static_debouncer(
fn create_chktex_debouncer(
manager: Arc<Mutex<DiagnosticsManager>>,
conn: &Connection,
context: Arc<ServerContext>,
) -> DiagnosticsDebouncer {
let sender = conn.sender.clone();
DiagnosticsDebouncer::launch(Arc::clone(&context), move |workspace, document| {
let options = { context.options.read().unwrap().clone() };
DiagnosticsDebouncer::launch(move |workspace, document| {
let mut manager = manager.lock().unwrap();
manager.update_chktex(&workspace, Arc::clone(&document.uri), &options);
manager.update_chktex(&workspace, Arc::clone(&document.uri), &workspace.options);
if let Err(why) = publish_diagnostics(&sender, &workspace, &manager) {
warn!("Failed to publish diagnostics: {}", why);
}

View file

@ -33,7 +33,7 @@ pub fn analyze_include(context: &mut LatexAnalyzerContext, node: latex::SyntaxNo
targets.push(Arc::new(context.base_uri.join(&path).ok()?.into()));
}
resolve_distro_file(&context.inner.resolver.lock().unwrap(), &stem, extensions)
resolve_distro_file(&context.workspace.resolver, &stem, extensions)
.into_iter()
.for_each(|target| targets.push(Arc::new(target)));

View file

@ -17,10 +17,10 @@ fn find_by_extension(context: &LatexAnalyzerContext, extension: &str) -> Option<
let file_stem = file_path.file_stem()?;
let aux_name = format!("{}.{}", file_stem.to_str()?, extension);
let options = context.inner.options.read().unwrap();
let options = &context.workspace.options;
if let Some(root_dir) = options.root_directory.as_ref() {
let path = context
.inner
.workspace
.current_directory
.join(root_dir)
.join(&aux_name);
@ -29,7 +29,7 @@ fn find_by_extension(context: &LatexAnalyzerContext, extension: &str) -> Option<
if let Some(build_dir) = options.aux_directory.as_ref() {
let path = context
.inner
.workspace
.current_directory
.join(build_dir)
.join(&aux_name);

View file

@ -4,11 +4,11 @@ use rowan::TextRange;
use rustc_hash::{FxHashMap, FxHashSet};
use smol_str::SmolStr;
use crate::{ServerContext, Uri};
use crate::{Uri, Workspace};
#[derive(Debug)]
pub struct LatexAnalyzerContext<'a> {
pub inner: &'a ServerContext,
pub workspace: &'a Workspace,
pub document_uri: Arc<Uri>,
pub base_uri: Arc<Uri>,
pub extras: Extras,

View file

@ -2,12 +2,13 @@ use std::{fs, path::PathBuf, sync::Arc};
use anyhow::Result;
use crossbeam_channel::Sender;
use lsp_types::{ClientCapabilities, ClientInfo};
use petgraph::{graphmap::UnGraphMap, visit::Dfs};
use rustc_hash::FxHashSet;
use crate::{
component_db::COMPONENT_DATABASE, Document, DocumentLanguage, DocumentVisibility,
ServerContext, Uri,
component_db::COMPONENT_DATABASE, distro::Resolver, Document, DocumentLanguage,
DocumentVisibility, Options, Uri,
};
#[derive(Debug, Clone)]
@ -19,19 +20,23 @@ pub enum WorkspaceEvent {
pub struct Workspace {
pub documents_by_uri: im::HashMap<Arc<Uri>, Document>,
pub listeners: im::Vector<Sender<WorkspaceEvent>>,
pub current_directory: Arc<PathBuf>,
pub client_capabilities: Arc<ClientCapabilities>,
pub client_info: Arc<Option<ClientInfo>>,
pub options: Arc<Options>,
pub resolver: Arc<Resolver>,
}
impl Workspace {
pub fn open(
&mut self,
context: &ServerContext,
uri: Arc<Uri>,
text: Arc<String>,
language: DocumentLanguage,
visibility: DocumentVisibility,
) -> Result<Document> {
log::debug!("(Re)Loading document: {}", uri);
let document = Document::parse(context, Arc::clone(&uri), text, language, visibility);
let document = Document::parse(self, Arc::clone(&uri), text, language, visibility);
self.documents_by_uri
.insert(Arc::clone(&uri), document.clone());
@ -40,12 +45,12 @@ impl Workspace {
listener.send(WorkspaceEvent::Changed(self.clone(), document.clone()))?;
}
self.expand_parent(context, &document);
self.expand_children(context, &document);
self.expand_parent(&document);
self.expand_children(&document);
Ok(document)
}
pub fn reload(&mut self, context: &ServerContext, path: PathBuf) -> Result<Option<Document>> {
pub fn reload(&mut self, path: PathBuf) -> Result<Option<Document>> {
let uri = Arc::new(Uri::from_file_path(path.clone()).unwrap());
if self.is_open(&uri) && !uri.as_str().ends_with(".log") {
@ -56,7 +61,6 @@ impl Workspace {
let data = fs::read(&path)?;
let text = Arc::new(String::from_utf8_lossy(&data).into_owned());
Ok(Some(self.open(
context,
uri,
text,
language,
@ -67,7 +71,7 @@ impl Workspace {
}
}
pub fn load(&mut self, context: &ServerContext, path: PathBuf) -> Result<Option<Document>> {
pub fn load(&mut self, path: PathBuf) -> Result<Option<Document>> {
let uri = Arc::new(Uri::from_file_path(path.clone()).unwrap());
if let Some(document) = self.documents_by_uri.get(&uri).cloned() {
@ -78,7 +82,6 @@ impl Workspace {
let text = Arc::new(String::from_utf8_lossy(&data).into_owned());
if let Some(language) = DocumentLanguage::by_path(&path) {
Ok(Some(self.open(
context,
uri,
text,
language,
@ -133,7 +136,8 @@ impl Workspace {
}
}
let mut slice = Workspace::default();
let mut slice = self.clone();
slice.documents_by_uri = im::HashMap::new();
let graph = UnGraphMap::from_edges(edges);
let mut dfs = Dfs::new(&graph, start);
while let Some(i) = dfs.next(&graph) {
@ -165,7 +169,7 @@ impl Workspace {
.cloned()
}
fn expand_parent(&mut self, context: &ServerContext, document: &Document) {
fn expand_parent(&mut self, document: &Document) {
let all_current_paths = self
.documents_by_uri
.values()
@ -189,14 +193,14 @@ impl Workspace {
})
.filter(|path| !all_current_paths.contains(path))
.for_each(|path| {
let _ = self.load(context, path);
let _ = self.load(path);
});
}
}
}
}
fn expand_children(&mut self, context: &ServerContext, document: &Document) {
fn expand_children(&mut self, document: &Document) {
if let Some(data) = document.data.as_latex() {
let extras = &data.extras;
let mut all_targets = vec![&extras.implicit_links.aux, &extras.implicit_links.log];
@ -216,7 +220,7 @@ impl Workspace {
.filter(|uri| uri.scheme() == "file" && uri.fragment().is_none())
.filter_map(|uri| uri.to_file_path().ok())
{
if self.load(context, path).is_ok() {
if self.load(path).is_ok() {
break;
}
}