Simplify workspace structure

This commit is contained in:
Patrick Förster 2022-04-17 21:49:44 +02:00
parent 22eacb2e75
commit 6644dacf2b
23 changed files with 454 additions and 618 deletions

49
Cargo.lock generated
View file

@ -73,6 +73,15 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bitmaps"
version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "031043d04099746d8db04daf1fa424b2bc8bd69d92b25962dcde24da39ab64a2"
dependencies = [
"typenum",
]
[[package]]
name = "bitvec"
version = "0.19.6"
@ -743,6 +752,20 @@ dependencies = [
"unicode-normalization",
]
[[package]]
name = "im"
version = "15.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "111c1983f3c5bb72732df25cddacee9b546d08325fb584b5ebd38148be7b0246"
dependencies = [
"bitmaps",
"rand_core",
"rand_xoshiro",
"sized-chunks",
"typenum",
"version_check",
]
[[package]]
name = "indexmap"
version = "1.8.0"
@ -1374,6 +1397,15 @@ dependencies = [
"rand_core",
]
[[package]]
name = "rand_xoshiro"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9fcdd2e881d02f1d9390ae47ad8e5696a9e4be7b547a1da2afbc61973217004"
dependencies = [
"rand_core",
]
[[package]]
name = "rayon"
version = "1.5.1"
@ -1628,6 +1660,16 @@ version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a86232ab60fa71287d7f2ddae4a7073f6b7aac33631c3015abb556f08c6d0a3e"
[[package]]
name = "sized-chunks"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "16d69225bde7a69b235da73377861095455d298f2b970996eec25ddbb42b3d1e"
dependencies = [
"bitmaps",
"typenum",
]
[[package]]
name = "smallvec"
version = "1.8.0"
@ -1795,6 +1837,7 @@ dependencies = [
"fnv",
"fuzzy-matcher",
"html2md",
"im",
"indoc",
"insta",
"itertools 0.10.3",
@ -1938,6 +1981,12 @@ dependencies = [
"syn",
]
[[package]]
name = "typenum"
version = "1.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
[[package]]
name = "ucd-trie"
version = "0.1.3"

View file

@ -42,6 +42,7 @@ fern = "0.6"
fnv = { version = "1.0", optional = true }
fuzzy-matcher = { version = "0.3.7", optional = true }
html2md = { version = "0.2.13", optional = true }
im = "15.0.0"
itertools = "0.10.1"
log = "0.4.16"
logos = "0.12.0"

View file

@ -10,16 +10,16 @@ use crate::{
};
pub fn analyze_bibtex_static(
workspace: &dyn Workspace,
workspace: &Workspace,
diagnostics_by_uri: &mut MultiMap<Arc<Uri>, Diagnostic>,
uri: &Uri,
) -> Option<()> {
let document = workspace.get(uri)?;
let document = workspace.documents_by_uri.get(uri)?;
let data = document.data.as_bibtex()?;
for node in bibtex::SyntaxNode::new_root(data.green.clone()).descendants() {
analyze_entry(&document, diagnostics_by_uri, node.clone())
.or_else(|| analyze_field(&document, diagnostics_by_uri, node));
analyze_entry(document, diagnostics_by_uri, node.clone())
.or_else(|| analyze_field(document, diagnostics_by_uri, node));
}
Some(())

View file

@ -6,14 +6,14 @@ use multimap::MultiMap;
use crate::{syntax::build_log::BuildErrorLevel, Uri, Workspace};
pub fn analyze_build_log_static(
workspace: &dyn Workspace,
workspace: &Workspace,
diagnostics_by_uri: &mut MultiMap<Arc<Uri>, Diagnostic>,
build_log_uri: &Uri,
) -> Option<()> {
let build_log_document = workspace.get(build_log_uri)?;
let build_log_document = workspace.documents_by_uri.get(build_log_uri)?;
let parse = build_log_document.data.as_build_log()?;
let root_document = workspace.documents().into_iter().find(|document| {
let root_document = workspace.documents_by_uri.values().find(|document| {
if let Some(data) = document.data.as_latex() {
!document.uri.as_str().ends_with(".aux")
&& data

View file

@ -14,12 +14,12 @@ use tempfile::tempdir;
use crate::{Options, RangeExt, Uri, Workspace};
pub fn analyze_latex_chktex(
workspace: &dyn Workspace,
workspace: &Workspace,
diagnostics_by_uri: &mut MultiMap<Arc<Uri>, Diagnostic>,
uri: &Uri,
options: &Options,
) -> Option<()> {
let document = workspace.get(uri)?;
let document = workspace.documents_by_uri.get(uri)?;
document.data.as_latex()?;
let current_dir = options

View file

@ -11,7 +11,7 @@ use crate::{Document, ServerContext, Uri, Workspace};
pub enum DiagnosticsMessage {
Analyze {
workspace: Arc<dyn Workspace>,
workspace: Workspace,
document: Document,
},
Shutdown,
@ -25,7 +25,7 @@ pub struct DiagnosticsDebouncer {
impl DiagnosticsDebouncer {
pub fn launch<A>(context: Arc<ServerContext>, action: A) -> Self
where
A: Fn(Arc<dyn Workspace>, Document) + Send + Clone + 'static,
A: Fn(Workspace, Document) + Send + Clone + 'static,
{
let (sender, receiver) = crossbeam_channel::unbounded();

View file

@ -7,11 +7,11 @@ use rowan::{ast::AstNode, TextRange};
use crate::{syntax::latex, Document, LineIndexExt, Uri, Workspace};
pub fn analyze_latex_static(
workspace: &dyn Workspace,
workspace: &Workspace,
diagnostics_by_uri: &mut MultiMap<Arc<Uri>, Diagnostic>,
uri: &Uri,
) -> Option<()> {
let document = workspace.get(uri)?;
let document = workspace.documents_by_uri.get(uri)?;
if !document.uri.as_str().ends_with(".tex") {
return None;
}
@ -19,8 +19,8 @@ pub fn analyze_latex_static(
let data = document.data.as_latex()?;
for node in latex::SyntaxNode::new_root(data.green.clone()).descendants() {
analyze_environment(&document, diagnostics_by_uri, node.clone())
.or_else(|| analyze_curly_group(&document, diagnostics_by_uri, node.clone()))
analyze_environment(document, diagnostics_by_uri, node.clone())
.or_else(|| analyze_curly_group(document, diagnostics_by_uri, node.clone()))
.or_else(|| {
if node.kind() == latex::ERROR && node.first_token()?.text() == "}" {
diagnostics_by_uri.insert(

View file

@ -26,7 +26,7 @@ pub struct DiagnosticsManager {
}
impl DiagnosticsManager {
pub fn update_static(&mut self, workspace: &dyn Workspace, uri: Arc<Uri>) {
pub fn update_static(&mut self, workspace: &Workspace, uri: Arc<Uri>) {
let mut diagnostics_by_uri = MultiMap::new();
analyze_build_log_static(workspace, &mut diagnostics_by_uri, &uri);
analyze_bibtex_static(workspace, &mut diagnostics_by_uri, &uri);
@ -34,7 +34,7 @@ impl DiagnosticsManager {
self.static_diagnostics.insert(uri, diagnostics_by_uri);
}
pub fn update_chktex(&mut self, workspace: &dyn Workspace, uri: Arc<Uri>, options: &Options) {
pub fn update_chktex(&mut self, workspace: &Workspace, uri: Arc<Uri>, options: &Options) {
analyze_latex_chktex(workspace, &mut self.chktex_diagnostics, &uri, options);
}

View file

@ -86,7 +86,7 @@ impl fmt::Debug for Document {
impl Document {
pub fn parse(
context: Arc<ServerContext>,
context: &ServerContext,
uri: Arc<Uri>,
text: Arc<String>,
language: DocumentLanguage,
@ -136,8 +136,4 @@ impl Document {
visibility,
}
}
pub fn language(&self) -> DocumentLanguage {
self.data.language()
}
}

View file

@ -125,7 +125,7 @@ impl BuildEngine {
})
.unwrap_or_else(|| request.main_document());
if document.language() != DocumentLanguage::Latex {
if document.data.language() != DocumentLanguage::Latex {
return Ok(BuildResult {
status: BuildStatus::SUCCESS,
});

View file

@ -57,7 +57,7 @@ pub fn format_with_latexindent(
path.join("latexindent.yaml"),
);
let name = if document.language() == DocumentLanguage::Bibtex {
let name = if document.data.language() == DocumentLanguage::Bibtex {
"file.bib"
} else {
"file.tex"

View file

@ -38,7 +38,7 @@ pub use self::{
pub struct FeatureRequest<P> {
pub context: Arc<ServerContext>,
pub params: P,
pub workspace: Arc<dyn Workspace>,
pub workspace: Workspace,
pub subset: WorkspaceSubset,
}
@ -62,8 +62,8 @@ mod testing {
use typed_builder::TypedBuilder;
use crate::{
create_workspace, distro::Resolver, DocumentLanguage, DocumentVisibility, Options,
ServerContext, Uri, Workspace,
distro::Resolver, DocumentLanguage, DocumentVisibility, Options, ServerContext, Uri,
Workspace,
};
use super::*;
@ -133,20 +133,24 @@ mod testing {
Arc::new(cx)
}
fn workspace(&self, cx: Arc<ServerContext>) -> Arc<dyn Workspace> {
let workspace = create_workspace(cx).unwrap();
fn workspace(&self, cx: Arc<ServerContext>) -> Workspace {
let mut workspace = Workspace::default();
for (name, source_code) in &self.files {
let uri = self.uri(name);
let path = uri.to_file_path().unwrap();
let language = DocumentLanguage::by_path(&path).expect("unknown document language");
workspace.open(
uri,
Arc::new(source_code.trim().to_string()),
language,
DocumentVisibility::Visible,
);
workspace
.open(
&cx,
uri,
Arc::new(source_code.trim().to_string()),
language,
DocumentVisibility::Visible,
)
.unwrap();
}
Arc::new(workspace)
workspace
}
fn request<P>(&self, params: P) -> FeatureRequest<P> {

View file

@ -42,7 +42,7 @@ pub fn find_document_symbols(req: FeatureRequest<DocumentSymbolParams>) -> Docum
.into_iter()
.map(|symbol| symbol.into_symbol_info(req.main_document().uri.as_ref().clone()))
.collect();
sort_symbols(req.workspace.as_ref(), &mut new_buf);
sort_symbols(&req.workspace, &mut new_buf);
DocumentSymbolResponse::Flat(new_buf)
}
}
@ -54,12 +54,12 @@ struct WorkspaceSymbol {
}
pub fn find_workspace_symbols(
workspace: &dyn Workspace,
workspace: &Workspace,
params: &WorkspaceSymbolParams,
) -> Vec<SymbolInformation> {
let mut symbols = Vec::new();
for document in workspace.documents() {
for document in workspace.documents_by_uri.values() {
if let Some(subset) = workspace.subset(Arc::clone(&document.uri)) {
let mut buf = Vec::new();
find_latex_symbols(&subset, &mut buf);
@ -102,7 +102,7 @@ pub fn find_workspace_symbols(
filtered
}
fn sort_symbols(workspace: &dyn Workspace, symbols: &mut [SymbolInformation]) {
fn sort_symbols(workspace: &Workspace, symbols: &mut [SymbolInformation]) {
let ordering = ProjectOrdering::from(workspace);
symbols.sort_by(|left, right| {
let left_key = (

View file

@ -19,12 +19,12 @@ impl ProjectOrdering {
}
}
impl From<&dyn Workspace> for ProjectOrdering {
fn from(workspace: &dyn Workspace) -> Self {
impl From<&Workspace> for ProjectOrdering {
fn from(workspace: &Workspace) -> Self {
let mut ordering = Vec::new();
let uris: FxHashSet<Arc<Uri>> = workspace
.documents()
.into_iter()
.documents_by_uri
.values()
.map(|document| Arc::clone(&document.uri))
.collect();
@ -42,7 +42,7 @@ impl From<&dyn Workspace> for ProjectOrdering {
}
ordering.push(Arc::clone(&uri));
if let Some(document) = workspace.get(&uri) {
if let Some(document) = workspace.documents_by_uri.get(&uri) {
if let Some(data) = document.data.as_latex() {
for link in data.extras.explicit_links.iter().rev() {
for target in &link.targets {
@ -60,10 +60,10 @@ impl From<&dyn Workspace> for ProjectOrdering {
}
}
fn connected_components(workspace: &dyn Workspace) -> Vec<WorkspaceSubset> {
fn connected_components(workspace: &Workspace) -> Vec<WorkspaceSubset> {
let mut components = Vec::new();
let mut visited = FxHashSet::default();
for root_document in workspace.documents() {
for root_document in workspace.documents_by_uri.values() {
if !visited.insert(Arc::clone(&root_document.uri)) {
continue;
}
@ -108,38 +108,40 @@ mod tests {
use anyhow::Result;
use crate::{create_workspace, DocumentLanguage, DocumentVisibility, ServerContext};
use crate::{DocumentLanguage, DocumentVisibility, ServerContext};
use super::*;
#[test]
fn test_no_cycles() -> Result<()> {
let workspace: Arc<dyn Workspace> = Arc::new(create_workspace(Arc::new(
ServerContext::new(std::env::temp_dir()),
))?);
let context = ServerContext::new(std::env::temp_dir());
let mut workspace = Workspace::default();
let a = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/a.tex")?),
Arc::new(String::new()),
DocumentLanguage::Latex,
DocumentVisibility::Visible,
);
)?;
let b = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/b.tex")?),
Arc::new(String::new()),
DocumentLanguage::Latex,
DocumentVisibility::Visible,
);
)?;
let c = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/c.tex")?),
Arc::new(r#"\include{b}\include{a}"#.to_string()),
DocumentLanguage::Latex,
DocumentVisibility::Visible,
);
)?;
let ordering = ProjectOrdering::from(workspace.as_ref());
let ordering = ProjectOrdering::from(&workspace);
assert_eq!(ordering.get(&a.uri), 2);
assert_eq!(ordering.get(&b.uri), 1);
@ -149,32 +151,34 @@ mod tests {
#[test]
fn test_cycles() -> Result<()> {
let workspace: Arc<dyn Workspace> = Arc::new(create_workspace(Arc::new(
ServerContext::new(std::env::temp_dir()),
))?);
let context = ServerContext::new(std::env::temp_dir());
let mut workspace = Workspace::default();
let a = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/a.tex")?),
Arc::new(r#"\include{b}"#.to_string()),
DocumentLanguage::Latex,
DocumentVisibility::Visible,
);
)?;
let b = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/b.tex")?),
Arc::new(r#"\include{a}"#.to_string()),
DocumentLanguage::Latex,
DocumentVisibility::Visible,
);
)?;
let c = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/c.tex")?),
Arc::new(r#"\include{a}"#.to_string()),
DocumentLanguage::Latex,
DocumentVisibility::Visible,
);
)?;
let ordering = ProjectOrdering::from(workspace.as_ref());
let ordering = ProjectOrdering::from(&workspace);
assert_eq!(ordering.get(&a.uri), 1);
assert_eq!(ordering.get(&b.uri), 2);
@ -184,39 +188,42 @@ mod tests {
#[test]
fn test_multiple_roots() -> Result<()> {
let workspace: Arc<dyn Workspace> = Arc::new(create_workspace(Arc::new(
ServerContext::new(std::env::temp_dir()),
))?);
let context = ServerContext::new(std::env::temp_dir());
let mut workspace = Workspace::default();
let a = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/a.tex")?),
Arc::new(r#"\include{b}"#.to_string()),
DocumentLanguage::Latex,
DocumentVisibility::Visible,
);
)?;
let b = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/b.tex")?),
Arc::new(r#""#.to_string()),
DocumentLanguage::Latex,
DocumentVisibility::Visible,
);
)?;
let c = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/c.tex")?),
Arc::new(r#""#.to_string()),
DocumentLanguage::Latex,
DocumentVisibility::Visible,
);
)?;
let d = workspace.open(
&context,
Arc::new(Uri::parse("http://example.com/d.tex")?),
Arc::new(r#"\include{c}"#.to_string()),
DocumentLanguage::Latex,
DocumentVisibility::Visible,
);
)?;
let ordering = ProjectOrdering::from(workspace.as_ref());
let ordering = ProjectOrdering::from(&workspace);
assert!(ordering.get(&a.uri) < ordering.get(&b.uri));
assert!(ordering.get(&d.uri) < ordering.get(&c.uri));

View file

@ -7,6 +7,7 @@ mod context;
pub mod diagnostics;
mod dispatch;
pub mod distro;
mod document;
pub mod features;
mod label;
mod lang_data;
@ -24,6 +25,7 @@ mod workspace;
pub use self::{
capabilities::ClientCapabilitiesExt,
context::ServerContext,
document::*,
label::*,
lang_data::*,
language::DocumentLanguage,

View file

@ -14,7 +14,6 @@ use threadpool::ThreadPool;
use crate::{
client::{send_notification, send_request},
component_db::COMPONENT_DATABASE,
create_workspace,
diagnostics::{DiagnosticsDebouncer, DiagnosticsManager, DiagnosticsMessage},
dispatch::{NotificationDispatcher, RequestDispatcher},
distro::Distribution,
@ -26,7 +25,7 @@ use crate::{
},
req_queue::{IncomingData, ReqQueue},
ClientCapabilitiesExt, Document, DocumentLanguage, DocumentVisibility, LineIndexExt, Options,
ServerContext, Uri, Workspace,
ServerContext, Uri, Workspace, WorkspaceEvent,
};
#[derive(Clone)]
@ -34,7 +33,7 @@ pub struct Server {
connection: Arc<Connection>,
context: Arc<ServerContext>,
req_queue: Arc<Mutex<ReqQueue>>,
workspace: Arc<dyn Workspace>,
workspace: Workspace,
static_debouncer: Arc<DiagnosticsDebouncer>,
chktex_debouncer: Arc<DiagnosticsDebouncer>,
pool: Arc<Mutex<ThreadPool>>,
@ -50,7 +49,7 @@ impl Server {
) -> Result<Self> {
let context = Arc::new(ServerContext::new(current_dir));
let req_queue = Arc::default();
let workspace = Arc::new(create_workspace(Arc::clone(&context))?);
let workspace = Workspace::default();
let diag_manager = Arc::new(Mutex::new(DiagnosticsManager::default()));
let static_debouncer = Arc::new(create_static_debouncer(
@ -160,9 +159,11 @@ impl Server {
self.spawn(move || {
server.register_config_capability();
server.register_file_watching();
server.pull_and_reparse_all();
});
self.pull_config();
self.reparse_all()?;
Ok(())
}
@ -230,15 +231,24 @@ impl Server {
}
fn register_diagnostics_handler(&mut self) {
let sender = self.static_debouncer.sender.clone();
self.workspace
.register_open_handler(Arc::new(move |workspace, document| {
let message = DiagnosticsMessage::Analyze {
workspace,
document,
let (event_sender, event_receiver) = crossbeam_channel::unbounded();
let diag_sender = self.static_debouncer.sender.clone();
std::thread::spawn(move || {
for event in event_receiver {
match event {
WorkspaceEvent::Changed(workspace, document) => {
let message = DiagnosticsMessage::Analyze {
workspace,
document,
};
diag_sender.send(message).unwrap();
}
};
sender.send(message).unwrap();
}));
}
});
self.workspace.listeners.push_back(event_sender);
}
fn register_incoming_request(&self, id: RequestId) {
@ -300,16 +310,16 @@ impl Server {
Ok(())
}
fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) -> Result<()> {
fn did_change_watched_files(&mut self, params: DidChangeWatchedFilesParams) -> Result<()> {
for change in params.changes {
if let Ok(path) = change.uri.to_file_path() {
let uri = change.uri.into();
let uri = Uri::from(change.uri);
match change.typ {
FileChangeType::CREATED | FileChangeType::CHANGED => {
self.workspace.reload(path)?;
self.workspace.reload(&self.context, path)?;
}
FileChangeType::DELETED => {
self.workspace.delete(&uri);
self.workspace.documents_by_uri.remove(&uri);
}
_ => {}
}
@ -319,13 +329,11 @@ impl Server {
Ok(())
}
fn did_change_configuration(&self, params: DidChangeConfigurationParams) -> Result<()> {
fn did_change_configuration(&mut self, params: DidChangeConfigurationParams) -> Result<()> {
let client_capabilities = { self.context.client_capabilities.lock().unwrap().clone() };
if client_capabilities.has_pull_configuration_support() {
let server = self.clone();
self.spawn(move || {
server.pull_and_reparse_all();
});
self.pull_config();
self.reparse_all()?;
} else {
match serde_json::from_value(params.settings) {
Ok(new_options) => {
@ -336,49 +344,50 @@ impl Server {
}
};
let server = self.clone();
self.spawn(move || {
server.reparse_all();
});
self.reparse_all()?;
}
Ok(())
}
fn did_open(&self, params: DidOpenTextDocumentParams) -> Result<()> {
fn did_open(&mut self, params: DidOpenTextDocumentParams) -> Result<()> {
let language_id = &params.text_document.language_id;
let language = DocumentLanguage::by_language_id(language_id);
let document = self.workspace.open(
&self.context,
Arc::new(params.text_document.uri.into()),
Arc::new(params.text_document.text),
language.unwrap_or(DocumentLanguage::Latex),
DocumentVisibility::Visible,
);
)?;
let should_lint = { self.context.options.read().unwrap().chktex.on_open_and_save };
if let Some(document) = self
.workspace
.documents_by_uri
.get(document.uri.as_ref())
.filter(|_| should_lint)
.cloned()
{
self.chktex_debouncer
.sender
.send(DiagnosticsMessage::Analyze {
workspace: Arc::clone(&self.workspace),
workspace: self.workspace.clone(),
document,
})?;
};
Ok(())
}
fn did_change(&self, mut params: DidChangeTextDocumentParams) -> Result<()> {
fn did_change(&mut self, mut params: DidChangeTextDocumentParams) -> Result<()> {
let uri = params.text_document.uri.into();
let old_document = self.workspace.get(&uri);
let old_document = self.workspace.documents_by_uri.get(&uri).cloned();
let old_text = old_document.as_ref().map(|document| document.text.as_str());
let uri = Arc::new(uri);
let language = self
.workspace
.documents_by_uri
.get(&uri)
.map(|document| document.data.language())
.unwrap_or(DocumentLanguage::Latex);
@ -391,11 +400,12 @@ impl Server {
self.merge_text_changes(&old_document, language, change)
}),
None => self.workspace.open(
&self.context,
Arc::clone(&uri),
Arc::new(params.content_changes.pop().unwrap().text),
language,
DocumentVisibility::Visible,
),
)?,
};
let line = match old_text {
@ -415,7 +425,7 @@ impl Server {
self.chktex_debouncer
.sender
.send(DiagnosticsMessage::Analyze {
workspace: Arc::clone(&self.workspace),
workspace: self.workspace.clone(),
document: new_document,
})?;
};
@ -424,7 +434,7 @@ impl Server {
}
fn merge_text_changes(
&self,
&mut self,
old_document: &Document,
new_language: DocumentLanguage,
change: TextDocumentContentChangeEvent,
@ -441,30 +451,34 @@ impl Server {
None => change.text,
};
self.workspace.open(
Arc::clone(&old_document.uri),
Arc::new(new_text),
new_language,
DocumentVisibility::Visible,
)
self.workspace
.open(
&self.context,
Arc::clone(&old_document.uri),
Arc::new(new_text),
new_language,
DocumentVisibility::Visible,
)
.unwrap()
}
fn did_save(&self, params: DidSaveTextDocumentParams) -> Result<()> {
let uri = params.text_document.uri.into();
let uri = Uri::from(params.text_document.uri);
let should_build = { self.context.options.read().unwrap().build.on_save };
if let Some(request) =
self.workspace
.get(&uri)
.filter(|_| should_build)
.and_then(|document| {
self.feature_request(
Arc::clone(&document.uri),
BuildParams {
text_document: TextDocumentIdentifier::new(uri.clone().into()),
},
)
})
if let Some(request) = self
.workspace
.documents_by_uri
.get(&uri)
.filter(|_| should_build)
.and_then(|document| {
self.feature_request(
Arc::clone(&document.uri),
BuildParams {
text_document: TextDocumentIdentifier::new(uri.clone().into()),
},
)
})
{
let lsp_sender = self.connection.sender.clone();
let req_queue = Arc::clone(&self.req_queue);
@ -482,18 +496,24 @@ impl Server {
}
let should_lint = { self.context.options.read().unwrap().chktex.on_open_and_save };
if let Some(document) = self.workspace.get(&uri).filter(|_| should_lint) {
if let Some(document) = self
.workspace
.documents_by_uri
.get(&uri)
.filter(|_| should_lint)
.cloned()
{
self.chktex_debouncer
.sender
.send(DiagnosticsMessage::Analyze {
workspace: Arc::clone(&self.workspace),
workspace: self.workspace.clone(),
document,
})?;
};
Ok(())
}
fn did_close(&self, params: DidCloseTextDocumentParams) -> Result<()> {
fn did_close(&mut self, params: DidCloseTextDocumentParams) -> Result<()> {
let uri = params.text_document.uri.into();
self.workspace.close(&uri);
Ok(())
@ -503,7 +523,7 @@ impl Server {
Some(FeatureRequest {
context: Arc::clone(&self.context),
params,
workspace: Arc::clone(&self.workspace),
workspace: self.workspace.clone(),
subset: self.workspace.subset(uri)?,
})
}
@ -561,9 +581,9 @@ impl Server {
fn workspace_symbols(&self, id: RequestId, params: WorkspaceSymbolParams) -> Result<()> {
let sender = self.connection.sender.clone();
let workspace = Arc::clone(&self.workspace);
let workspace = self.workspace.clone();
self.spawn(move || {
let result = find_workspace_symbols(workspace.as_ref(), &params);
let result = find_workspace_symbols(&workspace, &params);
sender
.send(lsp_server::Response::new_ok(id, result).into())
.unwrap();
@ -593,7 +613,7 @@ impl Server {
#[cfg(feature = "completion")]
fn completion_resolve(&self, id: RequestId, mut item: CompletionItem) -> Result<()> {
let sender = self.connection.sender.clone();
let workspace = Arc::clone(&self.workspace);
let workspace = self.workspace.clone();
self.spawn(move || {
match serde_json::from_value(item.data.clone().unwrap()).unwrap() {
crate::features::CompletionItemData::Package
@ -604,7 +624,7 @@ impl Server {
}
#[cfg(feature = "citation")]
crate::features::CompletionItemData::Citation { uri, key } => {
if let Some(document) = workspace.get(&uri) {
if let Some(document) = workspace.documents_by_uri.get(&uri) {
if let Some(data) = document.data.as_bibtex() {
let markup = crate::citation::render_citation(
&crate::syntax::bibtex::SyntaxNode::new_root(data.green.clone()),
@ -749,24 +769,29 @@ impl Server {
Ok(())
}
fn pull_and_reparse_all(&self) {
self.pull_config();
self.reparse_all();
}
fn reparse_all(&self) {
for document in self.workspace.documents() {
fn reparse_all(&mut self) -> Result<()> {
for document in self
.workspace
.documents_by_uri
.values()
.cloned()
.collect::<Vec<_>>()
{
self.workspace.open(
&self.context,
Arc::clone(&document.uri),
document.text.clone(),
document.language(),
document.data.language(),
DocumentVisibility::Visible,
);
)?;
}
Ok(())
}
fn process_messages(&self) -> Result<()> {
for msg in &self.connection.receiver {
fn process_messages(&mut self) -> Result<()> {
let receiver = self.connection.receiver.clone();
for msg in &receiver {
match msg {
Message::Request(request) => {
if self.connection.handle_shutdown(&request)? {
@ -862,8 +887,8 @@ fn create_static_debouncer(
let sender = conn.sender.clone();
DiagnosticsDebouncer::launch(context, move |workspace, document| {
let mut manager = manager.lock().unwrap();
manager.update_static(workspace.as_ref(), Arc::clone(&document.uri));
if let Err(why) = publish_diagnostics(&sender, workspace.as_ref(), &manager) {
manager.update_static(&workspace, Arc::clone(&document.uri));
if let Err(why) = publish_diagnostics(&sender, &workspace, &manager) {
warn!("Failed to publish diagnostics: {}", why);
}
})
@ -878,8 +903,8 @@ fn create_chktex_debouncer(
DiagnosticsDebouncer::launch(Arc::clone(&context), move |workspace, document| {
let options = { context.options.read().unwrap().clone() };
let mut manager = manager.lock().unwrap();
manager.update_chktex(workspace.as_ref(), Arc::clone(&document.uri), &options);
if let Err(why) = publish_diagnostics(&sender, workspace.as_ref(), &manager) {
manager.update_chktex(&workspace, Arc::clone(&document.uri), &options);
if let Err(why) = publish_diagnostics(&sender, &workspace, &manager) {
warn!("Failed to publish diagnostics: {}", why);
}
})
@ -887,10 +912,10 @@ fn create_chktex_debouncer(
fn publish_diagnostics(
sender: &Sender<lsp_server::Message>,
workspace: &dyn Workspace,
workspace: &Workspace,
diag_manager: &DiagnosticsManager,
) -> Result<()> {
for document in workspace.documents() {
for document in workspace.documents_by_uri.values() {
let diagnostics = diag_manager.publish(Arc::clone(&document.uri));
send_notification::<PublishDiagnostics>(
sender,

View file

@ -7,8 +7,8 @@ use smol_str::SmolStr;
use crate::{ServerContext, Uri};
#[derive(Debug)]
pub struct LatexAnalyzerContext {
pub inner: Arc<ServerContext>,
pub struct LatexAnalyzerContext<'a> {
pub inner: &'a ServerContext,
pub document_uri: Arc<Uri>,
pub base_uri: Arc<Uri>,
pub extras: Extras,

220
src/workspace.rs Normal file
View file

@ -0,0 +1,220 @@
use std::{fs, path::PathBuf, sync::Arc};
use anyhow::Result;
use crossbeam_channel::Sender;
use petgraph::{graphmap::UnGraphMap, visit::Dfs};
use rustc_hash::FxHashSet;
use crate::{
component_db::COMPONENT_DATABASE, Document, DocumentLanguage, DocumentVisibility,
ServerContext, Uri,
};
#[derive(Debug, Clone)]
pub enum WorkspaceEvent {
Changed(Workspace, Document),
}
#[derive(Debug, Clone)]
pub struct WorkspaceSubset {
pub documents: Vec<Document>,
}
#[derive(Debug, Clone, Default)]
pub struct Workspace {
pub documents_by_uri: im::HashMap<Arc<Uri>, Document>,
pub listeners: im::Vector<Sender<WorkspaceEvent>>,
}
impl Workspace {
pub fn open(
&mut self,
context: &ServerContext,
uri: Arc<Uri>,
text: Arc<String>,
language: DocumentLanguage,
visibility: DocumentVisibility,
) -> Result<Document> {
log::debug!("(Re)Loading document: {}", uri);
let document = Document::parse(context, Arc::clone(&uri), text, language, visibility);
self.documents_by_uri
.insert(Arc::clone(&uri), document.clone());
for listener in &self.listeners {
listener.send(WorkspaceEvent::Changed(self.clone(), document.clone()))?;
}
self.expand_parent(context, &document);
self.expand_children(context, &document);
Ok(document)
}
pub fn reload(&mut self, context: &ServerContext, path: PathBuf) -> Result<Option<Document>> {
let uri = Arc::new(Uri::from_file_path(path.clone()).unwrap());
if self.is_open(&uri) && !uri.as_str().ends_with(".log") {
return Ok(self.documents_by_uri.get(&uri).cloned());
}
if let Some(language) = DocumentLanguage::by_path(&path) {
let data = fs::read(&path)?;
let text = Arc::new(String::from_utf8_lossy(&data).into_owned());
Ok(Some(self.open(
context,
uri,
text,
language,
DocumentVisibility::Hidden,
)?))
} else {
Ok(None)
}
}
pub fn load(&mut self, context: &ServerContext, path: PathBuf) -> Result<Option<Document>> {
let uri = Arc::new(Uri::from_file_path(path.clone()).unwrap());
if let Some(document) = self.documents_by_uri.get(&uri).cloned() {
return Ok(Some(document));
}
let data = fs::read(&path)?;
let text = Arc::new(String::from_utf8_lossy(&data).into_owned());
if let Some(language) = DocumentLanguage::by_path(&path) {
Ok(Some(self.open(
context,
uri,
text,
language,
DocumentVisibility::Hidden,
)?))
} else {
Ok(None)
}
}
pub fn close(&mut self, uri: &Uri) {
if let Some(document) = self.documents_by_uri.get_mut(uri) {
document.visibility = DocumentVisibility::Hidden;
}
}
pub fn is_open(&self, uri: &Uri) -> bool {
self.documents_by_uri.get(uri).map_or(false, |document| {
document.visibility == DocumentVisibility::Visible
})
}
pub fn subset(&self, uri: Arc<Uri>) -> Option<WorkspaceSubset> {
let all_current_uris: Vec<Arc<Uri>> = self.documents_by_uri.keys().cloned().collect();
let mut edges = Vec::new();
for (i, uri) in all_current_uris.iter().enumerate() {
let document = self.documents_by_uri.get(uri);
if let Some(data) = document
.as_ref()
.and_then(|document| document.data.as_latex())
{
let extras = &data.extras;
let mut all_targets = vec![&extras.implicit_links.aux, &extras.implicit_links.log];
for link in &extras.explicit_links {
all_targets.push(&link.targets);
}
for targets in all_targets {
for target in targets {
if let Some(j) = all_current_uris.iter().position(|uri| uri == target) {
edges.push((i, j, ()));
break;
}
}
}
}
}
let graph = UnGraphMap::from_edges(edges);
let start = all_current_uris.iter().position(|u| *u == uri)?;
let mut dfs = Dfs::new(&graph, start);
let mut documents = Vec::new();
while let Some(i) = dfs.next(&graph) {
documents.push(self.documents_by_uri.get(&all_current_uris[i]).cloned()?);
}
Some(WorkspaceSubset { documents })
}
fn has_parent(&self, uri: Arc<Uri>) -> Option<bool> {
let subset = self.subset(Arc::clone(&uri))?;
Some(subset.documents.iter().any(|document| {
document.data.as_latex().map_or(false, |data| {
data.extras.has_document_environment
&& !data
.extras
.explicit_links
.iter()
.filter_map(|link| link.as_component_name())
.any(|name| name == "subfiles.cls")
})
}))
}
fn expand_parent(&mut self, context: &ServerContext, document: &Document) {
let all_current_paths = self
.documents_by_uri
.values()
.filter_map(|doc| doc.uri.to_file_path().ok())
.collect::<FxHashSet<_>>();
if document.uri.scheme() == "file" {
if let Ok(mut path) = document.uri.to_file_path() {
while path.pop() && !self.has_parent(Arc::clone(&document.uri)).unwrap_or(false) {
std::fs::read_dir(&path)
.into_iter()
.flatten()
.filter_map(|entry| entry.ok())
.filter(|entry| entry.file_type().ok().filter(|ty| ty.is_file()).is_some())
.map(|entry| entry.path())
.filter(|path| {
matches!(
DocumentLanguage::by_path(path),
Some(DocumentLanguage::Latex)
)
})
.filter(|path| !all_current_paths.contains(path))
.for_each(|path| {
let _ = self.load(context, path);
});
}
}
}
}
fn expand_children(&mut self, context: &ServerContext, document: &Document) {
if let Some(data) = document.data.as_latex() {
let extras = &data.extras;
let mut all_targets = vec![&extras.implicit_links.aux, &extras.implicit_links.log];
for link in &extras.explicit_links {
if link
.as_component_name()
.and_then(|name| COMPONENT_DATABASE.find(&name))
.is_none()
{
all_targets.push(&link.targets);
}
}
all_targets.into_iter().for_each(|targets| {
for path in targets
.iter()
.filter(|uri| uri.scheme() == "file" && uri.fragment().is_none())
.filter_map(|uri| uri.to_file_path().ok())
{
if self.load(context, path).is_ok() {
break;
}
}
});
}
}
}

View file

@ -1,82 +0,0 @@
use std::{fs, path::PathBuf, sync::Arc};
use anyhow::Result;
use crate::{DocumentLanguage, DocumentVisibility, Uri};
use super::Document;
#[derive(Debug, Clone)]
pub struct WorkspaceSubset {
pub documents: Vec<Document>,
}
pub type OpenHandler = Arc<dyn Fn(Arc<dyn Workspace>, Document) + Send + Sync + 'static>;
pub trait Workspace: Send + Sync {
fn open(
&self,
uri: Arc<Uri>,
text: Arc<String>,
language: DocumentLanguage,
visibility: DocumentVisibility,
) -> Document;
fn register_open_handler(&self, handler: OpenHandler);
fn reload(&self, path: PathBuf) -> Result<Option<Document>> {
let uri = Arc::new(Uri::from_file_path(path.clone()).unwrap());
if self.is_open(&uri) && !uri.as_str().ends_with(".log") {
return Ok(self.get(&uri));
}
if let Some(language) = DocumentLanguage::by_path(&path) {
let data = fs::read(&path)?;
let text = Arc::new(String::from_utf8_lossy(&data).into_owned());
Ok(Some(self.open(
uri,
text,
language,
DocumentVisibility::Hidden,
)))
} else {
Ok(None)
}
}
fn load(&self, path: PathBuf) -> Result<Option<Document>> {
let uri = Arc::new(Uri::from_file_path(path.clone()).unwrap());
if let Some(document) = self.get(&uri) {
return Ok(Some(document));
}
let data = fs::read(&path)?;
let text = Arc::new(String::from_utf8_lossy(&data).into_owned());
if let Some(language) = DocumentLanguage::by_path(&path) {
Ok(Some(self.open(
uri,
text,
language,
DocumentVisibility::Hidden,
)))
} else {
Ok(None)
}
}
fn documents(&self) -> Vec<Document>;
fn has(&self, uri: &Uri) -> bool;
fn get(&self, uri: &Uri) -> Option<Document>;
fn close(&self, uri: &Uri);
fn delete(&self, uri: &Uri);
fn is_open(&self, uri: &Uri) -> bool;
fn subset(&self, uri: Arc<Uri>) -> Option<WorkspaceSubset>;
}

View file

@ -1,96 +0,0 @@
use std::sync::Arc;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use crate::{
component_db::COMPONENT_DATABASE, Document, DocumentLanguage, DocumentVisibility, OpenHandler,
Uri, Workspace, WorkspaceSubset,
};
pub struct ChildrenExpander<W> {
workspace: Arc<W>,
}
impl<W> ChildrenExpander<W>
where
W: Workspace + Send + Sync + 'static,
{
pub fn new(workspace: Arc<W>) -> Self {
workspace.register_open_handler(Arc::new(move |workspace, document| {
Self::expand(workspace.as_ref(), &document);
}));
Self { workspace }
}
fn expand(workspace: &dyn Workspace, document: &Document) {
if let Some(data) = document.data.as_latex() {
let extras = &data.extras;
let mut all_targets = vec![&extras.implicit_links.aux, &extras.implicit_links.log];
for link in &extras.explicit_links {
if link
.as_component_name()
.and_then(|name| COMPONENT_DATABASE.find(&name))
.is_none()
{
all_targets.push(&link.targets);
}
}
all_targets.into_par_iter().for_each(|targets| {
for path in targets
.iter()
.filter(|uri| uri.scheme() == "file" && uri.fragment().is_none())
.filter_map(|uri| uri.to_file_path().ok())
{
if workspace.load(path).is_ok() {
break;
}
}
});
}
}
}
impl<W: Workspace> Workspace for ChildrenExpander<W> {
fn open(
&self,
uri: Arc<Uri>,
text: Arc<String>,
language: DocumentLanguage,
visibility: DocumentVisibility,
) -> Document {
self.workspace.open(uri, text, language, visibility)
}
fn register_open_handler(&self, handler: OpenHandler) {
self.workspace.register_open_handler(handler)
}
fn documents(&self) -> Vec<Document> {
self.workspace.documents()
}
fn has(&self, uri: &Uri) -> bool {
self.workspace.has(uri)
}
fn get(&self, uri: &Uri) -> Option<Document> {
self.workspace.get(uri)
}
fn close(&self, uri: &Uri) {
self.workspace.close(uri);
}
fn delete(&self, uri: &Uri) {
self.workspace.delete(uri);
}
fn is_open(&self, uri: &Uri) -> bool {
self.workspace.is_open(uri)
}
fn subset(&self, uri: Arc<Uri>) -> Option<WorkspaceSubset> {
self.workspace.subset(uri)
}
}

View file

@ -1,21 +0,0 @@
mod api;
mod children_expand;
mod document;
mod parent_expand;
mod storage;
use std::sync::Arc;
use anyhow::Result;
use crate::ServerContext;
pub use self::{api::*, document::*};
use self::{children_expand::ChildrenExpander, parent_expand::ParentExpander, storage::Storage};
pub fn create_workspace(context: Arc<ServerContext>) -> Result<impl Workspace> {
let workspace = Storage::new(context);
let workspace = ParentExpander::new(workspace);
let workspace = ChildrenExpander::new(Arc::new(workspace));
Ok(workspace)
}

View file

@ -1,127 +0,0 @@
use std::{fs, sync::Arc};
use rayon::iter::{IntoParallelIterator, ParallelIterator};
use rustc_hash::FxHashSet;
use crate::{
Document, DocumentLanguage, DocumentVisibility, OpenHandler, Uri, Workspace, WorkspaceSubset,
};
pub struct ParentExpander<W> {
workspace: W,
}
impl<W: Workspace> ParentExpander<W> {
pub fn new(workspace: W) -> Self {
Self { workspace }
}
}
impl<W> Workspace for ParentExpander<W>
where
W: Workspace + Send + Sync + 'static,
{
fn open(
&self,
uri: Arc<Uri>,
text: Arc<String>,
language: DocumentLanguage,
visibility: DocumentVisibility,
) -> Document {
let document = self
.workspace
.open(Arc::clone(&uri), text, language, visibility);
let all_current_paths = self
.workspace
.documents()
.into_iter()
.filter_map(|doc| doc.uri.to_file_path().ok())
.collect::<FxHashSet<_>>();
if uri.scheme() == "file" {
if let Ok(mut path) = uri.to_file_path() {
while path.pop() && !self.has_parent(Arc::clone(&uri)).unwrap_or(false) {
let mut files = Vec::new();
fs::read_dir(&path)
.into_iter()
.flatten()
.filter_map(|entry| entry.ok())
.filter(|entry| entry.file_type().ok().filter(|ty| ty.is_file()).is_some())
.map(|entry| entry.path())
.filter(|path| {
matches!(
DocumentLanguage::by_path(path),
Some(DocumentLanguage::Latex)
)
})
.filter(|path| !all_current_paths.contains(path))
.for_each(|path| {
files.push(path);
});
files.into_par_iter().for_each(|path| {
let _ = self.workspace.load(path);
});
}
}
}
document
}
fn register_open_handler(&self, handler: OpenHandler) {
self.workspace.register_open_handler(handler)
}
fn documents(&self) -> Vec<Document> {
self.workspace.documents()
}
fn has(&self, uri: &Uri) -> bool {
self.workspace.has(uri)
}
fn get(&self, uri: &Uri) -> Option<Document> {
self.workspace.get(uri)
}
fn close(&self, uri: &Uri) {
self.workspace.close(uri);
}
fn delete(&self, uri: &Uri) {
self.workspace.delete(uri);
}
fn is_open(&self, uri: &Uri) -> bool {
self.workspace.is_open(uri)
}
fn subset(&self, uri: Arc<Uri>) -> Option<WorkspaceSubset> {
self.workspace.subset(uri)
}
}
impl<W> ParentExpander<W>
where
W: Workspace + Send + Sync + 'static,
{
fn has_parent(&self, uri: Arc<Uri>) -> Option<bool> {
let subset = self.subset(Arc::clone(&uri))?;
Some(subset.documents.iter().any(|document| {
document
.data
.as_latex()
.map(|data| {
data.extras.has_document_environment
&& !data
.extras
.explicit_links
.iter()
.filter_map(|link| link.as_component_name())
.any(|name| name == "subfiles.cls")
})
.unwrap_or(false)
}))
}
}

View file

@ -1,142 +0,0 @@
use std::sync::{Arc, Mutex};
use petgraph::{graphmap::UnGraphMap, visit::Dfs};
use rustc_hash::FxHashMap;
use crate::{
Document, DocumentLanguage, DocumentVisibility, OpenHandler, ServerContext, Uri, Workspace,
WorkspaceSubset,
};
#[derive(Clone)]
pub struct Storage {
context: Arc<ServerContext>,
documents_by_uri: Arc<Mutex<FxHashMap<Arc<Uri>, Document>>>,
open_handlers: Arc<Mutex<Vec<OpenHandler>>>,
}
impl Storage {
pub fn new(context: Arc<ServerContext>) -> Self {
Self {
context,
documents_by_uri: Arc::default(),
open_handlers: Arc::default(),
}
}
}
impl Workspace for Storage {
fn open(
&self,
uri: Arc<Uri>,
text: Arc<String>,
language: DocumentLanguage,
visibility: DocumentVisibility,
) -> Document {
log::debug!("(Re)Loading document: {}", uri);
let document = Document::parse(
Arc::clone(&self.context),
Arc::clone(&uri),
text,
language,
visibility,
);
self.documents_by_uri
.lock()
.unwrap()
.insert(Arc::clone(&uri), document.clone());
let handlers = { self.open_handlers.lock().unwrap().clone() };
for handler in handlers {
handler(Arc::new(self.clone()), document.clone());
}
document
}
fn register_open_handler(&self, handler: OpenHandler) {
self.open_handlers.lock().unwrap().push(handler);
}
fn documents(&self) -> Vec<Document> {
self.documents_by_uri
.lock()
.unwrap()
.values()
.cloned()
.collect()
}
fn has(&self, uri: &Uri) -> bool {
self.documents_by_uri.lock().unwrap().contains_key(uri)
}
fn get(&self, uri: &Uri) -> Option<Document> {
self.documents_by_uri.lock().unwrap().get(uri).cloned()
}
fn close(&self, uri: &Uri) {
if let Some(document) = self.documents_by_uri.lock().unwrap().get_mut(uri) {
document.visibility = DocumentVisibility::Hidden;
}
}
fn delete(&self, uri: &Uri) {
self.documents_by_uri.lock().unwrap().remove(uri);
}
fn is_open(&self, uri: &Uri) -> bool {
self.documents_by_uri
.lock()
.unwrap()
.get(uri)
.map_or(false, |document| {
document.visibility == DocumentVisibility::Visible
})
}
fn subset(&self, uri: Arc<Uri>) -> Option<WorkspaceSubset> {
let all_current_uris: Vec<Arc<Uri>> = self
.documents_by_uri
.lock()
.unwrap()
.keys()
.cloned()
.collect();
let mut edges = Vec::new();
for (i, uri) in all_current_uris.iter().enumerate() {
let document = self.get(uri);
if let Some(data) = document
.as_ref()
.and_then(|document| document.data.as_latex())
{
let extras = &data.extras;
let mut all_targets = vec![&extras.implicit_links.aux, &extras.implicit_links.log];
for link in &extras.explicit_links {
all_targets.push(&link.targets);
}
for targets in all_targets {
for target in targets {
if let Some(j) = all_current_uris.iter().position(|uri| uri == target) {
edges.push((i, j, ()));
break;
}
}
}
}
}
let graph = UnGraphMap::from_edges(edges);
let start = all_current_uris.iter().position(|u| *u == uri)?;
let mut dfs = Dfs::new(&graph, start);
let mut documents = Vec::new();
while let Some(i) = dfs.next(&graph) {
documents.push(self.get(&all_current_uris[i])?);
}
Some(WorkspaceSubset { documents })
}
}