Do not use salsa crate in workspace struct

This commit is contained in:
Patrick Förster 2023-04-07 21:56:13 +02:00
parent 5433bf4995
commit f36086c4d3
7 changed files with 393 additions and 21 deletions

View file

@ -2,7 +2,7 @@ use std::path::PathBuf;
use distro::Language;
use rowan::TextSize;
use syntax::latex;
use syntax::{latex, BuildError};
use url::Url;
use crate::{line_index::LineIndex, semantics};
@ -23,6 +23,7 @@ pub struct Document {
pub owner: Owner,
pub cursor: TextSize,
pub chktex: Vec<()>,
pub language: Language,
pub data: DocumentData,
}
@ -51,7 +52,10 @@ impl Document {
let green = parser::parse_bibtex(&text);
DocumentData::Bib(BibDocumentData { green })
}
Language::Log => DocumentData::Log,
Language::Log => {
let errors = parser::parse_build_log(&text).errors;
DocumentData::Log(LogDocumentData { errors })
}
Language::Root => DocumentData::Root,
Language::Tectonic => DocumentData::Tectonic,
};
@ -65,17 +69,44 @@ impl Document {
owner,
cursor,
chktex,
language,
data,
}
}
}
impl std::borrow::Borrow<Url> for Document {
fn borrow(&self) -> &Url {
&self.uri
}
}
impl std::borrow::Borrow<str> for Document {
fn borrow(&self) -> &str {
self.uri.as_str()
}
}
impl PartialEq for Document {
fn eq(&self, other: &Self) -> bool {
self.uri == other.uri
}
}
impl Eq for Document {}
impl std::hash::Hash for Document {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
self.uri.hash(state)
}
}
#[derive(Debug)]
pub enum DocumentData {
Tex(TexDocumentData),
Bib(BibDocumentData),
Aux(AuxDocumentData),
Log,
Log(LogDocumentData),
Root,
Tectonic,
}
@ -91,6 +122,11 @@ pub struct BibDocumentData {
pub green: rowan::GreenNode,
}
#[derive(Debug)]
pub struct LogDocumentData {
pub errors: Vec<BuildError>,
}
#[derive(Debug)]
pub struct AuxDocumentData {
pub green: rowan::GreenNode,

161
crates/base-db/src/graph.rs Normal file
View file

@ -0,0 +1,161 @@
use std::{ffi::OsStr, path::PathBuf};
use distro::Language;
use itertools::Itertools;
use once_cell::sync::Lazy;
use rustc_hash::FxHashSet;
use url::Url;
use crate::{semantics, Document, DocumentData, Workspace};
pub static HOME_DIR: Lazy<Option<PathBuf>> = Lazy::new(dirs::home_dir);
#[derive(Debug)]
pub struct Edge<'a> {
pub source: &'a Document,
pub target: &'a Document,
pub weight: Option<EdgeWeight<'a>>,
}
#[derive(Debug)]
pub struct EdgeWeight<'a> {
pub link: &'a semantics::tex::Link,
pub old_base_dir: Url,
pub new_base_dir: Url,
}
#[derive(Debug)]
pub struct Graph<'a> {
pub workspace: &'a Workspace,
pub start: &'a Document,
pub edges: Vec<Edge<'a>>,
pub missing: Vec<Url>,
}
impl<'a> Graph<'a> {
pub fn new(workspace: &'a Workspace, start: &'a Document) -> Self {
let mut graph = Self {
workspace,
start,
edges: Vec::new(),
missing: Vec::new(),
};
let base_dir = workspace.current_dir(&start.dir);
let mut stack = vec![(start, base_dir)];
let mut visited = FxHashSet::default();
while let Some((source, base_dir)) = stack.pop() {
let index = graph.edges.len();
graph.explicit_edges(source, &base_dir);
for edge in &graph.edges[index..] {
let Some(weight) = edge.weight.as_ref() else { continue };
if visited.insert(&edge.target.uri) {
stack.push((edge.target, weight.new_base_dir.clone()));
}
}
graph.implicit_edges(source, &base_dir);
}
graph
}
pub fn preorder(&self) -> impl DoubleEndedIterator<Item = &'a Document> + '_ {
std::iter::once(self.start)
.chain(self.edges.iter().map(|group| group.target))
.unique_by(|document| &document.uri)
}
fn explicit_edges(&mut self, source: &'a Document, base_dir: &Url) {
let DocumentData::Tex(data) = &source.data else { return };
for link in &data.semantics.links {
self.explicit_edge(source, base_dir, link);
}
}
fn explicit_edge(
&mut self,
source: &'a Document,
base_dir: &Url,
link: &'a semantics::tex::Link,
) {
let home_dir = HOME_DIR.as_deref();
let stem = &link.path.text;
let mut file_names = vec![stem.clone()];
link.kind
.extensions()
.iter()
.map(|ext| format!("{stem}.{ext}"))
.for_each(|name| file_names.push(name));
let file_name_db = self.workspace.distro();
let distro_files = file_names
.iter()
.filter_map(|name| file_name_db.get(name))
.filter(|path| home_dir.map_or(false, |dir| path.starts_with(dir)))
.flat_map(Url::from_file_path);
for target_uri in file_names
.iter()
.flat_map(|file_name| base_dir.join(file_name))
.chain(distro_files)
{
match self.workspace.lookup(&target_uri) {
Some(target) => {
let new_base_dir = link
.base_dir
.as_deref()
.and_then(|path| base_dir.join(&path).ok())
.unwrap_or_else(|| base_dir.clone());
let weight = Some(EdgeWeight {
link,
old_base_dir: base_dir.clone(),
new_base_dir,
});
self.edges.push(Edge {
source,
target,
weight,
});
}
None => {
self.missing.push(target_uri);
}
};
}
}
fn implicit_edges(&mut self, source: &'a Document, base_dir: &Url) {
let uri = source.uri.as_str();
if source.language == Language::Tex && !uri.ends_with(".aux") {
self.implicit_edge(source, base_dir, "log");
self.implicit_edge(source, base_dir, "aux");
}
}
fn implicit_edge(&mut self, source: &'a Document, base_dir: &Url, extension: &str) {
let Some(target_uri) = source.uri.to_file_path().ok().and_then(|mut path| {
path.set_extension(extension);
path.file_name()
.and_then(OsStr::to_str)
.and_then(|name| self.workspace.output_dir(base_dir).join(&name).ok())
}) else { return };
match self.workspace.lookup(&target_uri) {
Some(target) => {
self.edges.push(Edge {
source,
target,
weight: None,
});
}
None => {
self.missing.push(target_uri);
}
}
}
}

View file

@ -1,5 +1,6 @@
mod config;
mod document;
pub mod graph;
mod line_index;
pub mod semantics;
mod workspace;

View file

@ -13,6 +13,8 @@ pub struct Semantics {
pub environments: FxHashSet<String>,
pub theorem_definitions: Vec<TheoremDefinition>,
pub graphics_paths: FxHashSet<String>,
pub can_be_root: bool,
pub can_be_compiled: bool,
}
impl Semantics {
@ -29,6 +31,14 @@ impl Semantics {
}
};
}
self.can_be_compiled = self.environments.contains("document");
self.can_be_root = self.can_be_compiled
&& self
.links
.iter()
.filter(|link| link.kind == LinkKind::Cls)
.any(|link| link.path.text == "subfiles");
}
fn process_node(&mut self, node: &latex::SyntaxNode) {

View file

@ -1,27 +1,54 @@
use std::{borrow::Cow, path::Path};
use std::{
borrow::{Borrow, Cow},
path::{Path, PathBuf},
};
use distro::Language;
use rustc_hash::FxHashMap;
use distro::{FileNameDB, Language};
use itertools::Itertools;
use rustc_hash::FxHashSet;
use url::Url;
use crate::{Config, Document, DocumentData, Owner};
use crate::{graph, Config, Document, DocumentData, Owner};
#[derive(Debug)]
pub struct Workspace {
documents: FxHashMap<Url, Document>,
documents: FxHashSet<Document>,
config: Config,
root_dirs: Vec<Url>,
distro: FileNameDB,
folders: Vec<PathBuf>,
}
impl Workspace {
pub fn lookup(&self, uri: &Url) -> Option<&Document> {
self.documents.get(uri)
pub fn lookup<Q>(&self, key: &Q) -> Option<&Document>
where
Q: std::hash::Hash + Eq,
Document: Borrow<Q>,
{
self.documents.get(key)
}
pub fn lookup_path(&self, path: &Path) -> Option<&Document> {
self.iter()
.find(|document| document.path.as_deref() == Some(path))
}
pub fn iter(&self) -> impl Iterator<Item = &Document> + '_ {
self.documents.iter()
}
pub fn config(&self) -> &Config {
&self.config
}
pub fn distro(&self) -> &FileNameDB {
&self.distro
}
pub fn open(&mut self, uri: Url, text: String, language: Language, owner: Owner) {
log::debug!("Opening document {uri}...");
let document = Document::parse(uri, text, language, owner);
self.documents.insert(document.uri.clone(), document);
self.documents.remove(&uri);
self.documents
.insert(Document::parse(uri, text, language, owner));
}
pub fn load(&mut self, path: &Path, language: Language, owner: Owner) -> std::io::Result<()> {
@ -37,8 +64,7 @@ impl Workspace {
}
pub fn watch(&mut self, watcher: &mut dyn notify::Watcher) {
self.documents
.values()
self.iter()
.filter(|document| document.uri.scheme() == "file")
.flat_map(|document| {
let dir1 = self.output_dir(&self.current_dir(&document.dir));
@ -57,10 +83,9 @@ impl Workspace {
return dir;
}
self.documents
.values()
.filter(|doc| matches!(doc.data, DocumentData::Root | DocumentData::Tectonic))
.flat_map(|doc| doc.uri.join("."))
self.iter()
.filter(|document| matches!(document.data, DocumentData::Root | DocumentData::Tectonic))
.flat_map(|document| document.uri.join("."))
.find(|root_dir| base_dir.as_str().starts_with(root_dir.as_str()))
.unwrap_or_else(|| base_dir.clone())
}
@ -73,4 +98,117 @@ impl Workspace {
base_dir.join(&path).unwrap_or_else(|_| base_dir.clone())
}
pub fn contains(&self, path: &Path) -> bool {
if self.folders.is_empty() {
return true;
}
self.folders.iter().any(|dir| path.starts_with(dir))
}
pub fn related(&self, child: &Document) -> FxHashSet<&Document> {
let mut results = FxHashSet::default();
for graph in self
.iter()
.map(|start| graph::Graph::new(self, start))
.filter(|graph| {
graph
.edges
.iter()
.any(|edge| edge.source == child || edge.target == child)
})
{
results.extend(graph.preorder());
}
results
}
pub fn parents(&self, child: &Document) -> FxHashSet<&Document> {
self.iter()
.filter(|document| {
let DocumentData::Tex(data) = &document.data else { return false };
data.semantics.can_be_root
})
.filter(|parent| {
let graph = graph::Graph::new(self, parent);
let mut nodes = graph.preorder();
nodes.contains(&child)
})
.collect()
}
pub fn discover(&mut self) {
loop {
let mut changed = false;
changed |= self.discover_parents();
changed |= self.discover_children();
if !changed {
break;
}
}
}
fn discover_parents(&mut self) -> bool {
let dirs = self
.iter()
.filter_map(|document| document.path.as_deref())
.flat_map(|path| path.ancestors().skip(1))
.filter(|path| self.contains(path))
.map(|path| path.to_path_buf())
.collect::<FxHashSet<_>>();
let mut changed = false;
for dir in dirs {
if self
.iter()
.filter(|document| matches!(document.language, Language::Root | Language::Tectonic))
.filter_map(|document| document.path.as_deref())
.filter_map(|path| path.parent())
.any(|marker| dir.starts_with(marker))
{
continue;
}
let Ok(entries) = std::fs::read_dir(dir) else { continue };
for file in entries
.flatten()
.filter(|entry| entry.file_type().map_or(false, |type_| type_.is_file()))
.map(|entry| entry.path())
{
let Some(lang) = Language::from_path(&file) else { continue };
if !matches!(lang, Language::Tex | Language::Root | Language::Tectonic) {
continue;
}
if self.lookup_path(&file).is_none() {
changed |= self.load(&file, lang, Owner::Server).is_ok();
}
}
}
changed
}
fn discover_children(&mut self) -> bool {
let paths = self
.iter()
.map(|start| graph::Graph::new(self, start))
.flat_map(|graph| graph.missing)
.filter(|uri| uri.scheme() == "file")
.flat_map(|uri| uri.to_file_path())
.collect::<FxHashSet<_>>();
let mut changed = false;
for path in paths {
let language = Language::from_path(&path).unwrap_or(Language::Tex);
if self.lookup_path(&path).is_none() {
changed |= self.load(&path, language, Owner::Server).is_ok();
}
}
changed
}
}