mirror of
https://github.com/Strum355/mcshader-lsp.git
synced 2025-08-04 00:49:17 +00:00
fixed error off-by-one once and for all, and more restructuring+a load more to go until Im happy with it (aka never)
This commit is contained in:
parent
786e09bdcf
commit
e7221304da
29 changed files with 1319 additions and 1128 deletions
|
@ -13,17 +13,13 @@ filesystem = { path = "../filesystem" }
|
|||
futures = "0.3.21"
|
||||
glob = "0.3"
|
||||
graph = { path = "../graph" }
|
||||
# include_merger = { path = "../include_merger" }
|
||||
lazy_static = "1.4"
|
||||
logging = { path = "../logging" }
|
||||
opengl = { path = "../opengl" }
|
||||
path-slash = "0.1"
|
||||
path-slash = "0.2"
|
||||
regex = "1.4"
|
||||
sourcefile = { path = "../sourcefile" }
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1.18.0", features = ["sync"] }
|
||||
tower-lsp = "0.17.0"
|
||||
tst = "0.10"
|
||||
url = "2.2"
|
||||
walkdir = "2.3"
|
||||
workspace_tree = { path = "../workspace_tree" }
|
||||
include_merger = { path = "../include_merger" }
|
||||
walkdir = "2.3"
|
|
@ -1,6 +1,272 @@
|
|||
#![feature(assert_matches)]
|
||||
#![feature(result_flattening)]
|
||||
#![feature(arc_unwrap_or_clone)]
|
||||
|
||||
pub mod workspace;
|
||||
pub mod workspace_manager;
|
||||
pub use workspace::*;
|
||||
pub use workspace_manager::*;
|
||||
use std::{
|
||||
collections::{hash_map::Entry, HashMap, HashSet},
|
||||
fs::read_to_string,
|
||||
};
|
||||
|
||||
use filesystem::{is_top_level, NormalizedPathBuf};
|
||||
use graph::{
|
||||
dfs::{CycleError, Dfs},
|
||||
CachedStableGraph, FilialTuple, NodeIndex,
|
||||
};
|
||||
use logging::{debug, info, warn};
|
||||
use sourcefile::{IncludeLine, Sourcefile};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
mod tree;
|
||||
|
||||
pub struct WorkspaceTree {
|
||||
root: NormalizedPathBuf,
|
||||
pub graph: CachedStableGraph<NormalizedPathBuf, IncludeLine>,
|
||||
disconnected: HashSet<NormalizedPathBuf>,
|
||||
sources: HashMap<NormalizedPathBuf, Sourcefile>,
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum TreeError {
|
||||
#[error("got a non-valid top-level file")]
|
||||
NonTopLevel(NormalizedPathBuf),
|
||||
#[error("file {missing} not found; imported by {importing}.")]
|
||||
FileNotFound {
|
||||
importing: NormalizedPathBuf,
|
||||
missing: NormalizedPathBuf,
|
||||
},
|
||||
#[error(transparent)]
|
||||
DfsError(#[from] CycleError<NormalizedPathBuf>),
|
||||
#[error(transparent)]
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
|
||||
impl WorkspaceTree {
|
||||
pub fn new(root: &NormalizedPathBuf) -> Self {
|
||||
WorkspaceTree {
|
||||
root: root.clone(),
|
||||
graph: CachedStableGraph::new(),
|
||||
disconnected: HashSet::new(),
|
||||
sources: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn num_connected_entries(&self) -> usize {
|
||||
self.graph.node_count()
|
||||
}
|
||||
|
||||
// pub fn num_disconnected_entries(&self) -> usize {
|
||||
// self.disconnected.len()
|
||||
// }
|
||||
|
||||
/// builds the set of connected and disconnected GLSL files from the root of the
|
||||
/// workspace.
|
||||
// TODO: support user-defined additional file extensions.
|
||||
pub fn build(&mut self) {
|
||||
let root = self.root.clone();
|
||||
|
||||
enum GraphEntry {
|
||||
// represents top-level nodes
|
||||
TopLevel(Sourcefile),
|
||||
// represents non-top-level nodes
|
||||
Leaf(Sourcefile),
|
||||
}
|
||||
|
||||
// let mut roots = Vec::new();
|
||||
|
||||
for entry in WalkDir::new(&root)
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.filter(|entry| entry.path().is_file())
|
||||
.map(|entry| NormalizedPathBuf::from(entry.into_path()))
|
||||
.filter_map(|path| {
|
||||
// files not imported anywhere wont be included in the graph,
|
||||
// this is ok for now.
|
||||
if !is_top_level(&path.strip_prefix(&root)) {
|
||||
let ext = path.extension();
|
||||
if ext == Some("fsh") || ext == Some("gsh") || ext == Some("vsh") || ext == Some("glsl") || ext == Some("csh") {
|
||||
return Some(GraphEntry::Leaf(Sourcefile::new(read_to_string(&path).ok()?, path, root.clone())));
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(GraphEntry::TopLevel(Sourcefile::new(
|
||||
read_to_string(&path).ok()?,
|
||||
path,
|
||||
root.clone(),
|
||||
)))
|
||||
})
|
||||
{
|
||||
// iterate all valid found files, search for includes, add a node into the graph for each
|
||||
// file and add a file->includes KV into the map
|
||||
match entry {
|
||||
GraphEntry::TopLevel(file) => {
|
||||
eprintln!("TOP LEVEL {}", file.path);
|
||||
let path = file.path.clone();
|
||||
// roots.push(file.clone());
|
||||
// self.sources.insert(path.clone(), file);
|
||||
self.update_sourcefile(&path, file.source);
|
||||
}
|
||||
GraphEntry::Leaf(file) => {
|
||||
eprintln!("LEAF {}", file.path);
|
||||
let path = file.path.clone();
|
||||
// self.sources.insert(path.clone(), file);
|
||||
self.update_sourcefile(&path, file.source);
|
||||
// self.disconnected.insert(path);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the lazy depth first iterators for the possible trees given any node.
|
||||
/// If it is a top-level node, only a single tree should be instantiated. If not a top-level node,
|
||||
/// a tree will be instantiated for every top-level root ancestor.
|
||||
///
|
||||
/// Error modes:
|
||||
/// - Top [`Result`]
|
||||
/// - The node is not known to the workspace
|
||||
/// - The node has no ancestors but is not a known valid top-level file
|
||||
/// - Middle [`Result`] (only for >1 ancestor)
|
||||
/// - A non-valid top-level ancestor was found
|
||||
/// - Bottom [`Result`]
|
||||
/// - A cycle was detected while iterating
|
||||
/// - A node was not found on the filesystem while synthesizing a Sourcefile instance
|
||||
pub fn trees_for_entry<'a>(
|
||||
&'a mut self, entry: &'a NormalizedPathBuf,
|
||||
) -> Result<
|
||||
impl Iterator<Item = Result<impl Iterator<Item = Result<FilialTuple<&Sourcefile>, TreeError>> + '_, TreeError>> + '_,
|
||||
TreeError,
|
||||
> {
|
||||
let root_ancestors = self.graph.root_ancestors_for_key(entry)?.unwrap_or_default();
|
||||
|
||||
let mut trees = Vec::with_capacity(root_ancestors.len().max(1));
|
||||
|
||||
info!("top-level file ancestors found";
|
||||
"uri" => entry,
|
||||
"ancestors" => format!("{:?}", root_ancestors.iter()
|
||||
.copied()
|
||||
.map(|e| &self.graph.graph[e])
|
||||
.collect::<Vec<_>>())
|
||||
);
|
||||
|
||||
let node = self.graph.find_node(entry).unwrap();
|
||||
|
||||
let transform_cycle_error =
|
||||
|result: Result<FilialTuple<NodeIndex>, CycleError<NormalizedPathBuf>>| result.map_err(TreeError::DfsError);
|
||||
let node_to_sourcefile = |result: Result<FilialTuple<NodeIndex>, TreeError>| -> Result<FilialTuple<&Sourcefile>, TreeError> {
|
||||
result.and_then(|tup| {
|
||||
let parent = tup.parent.map(|p| {
|
||||
let parent_path = &self.graph[p];
|
||||
// fatal error case, shouldnt happen
|
||||
self.sources
|
||||
.get(parent_path)
|
||||
.unwrap_or_else(|| panic!("no entry in sources for parent {}", parent_path))
|
||||
});
|
||||
|
||||
let child_path = &self.graph[tup.child];
|
||||
// soft-fail case, if file doesnt exist or mistype
|
||||
// eprintln!("MISSING? {:?}", self.sources.get(child_path).is_none());
|
||||
let child = self.sources.get(child_path).ok_or_else(|| TreeError::FileNotFound {
|
||||
importing: self.graph[tup.parent.unwrap()].clone(),
|
||||
missing: child_path.clone(),
|
||||
})?;
|
||||
|
||||
Ok(FilialTuple { child, parent })
|
||||
})
|
||||
};
|
||||
|
||||
if root_ancestors.is_empty() {
|
||||
if !is_top_level(&entry.strip_prefix(&self.root)) {
|
||||
return Err(TreeError::NonTopLevel(entry.clone()));
|
||||
}
|
||||
|
||||
let dfs = Dfs::new(&self.graph, node)
|
||||
.into_iter()
|
||||
.map(transform_cycle_error)
|
||||
.map(node_to_sourcefile);
|
||||
trees.push(Ok(dfs));
|
||||
} else {
|
||||
for root in root_ancestors {
|
||||
let root_path = &self.graph[root];
|
||||
if !is_top_level(&root_path.strip_prefix(&self.root)) {
|
||||
warn!("got a non-valid toplevel file"; "root_ancestor" => root_path);
|
||||
trees.push(Err(TreeError::NonTopLevel(root_path.clone())));
|
||||
continue;
|
||||
}
|
||||
|
||||
let dfs = Dfs::new(&self.graph, root)
|
||||
.into_iter()
|
||||
.map(transform_cycle_error)
|
||||
.map(node_to_sourcefile);
|
||||
trees.push(Ok(dfs));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(trees.into_iter())
|
||||
}
|
||||
|
||||
/// updates the set of GLSL files connected to the given file, moving unreferenced
|
||||
pub fn update_sourcefile(&mut self, path: &NormalizedPathBuf, text: String) {
|
||||
match self.sources.entry(path.clone()) {
|
||||
Entry::Occupied(mut entry) => entry.get_mut().source = text,
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(Sourcefile::new(text, path.clone(), self.root.clone()));
|
||||
}
|
||||
};
|
||||
let file = self.sources.get(path).unwrap();
|
||||
let includes = file.includes().unwrap();
|
||||
|
||||
info!("includes found for file"; "file" => &file.path, "includes" => format!("{:?}", includes));
|
||||
|
||||
let idx = self.graph.add_node(&file.path);
|
||||
|
||||
let prev_children: HashSet<_> =
|
||||
HashSet::from_iter(self.graph.get_all_edges_from(idx).map(|tup| (self.graph[tup.0].clone(), tup.1)));
|
||||
let new_children: HashSet<_> = includes.iter().cloned().collect();
|
||||
|
||||
let to_be_added = new_children.difference(&prev_children);
|
||||
let to_be_removed = prev_children.difference(&new_children);
|
||||
|
||||
debug!(
|
||||
"include sets diff'd";
|
||||
"for removal" => format!("{:?}", to_be_removed),
|
||||
"for addition" => format!("{:?}", to_be_added)
|
||||
);
|
||||
|
||||
for removal in to_be_removed {
|
||||
let child = self.graph.find_node(&removal.0).unwrap();
|
||||
self.graph.remove_edge(idx, child, removal.1);
|
||||
if removal.0.exists() && self.graph.parents(child).count() == 0 {
|
||||
self.disconnected.insert(removal.0.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: remove entire subtree from disconnected
|
||||
for insertion in to_be_added {
|
||||
let (child, position) = includes.iter().find(|f| f.0 == insertion.0).unwrap().clone();
|
||||
let child = self.graph.add_node(&child);
|
||||
self.graph.add_edge(idx, child, position);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{TreeError, WorkspaceTree};
|
||||
|
||||
#[test]
|
||||
fn test_trees() {
|
||||
let mut view = WorkspaceTree::new(&("/home/test/banana".into()));
|
||||
let parent = view.graph.add_node(&("/home/test/banana/test.fsh".into()));
|
||||
let child = view.graph.add_node(&("/home/test/banana/included.glsl".into()));
|
||||
view.graph.add_edge(parent, child, 2.into());
|
||||
|
||||
let parent = "/home/test/banana/test.fsh".into();
|
||||
let trees = view.trees_for_entry(&parent);
|
||||
match trees {
|
||||
Ok(_) => panic!("unexpected Ok result"),
|
||||
Err(e) => match e {
|
||||
TreeError::NonTopLevel(_) => {}
|
||||
_ => panic!("unexpected error {:?}", e),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
11
server/workspace/src/tree.rs
Normal file
11
server/workspace/src/tree.rs
Normal file
|
@ -0,0 +1,11 @@
|
|||
use sourcefile::Sourcefile;
|
||||
|
||||
pub struct Tree {}
|
||||
|
||||
impl Iterator for Tree {
|
||||
type Item = Sourcefile;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
todo!()
|
||||
}
|
||||
}
|
|
@ -1,185 +0,0 @@
|
|||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use anyhow::Result;
|
||||
use filesystem::{LFString, NormalizedPathBuf};
|
||||
use graph::{dfs, CachedStableGraph, FilialTuple, NodeIndex};
|
||||
use include_merger::MergeViewBuilder;
|
||||
use logging::{info, logger, warn, FutureExt};
|
||||
use opengl::{diagnostics_parser::DiagnosticsParser, TreeType};
|
||||
use sourcefile::{IncludeLine, SourceFile, SourceMapper};
|
||||
use tokio::sync::Mutex;
|
||||
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range};
|
||||
use url::Url;
|
||||
use workspace_tree::TreeError;
|
||||
|
||||
pub struct Workspace<S: opengl::ShaderValidator> {
|
||||
pub root: NormalizedPathBuf,
|
||||
workspace_view: Arc<Mutex<workspace_tree::WorkspaceTree>>,
|
||||
// graph: Arc<Mutex<CachedStableGraph<NormalizedPathBuf, IncludePosition>>>,
|
||||
gl_context: Arc<Mutex<S>>,
|
||||
}
|
||||
|
||||
impl<S: opengl::ShaderValidator> Workspace<S> {
|
||||
pub fn new(root: NormalizedPathBuf, gl: S) -> Self {
|
||||
Workspace {
|
||||
workspace_view: Arc::new(Mutex::new(workspace_tree::WorkspaceTree::new(&root))),
|
||||
root,
|
||||
// graph: Arc::new(Mutex::new(CachedStableGraph::new())),
|
||||
gl_context: Arc::new(Mutex::new(gl)),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn build(&self) {
|
||||
info!("initializing workspace"; "root" => &self.root);
|
||||
|
||||
let mut tree = self.workspace_view.lock().with_logger(logger()).await;
|
||||
tree.build();
|
||||
|
||||
info!("build graph"; "connected" => tree.num_connected_entries()/* , "disconnected" => tree.num_disconnected_entries() */);
|
||||
}
|
||||
|
||||
pub async fn refresh_graph_for_file(&self, path: &NormalizedPathBuf) {
|
||||
let mut tree = self.workspace_view.lock().with_logger(logger()).await;
|
||||
|
||||
tree.update_sourcefile(path);
|
||||
}
|
||||
|
||||
pub async fn lint(&self, path: &NormalizedPathBuf) -> Result<HashMap<Url, Vec<Diagnostic>>> {
|
||||
let mut workspace = self.workspace_view.lock().with_logger(logger()).await;
|
||||
|
||||
// the set of all filepath->content.
|
||||
let mut all_sources: HashMap<NormalizedPathBuf, LFString> = HashMap::new();
|
||||
// the set of filepath->list of diagnostics to report
|
||||
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::new();
|
||||
|
||||
// we want to backfill the diagnostics map with all linked sources
|
||||
let back_fill = |all_sources: &HashMap<NormalizedPathBuf, LFString>, diagnostics: &mut HashMap<Url, Vec<Diagnostic>>| {
|
||||
for path in all_sources.keys() {
|
||||
diagnostics.entry(Url::from_file_path(path).unwrap()).or_default();
|
||||
}
|
||||
};
|
||||
|
||||
let trees = match workspace.trees_for_entry(path) {
|
||||
Ok(trees) => trees,
|
||||
Err(err) => {
|
||||
match err {
|
||||
TreeError::NonTopLevel(e) => warn!("got a non-valid toplevel file"; "root_ancestor" => e, "stripped" => e.strip_prefix(&self.root), "path" => path),
|
||||
e => return Err(e.into()),
|
||||
}
|
||||
back_fill(&all_sources, &mut diagnostics);
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
}
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for tree in trees {
|
||||
let mut tree = match tree {
|
||||
Ok(t) => t.peekable(),
|
||||
Err(e) => match e {
|
||||
// dont care, didnt ask, skip
|
||||
TreeError::NonTopLevel(_) => continue,
|
||||
e => unreachable!("unexpected error {:?}", e),
|
||||
},
|
||||
};
|
||||
// let tree = match tree.and_then(|t| t.collect::<Result<Vec<_>, TreeError>>()) {
|
||||
// Ok(t) => t,
|
||||
// Err(e) => {
|
||||
// match e {
|
||||
// TreeError::NonTopLevel(f) => {
|
||||
// warn!("got a non-valid toplevel file"; "root_ancestor" => f, "stripped" => f.strip_prefix(&self.root));
|
||||
// continue;
|
||||
// }
|
||||
// TreeError::FileNotFound(f) => {
|
||||
// warn!("child not found"; "child" => f);
|
||||
// continue;
|
||||
// }
|
||||
// TreeError::DfsError(e) => {
|
||||
// diagnostics.insert(Url::from_file_path(path).unwrap(), vec![e.into()]);
|
||||
// back_fill(&all_sources, &mut diagnostics); // TODO: confirm
|
||||
// return Ok(diagnostics);
|
||||
// }
|
||||
// e => unreachable!("should only yield non-toplevel file error, got {:?}", e),
|
||||
// };
|
||||
// }
|
||||
// };
|
||||
|
||||
let tree_size = tree.size_hint().0;
|
||||
|
||||
let mut source_mapper = SourceMapper::new(tree_size); // very rough count
|
||||
|
||||
let root = tree
|
||||
.peek()
|
||||
.expect("expected non-zero sized tree")
|
||||
.as_ref()
|
||||
.expect("unexpected cycle or not-found node")
|
||||
.child;
|
||||
|
||||
let (tree_type, document_glsl_version) = (
|
||||
root.path.extension().unwrap().into(),
|
||||
root.version().expect("fatal error parsing file for include version"),
|
||||
);
|
||||
|
||||
let mut built_tree = Vec::with_capacity(tree_size);
|
||||
for entry in tree {
|
||||
match entry {
|
||||
Ok(node) => built_tree.push(node),
|
||||
Err(e) => match e {
|
||||
TreeError::FileNotFound {
|
||||
ref importing,
|
||||
ref missing,
|
||||
} => diagnostics
|
||||
.entry(Url::from_file_path(importing).unwrap())
|
||||
.or_default()
|
||||
.push(Diagnostic {
|
||||
range: Range::new(Position::new(0, 0), Position::new(0, u32::MAX)),
|
||||
severity: Some(DiagnosticSeverity::WARNING),
|
||||
source: Some("mcglsl".to_string()),
|
||||
message: e.to_string(),
|
||||
..Diagnostic::default()
|
||||
}),
|
||||
TreeError::DfsError(_) => todo!(),
|
||||
e => unreachable!("unexpected error {:?}", e),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let view = MergeViewBuilder::new(
|
||||
&built_tree,
|
||||
&mut source_mapper,
|
||||
self.gl_context.lock().with_logger(logger()).await.vendor().as_str().into(),
|
||||
document_glsl_version,
|
||||
)
|
||||
.build();
|
||||
|
||||
let stdout = match self.compile_shader_source(&view, tree_type, path).with_logger(logger()).await {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
back_fill(&all_sources, &mut diagnostics);
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
};
|
||||
|
||||
diagnostics.extend(
|
||||
DiagnosticsParser::new(&*self.gl_context.lock().with_logger(logger()).await).parse_diagnostics_output(
|
||||
stdout,
|
||||
path,
|
||||
&source_mapper,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
back_fill(&all_sources, &mut diagnostics);
|
||||
Ok(diagnostics)
|
||||
}
|
||||
|
||||
async fn compile_shader_source(&self, source: &str, tree_type: TreeType, path: &NormalizedPathBuf) -> Option<String> {
|
||||
let result = self.gl_context.lock().with_logger(logger()).await.validate(tree_type, source);
|
||||
match &result {
|
||||
Some(output) => {
|
||||
info!("compilation errors reported"; "errors" => format!("`{}`", output.replace('\n', "\\n")), "tree_root" => path)
|
||||
}
|
||||
None => info!("compilation reported no errors"; "tree_root" => path),
|
||||
};
|
||||
result
|
||||
}
|
||||
}
|
|
@ -1,137 +0,0 @@
|
|||
use std::{ffi::OsStr, path::Path};
|
||||
|
||||
use filesystem::NormalizedPathBuf;
|
||||
use glob::{glob_with, MatchOptions};
|
||||
use logging::{info, error, FutureExt, logger};
|
||||
use tst::TSTMap;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::workspace::Workspace;
|
||||
|
||||
pub struct WorkspaceIndex(usize);
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct WorkspaceManager<G, F>
|
||||
where
|
||||
G: opengl::ShaderValidator + Send,
|
||||
F: Fn() -> G
|
||||
{
|
||||
search: TSTMap<WorkspaceIndex>,
|
||||
workspaces: Vec<Workspace<G>>,
|
||||
gl_factory: F
|
||||
}
|
||||
|
||||
impl <G, F> WorkspaceManager<G, F>
|
||||
where
|
||||
G: opengl::ShaderValidator + Send,
|
||||
F: Fn() -> G
|
||||
{
|
||||
pub fn new(gl_factory: F) -> Self {
|
||||
WorkspaceManager {
|
||||
search: Default::default(),
|
||||
workspaces: Default::default(),
|
||||
gl_factory
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn gather_workspaces(&mut self, root: &NormalizedPathBuf) {
|
||||
let options = MatchOptions {
|
||||
case_sensitive: true,
|
||||
..MatchOptions::default()
|
||||
};
|
||||
|
||||
let glob = root.join("**").join("shaders.properties");
|
||||
info!("banana"; "glob" => &glob);
|
||||
|
||||
for entry in glob_with(&glob.to_string(), options).unwrap() {
|
||||
match entry {
|
||||
Ok(path)
|
||||
if path.file_name().and_then(OsStr::to_str) == Some("shaders.properties")
|
||||
&& path.parent().and_then(Path::file_name).and_then(OsStr::to_str) == Some("shaders") =>
|
||||
{
|
||||
match path.parent().and_then(Path::parent).map(Into::into) {
|
||||
Some(shader_root) => self.add_workspace(&shader_root).with_logger(logger()).await,
|
||||
None => todo!(),
|
||||
}
|
||||
}
|
||||
Ok(path)
|
||||
if path.file_name().and_then(OsStr::to_str) == Some("shaders.properties")
|
||||
&& path
|
||||
.parent()
|
||||
.and_then(Path::file_name)
|
||||
.and_then(OsStr::to_str)
|
||||
.map_or(false, |f| f.starts_with("world"))
|
||||
&& path
|
||||
.parent()
|
||||
.and_then(Path::parent)
|
||||
.and_then(Path::file_name)
|
||||
.and_then(OsStr::to_str)
|
||||
== Some("shaders") =>
|
||||
{
|
||||
match path.parent().and_then(Path::parent).and_then(Path::parent).map(Into::into) {
|
||||
Some(shader_root) => self.add_workspace(&shader_root).with_logger(logger()).await,
|
||||
None => todo!(),
|
||||
}
|
||||
}
|
||||
Ok(path) => {
|
||||
let path: NormalizedPathBuf = path.into();
|
||||
error!("shaders.properties found outside ./shaders or ./worldX dir"; "path" => path)
|
||||
}
|
||||
Err(e) => error!("error iterating glob entries"; "error" => format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
let glob = root.join("**").join("shaders");
|
||||
for entry in glob_with(&glob.to_string(), options).unwrap() {
|
||||
match entry {
|
||||
Ok(path)
|
||||
if !WalkDir::new(path.clone()).into_iter().any(|p| {
|
||||
p.as_ref()
|
||||
.ok()
|
||||
.map(|p| p.file_name())
|
||||
.and_then(|f| f.to_str())
|
||||
.map_or(false, |f| f == "shaders.properties")
|
||||
}) =>
|
||||
{
|
||||
match path.parent().map(Into::into) {
|
||||
Some(shader_root) => self.add_workspace(&shader_root).with_logger(logger()).await,
|
||||
None => todo!(),
|
||||
}
|
||||
}
|
||||
Ok(path) => {
|
||||
let path: NormalizedPathBuf = path.into();
|
||||
info!("skipping as already existing"; "path" => path)
|
||||
}
|
||||
Err(e) => error!("error iterating glob entries"; "error" => format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn add_workspace(&mut self, root: &NormalizedPathBuf) {
|
||||
if !self.search.contains_key(&root.to_string()) {
|
||||
info!("adding workspace"; "root" => &root);
|
||||
let opengl_context = (self.gl_factory)();
|
||||
let workspace = Workspace::new(root.clone(), opengl_context);
|
||||
workspace.build().with_logger(logger()).await;
|
||||
self.workspaces.push(workspace);
|
||||
self.search.insert(&root.to_string(), WorkspaceIndex(self.workspaces.len() - 1));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_workspace_for_file(&self, file: &NormalizedPathBuf) -> Option<&Workspace<G>> {
|
||||
let file = file.to_string();
|
||||
let prefix = self.search.longest_prefix(&file);
|
||||
if prefix.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match self.search.get(prefix) {
|
||||
Some(idx) => self.workspaces.get(idx.0),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn workspaces(&self) -> &[Workspace<G>] {
|
||||
&self.workspaces
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue