diff --git a/.vscode/settings.json b/.vscode/settings.json index 72fdbc6..e0759bf 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,15 +1,17 @@ { "files.exclude": { - "out": false + "out": false, + "client/node_modules": true, }, "npm.exclude": [ "**/client", "**/server" ], "search.exclude": { - "out": true + "out": true, + "client/node_modules": true }, "yaml.schemas": { "https://json.schemastore.org/github-workflow": [".github/*"] - } + }, } diff --git a/client/src/extension.ts b/client/src/extension.ts index 9ecf923..5362c02 100644 --- a/client/src/extension.ts +++ b/client/src/extension.ts @@ -65,7 +65,7 @@ export class Extension { } fileAssociationsToGlob = (associations: string[]): string => { - return '**/*.{'.concat( + return '**/*{'.concat( associations.map(s => s.substring(s.indexOf('.'))).join(',') ) + '}' } diff --git a/server/graph/src/graph.rs b/server/graph/src/graph.rs index 2d59d12..2c5542e 100644 --- a/server/graph/src/graph.rs +++ b/server/graph/src/graph.rs @@ -1,5 +1,3 @@ -use anyhow::format_err; -use anyhow::Result; use petgraph::stable_graph::EdgeIndex; use petgraph::stable_graph::NodeIndex; use petgraph::stable_graph::StableDiGraph; @@ -12,6 +10,10 @@ use std::hash::Hash; use std::ops::Index; use std::ops::IndexMut; +#[derive(thiserror::Error, Debug)] +#[error("node not found {0}")] +pub struct NotFound(K); + /// Wraps a `StableDiGraph` with caching behaviour for node search by maintaining /// an index for node value to node index and a reverse index. /// This allows for **O(1)** lookup for a value if it exists, else **O(n)**. @@ -55,8 +57,9 @@ where // &self.graph // } + #[inline] pub fn parents(&self, node: NodeIndex) -> impl Iterator + '_ { - self.graph.edges_directed(node, Direction::Incoming).map(|e| e.source()) + self.graph.neighbors_directed(node, Direction::Incoming) } /// Returns the `NodeIndex` for a given graph node with the value of `name` @@ -78,6 +81,11 @@ where } } + #[inline] + pub fn child_node_indexes(&self, node: NodeIndex) -> impl Iterator + '_ { + self.graph.neighbors(node) + } + /// Returns an iterator over all the edge values of type `V`'s between a parent and its child for all the /// positions that the child may be imported into the parent, in order of import. pub fn get_edges_between(&self, parent: NodeIndex, child: NodeIndex) -> impl DoubleEndedIterator + '_ { @@ -124,6 +132,17 @@ where idx } + #[inline] + pub fn remove_node(&mut self, node: NodeIndex) -> Option { + self.cache + .keys() + .find(|key| self.cache[key] == node) + .cloned() + .and_then(|key| self.cache.remove(&key)); + // key.and_then(|key| self.cache.remove(&key)); + self.graph.remove_node(node) + } + /// Adds a directional edge of type `V` between `parent` and `child`. #[inline] pub fn add_edge(&mut self, parent: NodeIndex, child: NodeIndex, meta: V) -> EdgeIndex { @@ -139,20 +158,10 @@ where .and_then(|edge| self.graph.remove_edge(edge)); } - #[inline] - pub fn child_node_indexes(&self, node: NodeIndex) -> impl Iterator + '_ { - self.graph.neighbors(node) - } - - #[inline] - pub fn parent_node_indexes(&self, node: NodeIndex) -> impl Iterator + '_ { - self.graph.neighbors_directed(node, Direction::Incoming) - } - - pub fn root_ancestors_for_key(&mut self, path: &K) -> Result>> { + pub fn root_ancestors_for_key(&mut self, path: &K) -> Result>, NotFound> { let node = match self.find_node(path) { Some(n) => n, - None => return Err(format_err!("node not found {:?}", path)), + None => return Err(NotFound(path.clone())), }; Ok(self.root_ancestors(node)) } @@ -168,7 +177,7 @@ where return None; } - let parents: Vec<_> = self.parent_node_indexes(node).collect(); + let parents: Vec<_> = self.parents(node).collect(); let mut collection = Vec::with_capacity(parents.len()); for ancestor in &parents { @@ -176,7 +185,7 @@ where } for ancestor in &parents { - if self.parent_node_indexes(*ancestor).next().is_some() { + if self.parents(*ancestor).next().is_some() { collection.extend(self.get_root_ancestors(initial, *ancestor, visited).unwrap_or_default()); } else { collection.push(*ancestor); @@ -230,13 +239,6 @@ where .map(|n| self.reverse_index.get(&n).unwrap().clone()) .collect() } - - fn remove_node(&mut self, name: &K) { - let idx = self.cache.remove(name); - if let Some(idx) = idx { - self.graph.remove_node(idx); - } - } } impl<'a, K, V> From<&'a CachedStableGraph> for &'a StableDiGraph @@ -280,7 +282,7 @@ mod graph_test { assert_eq!(parents.len(), 1); assert_eq!(parents[0], "sample"); - let parents: Vec<_> = graph.parent_node_indexes(idx2).collect(); + let parents: Vec<_> = graph.parents(idx2).collect(); assert_eq!(parents.len(), 1); assert_eq!(parents[0], idx1); @@ -291,7 +293,7 @@ mod graph_test { let ancestors = graph.root_ancestors(idx1).unwrap(); assert_eq!(ancestors.len(), 0); - graph.remove_node(&"sample"); + graph.remove_node(idx1); assert_eq!(graph.graph.node_count(), 1); assert!(graph.find_node(&"sample").is_none()); diff --git a/server/include_merger/src/merge_views.rs b/server/include_merger/src/merge_views.rs index 42690ca..1bd2943 100644 --- a/server/include_merger/src/merge_views.rs +++ b/server/include_merger/src/merge_views.rs @@ -350,7 +350,7 @@ mod test { use pretty_assertions::assert_str_eq; use sourcefile::SourceMapper; use tempdir::TempDir; - use workspace::{TreeError, WorkspaceTree}; + use workspace::{TreeError, WorkspaceTree, MaterializedTree}; use crate::MergeViewBuilder; @@ -388,7 +388,10 @@ mod test { let mut trees_vec = workspace .trees_for_entry(&final_path) .expect("expected successful tree initializing") - .collect::, TreeError>>() + .into_iter() + .filter_map(|treeish| treeish.ok()) + .map(|imtree| imtree.collect()) + .collect::>, TreeError>>() .expect("expected successful tree-building"); let mut trees = trees_vec.iter_mut(); @@ -396,10 +399,6 @@ mod test { assert!(trees.next().is_none()); - let tree = tree - .collect::, TreeError>>() - .expect("expected successful tree-building"); - let mut source_mapper = SourceMapper::new(2); let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build(); @@ -432,7 +431,10 @@ mod test { let mut trees_vec = workspace .trees_for_entry(&final_path) .expect("expected successful tree initializing") - .collect::, TreeError>>() + .into_iter() + .filter_map(|treeish| treeish.ok()) + .map(|imtree| imtree.collect()) + .collect::>, TreeError>>() .expect("expected successful tree-building"); let mut trees = trees_vec.iter_mut(); @@ -440,10 +442,6 @@ mod test { assert!(trees.next().is_none()); - let tree = tree - .collect::, TreeError>>() - .expect("expected successful tree-building"); - let mut source_mapper = SourceMapper::new(2); let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build(); @@ -474,7 +472,10 @@ mod test { let mut trees_vec = workspace .trees_for_entry(&final_path) .expect("expected successful tree initializing") - .collect::, TreeError>>() + .into_iter() + .filter_map(|treeish| treeish.ok()) + .map(|imtree| imtree.collect()) + .collect::>, TreeError>>() .expect("expected successful tree-building"); let mut trees = trees_vec.iter_mut(); @@ -482,10 +483,6 @@ mod test { assert!(trees.next().is_none()); - let tree = tree - .collect::, TreeError>>() - .expect("expected successful tree-building"); - let mut source_mapper = SourceMapper::new(2); let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build(); @@ -516,7 +513,10 @@ mod test { let mut trees_vec = workspace .trees_for_entry(&final_path) .expect("expected successful tree initializing") - .collect::, TreeError>>() + .into_iter() + .filter_map(|treeish| treeish.ok()) + .map(|imtree| imtree.collect()) + .collect::>, TreeError>>() .expect("expected successful tree-building"); let mut trees = trees_vec.iter_mut(); @@ -524,10 +524,6 @@ mod test { assert!(trees.next().is_none()); - let tree = tree - .collect::, TreeError>>() - .expect("expected successful tree-building"); - let mut source_mapper = SourceMapper::new(2); let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build(); @@ -566,7 +562,10 @@ mod test { let mut trees_vec = workspace .trees_for_entry(&final_path) .expect("expected successful tree initializing") - .collect::, TreeError>>() + .into_iter() + .filter_map(|treeish| treeish.ok()) + .map(|imtree| imtree.collect()) + .collect::>, TreeError>>() .expect("expected successful tree-building"); let mut trees = trees_vec.iter_mut(); @@ -574,10 +573,6 @@ mod test { assert!(trees.next().is_none()); - let tree = tree - .collect::, TreeError>>() - .expect("expected successful tree-building"); - let mut source_mapper = SourceMapper::new(2); let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build(); diff --git a/server/logging/src/lib.rs b/server/logging/src/lib.rs index dc73918..38f32c3 100644 --- a/server/logging/src/lib.rs +++ b/server/logging/src/lib.rs @@ -21,7 +21,6 @@ pub fn new_trace_id() -> String { } pub fn init_logger() -> GlobalLoggerGuard { - slog_stdlog::init_with_level(log::Level::Debug).unwrap(); slog_scope::set_global_logger(Logger::root(&*DRAIN_SWITCH, o!())) } diff --git a/server/server/src/server.rs b/server/server/src/server.rs index aa19a16..377670d 100644 --- a/server/server/src/server.rs +++ b/server/server/src/server.rs @@ -1,10 +1,10 @@ -use std::{collections::HashMap, ffi::OsStr, marker::Sync, path::Path, sync::Arc, str::FromStr}; +use std::{collections::HashMap, ffi::OsStr, marker::Sync, path::Path, str::FromStr, sync::Arc}; use filesystem::NormalizedPathBuf; use futures::future::join_all; use logging::{error, info, logger, trace, warn, FutureExt}; use serde::Deserialize; -use serde_json::{Value, from_value}; +use serde_json::{from_value, Value}; use tokio::sync::Mutex; @@ -37,7 +37,7 @@ where pub client: Arc>, workspaces: Arc>>>>, gl_factory: F, - _log_guard: logging::GlobalLoggerGuard + _log_guard: logging::GlobalLoggerGuard, } impl Server @@ -50,7 +50,7 @@ where client: Arc::new(Mutex::new(client)), workspaces: Default::default(), gl_factory, - _log_guard: logging::init_logger() + _log_guard: logging::init_logger(), } } } @@ -110,16 +110,6 @@ where }) } - async fn initialized(&self, _: InitializedParams) { - // self.client - // .lock() - // .with_logger(logger()) - // .await - // .log_message(MessageType::INFO, "command executed!") - // .with_logger(logger()) - // .await; - } - async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { warn!("shutting down language server..."); Ok(()) @@ -131,16 +121,20 @@ where let path: NormalizedPathBuf = params.text_document.uri.into(); - if let Some(workspace) = self.workspace_for_file(&path).await { - trace!("found workspace"; "root" => &workspace.root); + if let Some(meta) = self.workspace_for_file(&path).await { + trace!("found workspace"; "root" => &meta.root); - workspace + match meta .update_sourcefile(&path, params.text_document.text) .with_logger(logger()) - .await; - - match workspace.lint(&path).with_logger(logger()).await { - Ok(diagnostics) => self.publish_diagnostic(diagnostics, None).with_logger(logger()).await, + .await + { + Ok(diagnostics) => { + let client = self.client.lock().with_logger(logger()).await; + Server::::publish_diagnostic(&client, diagnostics, None) + .with_logger(logger()) + .await + } Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => &path), } } @@ -155,10 +149,17 @@ where Some(workspace) => { trace!("found workspace"; "root" => &workspace.root); - workspace.update_sourcefile(&path, params.text.unwrap()).with_logger(logger()).await; - - match workspace.lint(&path).with_logger(logger()).await { - Ok(diagnostics) => self.publish_diagnostic(diagnostics, None).with_logger(logger()).await, + match workspace + .update_sourcefile(&path, params.text.unwrap()) + .with_logger(logger()) + .await + { + Ok(diagnostics) => { + let client = self.client.lock().with_logger(logger()).await; + Server::::publish_diagnostic(&client, diagnostics, None) + .with_logger(logger()) + .await + } Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => &path), } } @@ -166,6 +167,29 @@ where } } + #[logging::with_trace_id] + async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { + for event in params.changes { + eprintln!("NEW EVENT {:?}", event); + if let FileChangeType::DELETED = event.typ { + let document_path: NormalizedPathBuf = event.uri.path().into(); + let workspace = match self.workspace_for_file(&document_path).await { + Some(meta) => meta, + None => continue, + }; + match workspace.delete_sourcefile(&document_path).await { + Ok(diagnostics) => { + let client = self.client.lock().with_logger(logger()).await; + Server::::publish_diagnostic(&client, diagnostics, None) + .with_logger(logger()) + .await + } + Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => &document_path), + } + } + } + } + #[logging::with_trace_id] async fn execute_command(&self, params: ExecuteCommandParams) -> tower_lsp::jsonrpc::Result> { match params.command.as_str() { @@ -185,7 +209,7 @@ where let workspace = self.workspace_for_file(&document_path).await.unwrap(); let mut workspace_view = workspace.workspace_view.lock().with_logger(logger()).await; - let mut roots = workspace_view.trees_for_entry(&document_path).unwrap(); + let mut roots = workspace_view.trees_for_entry(&document_path).unwrap().into_iter(); let root = roots.next().unwrap(); if roots.next().is_some() { return Err(Error { @@ -441,9 +465,14 @@ where workspace: Some(WorkspaceServerCapabilities { workspace_folders: Some(WorkspaceFoldersServerCapabilities{ supported: Some(true), - change_notifications: Some(OneOf::Left(false)), + change_notifications: Some(OneOf::Left(true)), + }), + file_operations: Some(WorkspaceFileOperationsServerCapabilities { + did_delete: Some(FileOperationRegistrationOptions { + filters: vec![FileOperationFilter{ scheme: None, pattern: FileOperationPattern { glob: "**/*".into(), matches: None, options: None } }] + }), + ..Default::default() }), - file_operations: None, }), semantic_tokens_provider: Some( SemanticTokensOptions { @@ -536,21 +565,17 @@ where async fn add_workspace(&self, root: &NormalizedPathBuf) { let mut search = self.workspaces.lock().with_logger(logger()).await; - // let mut workspaces = self.workspaces.lock().with_logger(logger()).await; if !search.contains_key(&root.to_string()) { info!("adding workspace"; "root" => &root); let opengl_context = (self.gl_factory)(); let workspace = Workspace::new(root.clone(), opengl_context); workspace.build().with_logger(logger()).await; - // workspaces.push(workspace); - // search.insert(&root.to_string(), WorkspaceIndex(workspaces.len() - 1)); search.insert(&root.to_string(), Arc::new(workspace)); } } - async fn publish_diagnostic(&self, diagnostics: HashMap>, document_version: Option) { - let client = self.client.lock().with_logger(logger()).await; + async fn publish_diagnostic(client: &Client, diagnostics: HashMap>, document_version: Option) { let mut handles = Vec::with_capacity(diagnostics.len()); for (url, diags) in diagnostics { handles.push(client.publish_diagnostics(url, diags, document_version)); @@ -560,7 +585,6 @@ where pub async fn workspace_for_file(&self, file: &NormalizedPathBuf) -> Option>> { let search = self.workspaces.lock().with_logger(logger()).await; - // let workspaces = self.workspaces.lock().with_logger(logger()).await; let file = file.to_string(); let prefix = search.longest_prefix(&file); diff --git a/server/server/src/workspace.rs b/server/server/src/workspace.rs index f588257..8ad2f65 100644 --- a/server/server/src/workspace.rs +++ b/server/server/src/workspace.rs @@ -12,20 +12,17 @@ use url::Url; use workspace::TreeError; pub struct Workspace { - pub root: NormalizedPathBuf, - // temporarily public - pub workspace_view: Arc>, - // graph: Arc>>, - pub gl_context: Arc>, + pub(super) root: NormalizedPathBuf, + pub(super) workspace_view: Arc>, + gl_context: Arc>, } impl Workspace { pub fn new(root: NormalizedPathBuf, gl: S) -> Self { Workspace { workspace_view: Arc::new(Mutex::new(workspace::WorkspaceTree::new(&root))), - root, - // graph: Arc::new(Mutex::new(CachedStableGraph::new())), gl_context: Arc::new(Mutex::new(gl)), + root, } } @@ -38,17 +35,67 @@ impl Workspace { info!("build graph"; "connected" => tree.num_connected_entries()/* , "disconnected" => tree.num_disconnected_entries() */); } - pub async fn update_sourcefile(&self, path: &NormalizedPathBuf, text: String) { - let mut tree = self.workspace_view.lock().with_logger(logger()).await; - - tree.update_sourcefile(path, text); - } - - pub async fn lint(&self, path: &NormalizedPathBuf) -> Result>> { + pub async fn delete_sourcefile(&self, path: &NormalizedPathBuf) -> Result>> { + info!("path deleted on filesystem"; "path" => path); let mut workspace = self.workspace_view.lock().with_logger(logger()).await; - // TODO: re-lint any removed files + // need to get the old trees first so we know what to lint to remove now stale diagnostics + let old_roots: Vec = match workspace.trees_for_entry(path) { + Ok(trees) => trees, + Err(_) => { + warn!("path not known to the workspace, this might be a bug"; "path" => path); + return Ok(HashMap::new()); + } + } + .into_iter() + .filter_map(|maybe_tree| maybe_tree.ok()) + // want to extract the root of each tree so we can build the trees _after_ removing the deleted file + .map(|mut tree| tree.next().expect("unexpected zero-sized tree").unwrap().child.path.clone()) + .collect::>(); + info!("found existing roots"; "roots" => format!("{:?}", old_roots)); + + workspace.remove_sourcefile(path); + + let mut all_diagnostics: HashMap> = HashMap::new(); + + for old_root in old_roots { + let new_trees = workspace.trees_for_entry(&old_root).expect("should be a known existing path"); + assert_eq!(new_trees.len(), 1, "root should not be able to yield more than one tree"); + let tree = new_trees.into_iter().next().unwrap().expect("should be a top-level path").collect(); + all_diagnostics.extend(self.lint(path, tree).with_logger(logger()).await); + } + + Ok(all_diagnostics) + } + + pub async fn update_sourcefile(&self, path: &NormalizedPathBuf, text: String) -> Result>> { + let mut workspace = self.workspace_view.lock().with_logger(logger()).await; + + workspace.update_sourcefile(path, text); + + let mut all_diagnostics: HashMap> = HashMap::new(); + + for tree in match workspace.trees_for_entry(path) { + Ok(trees) => trees, + Err(err) => { + return Err(err.into()); + // back_fill(Box::new(all_sources.keys()), &mut diagnostics); + // return Ok(diagnostics); + } + } + .into_iter() + .filter_map(|maybe_tree| maybe_tree.ok()) + .map(|tree| tree.collect()) { + all_diagnostics.extend(self.lint(path, tree).with_logger(logger()).await); + } + + Ok(all_diagnostics) + } + + async fn lint<'a>( + &'a self, path: &'a NormalizedPathBuf, tree: Result, TreeError>, + ) -> HashMap> { // the set of filepath->list of diagnostics to report let mut diagnostics: HashMap> = HashMap::new(); @@ -64,98 +111,64 @@ impl Workspace { } }; - let trees = match workspace.trees_for_entry(path) { - Ok(trees) => trees, - Err(err) => { - match err { - TreeError::NonTopLevel(e) => warn!("got a non-valid toplevel file"; "root_ancestor" => e, "stripped" => e.strip_prefix(&self.root), "path" => path), - e => return Err(e.into()), - } - // back_fill(Box::new(all_sources.keys()), &mut diagnostics); - return Ok(diagnostics); - } - } - .collect::>(); - let gpu_vendor: GPUVendor = self.gl_context.lock().with_logger(logger()).await.vendor().as_str().into(); - for tree in trees { - let mut tree = match tree { - Ok(t) => t.peekable(), - Err(e) => match e { - // dont care, didnt ask, skip - TreeError::NonTopLevel(_) => continue, - e => unreachable!("unexpected error {:?}", e), - }, - }; - - let tree_size = tree.size_hint().0; - - let mut source_mapper = SourceMapper::new(tree_size); // very rough count - - let root = tree - .peek() - .expect("expected non-zero sized tree") - .as_ref() - .expect("unexpected cycle or not-found node") - .child; - - let (tree_type, document_glsl_version) = ( - root.path.extension().unwrap().into(), - root.version().expect("fatal error parsing file for include version"), - ); - - let mut built_tree = Vec::with_capacity(tree_size); - for entry in tree { - match entry { - Ok(node) => built_tree.push(node), - Err(e) => match e { - TreeError::FileNotFound { ref importing, .. } => { - let diag = Diagnostic { - range: Range::new(Position::new(0, 0), Position::new(0, u32::MAX)), - severity: Some(DiagnosticSeverity::WARNING), - source: Some("mcglsl".to_string()), - message: e.to_string(), - ..Diagnostic::default() - }; - eprintln!("NOT FOUND {:?} {:?}", importing, diag); - diagnostics.entry(Url::from_file_path(importing).unwrap()).or_default().push(diag) - } - TreeError::DfsError(e) => { - diagnostics.entry(Url::from_file_path(path).unwrap()).or_default().push(e.into()); - return Ok(diagnostics); - } - e => unreachable!("unexpected error {:?}", e), - }, + let tree = match tree { + Ok(tree) => tree, + Err(e) => match e { + TreeError::FileNotFound { ref importing, .. } => { + let diag = Diagnostic { + range: Range::new(Position::new(0, 0), Position::new(0, u32::MAX)), + severity: Some(DiagnosticSeverity::WARNING), + source: Some("mcglsl".to_string()), + message: e.to_string(), + ..Diagnostic::default() + }; + // eprintln!("NOT FOUND {:?} {:?}", importing, diag); + diagnostics.entry(Url::from_file_path(importing).unwrap()).or_default().push(diag); + return diagnostics; } - } - - let view = MergeViewBuilder::new(&self.root, &built_tree, &mut source_mapper).build(); - - let stdout = match self.compile_shader_source(&view, tree_type, path).with_logger(logger()).await { - Some(s) => s, - None => { - let paths: Vec<_> = built_tree.iter().map(|s| &s.child.path).collect(); - back_fill(&paths, &mut diagnostics); - return Ok(diagnostics); + TreeError::DfsError(e) => { + diagnostics.entry(Url::from_file_path(path).unwrap()).or_default().push(e.into()); + return diagnostics; } - }; + }, + }; - for diagnostic in DiagnosticsParser::new(gpu_vendor, document_glsl_version).parse_diagnostics_output( - stdout, - path, - &source_mapper, - &built_tree.iter().map(|tup| (&tup.child.path, tup.child)).collect(), - ) { - diagnostics.entry(diagnostic.0).or_default().extend(diagnostic.1); + let mut source_mapper = SourceMapper::new(tree.len()); // very rough count + + let root = tree.first().expect("expected non-zero sized tree").child; + + let (tree_type, document_glsl_version) = ( + root.path.extension().unwrap().into(), + root.version().expect("fatal error parsing file for include version"), + ); + + let view = MergeViewBuilder::new(&self.root, &tree, &mut source_mapper).build(); + + let stdout = match self.compile_shader_source(&view, tree_type, path).with_logger(logger()).await { + Some(s) => s, + None => { + let paths: Vec<_> = tree.iter().map(|s| &s.child.path).collect(); + back_fill(&paths, &mut diagnostics); + return diagnostics; } - let paths: Vec<_> = built_tree.iter().map(|s| &s.child.path).collect(); - back_fill(&paths, &mut diagnostics); + }; + + for diagnostic in DiagnosticsParser::new(gpu_vendor, document_glsl_version).parse_diagnostics_output( + stdout, + path, + &source_mapper, + &tree.iter().map(|tup| (&tup.child.path, tup.child)).collect(), + ) { + diagnostics.entry(diagnostic.0).or_default().extend(diagnostic.1); } + let paths: Vec<_> = tree.iter().map(|s| &s.child.path).collect(); + back_fill(&paths, &mut diagnostics); eprintln!("DIAGS {:?}", diagnostics); // back_fill(Box::new(all_sources.keys()), &mut diagnostics); - Ok(diagnostics) + diagnostics } async fn compile_shader_source(&self, source: &str, tree_type: TreeType, path: &NormalizedPathBuf) -> Option { diff --git a/server/workspace/src/lib.rs b/server/workspace/src/lib.rs index 57126d1..fb43a5e 100644 --- a/server/workspace/src/lib.rs +++ b/server/workspace/src/lib.rs @@ -1,5 +1,6 @@ #![feature(result_flattening)] #![feature(arc_unwrap_or_clone)] +#![feature(type_alias_impl_trait)] use std::{ collections::{hash_map::Entry, HashMap, HashSet}, @@ -24,21 +25,39 @@ pub struct WorkspaceTree { sources: HashMap, } +// #[derive(thiserror::Error, Debug)] +// pub enum TreesGenError { +// #[error("got a non-valid top-level file: {0}")] +// NonTopLevel(NormalizedPathBuf), +// #[error(transparent)] +// PathNotFound(#[from] graph::NotFound), +// } + #[derive(thiserror::Error, Debug)] pub enum TreeError { - #[error("got a non-valid top-level file")] - NonTopLevel(NormalizedPathBuf), + #[error(transparent)] + DfsError(#[from] CycleError), #[error("file {missing} not found; imported by {importing}.")] FileNotFound { importing: NormalizedPathBuf, missing: NormalizedPathBuf, }, - #[error(transparent)] - DfsError(#[from] CycleError), - #[error(transparent)] - Other(#[from] anyhow::Error), + // #[error(transparent)] + // PathNotFound(#[from] graph::NotFound), } +pub type MaterializedTree<'a> = Vec>; + +pub type ImmaterializedTree<'a> = impl Iterator, TreeError>>; + +#[derive(thiserror::Error, Debug)] +#[error("got a non-valid top-level file: {0}")] +pub struct NonTopLevelError(NormalizedPathBuf); + +pub type SingleTreeGenResult<'a> = Result, NonTopLevelError>; + +pub type AllTreesGenResult<'a> = Result>, graph::NotFound>; + impl WorkspaceTree { pub fn new(root: &NormalizedPathBuf) -> Self { WorkspaceTree { @@ -99,14 +118,14 @@ impl WorkspaceTree { // file and add a file->includes KV into the map match entry { GraphEntry::TopLevel(file) => { - eprintln!("TOP LEVEL {}", file.path); + // eprintln!("TOP LEVEL {}", file.path); let path = file.path.clone(); // roots.push(file.clone()); // self.sources.insert(path.clone(), file); self.update_sourcefile(&path, file.source); } GraphEntry::Leaf(file) => { - eprintln!("LEAF {}", file.path); + // eprintln!("LEAF {}", file.path); let path = file.path.clone(); // self.sources.insert(path.clone(), file); self.update_sourcefile(&path, file.source); @@ -123,18 +142,12 @@ impl WorkspaceTree { /// Error modes: /// - Top [`Result`] /// - The node is not known to the workspace - /// - The node has no ancestors but is not a known valid top-level file /// - Middle [`Result`] (only for >1 ancestor) /// - A non-valid top-level ancestor was found /// - Bottom [`Result`] /// - A cycle was detected while iterating /// - A node was not found on the filesystem while synthesizing a Sourcefile instance - pub fn trees_for_entry<'a>( - &'a mut self, entry: &'a NormalizedPathBuf, - ) -> Result< - impl Iterator, TreeError>> + '_, TreeError>> + '_, - TreeError, - > { + pub fn trees_for_entry<'a>(&'a mut self, entry: &NormalizedPathBuf) -> AllTreesGenResult<'a> { let root_ancestors = self.graph.root_ancestors_for_key(entry)?.unwrap_or_default(); let mut trees = Vec::with_capacity(root_ancestors.len().max(1)); @@ -175,7 +188,8 @@ impl WorkspaceTree { if root_ancestors.is_empty() { if !is_top_level(&entry.strip_prefix(&self.root)) { - return Err(TreeError::NonTopLevel(entry.clone())); + trees.push(Err(NonTopLevelError(entry.clone()))); + return Ok(trees); } let dfs = Dfs::new(&self.graph, node) @@ -188,7 +202,7 @@ impl WorkspaceTree { let root_path = &self.graph[root]; if !is_top_level(&root_path.strip_prefix(&self.root)) { warn!("got a non-valid toplevel file"; "root_ancestor" => root_path); - trees.push(Err(TreeError::NonTopLevel(root_path.clone()))); + trees.push(Err(NonTopLevelError(root_path.clone()))); continue; } @@ -200,7 +214,7 @@ impl WorkspaceTree { } } - Ok(trees.into_iter()) + Ok(trees) } /// updates the set of GLSL files connected to the given file, moving unreferenced @@ -211,12 +225,12 @@ impl WorkspaceTree { entry.insert(Sourcefile::new(text, path.clone(), self.root.clone())); } }; - let file = self.sources.get(path).unwrap(); - let includes = file.includes().unwrap(); - info!("includes found for file"; "file" => &file.path, "includes" => format!("{:?}", includes)); + let includes = self.sources.get(path).unwrap().includes().unwrap(); - let idx = self.graph.add_node(&file.path); + info!("includes found for file"; "file" => path, "includes" => format!("{:?}", includes)); + + let idx = self.graph.add_node(path); let prev_children: HashSet<_> = HashSet::from_iter(self.graph.get_all_edges_from(idx).map(|tup| (self.graph[tup.0].clone(), tup.1))); @@ -246,11 +260,24 @@ impl WorkspaceTree { self.graph.add_edge(idx, child, position); } } + + pub fn remove_sourcefile(&mut self, path: &NormalizedPathBuf) { + let idx = self + .graph + .find_node(path) + .unwrap_or_else(|| panic!("path {:?} wasn't in the graph to begin with???", path)); + + self.disconnected.remove(path); + self.sources.remove(path); + self.graph.remove_node(idx); + + info!("removed file from graph"; "file" => path); + } } #[cfg(test)] mod test { - use crate::{TreeError, WorkspaceTree}; + use crate::WorkspaceTree; #[test] fn test_trees() { @@ -261,12 +288,6 @@ mod test { let parent = "/home/test/banana/test.fsh".into(); let trees = view.trees_for_entry(&parent); - match trees { - Ok(_) => panic!("unexpected Ok result"), - Err(e) => match e { - TreeError::NonTopLevel(_) => {} - _ => panic!("unexpected error {:?}", e), - }, - } + trees.unwrap()[0].as_ref().err().expect("unexpected Ok result"); } }