improve import cycle diagnostic (very cool now) and improve some code boundaries

This commit is contained in:
Noah Santschi-Cooney 2023-03-11 21:29:31 +00:00
parent b604140abc
commit ead5486374
No known key found for this signature in database
GPG key ID: 3B22282472C8AE48
9 changed files with 334 additions and 205 deletions

View file

@ -92,6 +92,12 @@ impl Display for NormalizedPathBuf {
} }
} }
impl From<NormalizedPathBuf> for Url {
fn from(val: NormalizedPathBuf) -> Self {
Url::from_file_path(val).unwrap()
}
}
impl From<Url> for NormalizedPathBuf { impl From<Url> for NormalizedPathBuf {
#[cfg(target_family = "windows")] #[cfg(target_family = "windows")]
fn from(u: Url) -> Self { fn from(u: Url) -> Self {

View file

@ -33,7 +33,7 @@ where
impl<'a, K, V> Dfs<'a, K, V> impl<'a, K, V> Dfs<'a, K, V>
where where
K: Hash + Clone + Display + Eq + Debug, K: Hash + Clone + Display + Eq + Debug,
V: Ord + Copy, V: Hash + Ord + Copy + Debug,
{ {
pub fn new(graph: &'a CachedStableGraph<K, V>, start: NodeIndex) -> Self { pub fn new(graph: &'a CachedStableGraph<K, V>, start: NodeIndex) -> Self {
Dfs { Dfs {
@ -54,7 +54,7 @@ where
} }
} }
fn check_for_cycle(&self, children: &[NodeIndex]) -> Result<(), CycleError<K>> { fn check_for_cycle(&self, children: &[NodeIndex]) -> Result<(), CycleError<K, V>> {
for prev in &self.cycle { for prev in &self.cycle {
for child in children { for child in children {
if prev.node == *child { if prev.node == *child {
@ -70,11 +70,11 @@ where
impl<'a, K, V> Iterator for Dfs<'a, K, V> impl<'a, K, V> Iterator for Dfs<'a, K, V>
where where
K: Hash + Clone + Display + Eq + Debug, K: Hash + Clone + Display + Eq + Debug,
V: Ord + Copy, V: Hash + Debug + Ord + Copy,
{ {
type Item = Result<FilialTuple<NodeIndex>, CycleError<K>>; type Item = Result<FilialTuple<NodeIndex, V>, CycleError<K, V>>;
fn next(&mut self) -> Option<Result<FilialTuple<NodeIndex>, CycleError<K>>> { fn next(&mut self) -> Option<Result<FilialTuple<NodeIndex, V>, CycleError<K, V>>> {
let parent = self.cycle.last().map(|p| p.node); let parent = self.cycle.last().map(|p| p.node);
if let Some(child) = self.stack.pop() { if let Some(child) = self.stack.pop() {
@ -99,66 +99,161 @@ where
self.reset_path_to_branch(); self.reset_path_to_branch();
} }
return Some(Ok(FilialTuple { child, parent })); let edges = parent.map(|p| self.graph.get_edges_between(p, child).collect()).unwrap_or_default();
return Some(Ok(FilialTuple { child, parent, edges }));
} }
None None
} }
} }
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range}; use tower_lsp::lsp_types::{Diagnostic, DiagnosticRelatedInformation, DiagnosticSeverity, Location, Position, Range, Url};
use std::{error::Error as StdError, fmt::Display}; use std::{error::Error as StdError, fmt::Display};
#[derive(Debug)] #[derive(Debug)]
// TODO: how can we include the line-of-import pub struct CycleError<K, V>(Vec<FilialTuple<K, V>>);
pub struct CycleError<K>(Vec<K>);
impl<K> StdError for CycleError<K> where K: Display + Debug {} impl<K, V> StdError for CycleError<K, V>
where
K: Display + Debug,
V: Debug,
{
}
impl<K> CycleError<K> impl<K, V> CycleError<K, V>
where where
K: Hash + Clone + Eq + Debug, K: Hash + Clone + Eq + Debug,
V: Hash + Debug + Ord + Copy,
{ {
pub fn new<V>(nodes: &[NodeIndex], current_node: NodeIndex, graph: &CachedStableGraph<K, V>) -> Self pub fn new(nodes: &[NodeIndex], current_node: NodeIndex, graph: &CachedStableGraph<K, V>) -> Self {
where let mut resolved_tupls = Vec::with_capacity(nodes.len());
V: Ord + Copy,
{ resolved_tupls.push(FilialTuple {
let mut resolved_nodes: Vec<K> = nodes.iter().map(|i| graph[*i].clone()).collect(); child: graph[nodes[0]].clone(),
resolved_nodes.push(graph[current_node].clone()); parent: None,
CycleError(resolved_nodes.into_iter().collect()) edges: vec![],
});
for window in nodes.array_windows::<2>() {
let edges: Vec<_> = graph.get_edges_between(window[0], window[1]).collect();
resolved_tupls.push(FilialTuple {
child: graph[window[1]].clone(),
parent: Some(graph[window[0]].clone()),
edges,
});
}
resolved_tupls.push(FilialTuple {
child: graph[current_node].clone(),
parent: Some(graph[nodes[nodes.len() - 1]].clone()),
edges: graph.get_edges_between(nodes[nodes.len() - 1], current_node).collect(),
});
CycleError(resolved_tupls)
}
pub fn path(&self) -> Vec<&K> {
self.0[..self.0.len() - 1].iter().map(|tup| &tup.child).collect()
} }
} }
impl<K: Display> Display for CycleError<K> { impl<K: Display, V> Display for CycleError<K, V> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let mut disp = String::new(); let mut disp = String::new();
disp.push_str(format!("Include cycle detected:\n{} imports ", self.0[0]).as_str()); disp.push_str(format!("Include cycle detected:\n{} imports ", self.0[0].child).as_str());
for p in &self.0[1..self.0.len() - 1] { for p in &self.0[1..self.0.len() - 1] {
disp.push_str(&format!("\n{}, which imports ", *p)); disp.push_str(&format!("\n{}, which imports ", p.child));
} }
disp.push_str(&format!("\n{}", self.0[self.0.len() - 1])); disp.push_str(&format!("\n{}", self.0[self.0.len() - 1].child));
f.write_str(disp.as_str()) f.write_str(disp.as_str())
} }
} }
impl<K: Display> From<CycleError<K>> for Diagnostic { impl<K, V> From<&CycleError<K, V>> for Vec<Diagnostic>
fn from(e: CycleError<K>) -> Diagnostic { where
Diagnostic { K: Into<Url> + Debug + Display + Clone,
V: Into<u32> + Copy + Debug,
{
fn from(e: &CycleError<K, V>) -> Vec<Diagnostic> {
let root_range = Range {
start: Position {
line: e.0[1].edges[0].into(),
character: 0,
},
end: Position {
line: e.0[1].edges[0].into(),
character: u32::MAX,
},
};
eprintln!("CYCLE {:?}", e.0);
let mut related: Vec<DiagnosticRelatedInformation> = Vec::with_capacity(e.0[2..].len());
for node in &e.0[2..] {
related.push(DiagnosticRelatedInformation {
location: Location {
uri: node.parent.as_ref().unwrap().clone().into(),
range: Range {
start: Position {
line: node.edges[0].into(),
character: 0,
},
end: Position {
line: node.edges[0].into(),
character: u32::MAX,
},
},
},
message: format!("{} imported here", node.child),
});
}
// we also want to show indications at the actual non-root import sites
let mut diagnostics: Vec<Diagnostic> = Vec::with_capacity(e.0[2..].len() + 1);
diagnostics.push(Diagnostic {
severity: Some(DiagnosticSeverity::ERROR), severity: Some(DiagnosticSeverity::ERROR),
range: Range::new(Position::new(0, 0), Position::new(0, 500)), range: root_range,
source: Some("mcglsl".into()), source: Some("mcglsl".into()),
message: e.into(), message: e.to_string(),
code: None, code: None,
tags: None, tags: None,
related_information: None, related_information: Some(related),
code_description: Option::None, code_description: Option::None,
data: Option::None, data: Option::None,
});
for node in &e.0[2..] {
diagnostics.push(Diagnostic {
range: Range {
start: Position {
line: node.edges[0].into(),
character: 0,
},
end: Position {
line: node.edges[0].into(),
character: u32::MAX,
},
},
severity: Some(DiagnosticSeverity::HINT),
message: format!("{} imported here", node.child),
related_information: Some(vec![DiagnosticRelatedInformation {
location: Location {
uri: e.0[0].child.clone().into(),
range: root_range,
},
message: "original diagnostic here".to_string(),
}]),
..Default::default()
});
} }
diagnostics
} }
} }
impl<K: Display> From<CycleError<K>> for String { impl<K: Display, V> From<CycleError<K, V>> for String {
fn from(e: CycleError<K>) -> String { fn from(e: CycleError<K, V>) -> String {
format!("{}", e) format!("{}", e)
} }
} }

View file

@ -1,3 +1,5 @@
#![feature(array_windows)]
pub mod dfs; pub mod dfs;
mod graph; mod graph;
pub use graph::*; pub use graph::*;
@ -10,10 +12,8 @@ pub use petgraph::stable_graph::NodeIndex;
/// parent. Parent can be nullable in the case of the child being a top level /// parent. Parent can be nullable in the case of the child being a top level
/// node in the tree. /// node in the tree.
#[derive(Hash, PartialEq, Eq, Debug, Clone)] #[derive(Hash, PartialEq, Eq, Debug, Clone)]
pub struct FilialTuple<T> { pub struct FilialTuple<T, E> {
// pub child: NodeIndex,
// pub parent: Option<NodeIndex>,
pub child: T,
pub parent: Option<T>, pub parent: Option<T>,
// pub edge: E, pub child: T,
pub edges: Vec<E>,
} }

View file

@ -20,7 +20,7 @@ const ERROR_DIRECTIVE: &str = "#error ";
pub struct MergeViewBuilder<'a> { pub struct MergeViewBuilder<'a> {
root: &'a NormalizedPathBuf, root: &'a NormalizedPathBuf,
nodes: Peekable<Iter<'a, FilialTuple<&'a Sourcefile>>>, nodes: Peekable<Iter<'a, FilialTuple<&'a Sourcefile, IncludeLine>>>,
/// contains additionally inserted lines such as #line and other directives, preamble defines etc /// contains additionally inserted lines such as #line and other directives, preamble defines etc
extra_lines: Vec<String>, extra_lines: Vec<String>,
@ -32,15 +32,16 @@ pub struct MergeViewBuilder<'a> {
/// A child can have multiple parents for a given tree, and be included multiple times /// A child can have multiple parents for a given tree, and be included multiple times
/// by the same parent, hence we have to track it for a ((child, parent), line) tuple /// by the same parent, hence we have to track it for a ((child, parent), line) tuple
/// instead of just the child or (child, parent). /// instead of just the child or (child, parent).
last_offset_set: HashMap<FilialTuple<&'a NormalizedPathBuf>, usize>, last_offset_set: HashMap<FilialTuple<&'a NormalizedPathBuf, ()>, usize>,
/// is included into the parent in line-sorted order. This is necessary for files that are imported /// is included into the parent in line-sorted order. This is necessary for files that are imported
/// more than once into the same parent, so we can easily get the next include position. /// more than once into the same parent, so we can easily get the next include position.
parent_child_edge_iterator: HashMap<FilialTuple<&'a NormalizedPathBuf>, Box<(dyn Iterator<Item = IncludeLine> + 'a)>>, parent_child_edge_iterator: HashMap<FilialTuple<&'a NormalizedPathBuf, ()>, Box<(dyn Iterator<Item = IncludeLine> + 'a)>>,
} }
impl<'a> MergeViewBuilder<'a> { impl<'a> MergeViewBuilder<'a> {
pub fn new( pub fn new(
root: &'a NormalizedPathBuf, nodes: &'a [FilialTuple<&'a Sourcefile>], source_mapper: &'a mut SourceMapper<NormalizedPathBuf>, root: &'a NormalizedPathBuf, nodes: &'a [FilialTuple<&'a Sourcefile, IncludeLine>],
source_mapper: &'a mut SourceMapper<NormalizedPathBuf>,
) -> Self { ) -> Self {
let mut all_includes: Vec<_> = nodes let mut all_includes: Vec<_> = nodes
.iter() .iter()
@ -125,10 +126,12 @@ impl<'a> MergeViewBuilder<'a> {
.entry(FilialTuple { .entry(FilialTuple {
child: &n.child.path, child: &n.child.path,
parent: n.parent.map(|p| &p.path), parent: n.parent.map(|p| &p.path),
// TODO: whats this
edges: vec![],
}) })
.or_insert_with(|| Box::new(parent.includes_of_path(child_path).unwrap())) .or_insert_with(|| Box::new(parent.includes_of_path(child_path).unwrap()))
.next() .next()
.unwrap(); .unwrap() as IncludeLine;
let parent_source = &parent.source; let parent_source = &parent.source;
let (char_for_line, char_following_line) = self.char_offset_for_line(edge, parent_source); let (char_for_line, char_following_line) = self.char_offset_for_line(edge, parent_source);
@ -261,21 +264,34 @@ impl<'a> MergeViewBuilder<'a> {
"parent" => parent, "parent" => parent,
"child" => &child, "child" => &child,
"offset" => offset); "offset" => offset);
self.last_offset_set.insert(FilialTuple { child, parent }, offset) self.last_offset_set.insert(
FilialTuple {
child,
parent,
edges: vec![],
},
offset,
)
} }
#[inline] #[inline]
fn get_last_offset_for_tuple(&self, parent: Option<&'a NormalizedPathBuf>, child: &'a NormalizedPathBuf) -> Option<usize> { fn get_last_offset_for_tuple(&self, parent: Option<&'a NormalizedPathBuf>, child: &'a NormalizedPathBuf) -> Option<usize> {
self.last_offset_set.get(&FilialTuple { child, parent }).copied() self.last_offset_set
.get(&FilialTuple {
child,
parent,
edges: vec![],
})
.copied()
} }
// returns the character offset + 1 of the end of line number `line` and the character // returns the character offset + 1 of the end of line number `line` and the character
// offset + 1 for the end of the line after the previous one // offset + 1 for the end of the line after the previous one
fn char_offset_for_line(&self, line_num: impl Into<usize> + Copy, source: &str) -> (usize, usize) { fn char_offset_for_line(&self, line_num: IncludeLine, source: &str) -> (usize, usize) {
let mut char_for_line: usize = 0; let mut char_for_line: usize = 0;
let mut char_following_line: usize = 0; let mut char_following_line: usize = 0;
for (n, line) in source.lines().enumerate() { for (n, line) in source.lines().enumerate() {
if n == line_num.into() { if (n as IncludeLine) == line_num {
char_following_line += line.len() + 1; char_following_line += line.len() + 1;
break; break;
} }
@ -286,23 +302,23 @@ impl<'a> MergeViewBuilder<'a> {
} }
#[inline] #[inline]
fn find_version_offset(&self, source: &str) -> usize { fn find_version_offset(&self, source: &str) -> IncludeLine {
source source
.lines() .lines()
.enumerate() .enumerate()
.find(|(_, line)| line.starts_with("#version ")) .find(|(_, line)| line.starts_with("#version "))
.map_or(0, |(i, _)| i) .map_or(0, |(i, _)| i as IncludeLine)
} }
fn add_preamble( fn add_preamble(
&mut self, version_line_offset: impl Into<usize>, version_char_offset: usize, path: &NormalizedPathBuf, source: &'a str, &mut self, version_line_offset: IncludeLine, version_char_offset: usize, path: &NormalizedPathBuf, source: &'a str,
merge_list: &mut LinkedList<&'a str>, merge_list: &mut LinkedList<&'a str>,
) { ) {
self.process_slice_addition(merge_list, path, &source[..version_char_offset]); self.process_slice_addition(merge_list, path, &source[..version_char_offset]);
// merge_list.push_back(&source[..version_char_offset]); // merge_list.push_back(&source[..version_char_offset]);
self.extra_lines.push(consts::OPTIFINE_PREAMBLE.into()); self.extra_lines.push(consts::OPTIFINE_PREAMBLE.into());
self.unsafe_get_and_insert(merge_list); self.unsafe_get_and_insert(merge_list);
self.add_closing_line_directive(version_line_offset.into() + 1, path, merge_list); self.add_closing_line_directive(version_line_offset + 1, path, merge_list);
} }
fn add_opening_line_directive(&mut self, path: &NormalizedPathBuf, merge_list: &mut LinkedList<&str>) { fn add_opening_line_directive(&mut self, path: &NormalizedPathBuf, merge_list: &mut LinkedList<&str>) {
@ -311,16 +327,16 @@ impl<'a> MergeViewBuilder<'a> {
self.unsafe_get_and_insert(merge_list); self.unsafe_get_and_insert(merge_list);
} }
fn add_closing_line_directive(&mut self, line: impl Into<usize>, path: &NormalizedPathBuf, merge_list: &mut LinkedList<&str>) { fn add_closing_line_directive(&mut self, line: IncludeLine, path: &NormalizedPathBuf, merge_list: &mut LinkedList<&str>) {
// Optifine doesn't seem to add a leading newline if the previous line was a #line directive // Optifine doesn't seem to add a leading newline if the previous line was a #line directive
let line_directive = if let Some(l) = merge_list.back() { let line_directive = if let Some(l) = merge_list.back() {
if l.trim().starts_with("#line") { if l.trim().starts_with("#line") {
format!("#line {} {} // {}\n", line.into(), self.source_mapper.get_num(path), path) format!("#line {} {} // {}\n", line, self.source_mapper.get_num(path), path)
} else { } else {
format!("\n#line {} {} // {}\n", line.into(), self.source_mapper.get_num(path), path) format!("\n#line {} {} // {}\n", line, self.source_mapper.get_num(path), path)
} }
} else { } else {
format!("\n#line {} {} // {}\n", line.into(), self.source_mapper.get_num(path), path) format!("\n#line {} {} // {}\n", line, self.source_mapper.get_num(path), path)
}; };
self.extra_lines.push(line_directive); self.extra_lines.push(line_directive);
@ -343,14 +359,12 @@ mod test {
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use anyhow::Result;
use filesystem::{LFString, NormalizedPathBuf}; use filesystem::{LFString, NormalizedPathBuf};
use fs_extra::{copy_items, dir}; use fs_extra::{copy_items, dir};
use pretty_assertions::assert_str_eq; use pretty_assertions::assert_str_eq;
use sourcefile::SourceMapper; use sourcefile::SourceMapper;
use tempdir::TempDir; use tempdir::TempDir;
use workspace::{TreeError, WorkspaceTree, MaterializedTree}; use workspace::WorkspaceTree;
use crate::MergeViewBuilder; use crate::MergeViewBuilder;
@ -387,12 +401,11 @@ mod test {
let mut trees_vec = workspace let mut trees_vec = workspace
.trees_for_entry(&final_path) .trees_for_entry(&final_path)
.expect("expected successful tree initializing") .unwrap()
.into_iter() .into_iter()
.filter_map(|treeish| treeish.ok()) .filter_map(|treeish| treeish.ok())
.map(|imtree| imtree.collect()) .map(|imtree| imtree.map(|tree| tree.unwrap()).collect::<Vec<_>>())
.collect::<Result<Vec<MaterializedTree<'_>>, TreeError>>() .collect::<Vec<_>>();
.expect("expected successful tree-building");
let mut trees = trees_vec.iter_mut(); let mut trees = trees_vec.iter_mut();
let tree = trees.next().unwrap(); let tree = trees.next().unwrap();
@ -401,7 +414,7 @@ mod test {
let mut source_mapper = SourceMapper::new(2); let mut source_mapper = SourceMapper::new(2);
let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build(); let result = MergeViewBuilder::new(&tmp_path, tree, &mut source_mapper).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge"); let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
@ -420,22 +433,15 @@ mod test {
let mut workspace = WorkspaceTree::new(&tmp_path.clone()); let mut workspace = WorkspaceTree::new(&tmp_path.clone());
workspace.build(); workspace.build();
// println!(
// "connected {}. leaf {}",
// workspace.num_connected_entries(),
// // workspace.num_disconnected_entries(),
// );
let final_path = tmp_path.join("shaders").join("final.fsh"); let final_path = tmp_path.join("shaders").join("final.fsh");
let mut trees_vec = workspace let mut trees_vec = workspace
.trees_for_entry(&final_path) .trees_for_entry(&final_path)
.expect("expected successful tree initializing") .unwrap()
.into_iter() .into_iter()
.filter_map(|treeish| treeish.ok()) .filter_map(|treeish| treeish.ok())
.map(|imtree| imtree.collect()) .map(|imtree| imtree.map(|tree| tree.unwrap()).collect::<Vec<_>>())
.collect::<Result<Vec<MaterializedTree<'_>>, TreeError>>() .collect::<Vec<_>>();
.expect("expected successful tree-building");
let mut trees = trees_vec.iter_mut(); let mut trees = trees_vec.iter_mut();
let tree = trees.next().unwrap(); let tree = trees.next().unwrap();
@ -444,7 +450,7 @@ mod test {
let mut source_mapper = SourceMapper::new(2); let mut source_mapper = SourceMapper::new(2);
let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build(); let result = MergeViewBuilder::new(&tmp_path, tree, &mut source_mapper).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge"); let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
@ -471,12 +477,11 @@ mod test {
let mut trees_vec = workspace let mut trees_vec = workspace
.trees_for_entry(&final_path) .trees_for_entry(&final_path)
.expect("expected successful tree initializing") .unwrap()
.into_iter() .into_iter()
.filter_map(|treeish| treeish.ok()) .filter_map(|treeish| treeish.ok())
.map(|imtree| imtree.collect()) .map(|imtree| imtree.map(|tree| tree.unwrap()).collect::<Vec<_>>())
.collect::<Result<Vec<MaterializedTree<'_>>, TreeError>>() .collect::<Vec<_>>();
.expect("expected successful tree-building");
let mut trees = trees_vec.iter_mut(); let mut trees = trees_vec.iter_mut();
let tree = trees.next().unwrap(); let tree = trees.next().unwrap();
@ -485,7 +490,7 @@ mod test {
let mut source_mapper = SourceMapper::new(2); let mut source_mapper = SourceMapper::new(2);
let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build(); let result = MergeViewBuilder::new(&tmp_path, tree, &mut source_mapper).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge"); let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
@ -512,12 +517,11 @@ mod test {
let mut trees_vec = workspace let mut trees_vec = workspace
.trees_for_entry(&final_path) .trees_for_entry(&final_path)
.expect("expected successful tree initializing") .unwrap()
.into_iter() .into_iter()
.filter_map(|treeish| treeish.ok()) .filter_map(|treeish| treeish.ok())
.map(|imtree| imtree.collect()) .map(|imtree| imtree.map(|tree| tree.unwrap()).collect::<Vec<_>>())
.collect::<Result<Vec<MaterializedTree<'_>>, TreeError>>() .collect::<Vec<_>>();
.expect("expected successful tree-building");
let mut trees = trees_vec.iter_mut(); let mut trees = trees_vec.iter_mut();
let tree = trees.next().unwrap(); let tree = trees.next().unwrap();
@ -526,7 +530,7 @@ mod test {
let mut source_mapper = SourceMapper::new(2); let mut source_mapper = SourceMapper::new(2);
let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build(); let result = MergeViewBuilder::new(&tmp_path, tree, &mut source_mapper).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge"); let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
@ -561,12 +565,11 @@ mod test {
let mut trees_vec = workspace let mut trees_vec = workspace
.trees_for_entry(&final_path) .trees_for_entry(&final_path)
.expect("expected successful tree initializing") .unwrap()
.into_iter() .into_iter()
.filter_map(|treeish| treeish.ok()) .filter_map(|treeish| treeish.ok())
.map(|imtree| imtree.collect()) .map(|imtree| imtree.map(|tree| tree.unwrap()).collect::<Vec<_>>())
.collect::<Result<Vec<MaterializedTree<'_>>, TreeError>>() .collect::<Vec<_>>();
.expect("expected successful tree-building");
let mut trees = trees_vec.iter_mut(); let mut trees = trees_vec.iter_mut();
let tree = trees.next().unwrap(); let tree = trees.next().unwrap();
@ -575,7 +578,7 @@ mod test {
let mut source_mapper = SourceMapper::new(2); let mut source_mapper = SourceMapper::new(2);
let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build(); let result = MergeViewBuilder::new(&tmp_path, tree, &mut source_mapper).build();
let merge_file = tmp_path.join("shaders").join("final.fsh.merge"); let merge_file = tmp_path.join("shaders").join("final.fsh.merge");

View file

@ -4,9 +4,9 @@ use include_merger::MergeViewBuilder;
use serde_json::Value; use serde_json::Value;
use anyhow::Result; use anyhow::Result;
use sourcefile::{SourceMapper, Sourcefile}; use sourcefile::{SourceMapper, Sourcefile, IncludeLine};
pub async fn run(path: &NormalizedPathBuf, sources: &[FilialTuple<&Sourcefile>]) -> Result<Option<Value>> { pub async fn run(path: &NormalizedPathBuf, sources: &[FilialTuple<&Sourcefile, IncludeLine>]) -> Result<Option<Value>> {
let mut source_mapper = SourceMapper::new(sources.len()); let mut source_mapper = SourceMapper::new(sources.len());
let view = MergeViewBuilder::new(path, sources, &mut source_mapper).build(); let view = MergeViewBuilder::new(path, sources, &mut source_mapper).build();

View file

@ -1,13 +1,13 @@
use std::{collections::HashMap, sync::Arc}; use std::{collections::HashMap, sync::Arc};
use anyhow::Result;
use filesystem::NormalizedPathBuf; use filesystem::NormalizedPathBuf;
use graph::dfs::CycleError;
use include_merger::MergeViewBuilder; use include_merger::MergeViewBuilder;
use logging::{info, logger, warn, FutureExt}; use logging::{info, logger, warn, FutureExt};
use opengl::{diagnostics_parser::DiagnosticsParser, GPUVendor, TreeType}; use opengl::{diagnostics_parser::DiagnosticsParser, GPUVendor, TreeType};
use sourcefile::SourceMapper; use sourcefile::{SourceMapper, IncludeLine};
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range}; use tower_lsp::lsp_types::Diagnostic;
use url::Url; use url::Url;
use workspace::TreeError; use workspace::TreeError;
@ -35,7 +35,7 @@ impl<S: opengl::ShaderValidator> Workspace<S> {
info!("build graph"; "connected" => tree.num_connected_entries()/* , "disconnected" => tree.num_disconnected_entries() */); info!("build graph"; "connected" => tree.num_connected_entries()/* , "disconnected" => tree.num_disconnected_entries() */);
} }
pub async fn delete_sourcefile(&self, path: &NormalizedPathBuf) -> Result<HashMap<Url, Vec<Diagnostic>>> { pub async fn delete_sourcefile(&self, path: &NormalizedPathBuf) -> Result<HashMap<Url, Vec<Diagnostic>>, anyhow::Error> {
info!("path deleted on filesystem"; "path" => path); info!("path deleted on filesystem"; "path" => path);
let mut workspace = self.workspace_view.lock().with_logger(logger()).await; let mut workspace = self.workspace_view.lock().with_logger(logger()).await;
@ -62,14 +62,24 @@ impl<S: opengl::ShaderValidator> Workspace<S> {
for old_root in old_roots { for old_root in old_roots {
let new_trees = workspace.trees_for_entry(&old_root).expect("should be a known existing path"); let new_trees = workspace.trees_for_entry(&old_root).expect("should be a known existing path");
assert_eq!(new_trees.len(), 1, "root should not be able to yield more than one tree"); assert_eq!(new_trees.len(), 1, "root should not be able to yield more than one tree");
let tree = new_trees.into_iter().next().unwrap().expect("should be a top-level path").collect(); let tree = new_trees
.into_iter()
.next()
.unwrap()
.expect("should be a top-level path")
.map(|tree| match tree {
Ok(t) => Ok(Ok(t)),
Err(TreeError::DfsError(e)) => Err(e),
Err(TreeError::FileNotFound(e)) => Ok(Err(e)),
})
.collect();
all_diagnostics.extend(self.lint(path, tree).with_logger(logger()).await); all_diagnostics.extend(self.lint(path, tree).with_logger(logger()).await);
} }
Ok(all_diagnostics) Ok(all_diagnostics)
} }
pub async fn update_sourcefile(&self, path: &NormalizedPathBuf, text: String) -> Result<HashMap<Url, Vec<Diagnostic>>> { pub async fn update_sourcefile(&self, path: &NormalizedPathBuf, text: String) -> Result<HashMap<Url, Vec<Diagnostic>>, anyhow::Error> {
let mut workspace = self.workspace_view.lock().with_logger(logger()).await; let mut workspace = self.workspace_view.lock().with_logger(logger()).await;
workspace.update_sourcefile(path, text); workspace.update_sourcefile(path, text);
@ -85,8 +95,16 @@ impl<S: opengl::ShaderValidator> Workspace<S> {
} }
} }
.into_iter() .into_iter()
// filter out roots that aren't valid top level ones, we cant lint them
.filter_map(|maybe_tree| maybe_tree.ok()) .filter_map(|maybe_tree| maybe_tree.ok())
.map(|tree| tree.collect()) { .map(|tree| {
tree.map(|tree| match tree {
Ok(t) => Ok(Ok(t)),
Err(TreeError::DfsError(e)) => Err(e),
Err(TreeError::FileNotFound(e)) => Ok(Err(e)),
})
.collect()
}) {
all_diagnostics.extend(self.lint(path, tree).with_logger(logger()).await); all_diagnostics.extend(self.lint(path, tree).with_logger(logger()).await);
} }
@ -94,7 +112,7 @@ impl<S: opengl::ShaderValidator> Workspace<S> {
} }
async fn lint<'a>( async fn lint<'a>(
&'a self, path: &'a NormalizedPathBuf, tree: Result<workspace::MaterializedTree<'a>, TreeError>, &'a self, path: &'a NormalizedPathBuf, tree: Result<workspace::MaterializedTree<'a>, CycleError<NormalizedPathBuf, IncludeLine>>,
) -> HashMap<Url, Vec<Diagnostic>> { ) -> HashMap<Url, Vec<Diagnostic>> {
// the set of filepath->list of diagnostics to report // the set of filepath->list of diagnostics to report
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::new(); let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::new();
@ -113,36 +131,29 @@ impl<S: opengl::ShaderValidator> Workspace<S> {
let gpu_vendor: GPUVendor = self.gl_context.lock().with_logger(logger()).await.vendor().as_str().into(); let gpu_vendor: GPUVendor = self.gl_context.lock().with_logger(logger()).await.vendor().as_str().into();
let tree = match tree { let tree: Vec<_> = match tree {
Ok(tree) => tree, Ok(tree) => tree.into_iter().filter_map(|tup| tup.ok()).collect(),
Err(e) => match e { Err(ref cycle) => {
TreeError::FileNotFound { ref importing, .. } => { let cycle_diagnostics: Vec<_> = cycle.into();
let diag = Diagnostic { for (node, diagnostic) in cycle.path().iter().zip(cycle_diagnostics.into_iter()) {
range: Range::new(Position::new(0, 0), Position::new(0, u32::MAX)), diagnostics
severity: Some(DiagnosticSeverity::WARNING), .entry(Url::from_file_path(node).unwrap())
source: Some("mcglsl".to_string()), .or_default()
message: e.to_string(), .push(diagnostic);
..Diagnostic::default()
};
// eprintln!("NOT FOUND {:?} {:?}", importing, diag);
diagnostics.entry(Url::from_file_path(importing).unwrap()).or_default().push(diag);
return diagnostics;
} }
TreeError::DfsError(e) => { return diagnostics;
diagnostics.entry(Url::from_file_path(path).unwrap()).or_default().push(e.into()); }
return diagnostics;
}
},
}; };
let mut source_mapper = SourceMapper::new(tree.len()); // very rough count let mut source_mapper = SourceMapper::new(tree.len());
let root = tree.first().expect("expected non-zero sized tree").child; let (tree_type, document_glsl_version) = {
let root = tree.first().expect("expected non-zero sized tree").child;
let (tree_type, document_glsl_version) = ( (
root.path.extension().unwrap().into(), root.path.extension().unwrap().into(),
root.version().expect("fatal error parsing file for include version"), root.version().expect("fatal error parsing file for include version"),
); )
};
let view = MergeViewBuilder::new(&self.root, &tree, &mut source_mapper).build(); let view = MergeViewBuilder::new(&self.root, &tree, &mut source_mapper).build();

View file

@ -10,52 +10,59 @@ use logging::Value;
pub use source_file::*; pub use source_file::*;
pub use source_mapper::*; pub use source_mapper::*;
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] // TODO: decide whether to alias or newtype
pub struct IncludeLine(usize); // #[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub type IncludeLine = u32;
impl From<IncludeLine> for usize { // impl From<IncludeLine> for u32 {
fn from(n: IncludeLine) -> Self { // fn from(n: IncludeLine) -> Self {
n.0 // n.0
} // }
} // }
impl From<usize> for IncludeLine { // impl From<IncludeLine> for usize {
fn from(n: usize) -> Self { // fn from(n: IncludeLine) -> Self {
IncludeLine(n) // n.0 as usize
} // }
} // }
impl std::ops::Add<usize> for IncludeLine { // impl From<u32> for IncludeLine {
type Output = IncludeLine; // fn from(n: u32) -> Self {
// IncludeLine(n)
// }
// }
fn add(self, rhs: usize) -> Self::Output { // impl std::ops::Add<u32> for IncludeLine {
IncludeLine(self.0 + rhs) // type Output = IncludeLine;
}
}
impl PartialEq<usize> for IncludeLine { // fn add(self, rhs: u32) -> Self::Output {
fn eq(&self, other: &usize) -> bool { // IncludeLine(self.0 + rhs)
self.0 == *other // }
} // }
}
impl Value for IncludeLine { // impl PartialEq<u32> for IncludeLine {
fn serialize(&self, record: &logging::Record, key: logging::Key, serializer: &mut dyn logging::Serializer) -> logging::Result { // fn eq(&self, other: &u32) -> bool {
self.0.serialize(record, key, serializer) // self.0 == *other
} // }
} // }
impl Debug for IncludeLine { // impl Value for IncludeLine {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { // fn serialize(&self, record: &logging::Record, key: logging::Key, serializer: &mut dyn logging::Serializer) -> logging::Result {
write!(f, "{{line: {}}}", self.0) // self.0.serialize(record, key, serializer)
} // }
} // }
impl Display for IncludeLine { // impl Debug for IncludeLine {
fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { // fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{{line: {}}}", self.0) // write!(f, "{{line: {}}}", self.0)
} // }
} // }
// impl Display for IncludeLine {
// fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {
// write!(f, "{{line: {}}}", self.0)
// }
// }
#[derive(PartialEq, Eq, Debug, Clone, Copy)] #[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub enum Version { pub enum Version {

View file

@ -108,7 +108,7 @@ impl Sourcefile {
} }
}; };
includes.push((include_str, IncludeLine(include.node.start_position().row))); includes.push((include_str, include.node.start_position().row as IncludeLine));
} }
Ok(includes) Ok(includes)
@ -160,8 +160,8 @@ mod test {
assert_eq!( assert_eq!(
source.includes()?, source.includes()?,
vec![ vec![
("/myshader/shaders/world0/path/to/banana.fsh".into(), IncludeLine(2)), ("/myshader/shaders/world0/path/to/banana.fsh".into(), 2),
("/myshader/shaders/path/to/badbanana.gsh".into(), IncludeLine(3)) ("/myshader/shaders/path/to/badbanana.gsh".into(), 3)
] ]
); );
Ok(()) Ok(())
@ -181,7 +181,7 @@ mod test {
let source = Sourcefile::new(source, "/myshader/shaders/world0/asdf.fsh", "/myshader"); let source = Sourcefile::new(source, "/myshader/shaders/world0/asdf.fsh", "/myshader");
assert_eq!( assert_eq!(
source.includes_of_path(&"/myshader/shaders/world0/path/to/banana.fsh".into())?.collect::<Vec<_>>(), source.includes_of_path(&"/myshader/shaders/world0/path/to/banana.fsh".into())?.collect::<Vec<_>>(),
vec![IncludeLine(2)] vec![2]
); );
Ok(()) Ok(())
} }

View file

@ -25,30 +25,25 @@ pub struct WorkspaceTree {
sources: HashMap<NormalizedPathBuf, Sourcefile>, sources: HashMap<NormalizedPathBuf, Sourcefile>,
} }
// #[derive(thiserror::Error, Debug)]
// pub enum TreesGenError {
// #[error("got a non-valid top-level file: {0}")]
// NonTopLevel(NormalizedPathBuf),
// #[error(transparent)]
// PathNotFound(#[from] graph::NotFound<NormalizedPathBuf>),
// }
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
pub enum TreeError { pub enum TreeError {
#[error(transparent)] #[error(transparent)]
DfsError(#[from] CycleError<NormalizedPathBuf>), DfsError(#[from] CycleError<NormalizedPathBuf, IncludeLine>),
#[error("file {missing} not found; imported by {importing}.")] #[error(transparent)]
FileNotFound { FileNotFound(#[from] FileNotFound),
importing: NormalizedPathBuf,
missing: NormalizedPathBuf,
},
// #[error(transparent)]
// PathNotFound(#[from] graph::NotFound<NormalizedPathBuf>),
} }
pub type MaterializedTree<'a> = Vec<FilialTuple<&'a Sourcefile>>; #[derive(thiserror::Error, Debug)]
#[error("file {missing} not found; imported by {importing}.")]
pub struct FileNotFound {
importing: NormalizedPathBuf,
import_locs: Vec<IncludeLine>,
missing: NormalizedPathBuf,
}
pub type ImmaterializedTree<'a> = impl Iterator<Item = Result<FilialTuple<&'a Sourcefile>, TreeError>>; pub type MaterializedTree<'a> = Vec<Result<FilialTuple<&'a Sourcefile, IncludeLine>, FileNotFound>>;
pub type ImmaterializedTree<'a> = impl Iterator<Item = Result<FilialTuple<&'a Sourcefile, IncludeLine>, TreeError>>;
#[derive(thiserror::Error, Debug)] #[derive(thiserror::Error, Debug)]
#[error("got a non-valid top-level file: {0}")] #[error("got a non-valid top-level file: {0}")]
@ -162,29 +157,41 @@ impl WorkspaceTree {
let node = self.graph.find_node(entry).unwrap(); let node = self.graph.find_node(entry).unwrap();
let transform_cycle_error = let transform_cycle_error = |result: Result<FilialTuple<NodeIndex, IncludeLine>, CycleError<NormalizedPathBuf, IncludeLine>>| {
|result: Result<FilialTuple<NodeIndex>, CycleError<NormalizedPathBuf>>| result.map_err(TreeError::DfsError); result.map_err(TreeError::DfsError)
let node_to_sourcefile = |result: Result<FilialTuple<NodeIndex>, TreeError>| -> Result<FilialTuple<&Sourcefile>, TreeError> {
result.and_then(|tup| {
let parent = tup.parent.map(|p| {
let parent_path = &self.graph[p];
// fatal error case, shouldnt happen
self.sources
.get(parent_path)
.unwrap_or_else(|| panic!("no entry in sources for parent {}", parent_path))
});
let child_path = &self.graph[tup.child];
// soft-fail case, if file doesnt exist or mistype
// eprintln!("MISSING? {:?}", self.sources.get(child_path).is_none());
let child = self.sources.get(child_path).ok_or_else(|| TreeError::FileNotFound {
importing: self.graph[tup.parent.unwrap()].clone(),
missing: child_path.clone(),
})?;
Ok(FilialTuple { child, parent })
})
}; };
let node_to_sourcefile =
|result: Result<FilialTuple<NodeIndex, IncludeLine>, TreeError>| -> Result<FilialTuple<&Sourcefile, IncludeLine>, TreeError> {
result.and_then(|tup| {
let parent = tup.parent.map(|p| {
let parent_path = &self.graph[p];
// fatal error case, shouldnt happen
self.sources
.get(parent_path)
.unwrap_or_else(|| panic!("no entry in sources for parent {}", parent_path))
});
let child_path = &self.graph[tup.child];
// soft-fail case, if file doesnt exist or mistype
// eprintln!("MISSING? {:?}", self.sources.get(child_path).is_none());
let edges = tup
.parent
.map(|p| self.graph.get_edges_between(p, tup.child).collect())
.unwrap_or_default();
let child = match self.sources.get(child_path) {
Some(source) => source,
None => return Err(TreeError::FileNotFound(FileNotFound {
importing: self.graph[tup.parent.unwrap()].clone(),
import_locs: edges,
missing: child_path.clone(),
})),
};
Ok(FilialTuple { child, parent, edges })
})
};
if root_ancestors.is_empty() { if root_ancestors.is_empty() {
if !is_top_level(&entry.strip_prefix(&self.root)) { if !is_top_level(&entry.strip_prefix(&self.root)) {
@ -284,7 +291,7 @@ mod test {
let mut view = WorkspaceTree::new(&("/home/test/banana".into())); let mut view = WorkspaceTree::new(&("/home/test/banana".into()));
let parent = view.graph.add_node(&("/home/test/banana/test.fsh".into())); let parent = view.graph.add_node(&("/home/test/banana/test.fsh".into()));
let child = view.graph.add_node(&("/home/test/banana/included.glsl".into())); let child = view.graph.add_node(&("/home/test/banana/included.glsl".into()));
view.graph.add_edge(parent, child, 2.into()); view.graph.add_edge(parent, child, 2);
let parent = "/home/test/banana/test.fsh".into(); let parent = "/home/test/banana/test.fsh".into();
let trees = view.trees_for_entry(&parent); let trees = view.trees_for_entry(&parent);