mirror of
https://github.com/Strum355/mcshader-lsp.git
synced 2025-08-02 16:13:27 +00:00
fixed error off-by-one once and for all, and more restructuring+a load more to go until Im happy with it (aka never)
This commit is contained in:
parent
786e09bdcf
commit
e7221304da
29 changed files with 1319 additions and 1128 deletions
|
@ -42,18 +42,10 @@ export class LanguageClient extends lsp.LanguageClient {
|
|||
}
|
||||
|
||||
public startServer = async (): Promise<LanguageClient> => {
|
||||
// this.extension.context.subscriptions.push(this.start())
|
||||
this.setTrace(lsp.Trace.Verbose)
|
||||
|
||||
this.extension.context.subscriptions.push(this.onNotification(PublishDiagnosticsNotification.type, (p) => {
|
||||
log.error(JSON.stringify(p))
|
||||
}))
|
||||
this.extension.context.subscriptions.push(this.onNotification(TelemetryEventNotification.type, this.onStatusChange))
|
||||
|
||||
await this.start()
|
||||
|
||||
// await this.onReady()
|
||||
console.log('banana')
|
||||
return this
|
||||
}
|
||||
|
||||
|
@ -62,7 +54,6 @@ export class LanguageClient extends lsp.LanguageClient {
|
|||
message: string
|
||||
icon: string
|
||||
}) => {
|
||||
log.info('bananan')
|
||||
switch (params.status) {
|
||||
case 'loading':
|
||||
case 'ready':
|
||||
|
|
606
server/Cargo.lock
generated
606
server/Cargo.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -8,7 +8,7 @@ edition = "2021"
|
|||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
path-slash = "0.1"
|
||||
path-slash = "0.2"
|
||||
anyhow = "1.0"
|
||||
url = "2.2"
|
||||
percent-encoding = "2.1"
|
||||
|
|
|
@ -14,7 +14,6 @@ logging = { path = "../logging" }
|
|||
logging_macro = { path = "../logging_macro" }
|
||||
slog = { version = "2.7", features = [ "max_level_trace", "release_max_level_trace" ] }
|
||||
slog-scope = "4.4"
|
||||
sourcefile = { path = "../sourcefile" }
|
||||
tower-lsp = "0.17.0"
|
||||
thiserror = "1.0"
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ struct VisitCount {
|
|||
/// Performs a depth-first search with duplicates
|
||||
pub struct Dfs<'a, K, V>
|
||||
where
|
||||
K: Hash + Clone + ToString + Eq + Debug,
|
||||
K: Hash + Clone + Display + Eq + Debug,
|
||||
V: Ord + Copy,
|
||||
{
|
||||
graph: &'a CachedStableGraph<K, V>,
|
||||
|
@ -32,7 +32,7 @@ where
|
|||
|
||||
impl<'a, K, V> Dfs<'a, K, V>
|
||||
where
|
||||
K: Hash + Clone + ToString + Eq + Debug,
|
||||
K: Hash + Clone + Display + Eq + Debug,
|
||||
V: Ord + Copy,
|
||||
{
|
||||
pub fn new(graph: &'a CachedStableGraph<K, V>, start: NodeIndex) -> Self {
|
||||
|
@ -54,7 +54,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
fn check_for_cycle(&self, children: &[NodeIndex]) -> Result<(), CycleError> {
|
||||
fn check_for_cycle(&self, children: &[NodeIndex]) -> Result<(), CycleError<K>> {
|
||||
for prev in &self.cycle {
|
||||
for child in children {
|
||||
if prev.node == *child {
|
||||
|
@ -69,12 +69,12 @@ where
|
|||
|
||||
impl<'a, K, V> Iterator for Dfs<'a, K, V>
|
||||
where
|
||||
K: Hash + Clone + ToString + Eq + Debug,
|
||||
K: Hash + Clone + Display + Eq + Debug,
|
||||
V: Ord + Copy,
|
||||
{
|
||||
type Item = Result<FilialTuple<NodeIndex>, CycleError>;
|
||||
type Item = Result<FilialTuple<NodeIndex>, CycleError<K>>;
|
||||
|
||||
fn next(&mut self) -> Option<Result<FilialTuple<NodeIndex>, CycleError>> {
|
||||
fn next(&mut self) -> Option<Result<FilialTuple<NodeIndex>, CycleError<K>>> {
|
||||
let parent = self.cycle.last().map(|p| p.node);
|
||||
|
||||
if let Some(child) = self.stack.pop() {
|
||||
|
@ -84,15 +84,16 @@ where
|
|||
touch: 1,
|
||||
});
|
||||
|
||||
let children: Vec<_> = self.graph.get_all_children(child).rev().collect();
|
||||
let children: Vec<_> = self.graph.get_all_edges_from(child).rev().collect();
|
||||
|
||||
if !children.is_empty() {
|
||||
if let Err(e) = self.check_for_cycle(&children) {
|
||||
let child_nodes: Vec<_> = children.iter().map(|(n, _)| n).copied().collect();
|
||||
if let Err(e) = self.check_for_cycle(&child_nodes) {
|
||||
return Some(Err(e));
|
||||
}
|
||||
|
||||
for child in children {
|
||||
self.stack.push(child);
|
||||
self.stack.push(child.0);
|
||||
}
|
||||
} else {
|
||||
self.reset_path_to_branch();
|
||||
|
@ -109,36 +110,39 @@ use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range};
|
|||
use std::{error::Error as StdError, fmt::Display};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct CycleError(Vec<String>);
|
||||
// TODO: how can we include the line-of-import
|
||||
pub struct CycleError<K>(Vec<K>);
|
||||
|
||||
impl StdError for CycleError {}
|
||||
impl<K> StdError for CycleError<K> where K: Display + Debug {}
|
||||
|
||||
impl CycleError {
|
||||
pub fn new<K, V>(nodes: &[NodeIndex], current_node: NodeIndex, graph: &CachedStableGraph<K, V>) -> Self
|
||||
impl<K> CycleError<K>
|
||||
where
|
||||
K: Hash + Clone + Eq + Debug,
|
||||
{
|
||||
pub fn new<V>(nodes: &[NodeIndex], current_node: NodeIndex, graph: &CachedStableGraph<K, V>) -> Self
|
||||
where
|
||||
K: Hash + Clone + ToString + Eq + Debug,
|
||||
V: Ord + Copy,
|
||||
{
|
||||
let mut resolved_nodes: Vec<K> = nodes.iter().map(|i| graph[*i].clone()).collect();
|
||||
resolved_nodes.push(graph[current_node].clone());
|
||||
CycleError(resolved_nodes.into_iter().map(|p| p.to_string()).collect())
|
||||
CycleError(resolved_nodes.into_iter().collect())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for CycleError {
|
||||
impl<K: Display> Display for CycleError<K> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut disp = String::new();
|
||||
disp.push_str(format!("Include cycle detected:\n{:?} imports ", self.0[0]).as_str());
|
||||
disp.push_str(format!("Include cycle detected:\n{} imports ", self.0[0]).as_str());
|
||||
for p in &self.0[1..self.0.len() - 1] {
|
||||
disp.push_str(format!("\n{:?}, which imports ", *p).as_str());
|
||||
disp.push_str(&format!("\n{}, which imports ", *p));
|
||||
}
|
||||
disp.push_str(format!("\n{:?}", self.0[self.0.len() - 1]).as_str());
|
||||
disp.push_str(&format!("\n{}", self.0[self.0.len() - 1]));
|
||||
f.write_str(disp.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CycleError> for Diagnostic {
|
||||
fn from(e: CycleError) -> Diagnostic {
|
||||
impl<K: Display> From<CycleError<K>> for Diagnostic {
|
||||
fn from(e: CycleError<K>) -> Diagnostic {
|
||||
Diagnostic {
|
||||
severity: Some(DiagnosticSeverity::ERROR),
|
||||
range: Range::new(Position::new(0, 0), Position::new(0, 500)),
|
||||
|
@ -153,8 +157,8 @@ impl From<CycleError> for Diagnostic {
|
|||
}
|
||||
}
|
||||
|
||||
impl From<CycleError> for String {
|
||||
fn from(e: CycleError) -> String {
|
||||
impl<K: Display> From<CycleError<K>> for String {
|
||||
fn from(e: CycleError<K>) -> String {
|
||||
format!("{}", e)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,12 +78,6 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
/// Returns all child node indexes for a parent, in order of import. May include duplicates if a child
|
||||
/// is imported more than once into the parent.
|
||||
pub fn get_all_children(&self, parent: NodeIndex) -> impl DoubleEndedIterator<Item = NodeIndex> + '_ {
|
||||
self.get_all_edges_from(parent).map(|p| p.0)
|
||||
}
|
||||
|
||||
/// Returns an iterator over all the edge values of type `V`'s between a parent and its child for all the
|
||||
/// positions that the child may be imported into the parent, in order of import.
|
||||
pub fn get_edges_between(&self, parent: NodeIndex, child: NodeIndex) -> impl DoubleEndedIterator<Item = V> + '_ {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
mod graph;
|
||||
pub mod dfs;
|
||||
mod graph;
|
||||
pub use graph::*;
|
||||
|
||||
pub use petgraph::stable_graph::NodeIndex;
|
||||
pub use petgraph::dot::Config;
|
||||
pub use petgraph::dot;
|
||||
pub use petgraph::dot::Config;
|
||||
pub use petgraph::stable_graph::NodeIndex;
|
||||
|
||||
/// FilialTuple represents a tuple (not really) of a child and any legitimate
|
||||
/// parent. Parent can be nullable in the case of the child being a top level
|
||||
|
@ -15,4 +15,5 @@ pub struct FilialTuple<T> {
|
|||
// pub parent: Option<NodeIndex>,
|
||||
pub child: T,
|
||||
pub parent: Option<T>,
|
||||
// pub edge: E,
|
||||
}
|
||||
|
|
|
@ -17,12 +17,14 @@ graph = { path = "../graph" }
|
|||
filesystem = { path = "../filesystem" }
|
||||
tokio = { version = "1.18", features = ["fs"]}
|
||||
|
||||
workspace_tree = { path = "../workspace_tree" }
|
||||
workspace = { path = "../workspace" }
|
||||
sourcefile = { path = "../sourcefile" }
|
||||
tree-sitter = "0.20.6"
|
||||
tree-sitter-glsl = "0.1.2"
|
||||
opengl = { path = "../opengl" }
|
||||
|
||||
regex = "1.4"
|
||||
|
||||
[dev-dependencies]
|
||||
tempdir = "0.3"
|
||||
fs_extra = "1.2"
|
||||
|
|
|
@ -7,16 +7,21 @@ use core::slice::Iter;
|
|||
use filesystem::{LFString, NormalizedPathBuf};
|
||||
use graph::FilialTuple;
|
||||
use logging::debug;
|
||||
use tree_sitter::{Parser, Query, QueryCursor};
|
||||
use tree_sitter_glsl::language;
|
||||
|
||||
use crate::consts;
|
||||
use sourcefile::{IncludeLine, SourceFile, SourceMapper, Version};
|
||||
use sourcefile::{IncludeLine, SourceMapper, Sourcefile};
|
||||
|
||||
const ERROR_DIRECTIVE: &str = "#error ";
|
||||
|
||||
/// Merges the source strings according to the nodes comprising a tree of imports into a GLSL source string
|
||||
/// that can be handed off to the GLSL compiler.
|
||||
pub struct MergeViewBuilder<'a> {
|
||||
nodes: Peekable<Iter<'a, FilialTuple<&'a SourceFile>>>,
|
||||
root: &'a NormalizedPathBuf,
|
||||
|
||||
nodes: Peekable<Iter<'a, FilialTuple<&'a Sourcefile>>>,
|
||||
|
||||
// sources: &'a HashMap<NormalizedPathBuf, LFString>,
|
||||
/// contains additionally inserted lines such as #line and other directives, preamble defines etc
|
||||
extra_lines: Vec<String>,
|
||||
|
||||
|
@ -28,30 +33,32 @@ pub struct MergeViewBuilder<'a> {
|
|||
/// by the same parent, hence we have to track it for a ((child, parent), line) tuple
|
||||
/// instead of just the child or (child, parent).
|
||||
last_offset_set: HashMap<FilialTuple<&'a NormalizedPathBuf>, usize>,
|
||||
/// holds, for any given filial tuple, the iterator yielding all the positions at which the child
|
||||
/// is included into the parent in line-sorted order. This is necessary for files that are imported
|
||||
/// more than once into the same parent, so we can easily get the next include position.
|
||||
parent_child_edge_iterator: HashMap<FilialTuple<&'a NormalizedPathBuf>, Box<(dyn Iterator<Item = IncludeLine> + 'a)>>,
|
||||
|
||||
// #line directives need to be adjusted based on GPU vendor + document glsl version
|
||||
gpu_vendor: opengl::GPUVendor,
|
||||
document_glsl_version: sourcefile::Version,
|
||||
}
|
||||
|
||||
impl<'a> MergeViewBuilder<'a> {
|
||||
pub fn new(
|
||||
nodes: &'a [FilialTuple<&'a SourceFile>], source_mapper: &'a mut SourceMapper<NormalizedPathBuf>, gpu_vendor: opengl::GPUVendor,
|
||||
document_glsl_version: sourcefile::Version,
|
||||
root: &'a NormalizedPathBuf, nodes: &'a [FilialTuple<&'a Sourcefile>], source_mapper: &'a mut SourceMapper<NormalizedPathBuf>,
|
||||
) -> Self {
|
||||
println!("{}", nodes.len());
|
||||
let mut all_includes: Vec<_> = nodes
|
||||
.iter()
|
||||
.flat_map(|tup| tup.child.includes().unwrap())
|
||||
.map(|tup| tup.0)
|
||||
.collect();
|
||||
all_includes.sort_unstable();
|
||||
all_includes.dedup();
|
||||
|
||||
MergeViewBuilder {
|
||||
root,
|
||||
nodes: nodes.iter().peekable(),
|
||||
extra_lines: Vec::with_capacity((nodes.len() * 2) + 2),
|
||||
// 1 start + 1 end #line & 1 preamble + 1 end #line + at worst the amount of #include directives found
|
||||
// TODO: more compatibility inserts
|
||||
extra_lines: Vec::with_capacity((nodes.len() * 2) + 2 + all_includes.len()),
|
||||
source_mapper,
|
||||
last_offset_set: HashMap::new(),
|
||||
parent_child_edge_iterator: HashMap::new(),
|
||||
gpu_vendor,
|
||||
document_glsl_version,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -69,11 +76,7 @@ impl<'a> MergeViewBuilder<'a> {
|
|||
|
||||
// add the optifine preamble (and extra compatibility mangling eventually)
|
||||
let version_line_offset = self.find_version_offset(first_source);
|
||||
let (version_char_for_line, version_char_following_line) = self.char_offset_for_line(version_line_offset, first_source);
|
||||
eprintln!(
|
||||
"line {} char for line {} char after line {}",
|
||||
version_line_offset, version_char_for_line, version_char_following_line
|
||||
);
|
||||
let (_, version_char_following_line) = self.char_offset_for_line(version_line_offset, first_source);
|
||||
self.add_preamble(
|
||||
version_line_offset,
|
||||
version_char_following_line,
|
||||
|
@ -94,7 +97,8 @@ impl<'a> MergeViewBuilder<'a> {
|
|||
// now we add a view of the remainder of the root file
|
||||
let offset = self.get_last_offset_for_tuple(None, first_path).unwrap();
|
||||
let len = first_source.len();
|
||||
merge_list.push_back(&first_source[min(offset, len)..]);
|
||||
self.process_slice_addition(&mut merge_list, first_path, &first_source[min(offset, len)..]);
|
||||
// merge_list.push_back(&first_source[min(offset, len)..]);
|
||||
|
||||
// Now merge all the views into one singular String to return
|
||||
let total_len = merge_list.iter().fold(0, |a, b| a + b.len());
|
||||
|
@ -122,10 +126,7 @@ impl<'a> MergeViewBuilder<'a> {
|
|||
child: &n.child.path,
|
||||
parent: n.parent.map(|p| &p.path),
|
||||
})
|
||||
.or_insert_with(|| {
|
||||
// let child_positions = self.graph.get_edges_between(parent, child);
|
||||
Box::new(parent.includes_of_path(child_path).unwrap())
|
||||
})
|
||||
.or_insert_with(|| Box::new(parent.includes_of_path(child_path).unwrap()))
|
||||
.next()
|
||||
.unwrap();
|
||||
|
||||
|
@ -143,7 +144,9 @@ impl<'a> MergeViewBuilder<'a> {
|
|||
"char_following_line" => char_following_line,
|
||||
);
|
||||
|
||||
merge_list.push_back(&parent_source[offset..char_for_line]);
|
||||
self.process_slice_addition(merge_list, parent_path, &parent_source[offset..char_for_line]);
|
||||
|
||||
// merge_list.push_back(&parent_source[offset..char_for_line]);
|
||||
self.add_opening_line_directive(child_path, merge_list);
|
||||
|
||||
match self.nodes.peek() {
|
||||
|
@ -157,11 +160,11 @@ impl<'a> MergeViewBuilder<'a> {
|
|||
true => child_source.len() - 1,
|
||||
false => child_source.len(),
|
||||
};
|
||||
merge_list.push_back(&child_source[..double_newline_offset]);
|
||||
self.process_slice_addition(merge_list, child_path, &child_source[..double_newline_offset]);
|
||||
// merge_list.push_back(&child_source[..double_newline_offset]);
|
||||
self.set_last_offset_for_tuple(Some(parent_path), child_path, 0);
|
||||
// +1 because edge.line is 0 indexed ~~but #line is 1 indexed and references the *following* line~~
|
||||
// turns out #line _is_ 0 indexed too? Im really confused
|
||||
self.add_closing_line_directive(edge + self.get_line_directive_offset(), parent_path, merge_list);
|
||||
self.add_closing_line_directive(edge + 1, parent_path, merge_list);
|
||||
|
||||
// if the next pair's parent is not the current pair's parent, we need to bubble up
|
||||
if stack.contains(&&next.parent.unwrap().path) {
|
||||
return;
|
||||
|
@ -182,13 +185,12 @@ impl<'a> MergeViewBuilder<'a> {
|
|||
};
|
||||
if offset < child_source.len() - end_offset {
|
||||
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
||||
merge_list.push_back(&child_source[offset..child_source.len() - end_offset]);
|
||||
self.process_slice_addition(merge_list, child_path, &child_source[offset..child_source.len() - end_offset]);
|
||||
// merge_list.push_back(&child_source[offset..child_source.len() - end_offset]);
|
||||
self.set_last_offset_for_tuple(Some(parent_path), child_path, 0);
|
||||
}
|
||||
|
||||
// +1 because edge.line is 0 indexed ~~but #line is 1 indexed and references the *following* line~~
|
||||
// turns out #line _is_ 0 indexed too? Im really confused
|
||||
self.add_closing_line_directive(edge + self.get_line_directive_offset(), parent_path, merge_list);
|
||||
self.add_closing_line_directive(edge + 1, parent_path, merge_list);
|
||||
|
||||
// we need to check the next item at the point of original return further down the callstack
|
||||
if self.nodes.peek().is_some() && stack.contains(&&self.nodes.peek().unwrap().parent.unwrap().path) {
|
||||
|
@ -203,16 +205,55 @@ impl<'a> MergeViewBuilder<'a> {
|
|||
true => child_source.len() - 1,
|
||||
false => child_source.len(),
|
||||
};
|
||||
merge_list.push_back(&child_source[..double_newline_offset]);
|
||||
self.process_slice_addition(merge_list, child_path, &child_source[..double_newline_offset]);
|
||||
// merge_list.push_back(&child_source[..double_newline_offset]);
|
||||
self.set_last_offset_for_tuple(Some(parent_path), child_path, 0);
|
||||
// +1 because edge.line is 0 indexed ~~but #line is 1 indexed and references the *following* line~~
|
||||
// turns out #line _is_ 0 indexed too? Im really confused
|
||||
self.add_closing_line_directive(edge + self.get_line_directive_offset(), parent_path, merge_list);
|
||||
self.add_closing_line_directive(edge + 1, parent_path, merge_list);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// process each new view here e.g. to replace #include statements that were not removed by a file existing with
|
||||
// #error etc
|
||||
fn process_slice_addition(&mut self, merge_list: &mut LinkedList<&'a str>, path: &NormalizedPathBuf, slice: &'a str) {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(language()).unwrap();
|
||||
|
||||
let query = Query::new(language(), sourcefile::GET_INCLUDES).unwrap();
|
||||
let mut query_cursor = QueryCursor::new();
|
||||
|
||||
let mut start_offset = 0;
|
||||
let mut end_offset = slice.len();
|
||||
|
||||
for (m, _) in query_cursor.captures(&query, parser.parse(slice, None).unwrap().root_node(), slice.as_bytes()) {
|
||||
if m.captures.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let include = m.captures[0];
|
||||
let include_str = {
|
||||
let mut string = include.node.utf8_text(slice.as_bytes()).unwrap();
|
||||
string = &string[1..string.len() - 1];
|
||||
if string.starts_with('/') {
|
||||
self.root.join("shaders").join(string.strip_prefix('/').unwrap())
|
||||
} else {
|
||||
path.parent().unwrap().join(string)
|
||||
}
|
||||
};
|
||||
|
||||
let line_offset = slice[start_offset..include.node.byte_range().start].rfind('\n').unwrap() + 1;
|
||||
merge_list.push_back(&slice[start_offset..line_offset]);
|
||||
end_offset = include.node.byte_range().end;
|
||||
start_offset = end_offset;
|
||||
merge_list.push_back(ERROR_DIRECTIVE);
|
||||
self.extra_lines.push(format!("Couldn't import file {}\n", include_str));
|
||||
self.unsafe_get_and_insert(merge_list)
|
||||
}
|
||||
|
||||
merge_list.push_back(&slice[start_offset..end_offset]);
|
||||
}
|
||||
|
||||
fn set_last_offset_for_tuple(
|
||||
&mut self, parent: Option<&'a NormalizedPathBuf>, child: &'a NormalizedPathBuf, offset: usize,
|
||||
) -> Option<usize> {
|
||||
|
@ -253,26 +294,15 @@ impl<'a> MergeViewBuilder<'a> {
|
|||
.map_or(0, |(i, _)| i)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn get_line_directive_offset(&self) -> usize {
|
||||
match (self.gpu_vendor, self.document_glsl_version) {
|
||||
(opengl::GPUVendor::NVIDIA, Version::Glsl110)
|
||||
| (opengl::GPUVendor::NVIDIA, Version::Glsl120)
|
||||
| (opengl::GPUVendor::NVIDIA, Version::Glsl130)
|
||||
| (opengl::GPUVendor::NVIDIA, Version::Glsl140)
|
||||
| (opengl::GPUVendor::NVIDIA, Version::Glsl150) => 1,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_preamble(
|
||||
&mut self, version_line_offset: impl Into<usize>, version_char_offset: usize, path: &NormalizedPathBuf, source: &'a str,
|
||||
merge_list: &mut LinkedList<&'a str>,
|
||||
) {
|
||||
merge_list.push_back(&source[..version_char_offset]);
|
||||
self.process_slice_addition(merge_list, path, &source[..version_char_offset]);
|
||||
// merge_list.push_back(&source[..version_char_offset]);
|
||||
self.extra_lines.push(consts::OPTIFINE_PREAMBLE.into());
|
||||
self.unsafe_get_and_insert(merge_list);
|
||||
self.add_closing_line_directive(version_line_offset.into() + self.get_line_directive_offset(), path, merge_list);
|
||||
self.add_closing_line_directive(version_line_offset.into() + 1, path, merge_list);
|
||||
}
|
||||
|
||||
fn add_opening_line_directive(&mut self, path: &NormalizedPathBuf, merge_list: &mut LinkedList<&str>) {
|
||||
|
@ -317,11 +347,10 @@ mod test {
|
|||
|
||||
use filesystem::{LFString, NormalizedPathBuf};
|
||||
use fs_extra::{copy_items, dir};
|
||||
use opengl::GPUVendor;
|
||||
use pretty_assertions::assert_str_eq;
|
||||
use sourcefile::{SourceMapper, Version};
|
||||
use sourcefile::SourceMapper;
|
||||
use tempdir::TempDir;
|
||||
use workspace_tree::{TreeError, WorkspaceTree};
|
||||
use workspace::{TreeError, WorkspaceTree};
|
||||
|
||||
use crate::MergeViewBuilder;
|
||||
|
||||
|
@ -334,11 +363,11 @@ mod test {
|
|||
.canonicalize()
|
||||
.unwrap_or_else(|_| panic!("canonicalizing '{}'", test_path));
|
||||
let opts = &dir::CopyOptions::new();
|
||||
let files = fs::read_dir(&test_path)
|
||||
let files = fs::read_dir(test_path)
|
||||
.unwrap()
|
||||
.map(|e| String::from(e.unwrap().path().to_str().unwrap()))
|
||||
.collect::<Vec<String>>();
|
||||
copy_items(&files, &tmp_dir.path().join("shaders"), opts).unwrap();
|
||||
copy_items(&files, tmp_dir.path().join("shaders"), opts).unwrap();
|
||||
}
|
||||
|
||||
let tmp_path = tmp_dir.path().to_str().unwrap().into();
|
||||
|
@ -373,7 +402,7 @@ mod test {
|
|||
|
||||
let mut source_mapper = SourceMapper::new(2);
|
||||
|
||||
let result = MergeViewBuilder::new(&tree, &mut source_mapper, GPUVendor::NVIDIA, Version::Glsl120).build();
|
||||
let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build();
|
||||
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
|
@ -417,7 +446,7 @@ mod test {
|
|||
|
||||
let mut source_mapper = SourceMapper::new(2);
|
||||
|
||||
let result = MergeViewBuilder::new(&tree, &mut source_mapper, GPUVendor::NVIDIA, Version::Glsl120).build();
|
||||
let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build();
|
||||
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
|
@ -459,7 +488,7 @@ mod test {
|
|||
|
||||
let mut source_mapper = SourceMapper::new(2);
|
||||
|
||||
let result = MergeViewBuilder::new(&tree, &mut source_mapper, GPUVendor::NVIDIA, Version::Glsl120).build();
|
||||
let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build();
|
||||
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
|
@ -501,7 +530,7 @@ mod test {
|
|||
|
||||
let mut source_mapper = SourceMapper::new(2);
|
||||
|
||||
let result = MergeViewBuilder::new(&tree, &mut source_mapper, GPUVendor::NVIDIA, Version::Glsl120).build();
|
||||
let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build();
|
||||
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
|
@ -551,7 +580,7 @@ mod test {
|
|||
|
||||
let mut source_mapper = SourceMapper::new(2);
|
||||
|
||||
let result = MergeViewBuilder::new(&tree, &mut source_mapper, GPUVendor::NVIDIA, Version::Glsl120).build();
|
||||
let result = MergeViewBuilder::new(&tmp_path, &tree, &mut source_mapper).build();
|
||||
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ pub use slog_scope_futures::FutureExt;
|
|||
|
||||
pub fn new_trace_id() -> String {
|
||||
let rng = CURRENT_RNG.with(|rng| rng.borrow_mut().gen::<[u8; 4]>());
|
||||
return format!("{:04x}", u32::from_be_bytes(rng));
|
||||
format!("{:04x}", u32::from_be_bytes(rng))
|
||||
}
|
||||
|
||||
pub fn set_level(level: Level) -> GlobalLoggerGuard {
|
||||
|
|
|
@ -14,7 +14,7 @@ lazy_static = "1.4"
|
|||
regex = "1.4"
|
||||
url = "2.2"
|
||||
mockall = "0.11"
|
||||
path-slash = "0.1"
|
||||
path-slash = "0.2"
|
||||
glob = "0.3"
|
||||
filesystem = { path = "../filesystem" }
|
||||
|
||||
|
@ -34,7 +34,6 @@ tower-lsp = "0.17"
|
|||
tokio = { version = "1.18", features = ["full"] }
|
||||
futures = "0.3"
|
||||
|
||||
workspace = { path = "../workspace" }
|
||||
opengl = { path = "../opengl" }
|
||||
sourcefile = { path = "../sourcefile" }
|
||||
|
||||
|
|
|
@ -8,12 +8,12 @@ edition = "2021"
|
|||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
glutin = "0.28"
|
||||
glutin = "0.29"
|
||||
gl = "0.14"
|
||||
url = "2.2"
|
||||
filesystem = { path = "../filesystem" }
|
||||
graph = { path = "../graph" }
|
||||
tower-lsp = "0.17.0"
|
||||
tower-lsp = "0.17"
|
||||
regex = "1.4"
|
||||
mockall = "0.11"
|
||||
logging = { path = "../logging" }
|
||||
|
|
|
@ -1,63 +1,64 @@
|
|||
use std::collections::HashMap;
|
||||
use core::cell::OnceCell;
|
||||
use filesystem::NormalizedPathBuf;
|
||||
use logging::debug;
|
||||
use regex::Regex;
|
||||
use std::collections::HashMap;
|
||||
use tower_lsp::lsp_types::*;
|
||||
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity};
|
||||
use url::Url;
|
||||
|
||||
use crate::ShaderValidator;
|
||||
use sourcefile::{SourceMapper, SourceNum};
|
||||
use crate::GPUVendor;
|
||||
use sourcefile::{SourceMapper, SourceNum, Sourcefile, Version, ROOT_SOURCE_NUM};
|
||||
|
||||
pub struct DiagnosticsParser<'a, T: ShaderValidator + ?Sized> {
|
||||
// line_offset: OnceCell<u32>,
|
||||
line_regex: OnceCell<Regex>,
|
||||
vendor_querier: &'a T,
|
||||
pub struct DiagnosticsParser {
|
||||
line_regex: Regex,
|
||||
line_offset: u32,
|
||||
}
|
||||
|
||||
impl<'a, T: ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
|
||||
pub fn new(vendor_querier: &'a T) -> Self {
|
||||
impl DiagnosticsParser {
|
||||
pub fn new(gpu_vendor: GPUVendor, doc_glsl_version: Version) -> Self {
|
||||
DiagnosticsParser {
|
||||
// line_offset: OnceCell::new(),
|
||||
line_regex: OnceCell::new(),
|
||||
vendor_querier,
|
||||
line_regex: DiagnosticsParser::get_line_regex(gpu_vendor),
|
||||
line_offset: DiagnosticsParser::get_line_offset(gpu_vendor, doc_glsl_version),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_line_regex(&self) -> &Regex {
|
||||
self.line_regex.get_or_init(|| match self.vendor_querier.vendor().as_str() {
|
||||
"NVIDIA Corporation" => {
|
||||
Regex::new(r#"^(?P<filepath>\d+)\((?P<linenum>\d+)\) : (?P<severity>error|warning) [A-C]\d+: (?P<output>.+)"#).unwrap()
|
||||
fn get_line_regex(gpu_vendor: GPUVendor) -> Regex {
|
||||
match gpu_vendor {
|
||||
GPUVendor::NVIDIA => {
|
||||
Regex::new(r#"^(?P<filepath>\d+)\((?P<linenum>\d+)\) : (?P<severity>error|warning) [A-C]\d+: (?P<output>.+)"#)
|
||||
}
|
||||
_ => Regex::new(
|
||||
r#"^(?P<severity>ERROR|WARNING): (?P<filepath>[^?<>*|"\n]+):(?P<linenum>\d+): (?:'.*' :|[a-z]+\(#\d+\)) +(?P<output>.+)$"#,
|
||||
)
|
||||
.unwrap(),
|
||||
})
|
||||
),
|
||||
}
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
// fn get_line_offset(&self) -> u32 {
|
||||
// *self.line_offset.get_or_init(|| match self.vendor_querier.vendor().as_str() {
|
||||
// "ATI Technologies" | "ATI Technologies Inc." | "AMD" => 0,
|
||||
// _ => 1,
|
||||
// })
|
||||
// }
|
||||
/// for certain NVIDIA GLSL versions, we need to offset the diagnostic number by -1 as those versions (incorrectly/inconsistently) state that:
|
||||
/// "After processing this directive (including its new-line), the implementation will behave as if it is compiling at line number line+1".
|
||||
/// So to get the correct behaviour (first line), with source strings being 0-based, we need to -1.
|
||||
fn get_line_offset(gpu_vendor: GPUVendor, doc_glsl_version: Version) -> u32 {
|
||||
match (gpu_vendor, doc_glsl_version) {
|
||||
(GPUVendor::NVIDIA, Version::Glsl110)
|
||||
| (GPUVendor::NVIDIA, Version::Glsl120)
|
||||
| (GPUVendor::NVIDIA, Version::Glsl130)
|
||||
| (GPUVendor::NVIDIA, Version::Glsl140)
|
||||
| (GPUVendor::NVIDIA, Version::Glsl150) => 1,
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_diagnostics_output(
|
||||
&self,
|
||||
output: String,
|
||||
uri: &NormalizedPathBuf,
|
||||
source_mapper: &SourceMapper<NormalizedPathBuf>,
|
||||
// graph: &CachedStableGraph<NormalizedPathBuf, IncludeLine>,
|
||||
&self, output: String, uri: &NormalizedPathBuf, source_mapper: &SourceMapper<NormalizedPathBuf>,
|
||||
sources: &HashMap<&NormalizedPathBuf, &Sourcefile>,
|
||||
) -> HashMap<Url, Vec<Diagnostic>> {
|
||||
let output_lines = output.split('\n').collect::<Vec<&str>>();
|
||||
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::with_capacity(output_lines.len());
|
||||
|
||||
debug!("diagnostics regex selected"; "regex" => self.get_line_regex() .as_str());
|
||||
debug!("diagnostics regex selected"; "regex" => self.line_regex .as_str());
|
||||
|
||||
for line in output_lines {
|
||||
let diagnostic_capture = match self.get_line_regex().captures(line) {
|
||||
let diagnostic_capture = match self.line_regex.captures(line) {
|
||||
Some(d) => d,
|
||||
None => continue,
|
||||
};
|
||||
|
@ -66,14 +67,22 @@ impl<'a, T: ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
|
|||
|
||||
let msg = diagnostic_capture.name("output").unwrap().as_str();
|
||||
|
||||
let line = match diagnostic_capture.name("linenum") {
|
||||
Some(c) => c.as_str().parse::<u32>().unwrap_or(0),
|
||||
None => 0,
|
||||
let source_num: SourceNum = match diagnostic_capture.name("filepath") {
|
||||
Some(o) => o.as_str().parse::<usize>().unwrap().into(),
|
||||
None => 0.into(),
|
||||
};
|
||||
|
||||
// TODO: line matching maybe
|
||||
/* let line_text = source_lines[line as usize];
|
||||
let leading_whitespace = line_text.len() - line_text.trim_start().len(); */
|
||||
let origin = match source_num {
|
||||
ROOT_SOURCE_NUM => uri,
|
||||
_ => source_mapper.get_node(source_num),
|
||||
};
|
||||
|
||||
let line = match diagnostic_capture.name("linenum") {
|
||||
Some(c) => {
|
||||
c.as_str().parse::<u32>().unwrap_or(0) - self.line_offset
|
||||
}
|
||||
None => 0,
|
||||
};
|
||||
|
||||
let severity = match diagnostic_capture.name("severity") {
|
||||
Some(c) => match c.as_str().to_lowercase().as_str() {
|
||||
|
@ -84,20 +93,14 @@ impl<'a, T: ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
|
|||
_ => DiagnosticSeverity::INFORMATION,
|
||||
};
|
||||
|
||||
let origin = match diagnostic_capture.name("filepath") {
|
||||
Some(o) => {
|
||||
let source_num: SourceNum = o.as_str().parse::<usize>().unwrap().into();
|
||||
source_mapper.get_node(source_num)
|
||||
}
|
||||
None => uri,
|
||||
};
|
||||
let source = sources[origin];
|
||||
let (start, end) = source.linemap().line_range_for_position(Position::new(line, 0));
|
||||
let line_text = &source.source[start..end.unwrap_or(source.source.len() - 1)];
|
||||
|
||||
let diagnostic = Diagnostic {
|
||||
range: Range::new(
|
||||
/* Position::new(line, leading_whitespace as u64),
|
||||
Position::new(line, line_text.len() as u64) */
|
||||
Position::new(line-1, 0),
|
||||
Position::new(line-1, 1000),
|
||||
Position::new(line, (line_text.len() - line_text.trim_start().len()) as u32),
|
||||
Position::new(line, line_text.len() as u32),
|
||||
),
|
||||
severity: Some(severity),
|
||||
source: Some("mcglsl".to_string()),
|
||||
|
@ -119,21 +122,20 @@ impl<'a, T: ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
|
|||
|
||||
#[cfg(test)]
|
||||
mod diagnostics_test {
|
||||
use std::collections::HashMap;
|
||||
|
||||
use filesystem::NormalizedPathBuf;
|
||||
use sourcefile::SourceMapper;
|
||||
use sourcefile::{SourceMapper, Sourcefile};
|
||||
use trim_margin::MarginTrimmable;
|
||||
use url::Url;
|
||||
|
||||
use crate::{diagnostics_parser::DiagnosticsParser, MockShaderValidator};
|
||||
use crate::diagnostics_parser::DiagnosticsParser;
|
||||
|
||||
#[test]
|
||||
#[logging_macro::scope]
|
||||
fn test_nvidia_diagnostics() {
|
||||
fn test_nvidia_diagnostics_glsl150() {
|
||||
logging::scope(&logging::logger().new(slog_o!("driver" => "nvidia")), || {
|
||||
let mut mockgl = MockShaderValidator::new();
|
||||
mockgl.expect_vendor().returning(|| "NVIDIA Corporation".into());
|
||||
|
||||
let output = "0(9) : error C0000: syntax error, unexpected '}', expecting ',' or ';' at token \"}\"";
|
||||
let output = "0(1) : error C0000: syntax error, unexpected '}', expecting ',' or ';' at token \"}\"";
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
let path: NormalizedPathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into();
|
||||
|
@ -143,9 +145,47 @@ mod diagnostics_test {
|
|||
let mut source_mapper = SourceMapper::new(0);
|
||||
source_mapper.get_num(&path);
|
||||
|
||||
let parser = DiagnosticsParser::new(&mockgl);
|
||||
let parser = DiagnosticsParser::new(crate::GPUVendor::NVIDIA, sourcefile::Version::Glsl150);
|
||||
|
||||
let results = parser.parse_diagnostics_output(output.to_string(), &path.parent().unwrap(), &source_mapper);
|
||||
let source = Sourcefile::new(
|
||||
"sample text".to_string(),
|
||||
path.clone(),
|
||||
path.parent().and_then(|p| p.parent()).unwrap(),
|
||||
);
|
||||
let sources = HashMap::from_iter(vec![(&path, &source)]);
|
||||
|
||||
let results = parser.parse_diagnostics_output(output.to_string(), &path, &source_mapper, &sources);
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
let first = results.into_iter().next().unwrap();
|
||||
assert_eq!(first.0, Url::from_file_path(path).unwrap());
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[logging_macro::scope]
|
||||
fn test_nvidia_diagnostics_glsl330() {
|
||||
logging::scope(&logging::logger().new(slog_o!("driver" => "nvidia")), || {
|
||||
let output = "0(0) : error C0000: syntax error, unexpected '}', expecting ',' or ';' at token \"}\"";
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
let path: NormalizedPathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into();
|
||||
#[cfg(target_family = "windows")]
|
||||
let path: NormalizedPathBuf = "c:\\home\\noah\\.minecraft\\shaderpacks\\test\\shaders\\final.fsh".into();
|
||||
|
||||
let mut source_mapper = SourceMapper::new(0);
|
||||
source_mapper.get_num(&path);
|
||||
|
||||
let parser = DiagnosticsParser::new(crate::GPUVendor::NVIDIA, sourcefile::Version::Glsl330);
|
||||
|
||||
let source = Sourcefile::new(
|
||||
"sample text".to_string(),
|
||||
path.clone(),
|
||||
path.parent().and_then(|p| p.parent()).unwrap(),
|
||||
);
|
||||
let sources = HashMap::from_iter(vec![(&path, &source)]);
|
||||
|
||||
let results = parser.parse_diagnostics_output(output.to_string(), &path, &source_mapper, &sources);
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
let first = results.into_iter().next().unwrap();
|
||||
|
@ -157,14 +197,13 @@ mod diagnostics_test {
|
|||
#[logging_macro::scope]
|
||||
fn test_amd_diagnostics() {
|
||||
logging::scope(&logging::logger().new(slog_o!("driver" => "amd")), || {
|
||||
let mut mockgl = MockShaderValidator::new();
|
||||
mockgl.expect_vendor().returning(|| "ATI Technologies".into());
|
||||
|
||||
let output = r#"
|
||||
|ERROR: 0:0: '' : syntax error: #line
|
||||
|ERROR: 0:1: '' : syntax error: #line
|
||||
|ERROR: 0:10: '' : syntax error: #line
|
||||
|ERROR: 0:15: 'varying' : syntax error: syntax error
|
||||
"#.trim_margin().unwrap();
|
||||
|ERROR: 0:2: 'varying' : syntax error: syntax error
|
||||
"#
|
||||
.trim_margin()
|
||||
.unwrap();
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
let path: NormalizedPathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into();
|
||||
|
@ -174,9 +213,21 @@ mod diagnostics_test {
|
|||
let mut source_mapper = SourceMapper::new(0);
|
||||
source_mapper.get_num(&path);
|
||||
|
||||
let parser = DiagnosticsParser::new(&mockgl);
|
||||
let parser = DiagnosticsParser::new(crate::GPUVendor::AMD, sourcefile::Version::Glsl150);
|
||||
|
||||
let results = parser.parse_diagnostics_output(output, &path.parent().unwrap(), &source_mapper);
|
||||
let source = Sourcefile::new(
|
||||
"|int main() {
|
||||
| hello_world();
|
||||
|}"
|
||||
.to_string()
|
||||
.trim_margin()
|
||||
.unwrap(),
|
||||
path.clone(),
|
||||
path.parent().and_then(|p| p.parent()).unwrap(),
|
||||
);
|
||||
let sources = HashMap::from_iter(vec![(&path, &source)]);
|
||||
|
||||
let results = parser.parse_diagnostics_output(output, &path, &source_mapper, &sources);
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
let first = results.into_iter().next().unwrap();
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
use std::ffi::{CStr, CString};
|
||||
use std::ptr;
|
||||
|
||||
use glutin::platform::unix::EventLoopExtUnix;
|
||||
use glutin::event_loop::EventLoopBuilder;
|
||||
use glutin::platform::unix::EventLoopBuilderExtUnix;
|
||||
use logging::info;
|
||||
|
||||
use crate::ShaderValidator;
|
||||
|
@ -12,7 +13,8 @@ pub(crate) struct Context {
|
|||
|
||||
impl Context {
|
||||
pub fn default() -> Context {
|
||||
let events_loop = glutin::event_loop::EventLoop::<()>::new_any_thread();
|
||||
let events_loop = EventLoopBuilder::new().with_any_thread(true).build();
|
||||
// let events_loop = glutin::event_loop::EventLoop::<()>::new_any_thread();
|
||||
let gl_window = glutin::ContextBuilder::new()
|
||||
.build_headless(&*events_loop, glutin::dpi::PhysicalSize::new(1, 1))
|
||||
.unwrap();
|
||||
|
|
|
@ -17,9 +17,10 @@ lazy_static = "1.4"
|
|||
regex = "1.4"
|
||||
url = "2.2"
|
||||
mockall = "0.11"
|
||||
path-slash = "0.1"
|
||||
path-slash = "0.2"
|
||||
glob = "0.3"
|
||||
filesystem = { path = "../filesystem" }
|
||||
tst = "0.10"
|
||||
|
||||
# glutin = "0.28"
|
||||
gl = "0.14"
|
||||
|
@ -38,6 +39,7 @@ tokio = { version = "1.18", features = ["full"] }
|
|||
futures = "0.3"
|
||||
|
||||
workspace = { path = "../workspace" }
|
||||
include_merger = { path = "../include_merger" }
|
||||
opengl = { path = "../opengl" }
|
||||
sourcefile = { path = "../sourcefile" }
|
||||
|
||||
|
|
|
@ -1,42 +1,15 @@
|
|||
use std::collections::{hash_map::Entry, HashMap};
|
||||
use filesystem::NormalizedPathBuf;
|
||||
use graph::FilialTuple;
|
||||
use include_merger::MergeViewBuilder;
|
||||
use serde_json::Value;
|
||||
|
||||
use filesystem::{LFString, NormalizedPathBuf};
|
||||
use graph::{dfs, CachedStableGraph};
|
||||
use logging::{logger, FutureExt};
|
||||
// use opengl::{merge_views, source_mapper::SourceMapper, IncludePosition};
|
||||
// use serde_json::Value;
|
||||
use anyhow::Result;
|
||||
use sourcefile::{SourceMapper, Sourcefile};
|
||||
|
||||
// use anyhow::{format_err, Result};
|
||||
pub async fn run(path: &NormalizedPathBuf, sources: &[FilialTuple<&Sourcefile>]) -> Result<Option<Value>> {
|
||||
let mut source_mapper = SourceMapper::new(sources.len());
|
||||
|
||||
// pub async fn run(path: &NormalizedPathBuf, graph: &mut CachedStableGraph<NormalizedPathBuf, IncludePosition>) -> Result<Option<Value>> {
|
||||
// if graph.root_ancestors_for_key(path)?.is_none() {
|
||||
// return Err(format_err!("'{}' is not a top-level file aka has ancestors", path));
|
||||
// };
|
||||
let view = MergeViewBuilder::new(path, sources, &mut source_mapper).build();
|
||||
|
||||
// //info!("ancestors for {}:\n\t{:?}", path, file_ancestors.iter().map(|e| graph.borrow().graph.node_weight(*e).unwrap().clone()).collect::<Vec<String>>());
|
||||
|
||||
// // if we are a top-level file (this has to be one of the set defined by Optifine, right?)
|
||||
// // gather the list of all descendants
|
||||
// let root = graph.find_node(path).unwrap();
|
||||
|
||||
// let mut sources = HashMap::new();
|
||||
|
||||
// let tree = dfs::Dfs::new(graph, root)
|
||||
// .map(|result| {
|
||||
// let node = result?;
|
||||
// let path = &graph[node.child];
|
||||
// if let Entry::Vacant(entry) = sources.entry(path.clone()) {
|
||||
// let source = futures::executor::block_on(async { LFString::read(path).with_logger(logger()).await })?;
|
||||
// entry.insert(source);
|
||||
// };
|
||||
// Ok(node)
|
||||
// })
|
||||
// .collect::<Result<Vec<_>>>()?;
|
||||
|
||||
// let mut source_mapper = SourceMapper::new(sources.len());
|
||||
// let view = merge_views::MergeViewBuilder::new(&tree, &sources, graph, &mut source_mapper).build();
|
||||
|
||||
// eprintln!("{:?}", view);
|
||||
|
||||
// Ok(Some(serde_json::value::Value::String(view.to_string())))
|
||||
// }
|
||||
Ok(Some(serde_json::value::Value::String(view.to_string())))
|
||||
}
|
||||
|
|
|
@ -3,3 +3,4 @@
|
|||
pub mod server;
|
||||
pub use server::*;
|
||||
mod commands;
|
||||
mod workspace;
|
|
@ -1,19 +1,20 @@
|
|||
use std::{collections::HashMap, marker::Sync, sync::Arc};
|
||||
use std::{collections::HashMap, ffi::OsStr, marker::Sync, path::Path, sync::Arc};
|
||||
|
||||
use filesystem::NormalizedPathBuf;
|
||||
// use futures::future::join_all;
|
||||
use futures::future::join_all;
|
||||
use logging::{error, info, logger, trace, warn, FutureExt};
|
||||
use serde_json::Value;
|
||||
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
// #[cfg(test)]
|
||||
// use test::Client;
|
||||
// #[cfg(not(test))]
|
||||
#[cfg(test)]
|
||||
use test::Client;
|
||||
#[cfg(not(test))]
|
||||
use tower_lsp::Client;
|
||||
|
||||
use glob::{glob_with, MatchOptions};
|
||||
use tower_lsp::{
|
||||
jsonrpc::{Error, ErrorCode, Result},
|
||||
jsonrpc::{Error, ErrorCode},
|
||||
lsp_types::{
|
||||
notification::{ShowMessage, TelemetryEvent},
|
||||
*,
|
||||
|
@ -21,9 +22,11 @@ use tower_lsp::{
|
|||
LanguageServer,
|
||||
};
|
||||
|
||||
use workspace::WorkspaceManager;
|
||||
use tst::TSTMap;
|
||||
|
||||
// use crate::commands;
|
||||
use crate::{commands, workspace::Workspace};
|
||||
|
||||
pub struct WorkspaceIndex(usize);
|
||||
|
||||
pub struct Server<G: 'static, F: 'static>
|
||||
where
|
||||
|
@ -31,7 +34,8 @@ where
|
|||
F: Fn() -> G,
|
||||
{
|
||||
pub client: Arc<Mutex<Client>>,
|
||||
workspace_manager: Arc<Mutex<WorkspaceManager<G, F>>>,
|
||||
workspaces: Arc<Mutex<TSTMap<Arc<Workspace<G>>>>>,
|
||||
gl_factory: F,
|
||||
}
|
||||
|
||||
impl<G, F> Server<G, F>
|
||||
|
@ -42,75 +46,10 @@ where
|
|||
pub fn new(client: Client, gl_factory: F) -> Self {
|
||||
Server {
|
||||
client: Arc::new(Mutex::new(client)),
|
||||
workspace_manager: Arc::new(Mutex::new(WorkspaceManager::new(gl_factory))),
|
||||
workspaces: Default::default(),
|
||||
gl_factory,
|
||||
}
|
||||
}
|
||||
|
||||
fn capabilities() -> ServerCapabilities {
|
||||
ServerCapabilities {
|
||||
definition_provider: Some(OneOf::Left(false)),
|
||||
references_provider: Some(OneOf::Left(false)),
|
||||
document_symbol_provider: Some(OneOf::Left(false)),
|
||||
document_link_provider: /* Some(DocumentLinkOptions {
|
||||
resolve_provider: None,
|
||||
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
|
||||
}), */
|
||||
None,
|
||||
execute_command_provider: Some(ExecuteCommandOptions {
|
||||
commands: vec!["graphDot".into(), "virtualMerge".into(), "parseTree".into()],
|
||||
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
|
||||
}),
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
will_save: None,
|
||||
will_save_wait_until: None,
|
||||
change: Some(TextDocumentSyncKind::FULL),
|
||||
save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { include_text: Some(true) })),
|
||||
})),
|
||||
workspace: Some(WorkspaceServerCapabilities {
|
||||
workspace_folders: Some(WorkspaceFoldersServerCapabilities{
|
||||
supported: Some(true),
|
||||
change_notifications: Some(OneOf::Left(false)),
|
||||
}),
|
||||
file_operations: None,
|
||||
}),
|
||||
semantic_tokens_provider: Some(
|
||||
SemanticTokensOptions {
|
||||
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
|
||||
legend: SemanticTokensLegend {
|
||||
token_types: vec![SemanticTokenType::COMMENT],
|
||||
token_modifiers: vec![],
|
||||
},
|
||||
range: None,
|
||||
full: Some(SemanticTokensFullOptions::Bool(true)),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
..ServerCapabilities::default()
|
||||
}
|
||||
}
|
||||
|
||||
async fn publish_diagnostic(&self, diagnostics: HashMap<Url, Vec<Diagnostic>>, document_version: Option<i32>) {
|
||||
let client = self.client.lock().with_logger(logger()).await;
|
||||
// let mut handles = Vec::with_capacity(diagnostics.len());
|
||||
for (url, diags) in diagnostics {
|
||||
eprintln!("publishing to {:?} {:?}", &url, diags);
|
||||
/* handles.push( */
|
||||
client.publish_diagnostics(url, diags, document_version).with_logger(logger()).await;
|
||||
client
|
||||
.log_message(MessageType::INFO, "PUBLISHING!")
|
||||
.with_logger(logger())
|
||||
.await;
|
||||
// client.send_notification::<PublishDiagnostics>(PublishDiagnosticsParams {
|
||||
// ri: url,
|
||||
// diagnostics: diags,
|
||||
// // version: document_version,
|
||||
// version: None,
|
||||
// }).await/* ) */;
|
||||
}
|
||||
// join_all(handles).with_logger(logger()).await;
|
||||
eprintln!("published")
|
||||
}
|
||||
}
|
||||
|
||||
#[tower_lsp::async_trait]
|
||||
|
@ -120,7 +59,7 @@ where
|
|||
F: Fn() -> G + Send + Sync,
|
||||
{
|
||||
#[logging::with_trace_id]
|
||||
async fn initialize(&self, params: InitializeParams) -> Result<InitializeResult> {
|
||||
async fn initialize(&self, params: InitializeParams) -> tower_lsp::jsonrpc::Result<InitializeResult> {
|
||||
info!("starting server...");
|
||||
|
||||
let capabilities = Server::<G, F>::capabilities();
|
||||
|
@ -136,33 +75,31 @@ where
|
|||
}
|
||||
};
|
||||
|
||||
let mut manager = self.workspace_manager.lock().with_logger(logger()).await;
|
||||
self.client
|
||||
.lock()
|
||||
.with_logger(logger())
|
||||
.await
|
||||
.send_notification::<TelemetryEvent>(serde_json::json!({
|
||||
"status": "loading",
|
||||
"message": "Building dependency graph...",
|
||||
"icon": "$(loading~spin)",
|
||||
}))
|
||||
.with_logger(logger())
|
||||
.await;
|
||||
|
||||
// self.client
|
||||
// .lock()
|
||||
// .with_logger(logger())
|
||||
// .await
|
||||
// .send_notification::<TelemetryEvent>(serde_json::json!({
|
||||
// "status": "loading",
|
||||
// "message": "Building dependency graph...",
|
||||
// "icon": "$(loading~spin)",
|
||||
// }))
|
||||
// .with_logger(logger())
|
||||
// .await;
|
||||
self.gather_workspaces(&root).with_logger(logger()).await;
|
||||
|
||||
manager.gather_workspaces(&root).with_logger(logger()).await;
|
||||
|
||||
// self.client
|
||||
// .lock()
|
||||
// .with_logger(logger())
|
||||
// .await
|
||||
// .send_notification::<TelemetryEvent>(serde_json::json!({
|
||||
// "status": "ready",
|
||||
// "message": "Project(s) initialized...",
|
||||
// "icon": "$(check)",
|
||||
// }))
|
||||
// .with_logger(logger())
|
||||
// .await;
|
||||
self.client
|
||||
.lock()
|
||||
.with_logger(logger())
|
||||
.await
|
||||
.send_notification::<TelemetryEvent>(serde_json::json!({
|
||||
"status": "ready",
|
||||
"message": "Project(s) initialized...",
|
||||
"icon": "$(check)",
|
||||
}))
|
||||
.with_logger(logger())
|
||||
.await;
|
||||
|
||||
Ok(InitializeResult {
|
||||
capabilities,
|
||||
|
@ -180,44 +117,24 @@ where
|
|||
// .await;
|
||||
}
|
||||
|
||||
async fn shutdown(&self) -> Result<()> {
|
||||
async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> {
|
||||
warn!("shutting down language server...");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[logging::with_trace_id]
|
||||
async fn did_open(&self, params: DidOpenTextDocumentParams) {
|
||||
self.client
|
||||
.lock()
|
||||
.with_logger(logger())
|
||||
.await
|
||||
.log_message(MessageType::INFO, "OPENED!")
|
||||
.with_logger(logger())
|
||||
.await;
|
||||
self.client
|
||||
.lock()
|
||||
.with_logger(logger())
|
||||
.await
|
||||
.send_notification::<TelemetryEvent>(serde_json::json!({
|
||||
"status": "ready",
|
||||
"message": "Project(s) initialized...",
|
||||
"icon": "$(check)",
|
||||
}))
|
||||
.with_logger(logger())
|
||||
.await;
|
||||
info!("opened document"; "uri" => params.text_document.uri.as_str());
|
||||
|
||||
let path: NormalizedPathBuf = params.text_document.uri.into();
|
||||
if let Some(workspace) = self
|
||||
.workspace_manager
|
||||
.lock()
|
||||
.with_logger(logger())
|
||||
.await
|
||||
.find_workspace_for_file(&path)
|
||||
{
|
||||
|
||||
if let Some(workspace) = self.workspace_for_file(&path).await {
|
||||
trace!("found workspace"; "root" => &workspace.root);
|
||||
|
||||
workspace.refresh_graph_for_file(&path).with_logger(logger()).await;
|
||||
workspace
|
||||
.update_sourcefile(&path, params.text_document.text)
|
||||
.with_logger(logger())
|
||||
.await;
|
||||
|
||||
match workspace.lint(&path).with_logger(logger()).await {
|
||||
Ok(diagnostics) => self.publish_diagnostic(diagnostics, None).with_logger(logger()).await,
|
||||
|
@ -231,17 +148,11 @@ where
|
|||
info!("saved document"; "uri" => params.text_document.uri.as_str());
|
||||
|
||||
let path: NormalizedPathBuf = params.text_document.uri.into();
|
||||
match self
|
||||
.workspace_manager
|
||||
.lock()
|
||||
.with_logger(logger())
|
||||
.await
|
||||
.find_workspace_for_file(&path)
|
||||
{
|
||||
match self.workspace_for_file(&path).await {
|
||||
Some(workspace) => {
|
||||
trace!("found workspace"; "root" => &workspace.root);
|
||||
|
||||
workspace.refresh_graph_for_file(&path).with_logger(logger()).await;
|
||||
workspace.update_sourcefile(&path, params.text.unwrap()).with_logger(logger()).await;
|
||||
|
||||
match workspace.lint(&path).with_logger(logger()).await {
|
||||
Ok(diagnostics) => self.publish_diagnostic(diagnostics, None).with_logger(logger()).await,
|
||||
|
@ -253,7 +164,7 @@ where
|
|||
}
|
||||
|
||||
#[logging::with_trace_id]
|
||||
async fn execute_command(&self, params: ExecuteCommandParams) -> Result<Option<Value>> {
|
||||
async fn execute_command(&self, params: ExecuteCommandParams) -> tower_lsp::jsonrpc::Result<Option<Value>> {
|
||||
match params.command.as_str() {
|
||||
// "graphDot" => {
|
||||
// let document_path: NormalizedPathBuf = params.arguments.first().unwrap().try_into().unwrap();
|
||||
|
@ -266,20 +177,38 @@ where
|
|||
// data: None,
|
||||
// })
|
||||
// }
|
||||
// "virtualMerge" => {
|
||||
// let document_path: NormalizedPathBuf = params.arguments.first().unwrap().try_into().unwrap();
|
||||
// let manager = self.workspace_manager.lock().with_logger(logger()).await;
|
||||
// let workspace = manager.find_workspace_for_file(&document_path).unwrap();
|
||||
// let mut graph = workspace.graph.lock().with_logger(logger()).await;
|
||||
// commands::merged_includes::run(&document_path, &mut graph)
|
||||
// .with_logger(logger())
|
||||
// .await
|
||||
// .map_err(|e| Error {
|
||||
// code: ErrorCode::InternalError,
|
||||
// message: format!("{:?}", e),
|
||||
// data: None,
|
||||
// })
|
||||
// }
|
||||
"virtualMerge" => {
|
||||
let document_path: NormalizedPathBuf = params.arguments.first().unwrap().try_into().unwrap();
|
||||
let workspace = self.workspace_for_file(&document_path).await.unwrap();
|
||||
let mut workspace_view = workspace.workspace_view.lock().with_logger(logger()).await;
|
||||
|
||||
let mut roots = workspace_view.trees_for_entry(&document_path).unwrap();
|
||||
let root = roots.next().unwrap();
|
||||
if roots.next().is_some() {
|
||||
return Err(Error {
|
||||
code: ErrorCode::InternalError,
|
||||
message: "unexpected >1 root".into(),
|
||||
data: None,
|
||||
});
|
||||
}
|
||||
|
||||
let sources = root
|
||||
.unwrap()
|
||||
.collect::<Vec<_>>()
|
||||
.into_iter()
|
||||
.filter(|res| !matches!(res, Err(workspace::TreeError::FileNotFound { .. })))
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.unwrap();
|
||||
|
||||
commands::merged_includes::run(&document_path, &sources)
|
||||
.with_logger(logger())
|
||||
.await
|
||||
.map_err(|e| Error {
|
||||
code: ErrorCode::InternalError,
|
||||
message: format!("{:?}", e),
|
||||
data: None,
|
||||
})
|
||||
}
|
||||
// "parseTree",
|
||||
_ => Err(Error {
|
||||
code: ErrorCode::InternalError,
|
||||
|
@ -317,7 +246,7 @@ where
|
|||
})
|
||||
}
|
||||
|
||||
async fn goto_definition(&self, _params: GotoDefinitionParams) -> Result<Option<GotoDefinitionResponse>> {
|
||||
async fn goto_definition(&self, _params: GotoDefinitionParams) -> tower_lsp::jsonrpc::Result<Option<GotoDefinitionResponse>> {
|
||||
/* logging::slog_with_trace_id(|| {
|
||||
let path = PathBuf::from_url(params.text_document.uri);
|
||||
if !path.starts_with(&self.root) {
|
||||
|
@ -348,7 +277,7 @@ where
|
|||
Ok(None)
|
||||
}
|
||||
|
||||
async fn references(&self, _params: ReferenceParams) -> Result<Option<Vec<Location>>> {
|
||||
async fn references(&self, _params: ReferenceParams) -> tower_lsp::jsonrpc::Result<Option<Vec<Location>>> {
|
||||
/* logging::slog_with_trace_id(|| {
|
||||
let path = PathBuf::from_url(params.text_document_position.text_document.uri);
|
||||
if !path.starts_with(&self.root) {
|
||||
|
@ -378,7 +307,7 @@ where
|
|||
Ok(None)
|
||||
}
|
||||
|
||||
async fn document_symbol(&self, _params: DocumentSymbolParams) -> Result<Option<DocumentSymbolResponse>> {
|
||||
async fn document_symbol(&self, _params: DocumentSymbolParams) -> tower_lsp::jsonrpc::Result<Option<DocumentSymbolResponse>> {
|
||||
/* logging::slog_with_trace_id(|| {
|
||||
let path = PathBuf::from_url(params.text_document.uri);
|
||||
if !path.starts_with(&self.root) {
|
||||
|
@ -410,7 +339,7 @@ where
|
|||
Ok(None)
|
||||
}
|
||||
|
||||
async fn document_link(&self, _params: DocumentLinkParams) -> Result<Option<Vec<DocumentLink>>> {
|
||||
async fn document_link(&self, _params: DocumentLinkParams) -> tower_lsp::jsonrpc::Result<Option<Vec<DocumentLink>>> {
|
||||
/* logging::slog_with_trace_id(|| {
|
||||
// node for current document
|
||||
let curr_doc = PathBuf::from_url(params.text_document.uri);
|
||||
|
@ -462,7 +391,6 @@ where
|
|||
}
|
||||
|
||||
async fn did_change_configuration(&self, _params: DidChangeConfigurationParams) {
|
||||
eprintln!("got notif");
|
||||
/* logging::slog_with_trace_id(|| {
|
||||
#[derive(Deserialize)]
|
||||
struct Configuration {
|
||||
|
@ -482,6 +410,166 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<G, F> Server<G, F>
|
||||
where
|
||||
G: opengl::ShaderValidator + Send,
|
||||
F: Fn() -> G,
|
||||
{
|
||||
fn capabilities() -> ServerCapabilities {
|
||||
ServerCapabilities {
|
||||
definition_provider: Some(OneOf::Left(false)),
|
||||
references_provider: Some(OneOf::Left(false)),
|
||||
document_symbol_provider: Some(OneOf::Left(false)),
|
||||
document_link_provider: /* Some(DocumentLinkOptions {
|
||||
resolve_provider: None,
|
||||
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
|
||||
}), */
|
||||
None,
|
||||
execute_command_provider: Some(ExecuteCommandOptions {
|
||||
commands: vec!["graphDot".into(), "virtualMerge".into(), "parseTree".into()],
|
||||
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
|
||||
}),
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
will_save: None,
|
||||
will_save_wait_until: None,
|
||||
change: Some(TextDocumentSyncKind::FULL),
|
||||
save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { include_text: Some(true) })),
|
||||
})),
|
||||
workspace: Some(WorkspaceServerCapabilities {
|
||||
workspace_folders: Some(WorkspaceFoldersServerCapabilities{
|
||||
supported: Some(true),
|
||||
change_notifications: Some(OneOf::Left(false)),
|
||||
}),
|
||||
file_operations: None,
|
||||
}),
|
||||
semantic_tokens_provider: Some(
|
||||
SemanticTokensOptions {
|
||||
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
|
||||
legend: SemanticTokensLegend {
|
||||
token_types: vec![SemanticTokenType::COMMENT],
|
||||
token_modifiers: vec![],
|
||||
},
|
||||
range: None,
|
||||
full: Some(SemanticTokensFullOptions::Bool(true)),
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
..ServerCapabilities::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn gather_workspaces(&self, root: &NormalizedPathBuf) {
|
||||
let options = MatchOptions {
|
||||
case_sensitive: true,
|
||||
..MatchOptions::default()
|
||||
};
|
||||
|
||||
let glob = root.join("**").join("shaders.properties");
|
||||
info!("banana"; "glob" => &glob);
|
||||
|
||||
for entry in glob_with(&glob.to_string(), options).unwrap() {
|
||||
match entry {
|
||||
Ok(path)
|
||||
if path.file_name().and_then(OsStr::to_str) == Some("shaders.properties")
|
||||
&& path.parent().and_then(Path::file_name).and_then(OsStr::to_str) == Some("shaders") =>
|
||||
{
|
||||
match path.parent().and_then(Path::parent).map(Into::into) {
|
||||
Some(shader_root) => self.add_workspace(&shader_root).with_logger(logger()).await,
|
||||
None => todo!(),
|
||||
}
|
||||
}
|
||||
Ok(path)
|
||||
if path.file_name().and_then(OsStr::to_str) == Some("shaders.properties")
|
||||
&& path
|
||||
.parent()
|
||||
.and_then(Path::file_name)
|
||||
.and_then(OsStr::to_str)
|
||||
.map_or(false, |f| f.starts_with("world"))
|
||||
&& path
|
||||
.parent()
|
||||
.and_then(Path::parent)
|
||||
.and_then(Path::file_name)
|
||||
.and_then(OsStr::to_str)
|
||||
== Some("shaders") =>
|
||||
{
|
||||
match path.parent().and_then(Path::parent).and_then(Path::parent).map(Into::into) {
|
||||
Some(shader_root) => self.add_workspace(&shader_root).with_logger(logger()).await,
|
||||
None => todo!(),
|
||||
}
|
||||
}
|
||||
Ok(path) => {
|
||||
let path: NormalizedPathBuf = path.into();
|
||||
error!("shaders.properties found outside ./shaders or ./worldX dir"; "path" => path)
|
||||
}
|
||||
Err(e) => error!("error iterating glob entries"; "error" => format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
let glob = root.join("**").join("shaders");
|
||||
for entry in glob_with(&glob.to_string(), options).unwrap() {
|
||||
match entry {
|
||||
Ok(path)
|
||||
if !walkdir::WalkDir::new(path.clone()).into_iter().any(|p| {
|
||||
p.as_ref()
|
||||
.ok()
|
||||
.map(|p| p.file_name())
|
||||
.and_then(|f| f.to_str())
|
||||
.map_or(false, |f| f == "shaders.properties")
|
||||
}) =>
|
||||
{
|
||||
match path.parent().map(Into::into) {
|
||||
Some(shader_root) => self.add_workspace(&shader_root).with_logger(logger()).await,
|
||||
None => todo!(),
|
||||
}
|
||||
}
|
||||
Ok(path) => {
|
||||
let path: NormalizedPathBuf = path.into();
|
||||
info!("skipping as already existing"; "path" => path)
|
||||
}
|
||||
Err(e) => error!("error iterating glob entries"; "error" => format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn add_workspace(&self, root: &NormalizedPathBuf) {
|
||||
let mut search = self.workspaces.lock().with_logger(logger()).await;
|
||||
// let mut workspaces = self.workspaces.lock().with_logger(logger()).await;
|
||||
|
||||
if !search.contains_key(&root.to_string()) {
|
||||
info!("adding workspace"; "root" => &root);
|
||||
let opengl_context = (self.gl_factory)();
|
||||
let workspace = Workspace::new(root.clone(), opengl_context);
|
||||
workspace.build().with_logger(logger()).await;
|
||||
// workspaces.push(workspace);
|
||||
// search.insert(&root.to_string(), WorkspaceIndex(workspaces.len() - 1));
|
||||
search.insert(&root.to_string(), Arc::new(workspace));
|
||||
}
|
||||
}
|
||||
|
||||
async fn publish_diagnostic(&self, diagnostics: HashMap<Url, Vec<Diagnostic>>, document_version: Option<i32>) {
|
||||
let client = self.client.lock().with_logger(logger()).await;
|
||||
let mut handles = Vec::with_capacity(diagnostics.len());
|
||||
for (url, diags) in diagnostics {
|
||||
handles.push(client.publish_diagnostics(url, diags, document_version));
|
||||
}
|
||||
join_all(handles).with_logger(logger()).await;
|
||||
}
|
||||
|
||||
pub async fn workspace_for_file(&self, file: &NormalizedPathBuf) -> Option<Arc<Workspace<G>>> {
|
||||
let search = self.workspaces.lock().with_logger(logger()).await;
|
||||
// let workspaces = self.workspaces.lock().with_logger(logger()).await;
|
||||
|
||||
let file = file.to_string();
|
||||
let prefix = search.longest_prefix(&file);
|
||||
if prefix.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
search.get(prefix).cloned()
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
@ -547,11 +635,11 @@ mod test {
|
|||
.canonicalize()
|
||||
.unwrap_or_else(|_| panic!("canonicalizing '{}'", test_path));
|
||||
let opts = &dir::CopyOptions::new();
|
||||
let files = fs::read_dir(&test_path)
|
||||
let files = fs::read_dir(test_path)
|
||||
.unwrap()
|
||||
.map(|e| String::from(e.unwrap().path().to_str().unwrap()))
|
||||
.collect::<Vec<String>>();
|
||||
copy_items(&files, &tmp_dir.path().join("shaders"), opts).unwrap();
|
||||
copy_items(&files, tmp_dir.path().join("shaders"), opts).unwrap();
|
||||
}
|
||||
|
||||
let tmp_path = tmp_dir.path().to_str().unwrap().into();
|
||||
|
@ -571,7 +659,7 @@ mod test {
|
|||
let init_resp = Ok(initialize::response());
|
||||
assert_exchange!(&server, init_req, init_resp, Server::initialize);
|
||||
|
||||
assert_eq!(server.workspace_manager.lock().await.workspaces().len(), 0);
|
||||
assert_eq!(server.workspaces.lock().await.len(), 0);
|
||||
}
|
||||
|
||||
#[tokio::test(flavor = "multi_thread", worker_threads = 1)]
|
||||
|
@ -585,10 +673,14 @@ mod test {
|
|||
let init_resp = Ok(initialize::response());
|
||||
assert_exchange!(&server, init_req, init_resp, Server::initialize);
|
||||
|
||||
let manager = server.workspace_manager.lock().await;
|
||||
let workspaces = manager.workspaces();
|
||||
assert_eq!(
|
||||
workspaces.iter().map(|w| w.root.to_string()).collect::<Vec<String>>(),
|
||||
server
|
||||
.workspaces
|
||||
.lock()
|
||||
.await
|
||||
.iter()
|
||||
.map(|(_, w)| w.root.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
vec![tmp_path.to_str().unwrap()]
|
||||
);
|
||||
|
||||
|
@ -618,10 +710,14 @@ mod test {
|
|||
let init_resp = Ok(initialize::response());
|
||||
assert_exchange!(&server, init_req, init_resp, Server::initialize);
|
||||
|
||||
let manager = server.workspace_manager.lock().await;
|
||||
let workspaces = manager.workspaces();
|
||||
assert_eq!(
|
||||
workspaces.iter().map(|w| w.root.to_string()).collect::<Vec<String>>(),
|
||||
server
|
||||
.workspaces
|
||||
.lock()
|
||||
.await
|
||||
.iter()
|
||||
.map(|(_, w)| w.root.to_string())
|
||||
.collect::<Vec<String>>(),
|
||||
vec![tmp_path.to_str().unwrap()]
|
||||
);
|
||||
|
||||
|
|
|
@ -1,28 +1,28 @@
|
|||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
use anyhow::Result;
|
||||
use filesystem::{LFString, NormalizedPathBuf};
|
||||
use graph::{dfs, CachedStableGraph, FilialTuple, NodeIndex};
|
||||
use filesystem::NormalizedPathBuf;
|
||||
use include_merger::MergeViewBuilder;
|
||||
use logging::{info, logger, warn, FutureExt};
|
||||
use opengl::{diagnostics_parser::DiagnosticsParser, TreeType};
|
||||
use sourcefile::{IncludeLine, SourceFile, SourceMapper};
|
||||
use opengl::{diagnostics_parser::DiagnosticsParser, GPUVendor, TreeType};
|
||||
use sourcefile::SourceMapper;
|
||||
use tokio::sync::Mutex;
|
||||
use tower_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range};
|
||||
use url::Url;
|
||||
use workspace_tree::TreeError;
|
||||
use workspace::TreeError;
|
||||
|
||||
pub struct Workspace<S: opengl::ShaderValidator> {
|
||||
pub root: NormalizedPathBuf,
|
||||
workspace_view: Arc<Mutex<workspace_tree::WorkspaceTree>>,
|
||||
// temporarily public
|
||||
pub workspace_view: Arc<Mutex<workspace::WorkspaceTree>>,
|
||||
// graph: Arc<Mutex<CachedStableGraph<NormalizedPathBuf, IncludePosition>>>,
|
||||
gl_context: Arc<Mutex<S>>,
|
||||
pub gl_context: Arc<Mutex<S>>,
|
||||
}
|
||||
|
||||
impl<S: opengl::ShaderValidator> Workspace<S> {
|
||||
pub fn new(root: NormalizedPathBuf, gl: S) -> Self {
|
||||
Workspace {
|
||||
workspace_view: Arc::new(Mutex::new(workspace_tree::WorkspaceTree::new(&root))),
|
||||
workspace_view: Arc::new(Mutex::new(workspace::WorkspaceTree::new(&root))),
|
||||
root,
|
||||
// graph: Arc::new(Mutex::new(CachedStableGraph::new())),
|
||||
gl_context: Arc::new(Mutex::new(gl)),
|
||||
|
@ -38,23 +38,28 @@ impl<S: opengl::ShaderValidator> Workspace<S> {
|
|||
info!("build graph"; "connected" => tree.num_connected_entries()/* , "disconnected" => tree.num_disconnected_entries() */);
|
||||
}
|
||||
|
||||
pub async fn refresh_graph_for_file(&self, path: &NormalizedPathBuf) {
|
||||
pub async fn update_sourcefile(&self, path: &NormalizedPathBuf, text: String) {
|
||||
let mut tree = self.workspace_view.lock().with_logger(logger()).await;
|
||||
|
||||
tree.update_sourcefile(path);
|
||||
tree.update_sourcefile(path, text);
|
||||
}
|
||||
|
||||
pub async fn lint(&self, path: &NormalizedPathBuf) -> Result<HashMap<Url, Vec<Diagnostic>>> {
|
||||
let mut workspace = self.workspace_view.lock().with_logger(logger()).await;
|
||||
|
||||
// the set of all filepath->content.
|
||||
let mut all_sources: HashMap<NormalizedPathBuf, LFString> = HashMap::new();
|
||||
// TODO: re-lint any removed files
|
||||
|
||||
// the set of filepath->list of diagnostics to report
|
||||
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::new();
|
||||
|
||||
// we want to backfill the diagnostics map with all linked sources
|
||||
let back_fill = |all_sources: &HashMap<NormalizedPathBuf, LFString>, diagnostics: &mut HashMap<Url, Vec<Diagnostic>>| {
|
||||
for path in all_sources.keys() {
|
||||
let back_fill = |all_sources: &[&NormalizedPathBuf], diagnostics: &mut HashMap<Url, Vec<Diagnostic>>| {
|
||||
for path in all_sources {
|
||||
eprintln!(
|
||||
"BACKFILLING FOR {:?}, EXISTS {}",
|
||||
path,
|
||||
diagnostics.contains_key(&Url::from_file_path(path).unwrap())
|
||||
);
|
||||
diagnostics.entry(Url::from_file_path(path).unwrap()).or_default();
|
||||
}
|
||||
};
|
||||
|
@ -66,12 +71,14 @@ impl<S: opengl::ShaderValidator> Workspace<S> {
|
|||
TreeError::NonTopLevel(e) => warn!("got a non-valid toplevel file"; "root_ancestor" => e, "stripped" => e.strip_prefix(&self.root), "path" => path),
|
||||
e => return Err(e.into()),
|
||||
}
|
||||
back_fill(&all_sources, &mut diagnostics);
|
||||
// back_fill(Box::new(all_sources.keys()), &mut diagnostics);
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
}
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let gpu_vendor: GPUVendor = self.gl_context.lock().with_logger(logger()).await.vendor().as_str().into();
|
||||
|
||||
for tree in trees {
|
||||
let mut tree = match tree {
|
||||
Ok(t) => t.peekable(),
|
||||
|
@ -81,27 +88,6 @@ impl<S: opengl::ShaderValidator> Workspace<S> {
|
|||
e => unreachable!("unexpected error {:?}", e),
|
||||
},
|
||||
};
|
||||
// let tree = match tree.and_then(|t| t.collect::<Result<Vec<_>, TreeError>>()) {
|
||||
// Ok(t) => t,
|
||||
// Err(e) => {
|
||||
// match e {
|
||||
// TreeError::NonTopLevel(f) => {
|
||||
// warn!("got a non-valid toplevel file"; "root_ancestor" => f, "stripped" => f.strip_prefix(&self.root));
|
||||
// continue;
|
||||
// }
|
||||
// TreeError::FileNotFound(f) => {
|
||||
// warn!("child not found"; "child" => f);
|
||||
// continue;
|
||||
// }
|
||||
// TreeError::DfsError(e) => {
|
||||
// diagnostics.insert(Url::from_file_path(path).unwrap(), vec![e.into()]);
|
||||
// back_fill(&all_sources, &mut diagnostics); // TODO: confirm
|
||||
// return Ok(diagnostics);
|
||||
// }
|
||||
// e => unreachable!("should only yield non-toplevel file error, got {:?}", e),
|
||||
// };
|
||||
// }
|
||||
// };
|
||||
|
||||
let tree_size = tree.size_hint().0;
|
||||
|
||||
|
@ -124,51 +110,51 @@ impl<S: opengl::ShaderValidator> Workspace<S> {
|
|||
match entry {
|
||||
Ok(node) => built_tree.push(node),
|
||||
Err(e) => match e {
|
||||
TreeError::FileNotFound {
|
||||
ref importing,
|
||||
ref missing,
|
||||
} => diagnostics
|
||||
.entry(Url::from_file_path(importing).unwrap())
|
||||
.or_default()
|
||||
.push(Diagnostic {
|
||||
TreeError::FileNotFound { ref importing, .. } => {
|
||||
let diag = Diagnostic {
|
||||
range: Range::new(Position::new(0, 0), Position::new(0, u32::MAX)),
|
||||
severity: Some(DiagnosticSeverity::WARNING),
|
||||
source: Some("mcglsl".to_string()),
|
||||
message: e.to_string(),
|
||||
..Diagnostic::default()
|
||||
}),
|
||||
TreeError::DfsError(_) => todo!(),
|
||||
};
|
||||
eprintln!("NOT FOUND {:?} {:?}", importing, diag);
|
||||
diagnostics.entry(Url::from_file_path(importing).unwrap()).or_default().push(diag)
|
||||
}
|
||||
TreeError::DfsError(e) => {
|
||||
diagnostics.entry(Url::from_file_path(path).unwrap()).or_default().push(e.into());
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
e => unreachable!("unexpected error {:?}", e),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
let view = MergeViewBuilder::new(
|
||||
&built_tree,
|
||||
&mut source_mapper,
|
||||
self.gl_context.lock().with_logger(logger()).await.vendor().as_str().into(),
|
||||
document_glsl_version,
|
||||
)
|
||||
.build();
|
||||
let view = MergeViewBuilder::new(&self.root, &built_tree, &mut source_mapper).build();
|
||||
|
||||
let stdout = match self.compile_shader_source(&view, tree_type, path).with_logger(logger()).await {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
back_fill(&all_sources, &mut diagnostics);
|
||||
let paths: Vec<_> = built_tree.iter().map(|s| &s.child.path).collect();
|
||||
back_fill(&paths, &mut diagnostics);
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
};
|
||||
|
||||
diagnostics.extend(
|
||||
DiagnosticsParser::new(&*self.gl_context.lock().with_logger(logger()).await).parse_diagnostics_output(
|
||||
stdout,
|
||||
path,
|
||||
&source_mapper,
|
||||
),
|
||||
);
|
||||
for diagnostic in DiagnosticsParser::new(gpu_vendor, document_glsl_version).parse_diagnostics_output(
|
||||
stdout,
|
||||
path,
|
||||
&source_mapper,
|
||||
&built_tree.iter().map(|tup| (&tup.child.path, tup.child)).collect(),
|
||||
) {
|
||||
diagnostics.entry(diagnostic.0).or_default().extend(diagnostic.1);
|
||||
}
|
||||
let paths: Vec<_> = built_tree.iter().map(|s| &s.child.path).collect();
|
||||
back_fill(&paths, &mut diagnostics);
|
||||
}
|
||||
|
||||
back_fill(&all_sources, &mut diagnostics);
|
||||
eprintln!("DIAGS {:?}", diagnostics);
|
||||
// back_fill(Box::new(all_sources.keys()), &mut diagnostics);
|
||||
Ok(diagnostics)
|
||||
}
|
||||
|
|
@ -10,14 +10,14 @@ doctest = false
|
|||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
|
||||
tower-lsp = "0.17.0"
|
||||
tower-lsp = "0.17"
|
||||
tokio = { version = "1.18", features = ["fs"]}
|
||||
|
||||
logging = { path = "../logging" }
|
||||
filesystem = { path = "../filesystem" }
|
||||
|
||||
tree-sitter = "0.20.6"
|
||||
tree-sitter-glsl = "0.1.2"
|
||||
tree-sitter = "0.20"
|
||||
tree-sitter-glsl = "0.1"
|
||||
|
||||
[dev-dependencies]
|
||||
trim-margin = "0.1"
|
|
@ -19,6 +19,13 @@ impl LineMap {
|
|||
pub fn offset_for_position(&self, position: Position) -> usize {
|
||||
self.positions[position.line as usize] + (position.character as usize)
|
||||
}
|
||||
|
||||
pub fn line_range_for_position(&self, position: Position) -> (usize, Option<usize>) {
|
||||
if (position.line + 1) as usize >= self.positions.len() {
|
||||
return (self.positions[position.line as usize], None)
|
||||
}
|
||||
(self.positions[position.line as usize], Some(self.positions[(position.line + 1) as usize] - 1))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
use std::collections::HashMap;
|
||||
use core::cell::OnceCell;
|
||||
// use core::cell::OnceCell;
|
||||
|
||||
use anyhow::Result;
|
||||
use filesystem::NormalizedPathBuf;
|
||||
|
@ -16,25 +15,20 @@ const GET_VERSION: &str = r#"
|
|||
(#match? @version_str "\#version")
|
||||
"#;
|
||||
|
||||
const GET_INCLUDES: &str = r#"
|
||||
pub const GET_INCLUDES: &str = r#"
|
||||
(preproc_include
|
||||
(string_literal) @include)
|
||||
"#;
|
||||
|
||||
pub struct SourceFile {
|
||||
pub struct Sourcefile {
|
||||
pub source: String,
|
||||
pub path: NormalizedPathBuf,
|
||||
root: NormalizedPathBuf,
|
||||
linemap: OnceCell<LineMap>,
|
||||
tree: OnceCell<Tree>,
|
||||
// TODO: use and implement invalidation
|
||||
includes: HashMap<NormalizedPathBuf, Vec<IncludeLine>>,
|
||||
// linemap: OnceCell<LineMap>,
|
||||
// tree: OnceCell<Tree>,
|
||||
}
|
||||
|
||||
unsafe impl Send for SourceFile {}
|
||||
unsafe impl Sync for SourceFile {}
|
||||
|
||||
impl SourceFile {
|
||||
impl Sourcefile {
|
||||
pub fn new<P, R>(source: String, path: P, root: R) -> Self
|
||||
where
|
||||
P: Into<NormalizedPathBuf>,
|
||||
|
@ -44,22 +38,23 @@ impl SourceFile {
|
|||
source,
|
||||
path: path.into(),
|
||||
root: root.into(),
|
||||
linemap: OnceCell::new(),
|
||||
tree: OnceCell::new(),
|
||||
includes: HashMap::new(),
|
||||
// linemap: OnceCell::new(),
|
||||
// tree: OnceCell::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn linemap(&self) -> &LineMap {
|
||||
self.linemap.get_or_init(|| LineMap::new(&self.source))
|
||||
pub fn linemap(&self) -> LineMap {
|
||||
// self.linemap.get_or_init(|| LineMap::new(&self.source))
|
||||
LineMap::new(&self.source)
|
||||
}
|
||||
|
||||
pub fn version(&self) -> Result<Version> {
|
||||
let query = Query::new(language(), GET_VERSION)?;
|
||||
let mut query_cursor = QueryCursor::new();
|
||||
|
||||
let tree = self.tree();
|
||||
let version_num_match = query_cursor
|
||||
.captures(&query, self.tree().root_node(), self.source.as_bytes())
|
||||
.captures(&query, tree.root_node(), self.source.as_bytes())
|
||||
.next()
|
||||
.unwrap()
|
||||
.0
|
||||
|
@ -72,21 +67,20 @@ impl SourceFile {
|
|||
.trim()
|
||||
.split(' ')
|
||||
.next()
|
||||
.unwrap()
|
||||
{
|
||||
"110" => Version::Glsl110,
|
||||
"120" => Version::Glsl120,
|
||||
"130" => Version::Glsl130,
|
||||
"140" => Version::Glsl140,
|
||||
"150" => Version::Glsl150,
|
||||
"330" => Version::Glsl330,
|
||||
"400" => Version::Glsl400,
|
||||
"410" => Version::Glsl410,
|
||||
"420" => Version::Glsl420,
|
||||
"430" => Version::Glsl430,
|
||||
"440" => Version::Glsl440,
|
||||
"450" => Version::Glsl450,
|
||||
"460" => Version::Glsl460,
|
||||
Some("110") => Version::Glsl110,
|
||||
Some("120") => Version::Glsl120,
|
||||
Some("130") => Version::Glsl130,
|
||||
Some("140") => Version::Glsl140,
|
||||
Some("150") => Version::Glsl150,
|
||||
Some("330") => Version::Glsl330,
|
||||
Some("400") => Version::Glsl400,
|
||||
Some("410") => Version::Glsl410,
|
||||
Some("420") => Version::Glsl420,
|
||||
Some("430") => Version::Glsl430,
|
||||
Some("440") => Version::Glsl440,
|
||||
Some("450") => Version::Glsl450,
|
||||
Some("460") => Version::Glsl460,
|
||||
_ => Version::Glsl110,
|
||||
},
|
||||
)
|
||||
|
@ -124,18 +118,18 @@ impl SourceFile {
|
|||
Ok(self.includes()?.into_iter().filter(move |(p, _)| p == child).map(|(_, l)| l))
|
||||
}
|
||||
|
||||
fn tree(&self) -> &Tree {
|
||||
self.tree.get_or_init(|| {
|
||||
fn tree(&self) -> Tree {
|
||||
// self.tree.get_or_init(|| {
|
||||
let mut parser = Parser::new();
|
||||
parser.set_language(language()).unwrap();
|
||||
parser.parse(&self.source, None).unwrap()
|
||||
})
|
||||
// })
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{IncludeLine, SourceFile, Version};
|
||||
use crate::{IncludeLine, Sourcefile, Version};
|
||||
use anyhow::Result;
|
||||
use trim_margin::MarginTrimmable;
|
||||
|
||||
|
@ -147,7 +141,7 @@ mod test {
|
|||
void main() {}
|
||||
"#;
|
||||
|
||||
let source = SourceFile::new(SOURCE.to_string(), "/asdf", "/");
|
||||
let source = Sourcefile::new(SOURCE.to_string(), "/asdf", "/");
|
||||
assert_eq!(source.version().unwrap(), Version::Glsl150);
|
||||
}
|
||||
|
||||
|
@ -162,7 +156,7 @@ mod test {
|
|||
.trim_margin()
|
||||
.unwrap();
|
||||
|
||||
let source = SourceFile::new(source, "/myshader/shaders/world0/asdf.fsh", "/myshader");
|
||||
let source = Sourcefile::new(source, "/myshader/shaders/world0/asdf.fsh", "/myshader");
|
||||
assert_eq!(
|
||||
source.includes()?,
|
||||
vec![
|
||||
|
@ -184,7 +178,7 @@ mod test {
|
|||
.trim_margin()
|
||||
.unwrap();
|
||||
|
||||
let source = SourceFile::new(source, "/myshader/shaders/world0/asdf.fsh", "/myshader");
|
||||
let source = Sourcefile::new(source, "/myshader/shaders/world0/asdf.fsh", "/myshader");
|
||||
assert_eq!(
|
||||
source.includes_of_path(&"/myshader/shaders/world0/path/to/banana.fsh".into())?.collect::<Vec<_>>(),
|
||||
vec![IncludeLine(2)]
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use std::{cmp::Eq, collections::HashMap, fmt::Display, hash::Hash};
|
||||
|
||||
pub const ROOT_SOURCE_NUM: SourceNum = SourceNum(0);
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SourceNum(usize);
|
||||
|
||||
|
|
|
@ -13,17 +13,13 @@ filesystem = { path = "../filesystem" }
|
|||
futures = "0.3.21"
|
||||
glob = "0.3"
|
||||
graph = { path = "../graph" }
|
||||
# include_merger = { path = "../include_merger" }
|
||||
lazy_static = "1.4"
|
||||
logging = { path = "../logging" }
|
||||
opengl = { path = "../opengl" }
|
||||
path-slash = "0.1"
|
||||
path-slash = "0.2"
|
||||
regex = "1.4"
|
||||
sourcefile = { path = "../sourcefile" }
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1.18.0", features = ["sync"] }
|
||||
tower-lsp = "0.17.0"
|
||||
tst = "0.10"
|
||||
url = "2.2"
|
||||
walkdir = "2.3"
|
||||
workspace_tree = { path = "../workspace_tree" }
|
||||
include_merger = { path = "../include_merger" }
|
||||
walkdir = "2.3"
|
|
@ -1,6 +1,272 @@
|
|||
#![feature(assert_matches)]
|
||||
#![feature(result_flattening)]
|
||||
#![feature(arc_unwrap_or_clone)]
|
||||
|
||||
pub mod workspace;
|
||||
pub mod workspace_manager;
|
||||
pub use workspace::*;
|
||||
pub use workspace_manager::*;
|
||||
use std::{
|
||||
collections::{hash_map::Entry, HashMap, HashSet},
|
||||
fs::read_to_string,
|
||||
};
|
||||
|
||||
use filesystem::{is_top_level, NormalizedPathBuf};
|
||||
use graph::{
|
||||
dfs::{CycleError, Dfs},
|
||||
CachedStableGraph, FilialTuple, NodeIndex,
|
||||
};
|
||||
use logging::{debug, info, warn};
|
||||
use sourcefile::{IncludeLine, Sourcefile};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
mod tree;
|
||||
|
||||
pub struct WorkspaceTree {
|
||||
root: NormalizedPathBuf,
|
||||
pub graph: CachedStableGraph<NormalizedPathBuf, IncludeLine>,
|
||||
disconnected: HashSet<NormalizedPathBuf>,
|
||||
sources: HashMap<NormalizedPathBuf, Sourcefile>,
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum TreeError {
|
||||
#[error("got a non-valid top-level file")]
|
||||
NonTopLevel(NormalizedPathBuf),
|
||||
#[error("file {missing} not found; imported by {importing}.")]
|
||||
FileNotFound {
|
||||
importing: NormalizedPathBuf,
|
||||
missing: NormalizedPathBuf,
|
||||
},
|
||||
#[error(transparent)]
|
||||
DfsError(#[from] CycleError<NormalizedPathBuf>),
|
||||
#[error(transparent)]
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
|
||||
impl WorkspaceTree {
|
||||
pub fn new(root: &NormalizedPathBuf) -> Self {
|
||||
WorkspaceTree {
|
||||
root: root.clone(),
|
||||
graph: CachedStableGraph::new(),
|
||||
disconnected: HashSet::new(),
|
||||
sources: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn num_connected_entries(&self) -> usize {
|
||||
self.graph.node_count()
|
||||
}
|
||||
|
||||
// pub fn num_disconnected_entries(&self) -> usize {
|
||||
// self.disconnected.len()
|
||||
// }
|
||||
|
||||
/// builds the set of connected and disconnected GLSL files from the root of the
|
||||
/// workspace.
|
||||
// TODO: support user-defined additional file extensions.
|
||||
pub fn build(&mut self) {
|
||||
let root = self.root.clone();
|
||||
|
||||
enum GraphEntry {
|
||||
// represents top-level nodes
|
||||
TopLevel(Sourcefile),
|
||||
// represents non-top-level nodes
|
||||
Leaf(Sourcefile),
|
||||
}
|
||||
|
||||
// let mut roots = Vec::new();
|
||||
|
||||
for entry in WalkDir::new(&root)
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.filter(|entry| entry.path().is_file())
|
||||
.map(|entry| NormalizedPathBuf::from(entry.into_path()))
|
||||
.filter_map(|path| {
|
||||
// files not imported anywhere wont be included in the graph,
|
||||
// this is ok for now.
|
||||
if !is_top_level(&path.strip_prefix(&root)) {
|
||||
let ext = path.extension();
|
||||
if ext == Some("fsh") || ext == Some("gsh") || ext == Some("vsh") || ext == Some("glsl") || ext == Some("csh") {
|
||||
return Some(GraphEntry::Leaf(Sourcefile::new(read_to_string(&path).ok()?, path, root.clone())));
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(GraphEntry::TopLevel(Sourcefile::new(
|
||||
read_to_string(&path).ok()?,
|
||||
path,
|
||||
root.clone(),
|
||||
)))
|
||||
})
|
||||
{
|
||||
// iterate all valid found files, search for includes, add a node into the graph for each
|
||||
// file and add a file->includes KV into the map
|
||||
match entry {
|
||||
GraphEntry::TopLevel(file) => {
|
||||
eprintln!("TOP LEVEL {}", file.path);
|
||||
let path = file.path.clone();
|
||||
// roots.push(file.clone());
|
||||
// self.sources.insert(path.clone(), file);
|
||||
self.update_sourcefile(&path, file.source);
|
||||
}
|
||||
GraphEntry::Leaf(file) => {
|
||||
eprintln!("LEAF {}", file.path);
|
||||
let path = file.path.clone();
|
||||
// self.sources.insert(path.clone(), file);
|
||||
self.update_sourcefile(&path, file.source);
|
||||
// self.disconnected.insert(path);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the lazy depth first iterators for the possible trees given any node.
|
||||
/// If it is a top-level node, only a single tree should be instantiated. If not a top-level node,
|
||||
/// a tree will be instantiated for every top-level root ancestor.
|
||||
///
|
||||
/// Error modes:
|
||||
/// - Top [`Result`]
|
||||
/// - The node is not known to the workspace
|
||||
/// - The node has no ancestors but is not a known valid top-level file
|
||||
/// - Middle [`Result`] (only for >1 ancestor)
|
||||
/// - A non-valid top-level ancestor was found
|
||||
/// - Bottom [`Result`]
|
||||
/// - A cycle was detected while iterating
|
||||
/// - A node was not found on the filesystem while synthesizing a Sourcefile instance
|
||||
pub fn trees_for_entry<'a>(
|
||||
&'a mut self, entry: &'a NormalizedPathBuf,
|
||||
) -> Result<
|
||||
impl Iterator<Item = Result<impl Iterator<Item = Result<FilialTuple<&Sourcefile>, TreeError>> + '_, TreeError>> + '_,
|
||||
TreeError,
|
||||
> {
|
||||
let root_ancestors = self.graph.root_ancestors_for_key(entry)?.unwrap_or_default();
|
||||
|
||||
let mut trees = Vec::with_capacity(root_ancestors.len().max(1));
|
||||
|
||||
info!("top-level file ancestors found";
|
||||
"uri" => entry,
|
||||
"ancestors" => format!("{:?}", root_ancestors.iter()
|
||||
.copied()
|
||||
.map(|e| &self.graph.graph[e])
|
||||
.collect::<Vec<_>>())
|
||||
);
|
||||
|
||||
let node = self.graph.find_node(entry).unwrap();
|
||||
|
||||
let transform_cycle_error =
|
||||
|result: Result<FilialTuple<NodeIndex>, CycleError<NormalizedPathBuf>>| result.map_err(TreeError::DfsError);
|
||||
let node_to_sourcefile = |result: Result<FilialTuple<NodeIndex>, TreeError>| -> Result<FilialTuple<&Sourcefile>, TreeError> {
|
||||
result.and_then(|tup| {
|
||||
let parent = tup.parent.map(|p| {
|
||||
let parent_path = &self.graph[p];
|
||||
// fatal error case, shouldnt happen
|
||||
self.sources
|
||||
.get(parent_path)
|
||||
.unwrap_or_else(|| panic!("no entry in sources for parent {}", parent_path))
|
||||
});
|
||||
|
||||
let child_path = &self.graph[tup.child];
|
||||
// soft-fail case, if file doesnt exist or mistype
|
||||
// eprintln!("MISSING? {:?}", self.sources.get(child_path).is_none());
|
||||
let child = self.sources.get(child_path).ok_or_else(|| TreeError::FileNotFound {
|
||||
importing: self.graph[tup.parent.unwrap()].clone(),
|
||||
missing: child_path.clone(),
|
||||
})?;
|
||||
|
||||
Ok(FilialTuple { child, parent })
|
||||
})
|
||||
};
|
||||
|
||||
if root_ancestors.is_empty() {
|
||||
if !is_top_level(&entry.strip_prefix(&self.root)) {
|
||||
return Err(TreeError::NonTopLevel(entry.clone()));
|
||||
}
|
||||
|
||||
let dfs = Dfs::new(&self.graph, node)
|
||||
.into_iter()
|
||||
.map(transform_cycle_error)
|
||||
.map(node_to_sourcefile);
|
||||
trees.push(Ok(dfs));
|
||||
} else {
|
||||
for root in root_ancestors {
|
||||
let root_path = &self.graph[root];
|
||||
if !is_top_level(&root_path.strip_prefix(&self.root)) {
|
||||
warn!("got a non-valid toplevel file"; "root_ancestor" => root_path);
|
||||
trees.push(Err(TreeError::NonTopLevel(root_path.clone())));
|
||||
continue;
|
||||
}
|
||||
|
||||
let dfs = Dfs::new(&self.graph, root)
|
||||
.into_iter()
|
||||
.map(transform_cycle_error)
|
||||
.map(node_to_sourcefile);
|
||||
trees.push(Ok(dfs));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(trees.into_iter())
|
||||
}
|
||||
|
||||
/// updates the set of GLSL files connected to the given file, moving unreferenced
|
||||
pub fn update_sourcefile(&mut self, path: &NormalizedPathBuf, text: String) {
|
||||
match self.sources.entry(path.clone()) {
|
||||
Entry::Occupied(mut entry) => entry.get_mut().source = text,
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(Sourcefile::new(text, path.clone(), self.root.clone()));
|
||||
}
|
||||
};
|
||||
let file = self.sources.get(path).unwrap();
|
||||
let includes = file.includes().unwrap();
|
||||
|
||||
info!("includes found for file"; "file" => &file.path, "includes" => format!("{:?}", includes));
|
||||
|
||||
let idx = self.graph.add_node(&file.path);
|
||||
|
||||
let prev_children: HashSet<_> =
|
||||
HashSet::from_iter(self.graph.get_all_edges_from(idx).map(|tup| (self.graph[tup.0].clone(), tup.1)));
|
||||
let new_children: HashSet<_> = includes.iter().cloned().collect();
|
||||
|
||||
let to_be_added = new_children.difference(&prev_children);
|
||||
let to_be_removed = prev_children.difference(&new_children);
|
||||
|
||||
debug!(
|
||||
"include sets diff'd";
|
||||
"for removal" => format!("{:?}", to_be_removed),
|
||||
"for addition" => format!("{:?}", to_be_added)
|
||||
);
|
||||
|
||||
for removal in to_be_removed {
|
||||
let child = self.graph.find_node(&removal.0).unwrap();
|
||||
self.graph.remove_edge(idx, child, removal.1);
|
||||
if removal.0.exists() && self.graph.parents(child).count() == 0 {
|
||||
self.disconnected.insert(removal.0.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: remove entire subtree from disconnected
|
||||
for insertion in to_be_added {
|
||||
let (child, position) = includes.iter().find(|f| f.0 == insertion.0).unwrap().clone();
|
||||
let child = self.graph.add_node(&child);
|
||||
self.graph.add_edge(idx, child, position);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{TreeError, WorkspaceTree};
|
||||
|
||||
#[test]
|
||||
fn test_trees() {
|
||||
let mut view = WorkspaceTree::new(&("/home/test/banana".into()));
|
||||
let parent = view.graph.add_node(&("/home/test/banana/test.fsh".into()));
|
||||
let child = view.graph.add_node(&("/home/test/banana/included.glsl".into()));
|
||||
view.graph.add_edge(parent, child, 2.into());
|
||||
|
||||
let parent = "/home/test/banana/test.fsh".into();
|
||||
let trees = view.trees_for_entry(&parent);
|
||||
match trees {
|
||||
Ok(_) => panic!("unexpected Ok result"),
|
||||
Err(e) => match e {
|
||||
TreeError::NonTopLevel(_) => {}
|
||||
_ => panic!("unexpected error {:?}", e),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
use sourcefile::SourceFile;
|
||||
use sourcefile::Sourcefile;
|
||||
|
||||
pub struct Tree {}
|
||||
|
||||
impl Iterator for Tree {
|
||||
type Item = SourceFile;
|
||||
type Item = Sourcefile;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
todo!()
|
|
@ -1,137 +0,0 @@
|
|||
use std::{ffi::OsStr, path::Path};
|
||||
|
||||
use filesystem::NormalizedPathBuf;
|
||||
use glob::{glob_with, MatchOptions};
|
||||
use logging::{info, error, FutureExt, logger};
|
||||
use tst::TSTMap;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::workspace::Workspace;
|
||||
|
||||
pub struct WorkspaceIndex(usize);
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct WorkspaceManager<G, F>
|
||||
where
|
||||
G: opengl::ShaderValidator + Send,
|
||||
F: Fn() -> G
|
||||
{
|
||||
search: TSTMap<WorkspaceIndex>,
|
||||
workspaces: Vec<Workspace<G>>,
|
||||
gl_factory: F
|
||||
}
|
||||
|
||||
impl <G, F> WorkspaceManager<G, F>
|
||||
where
|
||||
G: opengl::ShaderValidator + Send,
|
||||
F: Fn() -> G
|
||||
{
|
||||
pub fn new(gl_factory: F) -> Self {
|
||||
WorkspaceManager {
|
||||
search: Default::default(),
|
||||
workspaces: Default::default(),
|
||||
gl_factory
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn gather_workspaces(&mut self, root: &NormalizedPathBuf) {
|
||||
let options = MatchOptions {
|
||||
case_sensitive: true,
|
||||
..MatchOptions::default()
|
||||
};
|
||||
|
||||
let glob = root.join("**").join("shaders.properties");
|
||||
info!("banana"; "glob" => &glob);
|
||||
|
||||
for entry in glob_with(&glob.to_string(), options).unwrap() {
|
||||
match entry {
|
||||
Ok(path)
|
||||
if path.file_name().and_then(OsStr::to_str) == Some("shaders.properties")
|
||||
&& path.parent().and_then(Path::file_name).and_then(OsStr::to_str) == Some("shaders") =>
|
||||
{
|
||||
match path.parent().and_then(Path::parent).map(Into::into) {
|
||||
Some(shader_root) => self.add_workspace(&shader_root).with_logger(logger()).await,
|
||||
None => todo!(),
|
||||
}
|
||||
}
|
||||
Ok(path)
|
||||
if path.file_name().and_then(OsStr::to_str) == Some("shaders.properties")
|
||||
&& path
|
||||
.parent()
|
||||
.and_then(Path::file_name)
|
||||
.and_then(OsStr::to_str)
|
||||
.map_or(false, |f| f.starts_with("world"))
|
||||
&& path
|
||||
.parent()
|
||||
.and_then(Path::parent)
|
||||
.and_then(Path::file_name)
|
||||
.and_then(OsStr::to_str)
|
||||
== Some("shaders") =>
|
||||
{
|
||||
match path.parent().and_then(Path::parent).and_then(Path::parent).map(Into::into) {
|
||||
Some(shader_root) => self.add_workspace(&shader_root).with_logger(logger()).await,
|
||||
None => todo!(),
|
||||
}
|
||||
}
|
||||
Ok(path) => {
|
||||
let path: NormalizedPathBuf = path.into();
|
||||
error!("shaders.properties found outside ./shaders or ./worldX dir"; "path" => path)
|
||||
}
|
||||
Err(e) => error!("error iterating glob entries"; "error" => format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
let glob = root.join("**").join("shaders");
|
||||
for entry in glob_with(&glob.to_string(), options).unwrap() {
|
||||
match entry {
|
||||
Ok(path)
|
||||
if !WalkDir::new(path.clone()).into_iter().any(|p| {
|
||||
p.as_ref()
|
||||
.ok()
|
||||
.map(|p| p.file_name())
|
||||
.and_then(|f| f.to_str())
|
||||
.map_or(false, |f| f == "shaders.properties")
|
||||
}) =>
|
||||
{
|
||||
match path.parent().map(Into::into) {
|
||||
Some(shader_root) => self.add_workspace(&shader_root).with_logger(logger()).await,
|
||||
None => todo!(),
|
||||
}
|
||||
}
|
||||
Ok(path) => {
|
||||
let path: NormalizedPathBuf = path.into();
|
||||
info!("skipping as already existing"; "path" => path)
|
||||
}
|
||||
Err(e) => error!("error iterating glob entries"; "error" => format!("{:?}", e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn add_workspace(&mut self, root: &NormalizedPathBuf) {
|
||||
if !self.search.contains_key(&root.to_string()) {
|
||||
info!("adding workspace"; "root" => &root);
|
||||
let opengl_context = (self.gl_factory)();
|
||||
let workspace = Workspace::new(root.clone(), opengl_context);
|
||||
workspace.build().with_logger(logger()).await;
|
||||
self.workspaces.push(workspace);
|
||||
self.search.insert(&root.to_string(), WorkspaceIndex(self.workspaces.len() - 1));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_workspace_for_file(&self, file: &NormalizedPathBuf) -> Option<&Workspace<G>> {
|
||||
let file = file.to_string();
|
||||
let prefix = self.search.longest_prefix(&file);
|
||||
if prefix.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match self.search.get(prefix) {
|
||||
Some(idx) => self.workspaces.get(idx.0),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn workspaces(&self) -> &[Workspace<G>] {
|
||||
&self.workspaces
|
||||
}
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
[package]
|
||||
name = "workspace_tree"
|
||||
version = "0.9.8"
|
||||
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
# include_merger = { path = "../include_merger" }
|
||||
anyhow = "1.0"
|
||||
filesystem = { path = "../filesystem" }
|
||||
futures = "0.3.21"
|
||||
glob = "0.3"
|
||||
graph = { path = "../graph" }
|
||||
lazy_static = "1.4"
|
||||
logging = { path = "../logging" }
|
||||
opengl = { path = "../opengl" }
|
||||
path-slash = "0.1"
|
||||
regex = "1.4"
|
||||
sourcefile = { path = "../sourcefile" }
|
||||
thiserror = "1.0"
|
||||
tokio = { version = "1.18.0", features = ["sync"] }
|
||||
tst = "0.10"
|
||||
url = "2.2"
|
||||
walkdir = "2.3"
|
|
@ -1,280 +0,0 @@
|
|||
#![feature(result_flattening)]
|
||||
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
fs::read_to_string,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use filesystem::{is_top_level, NormalizedPathBuf};
|
||||
use graph::{
|
||||
dfs::{CycleError, Dfs},
|
||||
CachedStableGraph, FilialTuple, NodeIndex,
|
||||
};
|
||||
use logging::{debug, info, warn};
|
||||
use sourcefile::{IncludeLine, SourceFile};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
mod tree;
|
||||
|
||||
pub struct WorkspaceTree {
|
||||
root: NormalizedPathBuf,
|
||||
pub graph: CachedStableGraph<NormalizedPathBuf, IncludeLine>,
|
||||
disconnected: HashSet<NormalizedPathBuf>,
|
||||
sources: HashMap<NormalizedPathBuf, Arc<SourceFile>>,
|
||||
}
|
||||
|
||||
#[derive(thiserror::Error, Debug)]
|
||||
pub enum TreeError {
|
||||
#[error("got a non-valid top-level file")]
|
||||
NonTopLevel(NormalizedPathBuf),
|
||||
#[error("file {missing} not found; imported by {importing}.")]
|
||||
FileNotFound {
|
||||
importing: NormalizedPathBuf,
|
||||
missing: NormalizedPathBuf,
|
||||
},
|
||||
#[error(transparent)]
|
||||
DfsError(#[from] CycleError),
|
||||
#[error(transparent)]
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
|
||||
impl WorkspaceTree {
|
||||
pub fn new(root: &NormalizedPathBuf) -> Self {
|
||||
WorkspaceTree {
|
||||
root: root.clone(),
|
||||
graph: CachedStableGraph::new(),
|
||||
disconnected: HashSet::new(),
|
||||
sources: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn num_connected_entries(&self) -> usize {
|
||||
self.graph.node_count()
|
||||
}
|
||||
|
||||
// pub fn num_disconnected_entries(&self) -> usize {
|
||||
// self.disconnected.len()
|
||||
// }
|
||||
|
||||
/// builds the set of connected and disconnected GLSL files from the root of the
|
||||
/// workspace.
|
||||
// TODO: support user-defined additional file extensions.
|
||||
pub fn build(&mut self) {
|
||||
let root = self.root.clone();
|
||||
|
||||
enum GraphEntry {
|
||||
// represents top-level nodes
|
||||
TopLevel(SourceFile),
|
||||
// represents non-top-level nodes
|
||||
Leaf(SourceFile),
|
||||
}
|
||||
|
||||
// let mut roots = Vec::new();
|
||||
|
||||
for entry in WalkDir::new(&root)
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.filter(|entry| entry.path().is_file())
|
||||
.map(|entry| NormalizedPathBuf::from(entry.into_path()))
|
||||
.filter_map(|path| {
|
||||
// files not imported anywhere wont be included in the graph,
|
||||
// this is ok for now.
|
||||
if !is_top_level(&path.strip_prefix(&root)) {
|
||||
let ext = path.extension();
|
||||
if ext == Some("fsh") || ext == Some("gsh") || ext == Some("vsh") || ext == Some("glsl") || ext == Some("csh") {
|
||||
return Some(GraphEntry::Leaf(SourceFile::new(read_to_string(&path).ok()?, path, root.clone())));
|
||||
}
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(GraphEntry::TopLevel(SourceFile::new(
|
||||
read_to_string(&path).ok()?,
|
||||
path,
|
||||
root.clone(),
|
||||
)))
|
||||
})
|
||||
{
|
||||
// iterate all valid found files, search for includes, add a node into the graph for each
|
||||
// file and add a file->includes KV into the map
|
||||
match entry {
|
||||
GraphEntry::TopLevel(file) => {
|
||||
let file = Arc::new(file);
|
||||
eprintln!("TOP LEVEL {}", file.path);
|
||||
let path = file.path.clone();
|
||||
// roots.push(file.clone());
|
||||
self.sources.insert(path.clone(), file);
|
||||
self.update_sourcefile(&path);
|
||||
}
|
||||
GraphEntry::Leaf(file) => {
|
||||
let file = Arc::new(file);
|
||||
eprintln!("LEAF {}", file.path);
|
||||
let path = file.path.clone();
|
||||
self.sources.insert(path.clone(), file);
|
||||
self.update_sourcefile(&path);
|
||||
// self.disconnected.insert(path);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// for root in roots {
|
||||
// self.update_sourcefile(&root.path);
|
||||
// for include in root.includes().unwrap() {
|
||||
// // for tree_entry in self.trees_for_entry(&include.0).unwrap() {
|
||||
// // tree_entry.unwrap().
|
||||
// // }
|
||||
// }
|
||||
// }
|
||||
}
|
||||
|
||||
/// Returns the lazy depth first iterators for the possible trees given any node.
|
||||
/// If it is a top-level node, only a single tree should be instantiated. If not a top-level node,
|
||||
/// a tree will be instantiated for every top-level root ancestor.
|
||||
///
|
||||
/// Error modes:
|
||||
/// - Top [`Result`]
|
||||
/// - The node is not known to the workspace
|
||||
/// - The node has no ancestors but is not a known valid top-level file
|
||||
/// - Middle [`Result`] (only for >1 ancestor)
|
||||
/// - A non-valid top-level ancestor was found
|
||||
/// - Bottom [`Result`]
|
||||
/// - A cycle was detected while iterating
|
||||
/// - A node was not found on the filesystem while synthesizing a Sourcefile instance
|
||||
pub fn trees_for_entry<'a>(
|
||||
&'a mut self, entry: &'a NormalizedPathBuf,
|
||||
) -> Result<
|
||||
impl Iterator<Item = Result<impl Iterator<Item = Result<FilialTuple<&SourceFile>, TreeError>> + '_, TreeError>> + '_,
|
||||
TreeError,
|
||||
> {
|
||||
let root_ancestors = self.graph.root_ancestors_for_key(entry)?.unwrap_or_default();
|
||||
|
||||
let mut trees = Vec::with_capacity(root_ancestors.len().max(1));
|
||||
|
||||
info!("top-level file ancestors found";
|
||||
"uri" => entry,
|
||||
"ancestors" => format!("{:?}", root_ancestors.iter()
|
||||
.copied()
|
||||
.map(|e| &self.graph.graph[e])
|
||||
.collect::<Vec<_>>())
|
||||
);
|
||||
|
||||
let node = self.graph.find_node(entry).unwrap();
|
||||
|
||||
let transform_cycle_error = |result: Result<FilialTuple<NodeIndex>, CycleError>| result.map_err(TreeError::DfsError);
|
||||
let node_to_sourcefile = |result: Result<FilialTuple<NodeIndex>, TreeError>| -> Result<FilialTuple<&SourceFile>, TreeError> {
|
||||
result.and_then(|tup| {
|
||||
// fatal error case, shouldnt happen
|
||||
let parent = tup.parent.map(|p| {
|
||||
let parent_path = &self.graph[p];
|
||||
self.sources
|
||||
.get(parent_path)
|
||||
.unwrap_or_else(|| panic!("no entry in sources for parent {}", parent_path))
|
||||
.as_ref()
|
||||
});
|
||||
|
||||
let child_path = &self.graph[tup.child];
|
||||
// soft-fail case, if file doesnt exist or mistype
|
||||
let child = self
|
||||
.sources
|
||||
.get(child_path)
|
||||
.ok_or_else(|| TreeError::FileNotFound {
|
||||
importing: self.graph[tup.parent.unwrap()].clone(),
|
||||
missing: child_path.clone(),
|
||||
})?
|
||||
.as_ref();
|
||||
|
||||
Ok(FilialTuple { child, parent })
|
||||
})
|
||||
};
|
||||
|
||||
if root_ancestors.is_empty() {
|
||||
if !is_top_level(&entry.strip_prefix(&self.root)) {
|
||||
return Err(TreeError::NonTopLevel(entry.clone()));
|
||||
}
|
||||
|
||||
let dfs = Dfs::new(&self.graph, node)
|
||||
.into_iter()
|
||||
.map(transform_cycle_error)
|
||||
.map(node_to_sourcefile);
|
||||
trees.push(Ok(dfs));
|
||||
} else {
|
||||
for root in root_ancestors {
|
||||
let root_path = &self.graph[root];
|
||||
if !is_top_level(&root_path.strip_prefix(&self.root)) {
|
||||
warn!("got a non-valid toplevel file"; "root_ancestor" => root_path);
|
||||
trees.push(Err(TreeError::NonTopLevel(root_path.clone())));
|
||||
continue;
|
||||
}
|
||||
|
||||
let dfs = Dfs::new(&self.graph, root)
|
||||
.into_iter()
|
||||
.map(transform_cycle_error)
|
||||
.map(node_to_sourcefile);
|
||||
trees.push(Ok(dfs));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(trees.into_iter())
|
||||
}
|
||||
|
||||
/// updates the set of GLSL files connected to the given file, moving unreferenced
|
||||
pub fn update_sourcefile(&mut self, path: &NormalizedPathBuf) {
|
||||
let file = self.sources.get(path).unwrap();
|
||||
let includes = file.includes().unwrap();
|
||||
|
||||
info!("includes found for file"; "file" => &file.path, "includes" => format!("{:?}", includes));
|
||||
|
||||
let idx = self.graph.add_node(&file.path);
|
||||
|
||||
let prev_children: HashSet<_> =
|
||||
HashSet::from_iter(self.graph.get_all_edges_from(idx).map(|tup| (self.graph[tup.0].clone(), tup.1)));
|
||||
let new_children: HashSet<_> = includes.iter().cloned().collect();
|
||||
|
||||
let to_be_added = new_children.difference(&prev_children);
|
||||
let to_be_removed = prev_children.difference(&new_children);
|
||||
|
||||
debug!(
|
||||
"include sets diff'd";
|
||||
"for removal" => format!("{:?}", to_be_removed),
|
||||
"for addition" => format!("{:?}", to_be_added)
|
||||
);
|
||||
|
||||
for removal in to_be_removed {
|
||||
let child = self.graph.find_node(&removal.0).unwrap();
|
||||
self.graph.remove_edge(idx, child, removal.1);
|
||||
if removal.0.exists() && self.graph.parents(child).count() == 0 {
|
||||
self.disconnected.insert(removal.0.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: remove entire subtree from disconnected
|
||||
for insertion in to_be_added {
|
||||
let (child, position) = includes.iter().find(|f| f.0 == insertion.0).unwrap().clone();
|
||||
let child = self.graph.add_node(&child);
|
||||
self.graph.add_edge(idx, child, position);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use crate::{TreeError, WorkspaceTree};
|
||||
|
||||
#[test]
|
||||
fn test_trees() {
|
||||
let mut view = WorkspaceTree::new(&("/home/test/banana".into()));
|
||||
let parent = view.graph.add_node(&("/home/test/banana/test.fsh".into()));
|
||||
let child = view.graph.add_node(&("/home/test/banana/included.glsl".into()));
|
||||
view.graph.add_edge(parent, child, 2.into());
|
||||
|
||||
let parent = "/home/test/banana/test.fsh".into();
|
||||
let trees = view.trees_for_entry(&parent);
|
||||
match trees {
|
||||
Ok(_) => panic!("unexpected Ok result"),
|
||||
Err(e) => match e {
|
||||
TreeError::NonTopLevel(_) => {}
|
||||
_ => panic!("unexpected error {:?}", e),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue