mirror of
https://github.com/Strum355/mcshader-lsp.git
synced 2025-09-02 14:57:21 +00:00
fix include merging for when a file imports another file more than once directly
This commit is contained in:
parent
1529460a5c
commit
fecc41168a
10 changed files with 526 additions and 380 deletions
|
@ -51,7 +51,7 @@ impl VirtualMergedDocument {
|
||||||
|
|
||||||
for node in nodes {
|
for node in nodes {
|
||||||
let graph = self.graph.borrow();
|
let graph = self.graph.borrow();
|
||||||
let path = graph.get_node(node.0);
|
let path = graph.get_node(node.child);
|
||||||
|
|
||||||
if sources.contains_key(&path) {
|
if sources.contains_key(&path) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -103,7 +103,7 @@ impl Invokeable for VirtualMergedDocument {
|
||||||
|
|
||||||
let mut source_mapper = SourceMapper::new(all_sources.len());
|
let mut source_mapper = SourceMapper::new(all_sources.len());
|
||||||
let graph = self.graph.borrow();
|
let graph = self.graph.borrow();
|
||||||
let view = merge_views::generate_merge_list(&tree, &all_sources, &graph, &mut source_mapper);
|
let view = merge_views::MergeViewBuilder::new(&tree, &all_sources, &graph, &mut source_mapper).build();
|
||||||
return Ok(serde_json::value::Value::String(view));
|
return Ok(serde_json::value::Value::String(view));
|
||||||
}
|
}
|
||||||
return Err(format_err!(
|
return Err(format_err!(
|
||||||
|
|
|
@ -56,21 +56,21 @@ impl<'a> Iterator for Dfs<'a> {
|
||||||
fn next(&mut self) -> Option<Result<FilialTuple, error::CycleError>> {
|
fn next(&mut self) -> Option<Result<FilialTuple, error::CycleError>> {
|
||||||
let parent = self.cycle.last().map(|p| p.node);
|
let parent = self.cycle.last().map(|p| p.node);
|
||||||
|
|
||||||
if let Some(node) = self.stack.pop() {
|
if let Some(child) = self.stack.pop() {
|
||||||
self.cycle.push(VisitCount {
|
self.cycle.push(VisitCount {
|
||||||
node,
|
node: child,
|
||||||
children: self.graph.graph.edges(node).count(),
|
children: self.graph.graph.edges(child).count(),
|
||||||
touch: 1,
|
touch: 1,
|
||||||
});
|
});
|
||||||
|
|
||||||
let mut children = self.graph.child_node_indexes(node);
|
let mut children: Vec<NodeIndex> = self.graph.child_node_indexes(child).collect();
|
||||||
|
|
||||||
if !children.is_empty() {
|
if !children.is_empty() {
|
||||||
// sort by line number in parent
|
// sort by line number in parent
|
||||||
children.sort_by(|x, y| {
|
children.sort_by(|x, y| {
|
||||||
let graph = &self.graph.graph;
|
let graph = &self.graph.graph;
|
||||||
let edge1 = graph.edge_weight(graph.find_edge(node, *x).unwrap()).unwrap();
|
let edge1 = graph.edge_weight(graph.find_edge(child, *x).unwrap()).unwrap();
|
||||||
let edge2 = graph.edge_weight(graph.find_edge(node, *y).unwrap()).unwrap();
|
let edge2 = graph.edge_weight(graph.find_edge(child, *y).unwrap()).unwrap();
|
||||||
|
|
||||||
edge2.line.cmp(&edge1.line)
|
edge2.line.cmp(&edge1.line)
|
||||||
});
|
});
|
||||||
|
@ -87,7 +87,7 @@ impl<'a> Iterator for Dfs<'a> {
|
||||||
self.reset_path_to_branch();
|
self.reset_path_to_branch();
|
||||||
}
|
}
|
||||||
|
|
||||||
return Some(Ok((node, parent)));
|
return Some(Ok(FilialTuple { child, parent }));
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
@ -189,8 +189,8 @@ mod dfs_test {
|
||||||
collection.push(i.unwrap());
|
collection.push(i.unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
let nodes: Vec<NodeIndex> = collection.iter().map(|n| n.0).collect();
|
let nodes: Vec<NodeIndex> = collection.iter().map(|n| n.child).collect();
|
||||||
let parents: Vec<Option<NodeIndex>> = collection.iter().map(|n| n.1).collect();
|
let parents: Vec<Option<NodeIndex>> = collection.iter().map(|n| n.parent).collect();
|
||||||
// 0
|
// 0
|
||||||
// / \
|
// / \
|
||||||
// 1 2
|
// 1 2
|
||||||
|
@ -237,8 +237,8 @@ mod dfs_test {
|
||||||
collection.push(i.unwrap());
|
collection.push(i.unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
let nodes: Vec<NodeIndex> = collection.iter().map(|n| n.0).collect();
|
let nodes: Vec<NodeIndex> = collection.iter().map(|n| n.child).collect();
|
||||||
let parents: Vec<Option<NodeIndex>> = collection.iter().map(|n| n.1).collect();
|
let parents: Vec<Option<NodeIndex>> = collection.iter().map(|n| n.parent).collect();
|
||||||
// 0
|
// 0
|
||||||
// / \
|
// / \
|
||||||
// 1 2
|
// 1 2
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
use petgraph::stable_graph::EdgeIndex;
|
use petgraph::stable_graph::EdgeIndex;
|
||||||
use petgraph::stable_graph::NodeIndex;
|
use petgraph::stable_graph::NodeIndex;
|
||||||
use petgraph::stable_graph::StableDiGraph;
|
use petgraph::stable_graph::StableDiGraph;
|
||||||
|
use petgraph::visit::EdgeRef;
|
||||||
use petgraph::Direction;
|
use petgraph::Direction;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
|
@ -53,20 +54,27 @@ impl CachedStableGraph {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Returns the `PathBuf` for a given `NodeIndex`
|
||||||
pub fn get_node(&self, node: NodeIndex) -> PathBuf {
|
pub fn get_node(&self, node: NodeIndex) -> PathBuf {
|
||||||
PathBuf::from_str(&self.graph[node]).unwrap()
|
PathBuf::from_str(&self.graph[node]).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_edge_meta(&self, parent: NodeIndex, child: NodeIndex) -> &IncludePosition {
|
/// returns an iterator over all the `IncludePosition`'s between a parent and its child for all the positions
|
||||||
self.graph.edge_weight(self.graph.find_edge(parent, child).unwrap()).unwrap()
|
/// that the child may be imported into the parent, in order of import.
|
||||||
}
|
pub fn get_edge_metas(&self, parent: NodeIndex, child: NodeIndex) -> impl Iterator<Item = IncludePosition> + '_ {
|
||||||
|
let mut edges = self
|
||||||
#[allow(dead_code)]
|
.graph
|
||||||
pub fn remove_node(&mut self, name: &Path) {
|
.edges(parent)
|
||||||
let idx = self.cache.remove(name);
|
.filter_map(move |edge| {
|
||||||
if let Some(idx) = idx {
|
let target = self.graph.edge_endpoints(edge.id()).unwrap().1;
|
||||||
self.graph.remove_node(idx);
|
if target != child {
|
||||||
}
|
return None;
|
||||||
|
}
|
||||||
|
Some(self.graph[edge.id()])
|
||||||
|
})
|
||||||
|
.collect::<Vec<IncludePosition>>();
|
||||||
|
edges.sort_by(|x, y| x.line.cmp(&y.line));
|
||||||
|
edges.into_iter()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_node(&mut self, name: &Path) -> NodeIndex {
|
pub fn add_node(&mut self, name: &Path) -> NodeIndex {
|
||||||
|
@ -83,54 +91,24 @@ impl CachedStableGraph {
|
||||||
self.graph.add_edge(parent, child, meta)
|
self.graph.add_edge(parent, child, meta)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove_edge(&mut self, parent: NodeIndex, child: NodeIndex) {
|
pub fn remove_edge(&mut self, parent: NodeIndex, child: NodeIndex, position: IncludePosition) {
|
||||||
let edge = self.graph.find_edge(parent, child).unwrap();
|
|
||||||
self.graph.remove_edge(edge);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn edge_weights(&self, node: NodeIndex) -> Vec<IncludePosition> {
|
|
||||||
self.graph.edges(node).map(|e| e.weight().clone()).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn child_node_names(&self, node: NodeIndex) -> Vec<PathBuf> {
|
|
||||||
self.graph
|
self.graph
|
||||||
.neighbors(node)
|
.edges(parent)
|
||||||
.map(|n| self.reverse_index.get(&n).unwrap().clone())
|
.find(|edge| self.graph.edge_endpoints(edge.id()).unwrap().1 == child && *edge.weight() == position)
|
||||||
.collect()
|
.map(|edge| edge.id())
|
||||||
|
.and_then(|edge| self.graph.remove_edge(edge));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn child_node_meta(&self, node: NodeIndex) -> Vec<(PathBuf, IncludePosition)> {
|
pub fn child_node_metas(&self, node: NodeIndex) -> impl Iterator<Item = (PathBuf, IncludePosition)> + '_ {
|
||||||
self.graph
|
self.graph.neighbors(node).map(move |n| {
|
||||||
.neighbors(node)
|
let edge = self.graph.find_edge(node, n).unwrap();
|
||||||
.map(|n| {
|
let edge_meta = self.graph.edge_weight(edge).unwrap();
|
||||||
let edge = self.graph.find_edge(node, n).unwrap();
|
return (self.reverse_index.get(&n).unwrap().clone(), *edge_meta);
|
||||||
let edge_meta = self.graph.edge_weight(edge).unwrap();
|
})
|
||||||
return (self.reverse_index.get(&n).unwrap().clone(), edge_meta.clone());
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn child_node_indexes(&self, node: NodeIndex) -> Vec<NodeIndex> {
|
pub fn child_node_indexes(&self, node: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
|
||||||
self.graph.neighbors(node).collect()
|
self.graph.neighbors(node)
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn parent_node_names(&self, node: NodeIndex) -> Vec<PathBuf> {
|
|
||||||
self.graph
|
|
||||||
.neighbors_directed(node, Direction::Incoming)
|
|
||||||
.map(|n| self.reverse_index.get(&n).unwrap().clone())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parent_node_indexes(&self, node: NodeIndex) -> Vec<NodeIndex> {
|
|
||||||
self.graph.neighbors_directed(node, Direction::Incoming).collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
pub fn get_include_meta(&self, node: NodeIndex) -> Vec<IncludePosition> {
|
|
||||||
self.graph.edges(node).map(|e| e.weight().clone()).collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn collect_root_ancestors(&self, node: NodeIndex) -> Vec<NodeIndex> {
|
pub fn collect_root_ancestors(&self, node: NodeIndex) -> Vec<NodeIndex> {
|
||||||
|
@ -138,6 +116,11 @@ impl CachedStableGraph {
|
||||||
self.get_root_ancestors(node, node, &mut visited)
|
self.get_root_ancestors(node, node, &mut visited)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO: impl Iterator
|
||||||
|
fn parent_node_indexes(&self, node: NodeIndex) -> Vec<NodeIndex> {
|
||||||
|
self.graph.neighbors_directed(node, Direction::Incoming).collect()
|
||||||
|
}
|
||||||
|
|
||||||
fn get_root_ancestors(&self, initial: NodeIndex, node: NodeIndex, visited: &mut HashSet<NodeIndex>) -> Vec<NodeIndex> {
|
fn get_root_ancestors(&self, initial: NodeIndex, node: NodeIndex, visited: &mut HashSet<NodeIndex>) -> Vec<NodeIndex> {
|
||||||
if node == initial && !visited.is_empty() {
|
if node == initial && !visited.is_empty() {
|
||||||
return vec![];
|
return vec![];
|
||||||
|
@ -163,10 +146,36 @@ impl CachedStableGraph {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl CachedStableGraph {
|
||||||
|
fn parent_node_names(&self, node: NodeIndex) -> Vec<PathBuf> {
|
||||||
|
self.graph
|
||||||
|
.neighbors_directed(node, Direction::Incoming)
|
||||||
|
.map(|n| self.reverse_index.get(&n).unwrap().clone())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn child_node_names(&self, node: NodeIndex) -> Vec<PathBuf> {
|
||||||
|
self.graph
|
||||||
|
.neighbors(node)
|
||||||
|
.map(|n| self.reverse_index.get(&n).unwrap().clone())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remove_node(&mut self, name: &Path) {
|
||||||
|
let idx = self.cache.remove(name);
|
||||||
|
if let Some(idx) = idx {
|
||||||
|
self.graph.remove_node(idx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod graph_test {
|
mod graph_test {
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use petgraph::graph::NodeIndex;
|
||||||
|
|
||||||
use crate::{graph::CachedStableGraph, IncludePosition};
|
use crate::{graph::CachedStableGraph, IncludePosition};
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -182,7 +191,7 @@ mod graph_test {
|
||||||
assert_eq!(children.len(), 1);
|
assert_eq!(children.len(), 1);
|
||||||
assert_eq!(children[0], Into::<PathBuf>::into("banana".to_string()));
|
assert_eq!(children[0], Into::<PathBuf>::into("banana".to_string()));
|
||||||
|
|
||||||
let children = graph.child_node_indexes(idx1);
|
let children: Vec<NodeIndex> = graph.child_node_indexes(idx1).collect();
|
||||||
assert_eq!(children.len(), 1);
|
assert_eq!(children.len(), 1);
|
||||||
assert_eq!(children[0], idx2);
|
assert_eq!(children[0], idx2);
|
||||||
|
|
||||||
|
@ -207,10 +216,33 @@ mod graph_test {
|
||||||
graph.remove_node(&PathBuf::from("sample"));
|
graph.remove_node(&PathBuf::from("sample"));
|
||||||
assert_eq!(graph.graph.node_count(), 1);
|
assert_eq!(graph.graph.node_count(), 1);
|
||||||
assert!(graph.find_node(&PathBuf::from("sample")).is_none());
|
assert!(graph.find_node(&PathBuf::from("sample")).is_none());
|
||||||
|
|
||||||
let neighbors = graph.child_node_names(idx2);
|
let neighbors = graph.child_node_names(idx2);
|
||||||
assert_eq!(neighbors.len(), 0);
|
assert_eq!(neighbors.len(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[logging_macro::log_scope]
|
||||||
|
fn test_double_import() {
|
||||||
|
let mut graph = CachedStableGraph::new();
|
||||||
|
|
||||||
|
let idx0 = graph.add_node(&PathBuf::from("0"));
|
||||||
|
let idx1 = graph.add_node(&PathBuf::from("1"));
|
||||||
|
|
||||||
|
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
|
||||||
|
graph.add_edge(idx0, idx1, IncludePosition { line: 4, start: 0, end: 0 });
|
||||||
|
|
||||||
|
// 0
|
||||||
|
// / \
|
||||||
|
// 1 1
|
||||||
|
|
||||||
|
assert_eq!(2, graph.get_edge_metas(idx0, idx1).count());
|
||||||
|
|
||||||
|
let mut edge_metas = graph.get_edge_metas(idx0, idx1);
|
||||||
|
assert_eq!(Some(IncludePosition { line: 2, start: 0, end: 0 }), edge_metas.next());
|
||||||
|
assert_eq!(Some(IncludePosition { line: 4, start: 0, end: 0 }), edge_metas.next());
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
#[logging_macro::log_scope]
|
#[logging_macro::log_scope]
|
||||||
fn test_collect_root_ancestors() {
|
fn test_collect_root_ancestors() {
|
||||||
|
@ -287,10 +319,8 @@ mod graph_test {
|
||||||
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
|
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
|
||||||
|
|
||||||
// 0
|
// 0
|
||||||
// |
|
// \
|
||||||
// 1
|
// 2 1
|
||||||
// \
|
|
||||||
// 2 \
|
|
||||||
// \ /
|
// \ /
|
||||||
// 3
|
// 3
|
||||||
|
|
||||||
|
|
|
@ -16,7 +16,6 @@ use url_norm::FromUrl;
|
||||||
|
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
use std::collections::hash_map::RandomState;
|
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::fmt::{Debug, Display, Formatter};
|
use std::fmt::{Debug, Display, Formatter};
|
||||||
|
@ -60,9 +59,7 @@ mod url_norm;
|
||||||
mod test;
|
mod test;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref RE_VERSION: Regex = Regex::new(r#"#version [\d]{3}"#).unwrap();
|
|
||||||
static ref RE_INCLUDE: Regex = Regex::new(r#"^(?:\s)*?(?:#include) "(.+)"\r?"#).unwrap();
|
static ref RE_INCLUDE: Regex = Regex::new(r#"^(?:\s)*?(?:#include) "(.+)"\r?"#).unwrap();
|
||||||
static ref RE_INCLUDE_EXTENSION: Regex = Regex::new(r#"#extension GL_GOOGLE_include_directive ?: ?require"#).unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
@ -119,10 +116,13 @@ pub struct MinecraftShaderLanguageServer {
|
||||||
log_guard: Option<slog_scope::GlobalLoggerGuard>,
|
log_guard: Option<slog_scope::GlobalLoggerGuard>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Hash)]
|
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub struct IncludePosition {
|
pub struct IncludePosition {
|
||||||
|
// the 0-indexed line on which the include lives.
|
||||||
line: usize,
|
line: usize,
|
||||||
|
// the 0-indexed char offset defining the start of the include path string.
|
||||||
start: usize,
|
start: usize,
|
||||||
|
// the 0-indexed char offset defining the end of the include path string.
|
||||||
end: usize,
|
end: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -178,8 +178,8 @@ impl MinecraftShaderLanguageServer {
|
||||||
None => return None,
|
None => return None,
|
||||||
};
|
};
|
||||||
|
|
||||||
// TODO: include user added extensions
|
// TODO: include user added extensions with a set
|
||||||
if ext != "vsh" && ext != "fsh" && ext != "glsl" && ext != "inc" {
|
if ext != "vsh" && ext != "fsh" && ext != "csh" && ext != "gsh" && ext != "glsl" && ext != "inc" {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -251,8 +251,8 @@ impl MinecraftShaderLanguageServer {
|
||||||
Some(n) => n,
|
Some(n) => n,
|
||||||
};
|
};
|
||||||
|
|
||||||
let prev_children: HashSet<_, RandomState> = HashSet::from_iter(self.graph.borrow().child_node_meta(idx));
|
let prev_children: HashSet<_> = HashSet::from_iter(self.graph.borrow().child_node_metas(idx));
|
||||||
let new_children: HashSet<_, RandomState> = includes.iter().cloned().collect();
|
let new_children: HashSet<_> = includes.iter().cloned().collect();
|
||||||
|
|
||||||
let to_be_added = new_children.difference(&prev_children);
|
let to_be_added = new_children.difference(&prev_children);
|
||||||
let to_be_removed = prev_children.difference(&new_children);
|
let to_be_removed = prev_children.difference(&new_children);
|
||||||
|
@ -265,7 +265,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
|
|
||||||
for removal in to_be_removed {
|
for removal in to_be_removed {
|
||||||
let child = self.graph.borrow_mut().find_node(&removal.0).unwrap();
|
let child = self.graph.borrow_mut().find_node(&removal.0).unwrap();
|
||||||
self.graph.borrow_mut().remove_edge(idx, child);
|
self.graph.borrow_mut().remove_edge(idx, child, removal.1);
|
||||||
}
|
}
|
||||||
|
|
||||||
for insertion in to_be_added {
|
for insertion in to_be_added {
|
||||||
|
@ -325,7 +325,10 @@ impl MinecraftShaderLanguageServer {
|
||||||
|
|
||||||
let view = {
|
let view = {
|
||||||
let graph = self.graph.borrow();
|
let graph = self.graph.borrow();
|
||||||
merge_views::generate_merge_list(&tree, &all_sources, &graph, &mut source_mapper)
|
let merged_string = {
|
||||||
|
merge_views::MergeViewBuilder::new(&tree, &all_sources, &graph, &mut source_mapper).build()
|
||||||
|
};
|
||||||
|
merged_string
|
||||||
};
|
};
|
||||||
|
|
||||||
let root_path = self.graph.borrow().get_node(root);
|
let root_path = self.graph.borrow().get_node(root);
|
||||||
|
@ -365,7 +368,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
|
|
||||||
diagnostics.extend(diagnostics_parser.parse_diagnostics_output(stdout, uri, &source_mapper, &self.graph.borrow()));
|
diagnostics.extend(diagnostics_parser.parse_diagnostics_output(stdout, uri, &source_mapper, &self.graph.borrow()));
|
||||||
} else {
|
} else {
|
||||||
let mut all_trees: Vec<(TreeType, Vec<(NodeIndex, Option<_>)>)> = Vec::new();
|
let mut all_trees: Vec<(TreeType, Vec<FilialTuple>)> = Vec::new();
|
||||||
|
|
||||||
for root in &file_ancestors {
|
for root in &file_ancestors {
|
||||||
let nodes = match self.get_dfs_for_node(*root) {
|
let nodes = match self.get_dfs_for_node(*root) {
|
||||||
|
@ -408,10 +411,13 @@ impl MinecraftShaderLanguageServer {
|
||||||
let mut source_mapper = source_mapper::SourceMapper::new(all_sources.len());
|
let mut source_mapper = source_mapper::SourceMapper::new(all_sources.len());
|
||||||
let view = {
|
let view = {
|
||||||
let graph = self.graph.borrow();
|
let graph = self.graph.borrow();
|
||||||
merge_views::generate_merge_list(&tree.1, &all_sources, &graph, &mut source_mapper)
|
let merged_string = {
|
||||||
|
merge_views::MergeViewBuilder::new(&tree.1, &all_sources, &graph, &mut source_mapper).build()
|
||||||
|
};
|
||||||
|
merged_string
|
||||||
};
|
};
|
||||||
|
|
||||||
let root_path = self.graph.borrow().get_node(tree.1[0].0);
|
let root_path = self.graph.borrow().get_node(tree.1.first().unwrap().child);
|
||||||
let stdout = match self.compile_shader_source(&view, tree.0, &root_path) {
|
let stdout = match self.compile_shader_source(&view, tree.0, &root_path) {
|
||||||
Some(s) => s,
|
Some(s) => s,
|
||||||
None => continue,
|
None => continue,
|
||||||
|
@ -451,7 +457,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
|
|
||||||
for node in nodes {
|
for node in nodes {
|
||||||
let graph = self.graph.borrow();
|
let graph = self.graph.borrow();
|
||||||
let path = graph.get_node(node.0);
|
let path = graph.get_node(node.child);
|
||||||
|
|
||||||
if sources.contains_key(&path) {
|
if sources.contains_key(&path) {
|
||||||
continue;
|
continue;
|
||||||
|
@ -817,29 +823,30 @@ impl LanguageServerHandling for MinecraftShaderLanguageServer {
|
||||||
.graph
|
.graph
|
||||||
.borrow()
|
.borrow()
|
||||||
.child_node_indexes(node)
|
.child_node_indexes(node)
|
||||||
.into_iter()
|
.filter_map::<Vec<DocumentLink>, _>(|child| {
|
||||||
.filter_map(|child| {
|
|
||||||
let graph = self.graph.borrow();
|
let graph = self.graph.borrow();
|
||||||
let value = graph.get_edge_meta(node, child);
|
graph.get_edge_metas(node, child).map(|value| {
|
||||||
let path = graph.get_node(child);
|
let path = graph.get_node(child);
|
||||||
let url = match Url::from_file_path(&path) {
|
let url = match Url::from_file_path(&path) {
|
||||||
Ok(url) => url,
|
Ok(url) => url,
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
error!("error converting into url"; "path" => path.to_str().unwrap(), "error" => format!("{:?}", e));
|
error!("error converting into url"; "path" => path.to_str().unwrap(), "error" => format!("{:?}", e));
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Some(DocumentLink {
|
Some(DocumentLink {
|
||||||
range: Range::new(
|
range: Range::new(
|
||||||
Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.start).unwrap()),
|
Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.start).unwrap()),
|
||||||
Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.end).unwrap()),
|
Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.end).unwrap()),
|
||||||
),
|
),
|
||||||
target: Some(url.clone()),
|
target: Some(url.clone()),
|
||||||
tooltip: Some(url.path().to_string()),
|
tooltip: Some(url.path().to_string()),
|
||||||
data: None,
|
data: None,
|
||||||
})
|
})
|
||||||
|
}).collect()
|
||||||
})
|
})
|
||||||
|
.flatten()
|
||||||
.collect();
|
.collect();
|
||||||
debug!("document link results";
|
debug!("document link results";
|
||||||
"links" => format!("{:?}", edges.iter().map(|e| (e.range, e.target.as_ref().unwrap().path())).collect::<Vec<_>>()),
|
"links" => format!("{:?}", edges.iter().map(|e| (e.range, e.target.as_ref().unwrap().path())).collect::<Vec<_>>()),
|
||||||
|
|
|
@ -11,274 +11,335 @@ use petgraph::stable_graph::NodeIndex;
|
||||||
|
|
||||||
use crate::graph::CachedStableGraph;
|
use crate::graph::CachedStableGraph;
|
||||||
use crate::source_mapper::SourceMapper;
|
use crate::source_mapper::SourceMapper;
|
||||||
|
use crate::IncludePosition;
|
||||||
|
|
||||||
/// FilialTuple represents a tuple with a child at index 0
|
/// FilialTuple represents a tuple (not really) of a child and any legitimate
|
||||||
/// and a parent at index 1. Parent can be nullable in the case of
|
/// parent. Parent can be nullable in the case of the child being a top level
|
||||||
/// the child being a top level node in the tree.
|
/// node in the tree.
|
||||||
pub type FilialTuple = (NodeIndex, Option<NodeIndex>);
|
#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy)]
|
||||||
|
pub struct FilialTuple {
|
||||||
|
pub child: NodeIndex,
|
||||||
|
pub parent: Option<NodeIndex>,
|
||||||
|
}
|
||||||
|
|
||||||
pub fn generate_merge_list<'a>(
|
/// Merges the source strings according to the nodes comprising a tree of imports into a GLSL source string
|
||||||
nodes: &'a [FilialTuple], sources: &'a HashMap<PathBuf, String>, graph: &'a CachedStableGraph, source_mapper: &mut SourceMapper,
|
/// that can be handed off to the GLSL compiler.
|
||||||
) -> String {
|
pub struct MergeViewBuilder<'a> {
|
||||||
// contains additionally inserted lines such as #line and other directives, preamble defines etc
|
nodes: &'a [FilialTuple],
|
||||||
let mut extra_lines: Vec<String> = Vec::new();
|
nodes_peeker: Peekable<Iter<'a, FilialTuple>>,
|
||||||
extra_lines.reserve((nodes.len() * 2) + 2);
|
|
||||||
|
|
||||||
// list of source code views onto the below sources
|
sources: &'a HashMap<PathBuf, String>,
|
||||||
let mut merge_list: LinkedList<&'a str> = LinkedList::new();
|
graph: &'a CachedStableGraph,
|
||||||
|
source_mapper: &'a mut SourceMapper,
|
||||||
|
|
||||||
// holds the offset into the child which has been added to the merge list for a parent.
|
// holds the offset into the child which has been added to the merge list for a parent.
|
||||||
// A child can have multiple parents for a given tree, hence we have to track it for
|
// A child can have multiple parents for a given tree, and be included multiple times
|
||||||
// a (child, parent) tuple instead of just the child.
|
// by the same parent, hence we have to track it for a ((child, parent), line) tuple
|
||||||
let mut last_offset_set: HashMap<FilialTuple, usize> = HashMap::new();
|
// instead of just the child or (child, parent).
|
||||||
|
last_offset_set: HashMap<FilialTuple, usize>,
|
||||||
let mut nodes_iter = nodes.iter().peekable();
|
// holds, for any given filial tuple, the iterator yielding all the positions at which the child
|
||||||
|
// is included into the parent in line-sorted order. This is necessary for files that are imported
|
||||||
// invariant: nodes_iter always has _at least_ one element. Can't save a not-file :B
|
// more than once into the same parent, so we can easily get the next include position.
|
||||||
let first = nodes_iter.next().unwrap().0;
|
parent_child_edge_iterator: HashMap<FilialTuple, Box<(dyn Iterator<Item = IncludePosition> + 'a)>>,
|
||||||
let first_path = graph.get_node(first);
|
|
||||||
let first_source = sources.get(&first_path).unwrap();
|
|
||||||
|
|
||||||
// seed source_mapper with top-level file
|
|
||||||
source_mapper.get_num(first);
|
|
||||||
|
|
||||||
let version_line_offset = find_version_offset(first_source);
|
|
||||||
let version_char_offsets = char_offset_for_line(version_line_offset, first_source);
|
|
||||||
// add_preamble(
|
|
||||||
// version_line_offset,
|
|
||||||
// version_char_offsets.1,
|
|
||||||
// &first_path,
|
|
||||||
// first,
|
|
||||||
// first_source,
|
|
||||||
// &mut merge_list,
|
|
||||||
// &mut extra_lines,
|
|
||||||
// source_mapper,
|
|
||||||
// );
|
|
||||||
|
|
||||||
// last_offset_set.insert((first, None), version_char_offsets.1);
|
|
||||||
last_offset_set.insert((first, None), 0);
|
|
||||||
|
|
||||||
// stack to keep track of the depth first traversal
|
|
||||||
let mut stack = VecDeque::<NodeIndex>::new();
|
|
||||||
|
|
||||||
create_merge_views(
|
|
||||||
&mut nodes_iter,
|
|
||||||
&mut merge_list,
|
|
||||||
&mut last_offset_set,
|
|
||||||
graph,
|
|
||||||
sources,
|
|
||||||
&mut extra_lines,
|
|
||||||
&mut stack,
|
|
||||||
source_mapper,
|
|
||||||
);
|
|
||||||
|
|
||||||
// now we add a view of the remainder of the root file
|
|
||||||
let offset = *last_offset_set.get(&(first, None)).unwrap();
|
|
||||||
|
|
||||||
let len = first_source.len();
|
|
||||||
merge_list.push_back(&first_source[min(offset, len)..]);
|
|
||||||
|
|
||||||
let total_len = merge_list.iter().fold(0, |a, b| a + b.len());
|
|
||||||
|
|
||||||
let mut merged = String::with_capacity(total_len);
|
|
||||||
merged.extend(merge_list);
|
|
||||||
|
|
||||||
merged
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_merge_views<'a>(
|
impl<'a> MergeViewBuilder<'a> {
|
||||||
nodes: &mut Peekable<Iter<FilialTuple>>, merge_list: &mut LinkedList<&'a str>, last_offset_set: &mut HashMap<FilialTuple, usize>,
|
pub fn new(
|
||||||
graph: &'a CachedStableGraph, sources: &'a HashMap<PathBuf, String>, extra_lines: &mut Vec<String>, stack: &mut VecDeque<NodeIndex>,
|
nodes: &'a [FilialTuple], sources: &'a HashMap<PathBuf, String>, graph: &'a CachedStableGraph, source_mapper: &'a mut SourceMapper,
|
||||||
source_mapper: &mut SourceMapper,
|
) -> Self {
|
||||||
) {
|
MergeViewBuilder {
|
||||||
loop {
|
nodes,
|
||||||
let n = match nodes.next() {
|
nodes_peeker: nodes.iter().peekable(),
|
||||||
Some(n) => n,
|
sources,
|
||||||
None => return,
|
graph,
|
||||||
};
|
source_mapper,
|
||||||
|
last_offset_set: HashMap::new(),
|
||||||
|
parent_child_edge_iterator: HashMap::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// invariant: never None as only the first element in `nodes` should have a None, which is popped off in the calling function
|
pub fn build(&mut self) -> String {
|
||||||
let parent = n.1.unwrap();
|
// contains additionally inserted lines such as #line and other directives, preamble defines etc
|
||||||
let child = n.0;
|
let mut extra_lines: Vec<String> = Vec::new();
|
||||||
let edge = graph.get_edge_meta(parent, child);
|
extra_lines.reserve((self.nodes.len() * 2) + 2);
|
||||||
let parent_path = graph.get_node(parent).clone();
|
|
||||||
let child_path = graph.get_node(child).clone();
|
|
||||||
|
|
||||||
let parent_source = sources.get(&parent_path).unwrap();
|
// list of source code views onto the below sources
|
||||||
let (char_for_line, char_following_line) = char_offset_for_line(edge.line, parent_source);
|
let mut merge_list: LinkedList<&'a str> = LinkedList::new();
|
||||||
|
|
||||||
let offset = *last_offset_set
|
// invariant: nodes_iter always has _at least_ one element. Can't save a not-file :B
|
||||||
.insert((parent, stack.back().copied()), char_following_line)
|
let first = self.nodes_peeker.next().unwrap().child;
|
||||||
.get_or_insert(0);
|
let first_path = self.graph.get_node(first);
|
||||||
merge_list.push_back(&parent_source[offset..char_for_line]);
|
let first_source = self.sources.get(&first_path).unwrap();
|
||||||
add_opening_line_directive(&child_path, child, merge_list, extra_lines, source_mapper);
|
|
||||||
|
|
||||||
match nodes.peek() {
|
// seed source_mapper with top-level file
|
||||||
Some(next) => {
|
self.source_mapper.get_num(first);
|
||||||
let next = *next;
|
|
||||||
// if the next pair's parent is not a child of the current pair, we dump the rest of this childs source
|
let version_line_offset = self.find_version_offset(first_source);
|
||||||
if next.1.unwrap() != child {
|
let _version_char_offsets = self.char_offset_for_line(version_line_offset, first_source);
|
||||||
let child_source = sources.get(&child_path).unwrap();
|
// add_preamble(
|
||||||
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
// version_line_offset,
|
||||||
let offset = {
|
// version_char_offsets.1,
|
||||||
match child_source.ends_with('\n') {
|
// &first_path,
|
||||||
true => child_source.len() - 1,
|
// first,
|
||||||
false => child_source.len(),
|
// first_source,
|
||||||
|
// &mut merge_list,
|
||||||
|
// &mut extra_lines,
|
||||||
|
// source_mapper,
|
||||||
|
// );
|
||||||
|
|
||||||
|
// last_offset_set.insert((first, None), version_char_offsets.1);
|
||||||
|
self.last_offset_set.insert(
|
||||||
|
FilialTuple {
|
||||||
|
child: first,
|
||||||
|
parent: None,
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
|
||||||
|
// stack to keep track of the depth first traversal
|
||||||
|
let mut stack = VecDeque::<NodeIndex>::new();
|
||||||
|
|
||||||
|
self.create_merge_views(&mut merge_list, &mut extra_lines, &mut stack);
|
||||||
|
|
||||||
|
// now we add a view of the remainder of the root file
|
||||||
|
let offset = *self
|
||||||
|
.last_offset_set
|
||||||
|
.get(&FilialTuple {
|
||||||
|
child: first,
|
||||||
|
parent: None,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let len = first_source.len();
|
||||||
|
merge_list.push_back(&first_source[min(offset, len)..]);
|
||||||
|
|
||||||
|
let total_len = merge_list.iter().fold(0, |a, b| a + b.len());
|
||||||
|
|
||||||
|
let mut merged = String::with_capacity(total_len);
|
||||||
|
merged.extend(merge_list);
|
||||||
|
|
||||||
|
merged
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_merge_views(&mut self, merge_list: &mut LinkedList<&'a str>, extra_lines: &mut Vec<String>, stack: &mut VecDeque<NodeIndex>) {
|
||||||
|
loop {
|
||||||
|
let n = match self.nodes_peeker.next() {
|
||||||
|
Some(n) => n,
|
||||||
|
None => return,
|
||||||
|
};
|
||||||
|
|
||||||
|
// invariant: never None as only the first element in `nodes` should have a None, which is popped off in the calling function
|
||||||
|
let (parent, child) = (n.parent.unwrap(), n.child);
|
||||||
|
// gets the next include position for the filial tuple, seeding if this is the first time querying this tuple
|
||||||
|
let edge = self
|
||||||
|
.parent_child_edge_iterator
|
||||||
|
.entry(*n)
|
||||||
|
.or_insert_with(|| {
|
||||||
|
let edge_metas = self.graph.get_edge_metas(parent, child);
|
||||||
|
Box::new(edge_metas)
|
||||||
|
})
|
||||||
|
.next()
|
||||||
|
.unwrap();
|
||||||
|
let parent_path = self.graph.get_node(parent).clone();
|
||||||
|
let child_path = self.graph.get_node(child).clone();
|
||||||
|
|
||||||
|
let parent_source = self.sources.get(&parent_path).unwrap();
|
||||||
|
let (char_for_line, char_following_line) = self.char_offset_for_line(edge.line, parent_source);
|
||||||
|
|
||||||
|
let offset = *self
|
||||||
|
.last_offset_set
|
||||||
|
.insert(
|
||||||
|
FilialTuple {
|
||||||
|
child: parent,
|
||||||
|
parent: stack.back().copied(),
|
||||||
|
},
|
||||||
|
char_following_line,
|
||||||
|
)
|
||||||
|
.get_or_insert(0);
|
||||||
|
merge_list.push_back(&parent_source[offset..char_for_line]);
|
||||||
|
self.add_opening_line_directive(&child_path, child, merge_list, extra_lines);
|
||||||
|
|
||||||
|
match self.nodes_peeker.peek() {
|
||||||
|
Some(next) => {
|
||||||
|
let next = *next;
|
||||||
|
// if the next pair's parent is not a child of the current pair, we dump the rest of this childs source
|
||||||
|
if next.parent.unwrap() != child {
|
||||||
|
let child_source = self.sources.get(&child_path).unwrap();
|
||||||
|
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
||||||
|
let offset = {
|
||||||
|
match child_source.ends_with('\n') {
|
||||||
|
true => child_source.len() - 1,
|
||||||
|
false => child_source.len(),
|
||||||
|
}
|
||||||
|
};
|
||||||
|
merge_list.push_back(&child_source[..offset]);
|
||||||
|
self.last_offset_set.insert(
|
||||||
|
FilialTuple {
|
||||||
|
child,
|
||||||
|
parent: Some(parent),
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
||||||
|
self.add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines);
|
||||||
|
// if the next pair's parent is not the current pair's parent, we need to bubble up
|
||||||
|
if stack.contains(&next.parent.unwrap()) {
|
||||||
|
return;
|
||||||
}
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
stack.push_back(parent);
|
||||||
|
self.create_merge_views(merge_list, extra_lines, stack);
|
||||||
|
stack.pop_back();
|
||||||
|
|
||||||
|
let offset = *self
|
||||||
|
.last_offset_set
|
||||||
|
.get(&FilialTuple {
|
||||||
|
child,
|
||||||
|
parent: Some(parent),
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
let child_source = self.sources.get(&child_path).unwrap();
|
||||||
|
// this evaluates to false once the file contents have been exhausted aka offset = child_source.len() + 1
|
||||||
|
let end_offset = match child_source.ends_with('\n') {
|
||||||
|
true => 1, /* child_source.len()-1 */
|
||||||
|
false => 0, /* child_source.len() */
|
||||||
};
|
};
|
||||||
merge_list.push_back(&child_source[..offset]);
|
if offset < child_source.len() - end_offset {
|
||||||
last_offset_set.insert((child, Some(parent)), 0);
|
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
||||||
|
merge_list.push_back(&child_source[offset../* std::cmp::max( */child_source.len()-end_offset/* , offset) */]);
|
||||||
|
self.last_offset_set.insert(
|
||||||
|
FilialTuple {
|
||||||
|
child,
|
||||||
|
parent: Some(parent),
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
||||||
add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines, source_mapper);
|
self.add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines);
|
||||||
// if the next pair's parent is not the current pair's parent, we need to bubble up
|
|
||||||
if stack.contains(&next.1.unwrap()) {
|
// we need to check the next item at the point of original return further down the callstack
|
||||||
|
if self.nodes_peeker.peek().is_some() && stack.contains(&self.nodes_peeker.peek().unwrap().parent.unwrap()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
continue;
|
|
||||||
}
|
}
|
||||||
|
None => {
|
||||||
stack.push_back(parent);
|
let child_source = self.sources.get(&child_path).unwrap();
|
||||||
create_merge_views(
|
|
||||||
nodes,
|
|
||||||
merge_list,
|
|
||||||
last_offset_set,
|
|
||||||
graph,
|
|
||||||
sources,
|
|
||||||
extra_lines,
|
|
||||||
stack,
|
|
||||||
source_mapper,
|
|
||||||
);
|
|
||||||
stack.pop_back();
|
|
||||||
|
|
||||||
let offset = *last_offset_set.get(&(child, Some(parent))).unwrap();
|
|
||||||
let child_source = sources.get(&child_path).unwrap();
|
|
||||||
// this evaluates to false once the file contents have been exhausted aka offset = child_source.len() + 1
|
|
||||||
let end_offset = match child_source.ends_with('\n') {
|
|
||||||
true => 1, /* child_source.len()-1 */
|
|
||||||
false => 0, /* child_source.len() */
|
|
||||||
};
|
|
||||||
if offset < child_source.len() - end_offset {
|
|
||||||
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
||||||
merge_list.push_back(&child_source[offset../* std::cmp::max( */child_source.len()-end_offset/* , offset) */]);
|
let offset = match child_source.ends_with('\n') {
|
||||||
last_offset_set.insert((child, Some(parent)), 0);
|
true => child_source.len() - 1,
|
||||||
|
false => child_source.len(),
|
||||||
|
};
|
||||||
|
merge_list.push_back(&child_source[..offset]);
|
||||||
|
self.last_offset_set.insert(
|
||||||
|
FilialTuple {
|
||||||
|
child,
|
||||||
|
parent: Some(parent),
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
);
|
||||||
|
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
||||||
|
self.add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines);
|
||||||
}
|
}
|
||||||
|
|
||||||
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
|
||||||
add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines, source_mapper);
|
|
||||||
|
|
||||||
// we need to check the next item at the point of original return further down the callstack
|
|
||||||
if nodes.peek().is_some() && stack.contains(&nodes.peek().unwrap().1.unwrap()) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
let child_source = sources.get(&child_path).unwrap();
|
|
||||||
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
|
||||||
let offset = match child_source.ends_with('\n') {
|
|
||||||
true => child_source.len() - 1,
|
|
||||||
false => child_source.len(),
|
|
||||||
};
|
|
||||||
merge_list.push_back(&child_source[..offset]);
|
|
||||||
last_offset_set.insert((child, Some(parent)), 0);
|
|
||||||
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
|
||||||
add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines, source_mapper);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// returns the character offset + 1 of the end of line number `line` and the character
|
// returns the character offset + 1 of the end of line number `line` and the character
|
||||||
// offset + 1 for the end of the line after the previous one
|
// offset + 1 for the end of the line after the previous one
|
||||||
fn char_offset_for_line(line_num: usize, source: &str) -> (usize, usize) {
|
fn char_offset_for_line(&self, line_num: usize, source: &str) -> (usize, usize) {
|
||||||
let mut char_for_line: usize = 0;
|
let mut char_for_line: usize = 0;
|
||||||
let mut char_following_line: usize = 0;
|
let mut char_following_line: usize = 0;
|
||||||
for (n, line) in source.lines().enumerate() {
|
for (n, line) in source.lines().enumerate() {
|
||||||
if n == line_num {
|
if n == line_num {
|
||||||
char_following_line += line.len() + 1;
|
char_following_line += line.len() + 1;
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
|
char_for_line += line.len() + 1;
|
||||||
|
char_following_line = char_for_line;
|
||||||
}
|
}
|
||||||
char_for_line += line.len() + 1;
|
(char_for_line, char_following_line)
|
||||||
char_following_line = char_for_line;
|
|
||||||
}
|
}
|
||||||
(char_for_line, char_following_line)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn find_version_offset(source: &str) -> usize {
|
fn find_version_offset(&self, source: &str) -> usize {
|
||||||
source
|
source
|
||||||
.lines()
|
.lines()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.find(|(_, line)| line.starts_with("#version "))
|
.find(|(_, line)| line.starts_with("#version "))
|
||||||
.map_or(0, |(i, _)| i)
|
.map_or(0, |(i, _)| i)
|
||||||
}
|
}
|
||||||
|
|
||||||
// fn add_preamble<'a>(
|
// fn add_preamble<'a>(
|
||||||
// version_line_offset: usize, version_char_offset: usize, path: &Path, node: NodeIndex, source: &'a str,
|
// version_line_offset: usize, version_char_offset: usize, path: &Path, node: NodeIndex, source: &'a str,
|
||||||
// merge_list: &mut LinkedList<&'a str>, extra_lines: &mut Vec<String>, source_mapper: &mut SourceMapper,
|
// merge_list: &mut LinkedList<&'a str>, extra_lines: &mut Vec<String>, source_mapper: &mut SourceMapper,
|
||||||
// ) {
|
// ) {
|
||||||
// // TODO: Optifine #define preabmle
|
// // TODO: Optifine #define preabmle
|
||||||
// merge_list.push_back(&source[..version_char_offset]);
|
// merge_list.push_back(&source[..version_char_offset]);
|
||||||
// let google_line_directive = format!(
|
// let google_line_directive = format!(
|
||||||
// "#extension GL_GOOGLE_cpp_style_line_directive : enable\n#line {} {} // {}\n",
|
// "#extension GL_GOOGLE_cpp_style_line_directive : enable\n#line {} {} // {}\n",
|
||||||
// // +2 because 0 indexed but #line is 1 indexed and references the *following* line
|
// // +2 because 0 indexed but #line is 1 indexed and references the *following* line
|
||||||
// version_line_offset + 2,
|
// version_line_offset + 2,
|
||||||
// source_mapper.get_num(node),
|
// source_mapper.get_num(node),
|
||||||
// path.to_str().unwrap().replace('\\', "\\\\"),
|
// path.to_str().unwrap().replace('\\', "\\\\"),
|
||||||
// );
|
// );
|
||||||
// extra_lines.push(google_line_directive);
|
// extra_lines.push(google_line_directive);
|
||||||
// unsafe_get_and_insert(merge_list, extra_lines);
|
// unsafe_get_and_insert(merge_list, extra_lines);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
fn add_opening_line_directive(
|
fn add_opening_line_directive(
|
||||||
path: &Path, node: NodeIndex, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec<String>, source_mapper: &mut SourceMapper,
|
&mut self, path: &Path, node: NodeIndex, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec<String>,
|
||||||
) {
|
) {
|
||||||
let line_directive = format!(
|
let line_directive = format!(
|
||||||
"#line 1 {} // {}\n",
|
"#line 1 {} // {}\n",
|
||||||
source_mapper.get_num(node),
|
self.source_mapper.get_num(node),
|
||||||
path.to_str().unwrap().replace('\\', "\\\\")
|
path.to_str().unwrap().replace('\\', "\\\\")
|
||||||
);
|
);
|
||||||
extra_lines.push(line_directive);
|
extra_lines.push(line_directive);
|
||||||
unsafe_get_and_insert(merge_list, extra_lines);
|
self.unsafe_get_and_insert(merge_list, extra_lines);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_closing_line_directive(
|
fn add_closing_line_directive(
|
||||||
line: usize, path: &Path, node: NodeIndex, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec<String>,
|
&mut self, line: usize, path: &Path, node: NodeIndex, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec<String>,
|
||||||
source_mapper: &mut SourceMapper,
|
) {
|
||||||
) {
|
// Optifine doesn't seem to add a leading newline if the previous line was a #line directive
|
||||||
// Optifine doesn't seem to add a leading newline if the previous line was a #line directive
|
let line_directive = if let Some(l) = merge_list.back() {
|
||||||
let line_directive = if let Some(l) = merge_list.back() {
|
if l.trim().starts_with("#line") {
|
||||||
if l.trim().starts_with("#line") {
|
format!(
|
||||||
format!(
|
"#line {} {} // {}\n",
|
||||||
"#line {} {} // {}\n",
|
line,
|
||||||
line,
|
self.source_mapper.get_num(node),
|
||||||
source_mapper.get_num(node),
|
path.to_str().unwrap().replace('\\', "\\\\")
|
||||||
path.to_str().unwrap().replace('\\', "\\\\")
|
)
|
||||||
)
|
} else {
|
||||||
|
format!(
|
||||||
|
"\n#line {} {} // {}\n",
|
||||||
|
line,
|
||||||
|
self.source_mapper.get_num(node),
|
||||||
|
path.to_str().unwrap().replace('\\', "\\\\")
|
||||||
|
)
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
format!(
|
format!(
|
||||||
"\n#line {} {} // {}\n",
|
"\n#line {} {} // {}\n",
|
||||||
line,
|
line,
|
||||||
source_mapper.get_num(node),
|
self.source_mapper.get_num(node),
|
||||||
path.to_str().unwrap().replace('\\', "\\\\")
|
path.to_str().unwrap().replace('\\', "\\\\")
|
||||||
)
|
)
|
||||||
|
};
|
||||||
|
|
||||||
|
extra_lines.push(line_directive);
|
||||||
|
self.unsafe_get_and_insert(merge_list, extra_lines);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unsafe_get_and_insert(&self, merge_list: &mut LinkedList<&str>, extra_lines: &[String]) {
|
||||||
|
// :^)
|
||||||
|
unsafe {
|
||||||
|
let vec_ptr_offset = extra_lines.as_ptr().add(extra_lines.len() - 1);
|
||||||
|
merge_list.push_back(&vec_ptr_offset.as_ref().unwrap()[..]);
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
format!(
|
|
||||||
"\n#line {} {} // {}\n",
|
|
||||||
line,
|
|
||||||
source_mapper.get_num(node),
|
|
||||||
path.to_str().unwrap().replace('\\', "\\\\")
|
|
||||||
)
|
|
||||||
};
|
|
||||||
|
|
||||||
extra_lines.push(line_directive);
|
|
||||||
unsafe_get_and_insert(merge_list, extra_lines);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn unsafe_get_and_insert(merge_list: &mut LinkedList<&str>, extra_lines: &[String]) {
|
|
||||||
// :^)
|
|
||||||
unsafe {
|
|
||||||
let vec_ptr_offset = extra_lines.as_ptr().add(extra_lines.len() - 1);
|
|
||||||
merge_list.push_back(&vec_ptr_offset.as_ref().unwrap()[..]);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -287,7 +348,7 @@ mod merge_view_test {
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::merge_views::generate_merge_list;
|
use crate::merge_views::MergeViewBuilder;
|
||||||
use crate::source_mapper::SourceMapper;
|
use crate::source_mapper::SourceMapper;
|
||||||
use crate::test::{copy_to_and_set_root, new_temp_server};
|
use crate::test::{copy_to_and_set_root, new_temp_server};
|
||||||
use crate::IncludePosition;
|
use crate::IncludePosition;
|
||||||
|
@ -300,16 +361,8 @@ mod merge_view_test {
|
||||||
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/01", &mut server);
|
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/01", &mut server);
|
||||||
server.endpoint.request_shutdown();
|
server.endpoint.request_shutdown();
|
||||||
|
|
||||||
let final_idx = server
|
let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh"));
|
||||||
.graph
|
let common_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("common.glsl"));
|
||||||
.borrow_mut()
|
|
||||||
//.add_node(&format!("{:?}/shaders/final.fsh", tmp_path).try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("final.fsh"));
|
|
||||||
let common_idx = server
|
|
||||||
.graph
|
|
||||||
.borrow_mut()
|
|
||||||
//.add_node(&format!("{:?}/shaders/common.glsl", tmp_path).try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("common.glsl"));
|
|
||||||
|
|
||||||
server
|
server
|
||||||
.graph
|
.graph
|
||||||
|
@ -321,7 +374,7 @@ mod merge_view_test {
|
||||||
|
|
||||||
let graph_borrow = server.graph.borrow();
|
let graph_borrow = server.graph.borrow();
|
||||||
let mut source_mapper = SourceMapper::new(0);
|
let mut source_mapper = SourceMapper::new(0);
|
||||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow, &mut source_mapper);
|
let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build();
|
||||||
|
|
||||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||||
|
|
||||||
|
@ -352,25 +405,18 @@ mod merge_view_test {
|
||||||
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/02", &mut server);
|
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/02", &mut server);
|
||||||
server.endpoint.request_shutdown();
|
server.endpoint.request_shutdown();
|
||||||
|
|
||||||
let final_idx = server
|
let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh"));
|
||||||
.graph
|
|
||||||
.borrow_mut()
|
|
||||||
//.add_node(&format!("{}/shaders/{}", tmp_path, "final.fsh").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("final.fsh"));
|
|
||||||
let test_idx = server
|
let test_idx = server
|
||||||
.graph
|
.graph
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "test.glsl").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("utils").join("test.glsl"));
|
.add_node(&tmp_path.join("shaders").join("utils").join("test.glsl"));
|
||||||
let burger_idx = server
|
let burger_idx = server
|
||||||
.graph
|
.graph
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "burger.glsl").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("utils").join("burger.glsl"));
|
.add_node(&tmp_path.join("shaders").join("utils").join("burger.glsl"));
|
||||||
let sample_idx = server
|
let sample_idx = server
|
||||||
.graph
|
.graph
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "sample.glsl").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("utils").join("sample.glsl"));
|
.add_node(&tmp_path.join("shaders").join("utils").join("sample.glsl"));
|
||||||
|
|
||||||
server
|
server
|
||||||
|
@ -391,7 +437,7 @@ mod merge_view_test {
|
||||||
|
|
||||||
let graph_borrow = server.graph.borrow();
|
let graph_borrow = server.graph.borrow();
|
||||||
let mut source_mapper = SourceMapper::new(0);
|
let mut source_mapper = SourceMapper::new(0);
|
||||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow, &mut source_mapper);
|
let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build();
|
||||||
|
|
||||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||||
|
|
||||||
|
@ -434,25 +480,18 @@ mod merge_view_test {
|
||||||
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/03", &mut server);
|
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/03", &mut server);
|
||||||
server.endpoint.request_shutdown();
|
server.endpoint.request_shutdown();
|
||||||
|
|
||||||
let final_idx = server
|
let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh"));
|
||||||
.graph
|
|
||||||
.borrow_mut()
|
|
||||||
//.add_node(&format!("{}/shaders/{}", tmp_path, "final.fsh").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("final.fsh"));
|
|
||||||
let test_idx = server
|
let test_idx = server
|
||||||
.graph
|
.graph
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "test.glsl").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("utils").join("test.glsl"));
|
.add_node(&tmp_path.join("shaders").join("utils").join("test.glsl"));
|
||||||
let burger_idx = server
|
let burger_idx = server
|
||||||
.graph
|
.graph
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "burger.glsl").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("utils").join("burger.glsl"));
|
.add_node(&tmp_path.join("shaders").join("utils").join("burger.glsl"));
|
||||||
let sample_idx = server
|
let sample_idx = server
|
||||||
.graph
|
.graph
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "sample.glsl").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("utils").join("sample.glsl"));
|
.add_node(&tmp_path.join("shaders").join("utils").join("sample.glsl"));
|
||||||
|
|
||||||
server
|
server
|
||||||
|
@ -473,7 +512,7 @@ mod merge_view_test {
|
||||||
|
|
||||||
let graph_borrow = server.graph.borrow();
|
let graph_borrow = server.graph.borrow();
|
||||||
let mut source_mapper = SourceMapper::new(0);
|
let mut source_mapper = SourceMapper::new(0);
|
||||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow, &mut source_mapper);
|
let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build();
|
||||||
|
|
||||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||||
|
|
||||||
|
@ -516,30 +555,22 @@ mod merge_view_test {
|
||||||
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/04", &mut server);
|
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/04", &mut server);
|
||||||
server.endpoint.request_shutdown();
|
server.endpoint.request_shutdown();
|
||||||
|
|
||||||
let final_idx = server
|
let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh"));
|
||||||
.graph
|
|
||||||
.borrow_mut()
|
|
||||||
//.add_node(&format!("{}/shaders/{}", tmp_path, "final.fsh").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("final.fsh"));
|
|
||||||
let utilities_idx = server
|
let utilities_idx = server
|
||||||
.graph
|
.graph
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "utilities.glsl").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("utils").join("utilities.glsl"));
|
.add_node(&tmp_path.join("shaders").join("utils").join("utilities.glsl"));
|
||||||
let stuff1_idx = server
|
let stuff1_idx = server
|
||||||
.graph
|
.graph
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "stuff1.glsl").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("utils").join("stuff1.glsl"));
|
.add_node(&tmp_path.join("shaders").join("utils").join("stuff1.glsl"));
|
||||||
let stuff2_idx = server
|
let stuff2_idx = server
|
||||||
.graph
|
.graph
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "stuff2.glsl").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("utils").join("stuff2.glsl"));
|
.add_node(&tmp_path.join("shaders").join("utils").join("stuff2.glsl"));
|
||||||
let matrices_idx = server
|
let matrices_idx = server
|
||||||
.graph
|
.graph
|
||||||
.borrow_mut()
|
.borrow_mut()
|
||||||
//.add_node(&format!("{}/shaders/lib/{}", tmp_path, "matrices.glsl").try_into().unwrap());
|
|
||||||
.add_node(&tmp_path.join("shaders").join("lib").join("matrices.glsl"));
|
.add_node(&tmp_path.join("shaders").join("lib").join("matrices.glsl"));
|
||||||
|
|
||||||
server
|
server
|
||||||
|
@ -564,7 +595,7 @@ mod merge_view_test {
|
||||||
|
|
||||||
let graph_borrow = server.graph.borrow();
|
let graph_borrow = server.graph.borrow();
|
||||||
let mut source_mapper = SourceMapper::new(0);
|
let mut source_mapper = SourceMapper::new(0);
|
||||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow, &mut source_mapper);
|
let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build();
|
||||||
|
|
||||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||||
|
|
||||||
|
@ -587,4 +618,49 @@ mod merge_view_test {
|
||||||
|
|
||||||
assert_eq!(result, truth);
|
assert_eq!(result, truth);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
#[logging_macro::log_scope]
|
||||||
|
fn test_generate_merge_list_06() {
|
||||||
|
let mut server = new_temp_server(None);
|
||||||
|
|
||||||
|
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/06", &mut server);
|
||||||
|
server.endpoint.request_shutdown();
|
||||||
|
|
||||||
|
let final_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("final.fsh"));
|
||||||
|
let test_idx = server.graph.borrow_mut().add_node(&tmp_path.join("shaders").join("test.glsl"));
|
||||||
|
|
||||||
|
server
|
||||||
|
.graph
|
||||||
|
.borrow_mut()
|
||||||
|
.add_edge(final_idx, test_idx, IncludePosition { line: 3, start: 0, end: 0 });
|
||||||
|
server
|
||||||
|
.graph
|
||||||
|
.borrow_mut()
|
||||||
|
.add_edge(final_idx, test_idx, IncludePosition { line: 5, start: 0, end: 0 });
|
||||||
|
|
||||||
|
let nodes = server.get_dfs_for_node(final_idx).unwrap();
|
||||||
|
let sources = server.load_sources(&nodes).unwrap();
|
||||||
|
|
||||||
|
let graph_borrow = server.graph.borrow();
|
||||||
|
let mut source_mapper = SourceMapper::new(0);
|
||||||
|
let result = MergeViewBuilder::new(&nodes, &sources, &graph_borrow, &mut source_mapper).build();
|
||||||
|
|
||||||
|
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||||
|
|
||||||
|
let mut truth = fs::read_to_string(merge_file).unwrap();
|
||||||
|
|
||||||
|
for file in &[
|
||||||
|
// PathBuf::new().join("final.fsh").to_str().unwrap(),
|
||||||
|
PathBuf::new().join("test.glsl").to_str().unwrap(),
|
||||||
|
PathBuf::new().join("final.fsh").to_str().unwrap(),
|
||||||
|
PathBuf::new().join("test.glsl").to_str().unwrap(),
|
||||||
|
PathBuf::new().join("final.fsh").to_str().unwrap(),
|
||||||
|
] {
|
||||||
|
let path = tmp_path.clone();
|
||||||
|
truth = truth.replacen("!!", &path.join("shaders").join(file).to_str().unwrap().replace('\\', "\\\\"), 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert_eq!(result, truth);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,11 @@ impl SymbolName {
|
||||||
}
|
}
|
||||||
("struct_specifier", "field_declaration_list") => {
|
("struct_specifier", "field_declaration_list") => {
|
||||||
let struct_ident = node.child_by_field_name("name").unwrap();
|
let struct_ident = node.child_by_field_name("name").unwrap();
|
||||||
fqname.push(format!("{}[{}]", struct_ident.utf8_text(source.as_bytes()).unwrap(), struct_ident.id()));
|
fqname.push(format!(
|
||||||
|
"{}[{}]",
|
||||||
|
struct_ident.utf8_text(source.as_bytes()).unwrap(),
|
||||||
|
struct_ident.id()
|
||||||
|
));
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
|
2
server/main/testdata/05/final.fsh.merge
vendored
2
server/main/testdata/05/final.fsh.merge
vendored
|
@ -1,5 +1,5 @@
|
||||||
#version 120
|
#version 120
|
||||||
#extension GL_GOOGLE_cpp_style_line_directive : enable
|
|
||||||
#line 2 "!!"
|
#line 2 "!!"
|
||||||
|
|
||||||
#line 1 "!!"
|
#line 1 "!!"
|
||||||
|
|
9
server/main/testdata/06/final.fsh
vendored
Normal file
9
server/main/testdata/06/final.fsh
vendored
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
#version 120
|
||||||
|
|
||||||
|
#ifdef BANANA
|
||||||
|
#include "test.glsl"
|
||||||
|
#else
|
||||||
|
#include "test.glsl"
|
||||||
|
#endif
|
||||||
|
|
||||||
|
void main() {}
|
17
server/main/testdata/06/final.fsh.merge
vendored
Normal file
17
server/main/testdata/06/final.fsh.merge
vendored
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
#version 120
|
||||||
|
|
||||||
|
#ifdef BANANA
|
||||||
|
#line 1 1 // !!
|
||||||
|
int test() {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
#line 5 0 // !!
|
||||||
|
#else
|
||||||
|
#line 1 1 // !!
|
||||||
|
int test() {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
#line 7 0 // !!
|
||||||
|
#endif
|
||||||
|
|
||||||
|
void main() {}
|
3
server/main/testdata/06/test.glsl
vendored
Normal file
3
server/main/testdata/06/test.glsl
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
int test() {
|
||||||
|
return 1;
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue