mirror of
https://github.com/Strum355/mcshader-lsp.git
synced 2025-08-03 16:39:16 +00:00
formatting and adding a buncha log points
This commit is contained in:
parent
7cf009ee61
commit
ebab8c899a
9 changed files with 711 additions and 860 deletions
|
@ -1,34 +1,37 @@
|
|||
use std::{collections::HashMap, path::{Path, PathBuf}};
|
||||
use std::rc::Rc;
|
||||
use std::cell::RefCell;
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::prelude::*;
|
||||
use std::rc::Rc;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use petgraph::dot::Config;
|
||||
use serde_json::Value;
|
||||
|
||||
use petgraph::{dot, graph::NodeIndex};
|
||||
|
||||
use anyhow::{Result, format_err};
|
||||
use anyhow::{format_err, Result};
|
||||
use slog_scope::info;
|
||||
|
||||
use std::fs;
|
||||
|
||||
use crate::{graph::CachedStableGraph, merge_views, url_norm::FromJson};
|
||||
use crate::dfs;
|
||||
use crate::{graph::CachedStableGraph, merge_views, url_norm::FromJson};
|
||||
|
||||
pub struct CustomCommandProvider {
|
||||
commands: HashMap<String, Box<dyn Invokeable>>
|
||||
commands: HashMap<String, Box<dyn Invokeable>>,
|
||||
}
|
||||
|
||||
impl CustomCommandProvider {
|
||||
pub fn new(commands: Vec<(&str, Box<dyn Invokeable>)>) -> CustomCommandProvider {
|
||||
CustomCommandProvider{
|
||||
commands: commands.into_iter().map(|tup| {
|
||||
(tup.0.into(), tup.1)
|
||||
}).collect(),
|
||||
CustomCommandProvider {
|
||||
commands: commands.into_iter().map(|tup| (tup.0.into(), tup.1)).collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute(&self, command: &str, args: Vec<Value>, root_path: &Path) -> Result<Value> {
|
||||
pub fn execute(&self, command: &str, args: &[Value], root_path: &Path) -> Result<Value> {
|
||||
if self.commands.contains_key(command) {
|
||||
return self.commands.get(command).unwrap().run_command(root_path, args);
|
||||
}
|
||||
|
@ -37,43 +40,42 @@ impl CustomCommandProvider {
|
|||
}
|
||||
|
||||
pub trait Invokeable {
|
||||
fn run_command(&self, root: &Path, arguments: Vec<Value>) -> Result<Value>;
|
||||
fn run_command(&self, root: &Path, arguments: &[Value]) -> Result<Value>;
|
||||
}
|
||||
|
||||
pub struct GraphDotCommand {
|
||||
pub graph: Rc<RefCell<CachedStableGraph>>
|
||||
pub graph: Rc<RefCell<CachedStableGraph>>,
|
||||
}
|
||||
|
||||
impl Invokeable for GraphDotCommand {
|
||||
fn run_command(&self, root: &Path, _: Vec<Value>) -> Result<Value> {
|
||||
fn run_command(&self, root: &Path, _: &[Value]) -> Result<Value> {
|
||||
let filepath = root.join("graph.dot");
|
||||
eprintln!("generating dot file at {:?}", filepath);
|
||||
let mut file = OpenOptions::new()
|
||||
.truncate(true)
|
||||
.write(true)
|
||||
.create(true)
|
||||
.open(filepath)
|
||||
.unwrap();
|
||||
|
||||
info!("generating dot file"; "path" => filepath.as_os_str().to_str());
|
||||
|
||||
let mut file = OpenOptions::new().truncate(true).write(true).create(true).open(filepath).unwrap();
|
||||
|
||||
let mut write_data_closure = || -> Result<(), std::io::Error> {
|
||||
let graph = self.graph.as_ref();
|
||||
|
||||
file.seek(std::io::SeekFrom::Start(0))?;
|
||||
file.write_all(dot::Dot::new(&graph.borrow().graph).to_string().as_bytes())?;
|
||||
file.write_all("digraph {\n\tgraph [splines=ortho]\n\tnode [shape=box]\n".as_bytes())?;
|
||||
file.write_all(dot::Dot::with_config(&graph.borrow().graph, &[Config::GraphContentOnly]).to_string().as_bytes())?;
|
||||
file.write_all("\n}".as_bytes())?;
|
||||
file.flush()?;
|
||||
file.seek(std::io::SeekFrom::Start(0))?;
|
||||
Ok(())
|
||||
};
|
||||
|
||||
match write_data_closure() {
|
||||
Err(err) => Err(format_err!("Error generating graphviz data: {}", err)),
|
||||
_ => Ok(Value::Null)
|
||||
Err(err) => Err(format_err!("error generating graphviz data: {}", err)),
|
||||
_ => Ok(Value::Null),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VirtualMergedDocument {
|
||||
pub graph: Rc<RefCell<CachedStableGraph>>
|
||||
pub graph: Rc<RefCell<CachedStableGraph>>,
|
||||
}
|
||||
|
||||
impl VirtualMergedDocument {
|
||||
|
@ -111,7 +113,7 @@ impl VirtualMergedDocument {
|
|||
|
||||
let source = match fs::read_to_string(&path) {
|
||||
Ok(s) => s,
|
||||
Err(e) => return Err(format_err!("error reading {:?}: {}", path, e))
|
||||
Err(e) => return Err(format_err!("error reading {:?}: {}", path, e)),
|
||||
};
|
||||
let source = source.replace("\r\n", "\n");
|
||||
sources.insert(path.clone(), source);
|
||||
|
@ -122,7 +124,7 @@ impl VirtualMergedDocument {
|
|||
}
|
||||
|
||||
impl Invokeable for VirtualMergedDocument {
|
||||
fn run_command(&self, root: &Path, arguments: Vec<Value>) -> Result<Value> {
|
||||
fn run_command(&self, root: &Path, arguments: &[Value]) -> Result<Value> {
|
||||
let path = PathBuf::from_json(arguments.get(0).unwrap())?;
|
||||
|
||||
let file_ancestors = match self.get_file_toplevel_ancestors(&path) {
|
||||
|
@ -132,15 +134,15 @@ impl Invokeable for VirtualMergedDocument {
|
|||
},
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
//eprintln!("ancestors for {}:\n\t{:?}", path, file_ancestors.iter().map(|e| self.graph.borrow().graph.node_weight(*e).unwrap().clone()).collect::<Vec<String>>());
|
||||
|
||||
//info!("ancestors for {}:\n\t{:?}", path, file_ancestors.iter().map(|e| self.graph.borrow().graph.node_weight(*e).unwrap().clone()).collect::<Vec<String>>());
|
||||
|
||||
// the set of all filepath->content. TODO: change to Url?
|
||||
let mut all_sources: HashMap<PathBuf, String> = HashMap::new();
|
||||
|
||||
// if we are a top-level file (this has to be one of the set defined by Optifine, right?)
|
||||
if file_ancestors.is_empty() {
|
||||
// gather the list of all descendants
|
||||
// gather the list of all descendants
|
||||
let root = self.graph.borrow_mut().find_node(&path).unwrap();
|
||||
let tree = match self.get_dfs_for_node(root) {
|
||||
Ok(tree) => tree,
|
||||
|
@ -149,7 +151,7 @@ impl Invokeable for VirtualMergedDocument {
|
|||
|
||||
let sources = match self.load_sources(&tree) {
|
||||
Ok(s) => s,
|
||||
Err(e) => return Err(e)
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
all_sources.extend(sources);
|
||||
|
||||
|
@ -157,6 +159,9 @@ impl Invokeable for VirtualMergedDocument {
|
|||
let view = merge_views::generate_merge_list(&tree, &all_sources, &graph);
|
||||
return Ok(serde_json::value::Value::String(view));
|
||||
}
|
||||
return Err(format_err!("{:?} is not a top-level file aka has ancestors", path.strip_prefix(root).unwrap()))
|
||||
return Err(format_err!(
|
||||
"{:?} is not a top-level file aka has ancestors",
|
||||
path.strip_prefix(root).unwrap()
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -10,19 +10,19 @@ struct VisitCount {
|
|||
children: usize,
|
||||
}
|
||||
|
||||
/// Performs a depth-first search with duplicates
|
||||
/// Performs a depth-first search with duplicates
|
||||
pub struct Dfs<'a> {
|
||||
stack: Vec<NodeIndex>,
|
||||
graph: &'a CachedStableGraph,
|
||||
cycle: Vec<VisitCount>
|
||||
cycle: Vec<VisitCount>,
|
||||
}
|
||||
|
||||
impl <'a> Dfs<'a> {
|
||||
impl<'a> Dfs<'a> {
|
||||
pub fn new(graph: &'a CachedStableGraph, start: NodeIndex) -> Self {
|
||||
Dfs {
|
||||
stack: vec![start],
|
||||
graph,
|
||||
cycle: Vec::new()
|
||||
cycle: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -42,49 +42,44 @@ impl <'a> Dfs<'a> {
|
|||
for child in children {
|
||||
if prev.node == *child {
|
||||
let cycle_nodes: Vec<NodeIndex> = self.cycle.iter().map(|n| n.node).collect();
|
||||
return Err(
|
||||
error::CycleError::new(&cycle_nodes, *child, self.graph)
|
||||
);
|
||||
return Err(error::CycleError::new(&cycle_nodes, *child, self.graph));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl <'a> Iterator for Dfs<'a> {
|
||||
impl<'a> Iterator for Dfs<'a> {
|
||||
type Item = Result<(NodeIndex, Option<NodeIndex>), error::CycleError>;
|
||||
|
||||
fn next(&mut self) -> Option<Result<(NodeIndex, Option<NodeIndex>), error::CycleError>> {
|
||||
let parent = match self.cycle.last() {
|
||||
Some(p) => Some(p.node),
|
||||
None => None,
|
||||
};
|
||||
let parent = self.cycle.last().map(|p| p.node);
|
||||
|
||||
if let Some(node) = self.stack.pop() {
|
||||
self.cycle.push(VisitCount{
|
||||
self.cycle.push(VisitCount {
|
||||
node,
|
||||
children: self.graph.graph.edges(node).count(),
|
||||
touch: 1,
|
||||
});
|
||||
|
||||
let mut children = self.graph.child_node_indexes(node);
|
||||
|
||||
|
||||
if !children.is_empty() {
|
||||
// sort by line number in parent
|
||||
children.sort_by(|x, y| {
|
||||
let graph = &self.graph.graph;
|
||||
let edge1 = graph.edge_weight(graph.find_edge(node, *x).unwrap()).unwrap();
|
||||
let edge2 = graph.edge_weight(graph.find_edge(node, *y).unwrap()).unwrap();
|
||||
|
||||
|
||||
edge2.line.cmp(&edge1.line)
|
||||
});
|
||||
|
||||
|
||||
match self.check_for_cycle(&children) {
|
||||
Ok(_) => {}
|
||||
Err(e) => return Some(Err(e)),
|
||||
};
|
||||
|
||||
|
||||
for child in children {
|
||||
self.stack.push(child);
|
||||
}
|
||||
|
@ -101,9 +96,13 @@ impl <'a> Iterator for Dfs<'a> {
|
|||
pub mod error {
|
||||
use petgraph::stable_graph::NodeIndex;
|
||||
|
||||
use std::{fmt::{Debug, Display}, path::PathBuf, error::Error as StdError};
|
||||
use std::{
|
||||
error::Error as StdError,
|
||||
fmt::{Debug, Display},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use crate::{graph::CachedStableGraph, consts};
|
||||
use crate::{consts, graph::CachedStableGraph};
|
||||
|
||||
use rust_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range};
|
||||
|
||||
|
@ -111,7 +110,7 @@ pub mod error {
|
|||
pub struct CycleError(Vec<PathBuf>);
|
||||
|
||||
impl StdError for CycleError {}
|
||||
|
||||
|
||||
impl CycleError {
|
||||
pub fn new(nodes: &[NodeIndex], current_node: NodeIndex, graph: &CachedStableGraph) -> Self {
|
||||
let mut resolved_nodes: Vec<PathBuf> = nodes.iter().map(|i| graph.get_node(*i)).collect();
|
||||
|
@ -119,22 +118,22 @@ pub mod error {
|
|||
CycleError(resolved_nodes)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
impl Display for CycleError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut disp = String::new();
|
||||
disp.push_str(format!("Include cycle detected:\n{:?} imports ", self.0[0]).as_str());
|
||||
for p in &self.0[1..self.0.len()-1] {
|
||||
for p in &self.0[1..self.0.len() - 1] {
|
||||
disp.push_str(format!("\n{:?}, which imports ", *p).as_str());
|
||||
}
|
||||
disp.push_str(format!("\n{:?}", self.0[self.0.len()-1]).as_str());
|
||||
disp.push_str(format!("\n{:?}", self.0[self.0.len() - 1]).as_str());
|
||||
f.write_str(disp.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CycleError> for Diagnostic {
|
||||
fn from(e: CycleError) -> Diagnostic {
|
||||
Diagnostic{
|
||||
Diagnostic {
|
||||
severity: Some(DiagnosticSeverity::Error),
|
||||
range: Range::new(Position::new(0, 0), Position::new(0, 500)),
|
||||
source: Some(consts::SOURCE.into()),
|
||||
|
@ -153,4 +152,4 @@ pub mod error {
|
|||
format!("{}", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,9 +1,13 @@
|
|||
use petgraph::stable_graph::StableDiGraph;
|
||||
use petgraph::stable_graph::NodeIndex;
|
||||
use petgraph::Direction;
|
||||
use petgraph::stable_graph::EdgeIndex;
|
||||
use petgraph::stable_graph::NodeIndex;
|
||||
use petgraph::stable_graph::StableDiGraph;
|
||||
use petgraph::Direction;
|
||||
|
||||
use std::{collections::{HashMap, HashSet}, path::{Path, PathBuf}, str::FromStr};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
path::{Path, PathBuf},
|
||||
str::FromStr,
|
||||
};
|
||||
|
||||
use super::IncludePosition;
|
||||
|
||||
|
@ -16,14 +20,14 @@ pub struct CachedStableGraph {
|
|||
pub graph: StableDiGraph<String, IncludePosition>,
|
||||
cache: HashMap<PathBuf, NodeIndex>,
|
||||
// Maps a node index to its abstracted string representation.
|
||||
// Mainly used as the graph is based on NodeIndex and
|
||||
// Mainly used as the graph is based on NodeIndex.
|
||||
reverse_index: HashMap<NodeIndex, PathBuf>,
|
||||
}
|
||||
|
||||
impl CachedStableGraph {
|
||||
#[allow(clippy::new_without_default)]
|
||||
pub fn new() -> CachedStableGraph {
|
||||
CachedStableGraph{
|
||||
CachedStableGraph {
|
||||
graph: StableDiGraph::new(),
|
||||
cache: HashMap::new(),
|
||||
reverse_index: HashMap::new(),
|
||||
|
@ -91,15 +95,21 @@ impl CachedStableGraph {
|
|||
|
||||
#[allow(dead_code)]
|
||||
pub fn child_node_names(&self, node: NodeIndex) -> Vec<PathBuf> {
|
||||
self.graph.neighbors(node).map(|n| self.reverse_index.get(&n).unwrap().clone()).collect()
|
||||
self.graph
|
||||
.neighbors(node)
|
||||
.map(|n| self.reverse_index.get(&n).unwrap().clone())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn child_node_meta(&self, node: NodeIndex) -> Vec<(PathBuf, IncludePosition)> {
|
||||
self.graph.neighbors(node).map(|n| {
|
||||
let edge = self.graph.find_edge(node, n).unwrap();
|
||||
let edge_meta = self.graph.edge_weight(edge).unwrap();
|
||||
return (self.reverse_index.get(&n).unwrap().clone(), edge_meta.clone())
|
||||
}).collect()
|
||||
self.graph
|
||||
.neighbors(node)
|
||||
.map(|n| {
|
||||
let edge = self.graph.find_edge(node, n).unwrap();
|
||||
let edge_meta = self.graph.edge_weight(edge).unwrap();
|
||||
return (self.reverse_index.get(&n).unwrap().clone(), edge_meta.clone());
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn child_node_indexes(&self, node: NodeIndex) -> Vec<NodeIndex> {
|
||||
|
@ -108,7 +118,10 @@ impl CachedStableGraph {
|
|||
|
||||
#[allow(dead_code)]
|
||||
pub fn parent_node_names(&self, node: NodeIndex) -> Vec<PathBuf> {
|
||||
self.graph.neighbors_directed(node, Direction::Incoming).map(|n| self.reverse_index.get(&n).unwrap().clone()).collect()
|
||||
self.graph
|
||||
.neighbors_directed(node, Direction::Incoming)
|
||||
.map(|n| self.reverse_index.get(&n).unwrap().clone())
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn parent_node_indexes(&self, node: NodeIndex) -> Vec<NodeIndex> {
|
||||
|
@ -129,7 +142,7 @@ impl CachedStableGraph {
|
|||
if node == initial && !visited.is_empty() {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
|
||||
let parents = self.parent_node_indexes(node);
|
||||
let mut collection = Vec::with_capacity(parents.len());
|
||||
|
||||
|
@ -148,4 +161,4 @@ impl CachedStableGraph {
|
|||
|
||||
collection
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,4 +13,4 @@ pub struct StatusParams {
|
|||
pub status: String,
|
||||
pub message: Option<String>,
|
||||
pub icon: Option<String>,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,38 +1,48 @@
|
|||
use rust_lsp::jsonrpc::{*, method_types::*};
|
||||
use rust_lsp::jsonrpc::{method_types::*, *};
|
||||
use rust_lsp::lsp::*;
|
||||
use rust_lsp::lsp_types::{*, notification::*};
|
||||
use rust_lsp::lsp_types::{notification::*, *};
|
||||
|
||||
use petgraph::stable_graph::NodeIndex;
|
||||
|
||||
use serde_json::Value;
|
||||
use serde::Deserialize;
|
||||
use serde_json::{Value, from_value};
|
||||
|
||||
use url_norm::FromUrl;
|
||||
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use std::{cell::RefCell, path::{Path, PathBuf}, str::FromStr};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::collections::hash_map::RandomState;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::convert::TryFrom;
|
||||
use std::fmt::{Display, Formatter, Debug};
|
||||
use std::io::{stdin, stdout, BufRead, BufReader};
|
||||
use std::rc::Rc;
|
||||
use std::fmt::{Debug, Display, Formatter};
|
||||
use std::fs;
|
||||
use std::io::{stdin, stdout, BufRead, BufReader};
|
||||
use std::iter::{Extend, FromIterator};
|
||||
use std::rc::Rc;
|
||||
use std::str::FromStr;
|
||||
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use slog::Level;
|
||||
use slog_scope::{debug, error, info, warn};
|
||||
|
||||
use path_slash::PathBufExt;
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{anyhow, Result};
|
||||
|
||||
use regex::Regex;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
|
||||
mod graph;
|
||||
mod commands;
|
||||
mod lsp_ext;
|
||||
mod dfs;
|
||||
mod merge_views;
|
||||
mod consts;
|
||||
mod dfs;
|
||||
mod graph;
|
||||
mod lsp_ext;
|
||||
mod merge_views;
|
||||
mod opengl;
|
||||
mod logging;
|
||||
mod url_norm;
|
||||
|
@ -73,10 +83,10 @@ fn main() {
|
|||
),
|
||||
(
|
||||
"virtualMerge",
|
||||
Box::new(commands::VirtualMergedDocument{
|
||||
graph: Rc::clone(&langserver.graph)
|
||||
})
|
||||
)
|
||||
Box::new(commands::VirtualMergedDocument {
|
||||
graph: Rc::clone(&langserver.graph),
|
||||
}),
|
||||
),
|
||||
]));
|
||||
|
||||
LSPEndpoint::run_server_from_input(&mut stdin.lock(), endpoint_output, langserver);
|
||||
|
@ -110,8 +120,12 @@ impl Display for IncludePosition {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TreeType {
|
||||
Fragment, Vertex, Geometry, Compute
|
||||
Fragment,
|
||||
Vertex,
|
||||
Geometry,
|
||||
Compute,
|
||||
}
|
||||
|
||||
impl MinecraftShaderLanguageServer {
|
||||
|
@ -125,10 +139,12 @@ impl MinecraftShaderLanguageServer {
|
|||
}
|
||||
|
||||
pub fn gen_initial_graph(&self) {
|
||||
eprintln!("root of project is {:?}", self.root);
|
||||
info!("generating graph for current root"; "root" => self.root.to_str().unwrap());
|
||||
|
||||
// filter directories and files not ending in any of the 3 extensions
|
||||
WalkDir::new(&self.root).into_iter().filter_map(|entry| {
|
||||
WalkDir::new(&self.root)
|
||||
.into_iter()
|
||||
.filter_map(|entry| {
|
||||
if entry.is_err() {
|
||||
return None;
|
||||
}
|
||||
|
@ -141,29 +157,31 @@ impl MinecraftShaderLanguageServer {
|
|||
|
||||
let ext = match path.extension() {
|
||||
Some(e) => e,
|
||||
None => return None,
|
||||
None => return None,
|
||||
};
|
||||
|
||||
// TODO: include user added extensions
|
||||
if ext != "vsh" && ext != "fsh" && ext != "glsl" && ext != "inc" {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(entry.into_path())
|
||||
}).for_each(|path| {
|
||||
// iterate all valid found files, search for includes, add a node into the graph for each
|
||||
// file and add a file->includes KV into the map
|
||||
self.add_file_and_includes_to_graph(&path);
|
||||
});
|
||||
})
|
||||
.for_each(|path| {
|
||||
// iterate all valid found files, search for includes, add a node into the graph for each
|
||||
// file and add a file->includes KV into the map
|
||||
self.add_file_and_includes_to_graph(&path);
|
||||
});
|
||||
|
||||
eprintln!("finished building project include graph");
|
||||
info!("finished building project include graph");
|
||||
}
|
||||
|
||||
fn add_file_and_includes_to_graph(&self, path: &Path) {
|
||||
let includes = self.find_includes(path);
|
||||
|
||||
let idx = self.graph.borrow_mut().add_node(&path);
|
||||
let idx = self.graph.borrow_mut().add_node(path);
|
||||
|
||||
//eprintln!("adding {:?} with {:?}", path, includes);
|
||||
debug!("adding includes for new file"; "file" => path.to_str().unwrap(), "includes" => format!("{:?}", includes));
|
||||
for include in includes {
|
||||
self.add_include(include, idx);
|
||||
}
|
||||
|
@ -186,17 +204,12 @@ impl MinecraftShaderLanguageServer {
|
|||
})
|
||||
.filter(|line| RE_INCLUDE.is_match(line.1.as_str()))
|
||||
.for_each(|line| {
|
||||
let cap = RE_INCLUDE
|
||||
.captures(line.1.as_str())
|
||||
.unwrap()
|
||||
.get(1)
|
||||
.unwrap();
|
||||
let cap = RE_INCLUDE.captures(line.1.as_str()).unwrap().get(1).unwrap();
|
||||
|
||||
let start = cap.start();
|
||||
let end = cap.end();
|
||||
let mut path: String = cap.as_str().into();
|
||||
|
||||
// TODO: difference between / and not
|
||||
let full_include = if path.starts_with('/') {
|
||||
path = path.strip_prefix('/').unwrap().to_string();
|
||||
self.root.join("shaders").join(PathBuf::from_slash(&path))
|
||||
|
@ -204,14 +217,7 @@ impl MinecraftShaderLanguageServer {
|
|||
file.parent().unwrap().join(PathBuf::from_slash(&path))
|
||||
};
|
||||
|
||||
includes.push((
|
||||
full_include,
|
||||
IncludePosition {
|
||||
line: line.0,
|
||||
start,
|
||||
end,
|
||||
}
|
||||
));
|
||||
includes.push((full_include, IncludePosition { line: line.0, start, end }));
|
||||
});
|
||||
|
||||
includes
|
||||
|
@ -220,12 +226,10 @@ impl MinecraftShaderLanguageServer {
|
|||
fn update_includes(&self, file: &Path) {
|
||||
let includes = self.find_includes(file);
|
||||
|
||||
eprintln!("updating {:?} with {:?}", file, includes);
|
||||
info!("includes found for file"; "file" => file.to_str().unwrap(), "includes" => format!("{:?}", includes));
|
||||
|
||||
let idx = match self.graph.borrow_mut().find_node(&file) {
|
||||
None => {
|
||||
return
|
||||
},
|
||||
let idx = match self.graph.borrow_mut().find_node(file) {
|
||||
None => return,
|
||||
Some(n) => n,
|
||||
};
|
||||
|
||||
|
@ -235,7 +239,11 @@ impl MinecraftShaderLanguageServer {
|
|||
let to_be_added = new_children.difference(&prev_children);
|
||||
let to_be_removed = prev_children.difference(&new_children);
|
||||
|
||||
eprintln!("removing:\n\t{:?}\nadding:\n\t{:?}", to_be_removed, to_be_added);
|
||||
debug!(
|
||||
"include sets diff'd";
|
||||
"for removal" => format!("{:?}", to_be_removed),
|
||||
"for addition" => format!("{:?}", to_be_added)
|
||||
);
|
||||
|
||||
for removal in to_be_removed {
|
||||
let child = self.graph.borrow_mut().find_node(&removal.0).unwrap();
|
||||
|
@ -256,15 +264,25 @@ impl MinecraftShaderLanguageServer {
|
|||
},
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
eprintln!("ancestors for {:?}:\n\t{:?}", uri, file_ancestors.iter().map(|e| PathBuf::from_str(&self.graph.borrow().graph.node_weight(*e).unwrap().clone()).unwrap()).collect::<Vec<PathBuf>>());
|
||||
|
||||
info!(
|
||||
"top-level file ancestors found";
|
||||
"uri" => uri.to_str().unwrap(),
|
||||
"ancestors" => format!("{:?}", file_ancestors
|
||||
.iter()
|
||||
.map(|e| PathBuf::from_str(
|
||||
&self.graph.borrow().graph[*e].clone()
|
||||
)
|
||||
.unwrap())
|
||||
.collect::<Vec<PathBuf>>())
|
||||
);
|
||||
|
||||
// the set of all filepath->content.
|
||||
let mut all_sources: HashMap<PathBuf, String> = HashMap::new();
|
||||
// the set of filepath->list of diagnostics to report
|
||||
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::new();
|
||||
|
||||
// we want to backfill the diagnostics map with all linked sources
|
||||
// we want to backfill the diagnostics map with all linked sources
|
||||
let back_fill = |all_sources: &HashMap<PathBuf, String>, diagnostics: &mut HashMap<Url, Vec<Diagnostic>>| {
|
||||
for path in all_sources.keys() {
|
||||
diagnostics.entry(Url::from_file_path(path).unwrap()).or_default();
|
||||
|
@ -273,8 +291,8 @@ impl MinecraftShaderLanguageServer {
|
|||
|
||||
// if we are a top-level file (this has to be one of the set defined by Optifine, right?)
|
||||
if file_ancestors.is_empty() {
|
||||
// gather the list of all descendants
|
||||
let root = self.graph.borrow_mut().find_node(&uri).unwrap();
|
||||
// gather the list of all descendants
|
||||
let root = self.graph.borrow_mut().find_node(uri).unwrap();
|
||||
let tree = match self.get_dfs_for_node(root) {
|
||||
Ok(tree) => tree,
|
||||
Err(e) => {
|
||||
|
@ -283,20 +301,20 @@ impl MinecraftShaderLanguageServer {
|
|||
}
|
||||
};
|
||||
|
||||
all_sources.extend( self.load_sources(&tree)?);
|
||||
all_sources.extend(self.load_sources(&tree)?);
|
||||
|
||||
let view = {
|
||||
let graph = self.graph.borrow();
|
||||
let graph = self.graph.borrow();
|
||||
merge_views::generate_merge_list(&tree, &all_sources, &graph)
|
||||
};
|
||||
|
||||
let root_path = self.graph.borrow().get_node(root);
|
||||
let ext = match root_path.extension() {
|
||||
Some(ext) => ext,
|
||||
Some(ext) => ext.to_str().unwrap(),
|
||||
None => {
|
||||
back_fill(&all_sources, &mut diagnostics);
|
||||
return Ok(diagnostics)
|
||||
},
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
};
|
||||
let tree_type = if ext == "fsh" {
|
||||
TreeType::Fragment
|
||||
|
@ -307,17 +325,20 @@ impl MinecraftShaderLanguageServer {
|
|||
} else if ext == "csh" {
|
||||
TreeType::Compute
|
||||
} else {
|
||||
eprintln!("got a non fsh|vsh|gsh|csh ({:?}) as a file root ancestor: {:?}", ext, root_path);
|
||||
warn!(
|
||||
"got a non fsh|vsh|gsh|csh as a file root ancestor, skipping lint";
|
||||
"extension" => ext, "root_ancestor" => root_path.to_str().unwrap()
|
||||
);
|
||||
back_fill(&all_sources, &mut diagnostics);
|
||||
return Ok(diagnostics)
|
||||
return Ok(diagnostics);
|
||||
};
|
||||
|
||||
let stdout = match self.opengl_context.clone().validate(tree_type, view) {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
back_fill(&all_sources, &mut diagnostics);
|
||||
return Ok(diagnostics)
|
||||
},
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
};
|
||||
diagnostics.extend(self.parse_validator_stdout(uri, stdout, ""));
|
||||
} else {
|
||||
|
@ -335,8 +356,8 @@ impl MinecraftShaderLanguageServer {
|
|||
|
||||
let root_path = self.graph.borrow().get_node(*root).clone();
|
||||
let ext = match root_path.extension() {
|
||||
Some(ext) => ext,
|
||||
None => continue
|
||||
Some(ext) => ext.to_str().unwrap(),
|
||||
None => continue,
|
||||
};
|
||||
let tree_type = if ext == "fsh" {
|
||||
TreeType::Fragment
|
||||
|
@ -347,7 +368,10 @@ impl MinecraftShaderLanguageServer {
|
|||
} else if ext == "csh" {
|
||||
TreeType::Compute
|
||||
} else {
|
||||
eprintln!("got a non fsh|vsh|gsh|csh ({:?}) as a file root ancestor: {:?}", ext, root_path);
|
||||
warn!(
|
||||
"got a non fsh|vsh|gsh|csh as a file root ancestor, skipping lint";
|
||||
"extension" => ext, "root_ancestor" => root_path.to_str().unwrap()
|
||||
);
|
||||
continue;
|
||||
};
|
||||
|
||||
|
@ -358,7 +382,7 @@ impl MinecraftShaderLanguageServer {
|
|||
|
||||
for tree in all_trees {
|
||||
let view = {
|
||||
let graph = self.graph.borrow();
|
||||
let graph = self.graph.borrow();
|
||||
merge_views::generate_merge_list(&tree.1, &all_sources, &graph)
|
||||
};
|
||||
|
||||
|
@ -451,7 +475,7 @@ impl MinecraftShaderLanguageServer {
|
|||
|
||||
let dfs = dfs::Dfs::new(&graph_ref, root);
|
||||
|
||||
dfs.collect::<Result<Vec<_>, _>>()
|
||||
dfs.collect::<Result<_, _>>()
|
||||
}
|
||||
|
||||
pub fn load_sources(&self, nodes: &[(NodeIndex, Option<NodeIndex>)]) -> Result<HashMap<PathBuf, String>> {
|
||||
|
@ -467,7 +491,7 @@ impl MinecraftShaderLanguageServer {
|
|||
|
||||
let source = match fs::read_to_string(&path) {
|
||||
Ok(s) => s,
|
||||
Err(e) => return Err(anyhow!("error reading {:?}: {}", path, e))
|
||||
Err(e) => return Err(anyhow!("error reading {:?}: {}", path, e)),
|
||||
};
|
||||
let source = source.replace("\r\n", "\n");
|
||||
sources.insert(path.clone(), source);
|
||||
|
@ -489,86 +513,88 @@ impl MinecraftShaderLanguageServer {
|
|||
}
|
||||
|
||||
pub fn publish_diagnostic(&self, diagnostics: HashMap<Url, Vec<Diagnostic>>, document_version: Option<i32>) {
|
||||
eprintln!("DIAGNOSTICS:\n{:?}", diagnostics);
|
||||
// info!("DIAGNOSTICS:\n{:?}", diagnostics);
|
||||
for (uri, diagnostics) in diagnostics {
|
||||
self.endpoint.send_notification(PublishDiagnostics::METHOD, PublishDiagnosticsParams {
|
||||
uri,
|
||||
diagnostics,
|
||||
version: document_version,
|
||||
}).expect("failed to publish diagnostics");
|
||||
self.endpoint
|
||||
.send_notification(
|
||||
PublishDiagnostics::METHOD,
|
||||
PublishDiagnosticsParams {
|
||||
uri,
|
||||
diagnostics,
|
||||
version: document_version,
|
||||
},
|
||||
)
|
||||
.expect("failed to publish diagnostics");
|
||||
}
|
||||
}
|
||||
|
||||
fn set_status(&self, status: impl Into<String>, message: impl Into<String>, icon: impl Into<String>) {
|
||||
self.endpoint.send_notification(lsp_ext::Status::METHOD, lsp_ext::StatusParams {
|
||||
status: status.into(),
|
||||
message: Some(message.into()),
|
||||
icon: Some(icon.into()),
|
||||
}).unwrap_or(());
|
||||
self.endpoint
|
||||
.send_notification(
|
||||
lsp_ext::Status::METHOD,
|
||||
lsp_ext::StatusParams {
|
||||
status: status.into(),
|
||||
message: Some(message.into()),
|
||||
icon: Some(icon.into()),
|
||||
},
|
||||
)
|
||||
.unwrap_or(());
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageServerHandling for MinecraftShaderLanguageServer {
|
||||
fn initialize(&mut self, params: InitializeParams, completable: MethodCompletable<InitializeResult, InitializeError>) {
|
||||
|
||||
logging::slog_with_trace_id(|| {
|
||||
info!("starting server...");
|
||||
|
||||
let capabilities = ServerCapabilities{
|
||||
document_link_provider: Some(DocumentLinkOptions {
|
||||
resolve_provider: None,
|
||||
work_done_progress_options: WorkDoneProgressOptions {
|
||||
work_done_progress: None,
|
||||
},
|
||||
}),
|
||||
execute_command_provider: Some(ExecuteCommandOptions {
|
||||
commands: vec!["graphDot".into()],
|
||||
work_done_progress_options: WorkDoneProgressOptions {
|
||||
work_done_progress: None,
|
||||
},
|
||||
}),
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(
|
||||
TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
will_save: None,
|
||||
will_save_wait_until: None,
|
||||
change: Some(TextDocumentSyncKind::Full),
|
||||
save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions {
|
||||
include_text: Some(true),
|
||||
}))
|
||||
},
|
||||
)),
|
||||
.. ServerCapabilities::default()
|
||||
};
|
||||
let capabilities = ServerCapabilities {
|
||||
document_link_provider: Some(DocumentLinkOptions {
|
||||
resolve_provider: None,
|
||||
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
|
||||
}),
|
||||
execute_command_provider: Some(ExecuteCommandOptions {
|
||||
commands: vec!["graphDot".into()],
|
||||
work_done_progress_options: WorkDoneProgressOptions { work_done_progress: None },
|
||||
}),
|
||||
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
|
||||
open_close: Some(true),
|
||||
will_save: None,
|
||||
will_save_wait_until: None,
|
||||
change: Some(TextDocumentSyncKind::Full),
|
||||
save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { include_text: Some(true) })),
|
||||
})),
|
||||
..ServerCapabilities::default()
|
||||
};
|
||||
|
||||
let root = match params.root_uri {
|
||||
Some(uri) => PathBuf::from_url(uri),
|
||||
None => {
|
||||
completable.complete(Err(MethodError {
|
||||
code: 42069,
|
||||
message: "Must be in workspace".into(),
|
||||
data: InitializeError {
|
||||
retry: false,
|
||||
},
|
||||
}));
|
||||
return;
|
||||
}
|
||||
};
|
||||
let root = match params.root_uri {
|
||||
Some(uri) => PathBuf::from_url(uri),
|
||||
None => {
|
||||
completable.complete(Err(MethodError {
|
||||
code: 42069,
|
||||
message: "Must be in workspace".into(),
|
||||
data: InitializeError { retry: false },
|
||||
}));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
completable.complete(Ok(InitializeResult {
|
||||
capabilities,
|
||||
server_info: None,
|
||||
}));
|
||||
completable.complete(Ok(InitializeResult {
|
||||
capabilities,
|
||||
server_info: None,
|
||||
}));
|
||||
|
||||
self.set_status("loading", "Building dependency graph...", "$(loading~spin)");
|
||||
self.set_status("loading", "Building dependency graph...", "$(loading~spin)");
|
||||
|
||||
self.root = root;
|
||||
self.root = root;
|
||||
|
||||
self.gen_initial_graph();
|
||||
self.gen_initial_graph();
|
||||
|
||||
self.set_status("ready", "Project initialized", "$(check)");
|
||||
self.set_status("ready", "Project initialized", "$(check)");
|
||||
});
|
||||
}
|
||||
|
||||
fn shutdown(&mut self, _: (), completable: LSCompletable<()>) {
|
||||
eprintln!("shutting down language server...");
|
||||
warn!("shutting down language server...");
|
||||
completable.complete(Ok(()));
|
||||
}
|
||||
|
||||
|
@ -577,22 +603,25 @@ impl LanguageServerHandling for MinecraftShaderLanguageServer {
|
|||
}
|
||||
|
||||
fn workspace_change_configuration(&mut self, params: DidChangeConfigurationParams) {
|
||||
//let config = params.settings.as_object().unwrap().get("mcglsl").unwrap();
|
||||
logging::slog_with_trace_id(|| {
|
||||
});
|
||||
}
|
||||
|
||||
fn did_open_text_document(&mut self, params: DidOpenTextDocumentParams) {
|
||||
//eprintln!("opened doc {}", params.text_document.uri);
|
||||
let path = PathBuf::from_url(params.text_document.uri);
|
||||
if !path.starts_with(&self.root) {
|
||||
return
|
||||
}
|
||||
if self.graph.borrow_mut().find_node(&path) == None {
|
||||
self.add_file_and_includes_to_graph(&path);
|
||||
}
|
||||
match self.lint(&path) {
|
||||
Ok(diagnostics) => self.publish_diagnostic(diagnostics, None),
|
||||
Err(e) => eprintln!("error linting: {}", e),
|
||||
}
|
||||
logging::slog_with_trace_id(|| {
|
||||
//info!("opened doc {}", params.text_document.uri);
|
||||
let path = PathBuf::from_url(params.text_document.uri);
|
||||
if !path.starts_with(&self.root) {
|
||||
return;
|
||||
}
|
||||
if self.graph.borrow_mut().find_node(&path) == None {
|
||||
self.add_file_and_includes_to_graph(&path);
|
||||
}
|
||||
match self.lint(&path) {
|
||||
Ok(diagnostics) => self.publish_diagnostic(diagnostics, None),
|
||||
Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => path.to_str().unwrap()),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn did_change_text_document(&mut self, _: DidChangeTextDocumentParams) {}
|
||||
|
@ -600,18 +629,18 @@ impl LanguageServerHandling for MinecraftShaderLanguageServer {
|
|||
fn did_close_text_document(&mut self, _: DidCloseTextDocumentParams) {}
|
||||
|
||||
fn did_save_text_document(&mut self, params: DidSaveTextDocumentParams) {
|
||||
//eprintln!("saved doc {}", params.text_document.uri);
|
||||
logging::slog_with_trace_id(|| {
|
||||
let path = PathBuf::from_url(params.text_document.uri);
|
||||
if !path.starts_with(&self.root) {
|
||||
return;
|
||||
}
|
||||
self.update_includes(&path);
|
||||
|
||||
let path = PathBuf::from_url(params.text_document.uri);
|
||||
if !path.starts_with(&self.root) {
|
||||
return
|
||||
}
|
||||
self.update_includes(&path);
|
||||
|
||||
match self.lint(&path) {
|
||||
Ok(diagnostics) => self.publish_diagnostic(diagnostics, None),
|
||||
Err(e) => eprintln!("error linting: {}", e),
|
||||
}
|
||||
match self.lint(&path) {
|
||||
Ok(diagnostics) => self.publish_diagnostic(diagnostics, None),
|
||||
Err(e) => error!("error linting"; "error" => format!("{:?}", e), "path" => path.to_str().unwrap()),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn did_change_watched_files(&mut self, _: DidChangeWatchedFilesParams) {}
|
||||
|
@ -635,24 +664,41 @@ impl LanguageServerHandling for MinecraftShaderLanguageServer {
|
|||
}
|
||||
|
||||
fn execute_command(&mut self, params: ExecuteCommandParams, completable: LSCompletable<Option<Value>>) {
|
||||
match self.command_provider.as_ref().unwrap().execute(¶ms.command, params.arguments, &self.root) {
|
||||
Ok(resp) => {
|
||||
eprintln!("executed {} successfully", params.command);
|
||||
self.endpoint.send_notification(ShowMessage::METHOD, ShowMessageParams {
|
||||
typ: MessageType::Info,
|
||||
message: format!("Command {} executed successfully.", params.command),
|
||||
}).expect("failed to send popup/show message notification");
|
||||
completable.complete(Ok(Some(resp)))
|
||||
},
|
||||
Err(err) => {
|
||||
self.endpoint.send_notification(ShowMessage::METHOD, ShowMessageParams {
|
||||
typ: MessageType::Error,
|
||||
message: format!("Failed to execute `{}`. Reason: {}", params.command, err),
|
||||
}).expect("failed to send popup/show message notification");
|
||||
eprintln!("failed to execute {}: {}", params.command, err);
|
||||
completable.complete(Err(MethodError::new(32420, err.to_string(), ())))
|
||||
},
|
||||
}
|
||||
logging::slog_with_trace_id(|| {
|
||||
match self
|
||||
.command_provider
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.execute(¶ms.command, ¶ms.arguments, &self.root)
|
||||
{
|
||||
Ok(resp) => {
|
||||
info!("executed command successfully"; "command" => params.command.clone());
|
||||
self.endpoint
|
||||
.send_notification(
|
||||
ShowMessage::METHOD,
|
||||
ShowMessageParams {
|
||||
typ: MessageType::Info,
|
||||
message: format!("Command {} executed successfully.", params.command),
|
||||
},
|
||||
)
|
||||
.expect("failed to send popup/show message notification");
|
||||
completable.complete(Ok(Some(resp)))
|
||||
}
|
||||
Err(err) => {
|
||||
error!("failed to execute command"; "command" => params.command.clone(), "error" => format!("{:?}", err));
|
||||
self.endpoint
|
||||
.send_notification(
|
||||
ShowMessage::METHOD,
|
||||
ShowMessageParams {
|
||||
typ: MessageType::Error,
|
||||
message: format!("Failed to execute `{}`. Reason: {}", params.command, err),
|
||||
},
|
||||
)
|
||||
.expect("failed to send popup/show message notification");
|
||||
completable.complete(Err(MethodError::new(32420, err.to_string(), ())))
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn signature_help(&mut self, _: TextDocumentPositionParams, completable: LSCompletable<SignatureHelp>) {
|
||||
|
@ -692,55 +738,52 @@ impl LanguageServerHandling for MinecraftShaderLanguageServer {
|
|||
}
|
||||
|
||||
fn document_link(&mut self, params: DocumentLinkParams, completable: LSCompletable<Vec<DocumentLink>>) {
|
||||
eprintln!("document link file: {:?}", params.text_document.uri.to_file_path().unwrap());
|
||||
// node for current document
|
||||
let curr_doc = params
|
||||
.text_document
|
||||
.uri
|
||||
.to_file_path()
|
||||
.unwrap();
|
||||
let node = match self.graph.borrow_mut().find_node(&curr_doc) {
|
||||
Some(n) => n,
|
||||
None => {
|
||||
completable.complete(Ok(vec![]));
|
||||
return
|
||||
},
|
||||
};
|
||||
logging::slog_with_trace_id(|| {
|
||||
// node for current document
|
||||
let curr_doc = params.text_document.uri.to_file_path().unwrap();
|
||||
let node = match self.graph.borrow_mut().find_node(&curr_doc) {
|
||||
Some(n) => n,
|
||||
None => {
|
||||
warn!("document not found in graph"; "path" => curr_doc.to_str().unwrap());
|
||||
completable.complete(Ok(vec![]));
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
let edges: Vec<DocumentLink> = self
|
||||
.graph
|
||||
.borrow()
|
||||
.child_node_indexes(node)
|
||||
.into_iter()
|
||||
.filter_map(|child| {
|
||||
let graph = self.graph.borrow();
|
||||
let value = graph.get_edge_meta(node, child);
|
||||
let path = graph.get_node(child);
|
||||
let url = match Url::from_file_path(&path) {
|
||||
Ok(url) => url,
|
||||
Err(e) => {
|
||||
eprintln!("error converting {:?} into url: {:?}", path, e);
|
||||
return None;
|
||||
}
|
||||
};
|
||||
let edges: Vec<DocumentLink> = self
|
||||
.graph
|
||||
.borrow()
|
||||
.child_node_indexes(node)
|
||||
.into_iter()
|
||||
.filter_map(|child| {
|
||||
let graph = self.graph.borrow();
|
||||
let value = graph.get_edge_meta(node, child);
|
||||
let path = graph.get_node(child);
|
||||
let url = match Url::from_file_path(&path) {
|
||||
Ok(url) => url,
|
||||
Err(e) => {
|
||||
error!("error converting into url"; "path" => path.to_str().unwrap(), "error" => format!("{:?}", e));
|
||||
return None;
|
||||
}
|
||||
};
|
||||
|
||||
Some(DocumentLink {
|
||||
range: Range::new(
|
||||
Position::new(
|
||||
u32::try_from(value.line).unwrap(),
|
||||
u32::try_from(value.start).unwrap()),
|
||||
Position::new(
|
||||
u32::try_from(value.line).unwrap(),
|
||||
u32::try_from(value.end).unwrap()),
|
||||
),
|
||||
target: Some(url),
|
||||
//tooltip: Some(url.path().to_string().strip_prefix(self.root.clone().unwrap().as_str()).unwrap().to_string()),
|
||||
tooltip: None,
|
||||
data: None,
|
||||
Some(DocumentLink {
|
||||
range: Range::new(
|
||||
Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.start).unwrap()),
|
||||
Position::new(u32::try_from(value.line).unwrap(), u32::try_from(value.end).unwrap()),
|
||||
),
|
||||
target: Some(url.clone()),
|
||||
tooltip: Some(url.path().to_string()),
|
||||
data: None,
|
||||
})
|
||||
})
|
||||
}).collect();
|
||||
eprintln!("links: {:?}", edges);
|
||||
completable.complete(Ok(edges));
|
||||
.collect();
|
||||
debug!("document link results";
|
||||
"links" => format!("{:?}", edges.iter().map(|e| (e.range, e.target.as_ref().unwrap().path())).collect::<Vec<_>>()),
|
||||
"path" => curr_doc.to_str().unwrap(),
|
||||
);
|
||||
completable.complete(Ok(edges));
|
||||
});
|
||||
}
|
||||
|
||||
fn document_link_resolve(&mut self, _: DocumentLink, completable: LSCompletable<DocumentLink>) {
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
use std::{collections::{HashMap, LinkedList, VecDeque}, path::{Path, PathBuf}};
|
||||
use std::iter::Peekable;
|
||||
use std::cmp::min;
|
||||
use std::iter::Peekable;
|
||||
use std::{
|
||||
collections::{HashMap, LinkedList, VecDeque},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use core::slice::Iter;
|
||||
|
||||
|
@ -8,7 +11,7 @@ use petgraph::stable_graph::NodeIndex;
|
|||
|
||||
use crate::graph::CachedStableGraph;
|
||||
|
||||
/// FilialTuple represents a tuple with a parent at index 0
|
||||
/// FilialTuple represents a tuple with a parent at index 0
|
||||
/// and a child at index 1. Parent can be nullable in the case of
|
||||
/// the child being a top level node in the tree.
|
||||
#[derive(PartialEq, Eq, Hash)]
|
||||
|
@ -21,9 +24,7 @@ impl From<(Option<&NodeIndex>, NodeIndex)> for FilialTuple {
|
|||
}
|
||||
|
||||
pub fn generate_merge_list<'a>(
|
||||
nodes: &'a [(NodeIndex, Option<NodeIndex>)],
|
||||
sources: &'a HashMap<PathBuf, String>,
|
||||
graph: &'a CachedStableGraph
|
||||
nodes: &'a [(NodeIndex, Option<NodeIndex>)], sources: &'a HashMap<PathBuf, String>, graph: &'a CachedStableGraph,
|
||||
) -> String {
|
||||
let mut line_directives: Vec<String> = Vec::new();
|
||||
|
||||
|
@ -45,17 +46,23 @@ pub fn generate_merge_list<'a>(
|
|||
// stack to keep track of the depth first traversal
|
||||
let mut stack = VecDeque::<NodeIndex>::new();
|
||||
|
||||
create_merge_views(&mut nodes_iter, &mut merge_list, &mut last_offset_set, graph, sources, &mut line_directives, &mut stack);
|
||||
create_merge_views(
|
||||
&mut nodes_iter,
|
||||
&mut merge_list,
|
||||
&mut last_offset_set,
|
||||
graph,
|
||||
sources,
|
||||
&mut line_directives,
|
||||
&mut stack,
|
||||
);
|
||||
|
||||
// now we add a view of the remainder of the root file
|
||||
let offset = *last_offset_set.get(&FilialTuple(None, first)).unwrap();
|
||||
|
||||
let len = sources.get(&first_path).unwrap().len();
|
||||
merge_list.push_back(&sources.get(&first_path).unwrap()[min(offset, len) ..]);
|
||||
merge_list.push_back(&sources.get(&first_path).unwrap()[min(offset, len)..]);
|
||||
|
||||
let total_len = merge_list.iter().fold(0, |a, b| {
|
||||
a + b.len()
|
||||
});
|
||||
let total_len = merge_list.iter().fold(0, |a, b| a + b.len());
|
||||
|
||||
let mut merged = String::with_capacity(total_len);
|
||||
for slice in merge_list {
|
||||
|
@ -66,21 +73,16 @@ pub fn generate_merge_list<'a>(
|
|||
}
|
||||
|
||||
fn create_merge_views<'a>(
|
||||
nodes: &mut Peekable<Iter<(NodeIndex, Option<NodeIndex>)>>,
|
||||
merge_list: &mut LinkedList<&'a str>,
|
||||
last_offset_set: &mut HashMap<FilialTuple, usize>,
|
||||
graph: &'a CachedStableGraph,
|
||||
sources: &'a HashMap<PathBuf, String>,
|
||||
line_directives: &mut Vec<String>,
|
||||
stack: &mut VecDeque<NodeIndex>,
|
||||
nodes: &mut Peekable<Iter<(NodeIndex, Option<NodeIndex>)>>, merge_list: &mut LinkedList<&'a str>,
|
||||
last_offset_set: &mut HashMap<FilialTuple, usize>, graph: &'a CachedStableGraph, sources: &'a HashMap<PathBuf, String>,
|
||||
line_directives: &mut Vec<String>, stack: &mut VecDeque<NodeIndex>,
|
||||
) {
|
||||
|
||||
loop {
|
||||
let n = match nodes.next() {
|
||||
Some(n) => n,
|
||||
None => return,
|
||||
};
|
||||
|
||||
|
||||
// invariant: never None as only the first element in `nodes` should have a None, which is popped off in the calling function
|
||||
let parent = n.1.unwrap();
|
||||
let child = n.0;
|
||||
|
@ -90,8 +92,10 @@ fn create_merge_views<'a>(
|
|||
|
||||
let parent_source = sources.get(&parent_path).unwrap();
|
||||
let (char_for_line, char_following_line) = char_offset_for_line(edge.line, parent_source);
|
||||
|
||||
let offset = *last_offset_set.insert((stack.back(), parent).into(), char_following_line).get_or_insert(0);
|
||||
|
||||
let offset = *last_offset_set
|
||||
.insert((stack.back(), parent).into(), char_following_line)
|
||||
.get_or_insert(0);
|
||||
merge_list.push_back(&parent_source[offset..char_for_line]);
|
||||
add_opening_line_directive(&child_path, merge_list, line_directives);
|
||||
|
||||
|
@ -104,21 +108,21 @@ fn create_merge_views<'a>(
|
|||
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
||||
let offset = {
|
||||
match child_source.ends_with('\n') {
|
||||
true => child_source.len()-1,
|
||||
true => child_source.len() - 1,
|
||||
false => child_source.len(),
|
||||
}
|
||||
};
|
||||
merge_list.push_back(&child_source[..offset]);
|
||||
last_offset_set.insert(FilialTuple(Some(parent), child), 0);
|
||||
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
||||
add_closing_line_directive(edge.line+2, &parent_path, merge_list, line_directives);
|
||||
add_closing_line_directive(edge.line + 2, &parent_path, merge_list, line_directives);
|
||||
// if the next pair's parent is not the current pair's parent, we need to bubble up
|
||||
if stack.contains(&next.1.unwrap()) {
|
||||
return;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
stack.push_back(parent);
|
||||
create_merge_views(nodes, merge_list, last_offset_set, graph, sources, line_directives, stack);
|
||||
stack.pop_back();
|
||||
|
@ -128,37 +132,37 @@ fn create_merge_views<'a>(
|
|||
// this evaluates to false once the file contents have been exhausted aka offset = child_source.len() + 1
|
||||
let end_offset = {
|
||||
match child_source.ends_with('\n') {
|
||||
true => 1/* child_source.len()-1 */,
|
||||
false => 0/* child_source.len() */,
|
||||
true => 1, /* child_source.len()-1 */
|
||||
false => 0, /* child_source.len() */
|
||||
}
|
||||
};
|
||||
if offset < child_source.len()-end_offset {
|
||||
if offset < child_source.len() - end_offset {
|
||||
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
||||
merge_list.push_back(&child_source[offset../* std::cmp::max( */child_source.len()-end_offset/* , offset) */]);
|
||||
last_offset_set.insert(FilialTuple(Some(parent), child), 0);
|
||||
}
|
||||
|
||||
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
||||
add_closing_line_directive(edge.line+2, &parent_path, merge_list, line_directives);
|
||||
add_closing_line_directive(edge.line + 2, &parent_path, merge_list, line_directives);
|
||||
|
||||
// we need to check the next item at the point of original return further down the callstack
|
||||
if nodes.peek().is_some() && stack.contains(&nodes.peek().unwrap().1.unwrap()) {
|
||||
return;
|
||||
}
|
||||
},
|
||||
}
|
||||
None => {
|
||||
let child_source = sources.get(&child_path).unwrap();
|
||||
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
||||
let offset = {
|
||||
match child_source.ends_with('\n') {
|
||||
true => child_source.len()-1,
|
||||
true => child_source.len() - 1,
|
||||
false => child_source.len(),
|
||||
}
|
||||
};
|
||||
merge_list.push_back(&child_source[..offset]);
|
||||
last_offset_set.insert(FilialTuple(Some(parent), child), 0);
|
||||
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
||||
add_closing_line_directive(edge.line+2, &parent_path, merge_list, line_directives);
|
||||
add_closing_line_directive(edge.line + 2, &parent_path, merge_list, line_directives);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -171,17 +175,17 @@ fn char_offset_for_line(line_num: usize, source: &str) -> (usize, usize) {
|
|||
let mut char_following_line: usize = 0;
|
||||
for (n, line) in source.lines().enumerate() {
|
||||
if n == line_num {
|
||||
char_following_line += line.len()+1;
|
||||
char_following_line += line.len() + 1;
|
||||
break;
|
||||
}
|
||||
char_for_line += line.len()+1;
|
||||
char_for_line += line.len() + 1;
|
||||
char_following_line = char_for_line;
|
||||
}
|
||||
(char_for_line, char_following_line)
|
||||
}
|
||||
|
||||
fn add_opening_line_directive(path: &Path, merge_list: &mut LinkedList<&str>, line_directives: &mut Vec<String>) {
|
||||
let line_directive = format!("#line 1 \"{}\"\n", path.to_str().unwrap().replace("\\", "\\\\"));
|
||||
let line_directive = format!("#line 1 \"{}\"\n", path.to_str().unwrap().replace('\\', "\\\\"));
|
||||
line_directives.push(line_directive);
|
||||
unsafe_get_and_insert(merge_list, line_directives);
|
||||
}
|
||||
|
@ -190,14 +194,14 @@ fn add_closing_line_directive(line: usize, path: &Path, merge_list: &mut LinkedL
|
|||
// Optifine doesn't seem to add a leading newline if the previous line was a #line directive
|
||||
let line_directive = if let Some(l) = merge_list.back() {
|
||||
if l.trim().starts_with("#line") {
|
||||
format!("#line {} \"{}\"\n", line, path.to_str().unwrap().replace("\\", "\\\\"))
|
||||
format!("#line {} \"{}\"\n", line, path.to_str().unwrap().replace('\\', "\\\\"))
|
||||
} else {
|
||||
format!("\n#line {} \"{}\"\n", line, path.to_str().unwrap().replace("\\", "\\\\"))
|
||||
format!("\n#line {} \"{}\"\n", line, path.to_str().unwrap().replace('\\', "\\\\"))
|
||||
}
|
||||
} else {
|
||||
format!("\n#line {} \"{}\"\n", line, path.to_str().unwrap().replace("\\", "\\\\"))
|
||||
format!("\n#line {} \"{}\"\n", line, path.to_str().unwrap().replace('\\', "\\\\"))
|
||||
};
|
||||
|
||||
|
||||
line_directives.push(line_directive);
|
||||
unsafe_get_and_insert(merge_list, line_directives);
|
||||
}
|
||||
|
@ -205,7 +209,7 @@ fn add_closing_line_directive(line: usize, path: &Path, merge_list: &mut LinkedL
|
|||
fn unsafe_get_and_insert(merge_list: &mut LinkedList<&str>, line_directives: &[String]) {
|
||||
// :^)
|
||||
unsafe {
|
||||
let vec_ptr_offset = line_directives.as_ptr().add(line_directives.len()-1);
|
||||
let vec_ptr_offset = line_directives.as_ptr().add(line_directives.len() - 1);
|
||||
merge_list.push_back(&vec_ptr_offset.as_ref().unwrap()[..]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
use std::ffi::{CStr, CString};
|
||||
use std::ptr;
|
||||
use std::ffi::{CString, CStr};
|
||||
|
||||
use slog_scope::{debug, info};
|
||||
|
||||
#[cfg(test)]
|
||||
use mockall::automock;
|
||||
|
@ -10,31 +12,33 @@ pub trait ShaderValidator {
|
|||
}
|
||||
|
||||
pub struct OpenGlContext {
|
||||
_ctx: glutin::Context<glutin::PossiblyCurrent>
|
||||
_ctx: glutin::Context<glutin::PossiblyCurrent>,
|
||||
}
|
||||
|
||||
impl OpenGlContext {
|
||||
pub fn new() -> OpenGlContext {
|
||||
let events_loop = glutin::event_loop::EventLoop::new();
|
||||
let gl_window = glutin::ContextBuilder::new().build_headless(&*events_loop, glutin::dpi::PhysicalSize::new(1, 1)).unwrap();
|
||||
|
||||
let gl_window = glutin::ContextBuilder::new()
|
||||
.build_headless(&*events_loop, glutin::dpi::PhysicalSize::new(1, 1))
|
||||
.unwrap();
|
||||
|
||||
let gl_window = unsafe {
|
||||
let gl_window = gl_window.make_current().unwrap();
|
||||
gl::load_with(|symbol| gl_window.get_proc_address(symbol) as *const _);
|
||||
gl_window
|
||||
};
|
||||
|
||||
let gl_ctx = OpenGlContext { _ctx: gl_window };
|
||||
|
||||
unsafe {
|
||||
eprintln!(
|
||||
"Using OpenGL device {} {} {}",
|
||||
String::from_utf8(CStr::from_ptr(gl::GetString(gl::VENDOR) as *const _).to_bytes().to_vec()).unwrap(),
|
||||
String::from_utf8(CStr::from_ptr(gl::GetString(gl::VERSION) as *const _).to_bytes().to_vec()).unwrap(),
|
||||
String::from_utf8(CStr::from_ptr(gl::GetString(gl::RENDERER) as *const _).to_bytes().to_vec()).unwrap()
|
||||
debug!(
|
||||
"OpenGL device";
|
||||
"vendor" => gl_ctx.vendor(),
|
||||
"version" => String::from_utf8(CStr::from_ptr(gl::GetString(gl::VERSION) as *const _).to_bytes().to_vec()).unwrap(),
|
||||
"renderer" => String::from_utf8(CStr::from_ptr(gl::GetString(gl::RENDERER) as *const _).to_bytes().to_vec()).unwrap()
|
||||
);
|
||||
}
|
||||
OpenGlContext{
|
||||
_ctx: gl_window,
|
||||
}
|
||||
gl_ctx
|
||||
}
|
||||
|
||||
unsafe fn compile_and_get_shader_log(&self, shader: gl::types::GLuint, source: String) -> Option<String> {
|
||||
|
@ -49,7 +53,12 @@ impl OpenGlContext {
|
|||
let mut info_len: gl::types::GLint = 0;
|
||||
gl::GetShaderiv(shader, gl::INFO_LOG_LENGTH, &mut info_len);
|
||||
let mut info = vec![0u8; info_len as usize];
|
||||
gl::GetShaderInfoLog(shader, info_len as gl::types::GLsizei, ptr::null_mut(), info.as_mut_ptr() as *mut gl::types::GLchar);
|
||||
gl::GetShaderInfoLog(
|
||||
shader,
|
||||
info_len as gl::types::GLsizei,
|
||||
ptr::null_mut(),
|
||||
info.as_mut_ptr() as *mut gl::types::GLchar,
|
||||
);
|
||||
info.set_len((info_len - 1) as usize); // ignore null for str::from_utf8
|
||||
Some(String::from_utf8(info).unwrap())
|
||||
} else {
|
||||
|
@ -62,7 +71,7 @@ impl OpenGlContext {
|
|||
|
||||
impl ShaderValidator for OpenGlContext {
|
||||
fn validate(&self, tree_type: super::TreeType, source: String) -> Option<String> {
|
||||
unsafe {
|
||||
let result = unsafe {
|
||||
match tree_type {
|
||||
crate::TreeType::Fragment => {
|
||||
// Fragment shader
|
||||
|
@ -81,10 +90,19 @@ impl ShaderValidator for OpenGlContext {
|
|||
}
|
||||
crate::TreeType::Compute => {
|
||||
// Compute shader
|
||||
let compute_shader= gl::CreateShader(gl::COMPUTE_SHADER);
|
||||
let compute_shader = gl::CreateShader(gl::COMPUTE_SHADER);
|
||||
self.compile_and_get_shader_log(compute_shader, source)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match &result {
|
||||
Some(output) => info!("compilation errors reported"; "errors" => format!("`{}`", output.replace('\n', "\\n"))),
|
||||
None => info!("compilation reported no errors"),
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -62,10 +62,10 @@ fn copy_files(files: &str, dest: &TempDir) {
|
|||
copy_items(&files, dest.path().join("shaders"), opts).unwrap();
|
||||
}
|
||||
|
||||
fn copy_to_and_set_root(test_path: &str, server: &mut MinecraftShaderLanguageServer,) -> (Rc<TempDir>, PathBuf) {
|
||||
fn copy_to_and_set_root(test_path: &str, server: &mut MinecraftShaderLanguageServer) -> (Rc<TempDir>, PathBuf) {
|
||||
let (_tmp_dir, tmp_path) = copy_to_tmp_dir(test_path);
|
||||
|
||||
server.root = tmp_path.clone();//format!("{}{}", "file://", tmp_path);
|
||||
server.root = tmp_path.clone(); //format!("{}{}", "file://", tmp_path);
|
||||
|
||||
(_tmp_dir, tmp_path)
|
||||
}
|
||||
|
@ -119,10 +119,7 @@ fn test_empty_initialize() {
|
|||
}
|
||||
};
|
||||
|
||||
let completable = MethodCompletable::new(ResponseCompletable::new(
|
||||
Some(Id::Number(1)),
|
||||
Box::new(on_response),
|
||||
));
|
||||
let completable = MethodCompletable::new(ResponseCompletable::new(Some(Id::Number(1)), Box::new(on_response)));
|
||||
server.initialize(initialize_params, completable);
|
||||
|
||||
assert_eq!(server.root, tmp_path);
|
||||
|
@ -169,10 +166,7 @@ fn test_01_initialize() {
|
|||
}
|
||||
};
|
||||
|
||||
let completable = MethodCompletable::new(ResponseCompletable::new(
|
||||
Some(Id::Number(1)),
|
||||
Box::new(on_response),
|
||||
));
|
||||
let completable = MethodCompletable::new(ResponseCompletable::new(Some(Id::Number(1)), Box::new(on_response)));
|
||||
server.initialize(initialize_params, completable);
|
||||
server.endpoint.request_shutdown();
|
||||
|
||||
|
@ -186,7 +180,7 @@ fn test_01_initialize() {
|
|||
assert_eq!(
|
||||
server.graph.borrow().graph[node1],
|
||||
//format!("{:?}/{}/{}", tmp_path, "shaders", "final.fsh")
|
||||
tmp_path.join("shaders").join("final.fsh").to_str().unwrap().to_string()
|
||||
tmp_path.join("shaders").join("final.fsh").to_str().unwrap().to_string()
|
||||
);
|
||||
assert_eq!(
|
||||
server.graph.borrow().graph[node2],
|
||||
|
@ -194,10 +188,7 @@ fn test_01_initialize() {
|
|||
tmp_path.join("shaders").join("common.glsl").to_str().unwrap().to_string()
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
server.graph.borrow().graph.edge_weight(edge).unwrap().line,
|
||||
2
|
||||
);
|
||||
assert_eq!(server.graph.borrow().graph.edge_weight(edge).unwrap().line, 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -235,10 +226,7 @@ fn test_05_initialize() {
|
|||
}
|
||||
};
|
||||
|
||||
let completable = MethodCompletable::new(ResponseCompletable::new(
|
||||
Some(Id::Number(1)),
|
||||
Box::new(on_response),
|
||||
));
|
||||
let completable = MethodCompletable::new(ResponseCompletable::new(Some(Id::Number(1)), Box::new(on_response)));
|
||||
server.initialize(initialize_params, completable);
|
||||
server.endpoint.request_shutdown();
|
||||
|
||||
|
@ -250,17 +238,40 @@ fn test_05_initialize() {
|
|||
let pairs: HashSet<(PathBuf, PathBuf)> = vec![
|
||||
(
|
||||
tmp_path.join("shaders").join("final.fsh").to_str().unwrap().to_string().into(),
|
||||
tmp_path.join("shaders").join("common.glsl").to_str().unwrap().to_string().into()
|
||||
tmp_path.join("shaders").join("common.glsl").to_str().unwrap().to_string().into(),
|
||||
),
|
||||
(
|
||||
tmp_path.join("shaders").join("final.fsh").to_str().unwrap().to_string().into(),
|
||||
tmp_path.join("shaders").join("test").join("banana.glsl").to_str().unwrap().to_string().into()
|
||||
tmp_path
|
||||
.join("shaders")
|
||||
.join("test")
|
||||
.join("banana.glsl")
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.into(),
|
||||
),
|
||||
(
|
||||
tmp_path.join("shaders").join("test").join("banana.glsl").to_str().unwrap().to_string().into(),
|
||||
tmp_path.join("shaders").join("test").join("burger.glsl").to_str().unwrap().to_string().into()
|
||||
)
|
||||
].into_iter().collect();
|
||||
tmp_path
|
||||
.join("shaders")
|
||||
.join("test")
|
||||
.join("banana.glsl")
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.into(),
|
||||
tmp_path
|
||||
.join("shaders")
|
||||
.join("test")
|
||||
.join("burger.glsl")
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string()
|
||||
.into(),
|
||||
),
|
||||
]
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
for edge in server.graph.borrow().graph.edge_indices() {
|
||||
let endpoints = server.graph.borrow().graph.edge_endpoints(edge).unwrap();
|
||||
|
@ -277,15 +288,7 @@ fn test_graph_two_connected_nodes() {
|
|||
|
||||
let idx1 = graph.add_node(&PathBuf::from("sample"));
|
||||
let idx2 = graph.add_node(&PathBuf::from("banana"));
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx2,
|
||||
IncludePosition {
|
||||
line: 3,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(idx1, idx2, IncludePosition { line: 3, start: 0, end: 0 });
|
||||
|
||||
let children = graph.child_node_names(idx1);
|
||||
assert_eq!(children.len(), 1);
|
||||
|
@ -330,33 +333,9 @@ fn test_collect_root_ancestors() {
|
|||
let idx2 = graph.add_node(&PathBuf::from("2"));
|
||||
let idx3 = graph.add_node(&PathBuf::from("3"));
|
||||
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx1,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx2,
|
||||
IncludePosition {
|
||||
line: 3,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx3,
|
||||
idx1,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx2, IncludePosition { line: 3, start: 0, end: 0 });
|
||||
graph.add_edge(idx3, idx1, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
|
||||
// 0 3
|
||||
// |/
|
||||
|
@ -384,33 +363,9 @@ fn test_collect_root_ancestors() {
|
|||
let idx2 = graph.add_node(&PathBuf::from("2"));
|
||||
let idx3 = graph.add_node(&PathBuf::from("3"));
|
||||
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx1,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx2,
|
||||
IncludePosition {
|
||||
line: 3,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx3,
|
||||
IncludePosition {
|
||||
line: 5,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
|
||||
|
||||
// 0
|
||||
// / \
|
||||
|
@ -438,33 +393,9 @@ fn test_collect_root_ancestors() {
|
|||
let idx2 = graph.add_node(&PathBuf::from("2"));
|
||||
let idx3 = graph.add_node(&PathBuf::from("3"));
|
||||
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx1,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx2,
|
||||
idx3,
|
||||
IncludePosition {
|
||||
line: 3,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx3,
|
||||
IncludePosition {
|
||||
line: 5,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
graph.add_edge(idx2, idx3, IncludePosition { line: 3, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
|
||||
|
||||
// 0
|
||||
// |
|
||||
|
@ -494,33 +425,9 @@ fn test_collect_root_ancestors() {
|
|||
let idx2 = graph.add_node(&PathBuf::from("2"));
|
||||
let idx3 = graph.add_node(&PathBuf::from("3"));
|
||||
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx1,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx2,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx3,
|
||||
IncludePosition {
|
||||
line: 6,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx2, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx3, IncludePosition { line: 6, start: 0, end: 0 });
|
||||
|
||||
// 0
|
||||
// |
|
||||
|
@ -552,33 +459,9 @@ fn test_graph_dfs() {
|
|||
let idx2 = graph.add_node(&PathBuf::from("2"));
|
||||
let idx3 = graph.add_node(&PathBuf::from("3"));
|
||||
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx1,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx2,
|
||||
IncludePosition {
|
||||
line: 3,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx3,
|
||||
IncludePosition {
|
||||
line: 5,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
|
||||
|
||||
let dfs = dfs::Dfs::new(&graph, idx0);
|
||||
|
||||
|
@ -618,87 +501,15 @@ fn test_graph_dfs() {
|
|||
let idx6 = graph.add_node(&PathBuf::from("6"));
|
||||
let idx7 = graph.add_node(&PathBuf::from("7"));
|
||||
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx1,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx2,
|
||||
IncludePosition {
|
||||
line: 3,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx3,
|
||||
IncludePosition {
|
||||
line: 5,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx4,
|
||||
IncludePosition {
|
||||
line: 6,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx2,
|
||||
idx4,
|
||||
IncludePosition {
|
||||
line: 5,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx2,
|
||||
idx5,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx3,
|
||||
idx6,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx4,
|
||||
idx6,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx6,
|
||||
idx7,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx4, IncludePosition { line: 6, start: 0, end: 0 });
|
||||
graph.add_edge(idx2, idx4, IncludePosition { line: 5, start: 0, end: 0 });
|
||||
graph.add_edge(idx2, idx5, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
graph.add_edge(idx3, idx6, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
graph.add_edge(idx4, idx6, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
graph.add_edge(idx6, idx7, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
|
||||
let dfs = dfs::Dfs::new(&graph, idx0);
|
||||
|
||||
|
@ -718,9 +529,7 @@ fn test_graph_dfs() {
|
|||
// 3 4 5
|
||||
// \ /
|
||||
// 6 - 7
|
||||
let expected_nodes = vec![
|
||||
idx0, idx1, idx3, idx6, idx7, idx4, idx6, idx7, idx2, idx5, idx4, idx6, idx7,
|
||||
];
|
||||
let expected_nodes = vec![idx0, idx1, idx3, idx6, idx7, idx4, idx6, idx7, idx2, idx5, idx4, idx6, idx7];
|
||||
|
||||
assert_eq!(expected_nodes, nodes);
|
||||
|
||||
|
@ -760,96 +569,16 @@ fn test_graph_dfs_cycle() {
|
|||
let idx6 = graph.add_node(&PathBuf::from("6"));
|
||||
let idx7 = graph.add_node(&PathBuf::from("7"));
|
||||
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx1,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx2,
|
||||
IncludePosition {
|
||||
line: 3,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx3,
|
||||
IncludePosition {
|
||||
line: 5,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx4,
|
||||
IncludePosition {
|
||||
line: 6,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx2,
|
||||
idx4,
|
||||
IncludePosition {
|
||||
line: 5,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx2,
|
||||
idx5,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx3,
|
||||
idx6,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx4,
|
||||
idx6,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx6,
|
||||
idx7,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx7,
|
||||
idx4,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
graph.add_edge(idx0, idx2, IncludePosition { line: 3, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx3, IncludePosition { line: 5, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx4, IncludePosition { line: 6, start: 0, end: 0 });
|
||||
graph.add_edge(idx2, idx4, IncludePosition { line: 5, start: 0, end: 0 });
|
||||
graph.add_edge(idx2, idx5, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
graph.add_edge(idx3, idx6, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
graph.add_edge(idx4, idx6, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
graph.add_edge(idx6, idx7, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
graph.add_edge(idx7, idx4, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
|
||||
let mut dfs = dfs::Dfs::new(&graph, idx0);
|
||||
|
||||
|
@ -878,24 +607,8 @@ fn test_graph_dfs_cycle() {
|
|||
let idx0 = graph.add_node(&PathBuf::from("0"));
|
||||
let idx1 = graph.add_node(&PathBuf::from("1"));
|
||||
|
||||
graph.add_edge(
|
||||
idx0,
|
||||
idx1,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(
|
||||
idx1,
|
||||
idx0,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
graph.add_edge(idx0, idx1, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
graph.add_edge(idx1, idx0, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
|
||||
let mut dfs = dfs::Dfs::new(&graph, idx1);
|
||||
|
||||
|
@ -912,22 +625,21 @@ fn test_generate_merge_list_01() {
|
|||
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/01", &mut server);
|
||||
server.endpoint.request_shutdown();
|
||||
|
||||
let final_idx = server.graph.borrow_mut()
|
||||
let final_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{:?}/shaders/final.fsh", tmp_path).try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("final.fsh"));
|
||||
let common_idx = server.graph.borrow_mut()
|
||||
let common_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{:?}/shaders/common.glsl", tmp_path).try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("common.glsl"));
|
||||
|
||||
server.graph.borrow_mut().add_edge(
|
||||
final_idx,
|
||||
common_idx,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(final_idx, common_idx, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
|
||||
let nodes = server.get_dfs_for_node(final_idx).unwrap();
|
||||
let sources = server.load_sources(&nodes).unwrap();
|
||||
|
@ -935,11 +647,18 @@ fn test_generate_merge_list_01() {
|
|||
let graph_borrow = server.graph.borrow();
|
||||
let result = merge_views::generate_merge_list(&nodes, &sources, &graph_borrow);
|
||||
|
||||
let merge_file = tmp_path.clone().join( "shaders").join("final.fsh.merge");
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
let mut truth = fs::read_to_string(merge_file).unwrap();
|
||||
truth = truth.replacen("!!", &tmp_path.clone().join("shaders").join("common.glsl").to_str().unwrap().replace("\\", "\\\\"), 1);
|
||||
truth = truth.replace("!!", &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace("\\", "\\\\"));
|
||||
truth = truth.replacen(
|
||||
"!!",
|
||||
&tmp_path.join("shaders").join("common.glsl").to_str().unwrap().replace('\\', "\\\\"),
|
||||
1,
|
||||
);
|
||||
truth = truth.replace(
|
||||
"!!",
|
||||
&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
|
||||
);
|
||||
|
||||
assert_eq!(result, truth);
|
||||
}
|
||||
|
@ -951,46 +670,39 @@ fn test_generate_merge_list_02() {
|
|||
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/02", &mut server);
|
||||
server.endpoint.request_shutdown();
|
||||
|
||||
let final_idx = server.graph.borrow_mut()
|
||||
let final_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/{}", tmp_path, "final.fsh").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("final.fsh"));
|
||||
let test_idx = server.graph.borrow_mut()
|
||||
let test_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "test.glsl").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("utils").join("test.glsl"));
|
||||
let burger_idx = server.graph.borrow_mut()
|
||||
let burger_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "burger.glsl").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("utils").join("burger.glsl"));
|
||||
let sample_idx = server.graph.borrow_mut()
|
||||
let sample_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "sample.glsl").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("utils").join("sample.glsl"));
|
||||
|
||||
server.graph.borrow_mut().add_edge(
|
||||
final_idx,
|
||||
sample_idx,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server.graph.borrow_mut().add_edge(
|
||||
sample_idx,
|
||||
burger_idx,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server.graph.borrow_mut().add_edge(
|
||||
sample_idx,
|
||||
test_idx,
|
||||
IncludePosition {
|
||||
line: 6,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(final_idx, sample_idx, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(sample_idx, burger_idx, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(sample_idx, test_idx, IncludePosition { line: 6, start: 0, end: 0 });
|
||||
|
||||
let nodes = server.get_dfs_for_node(final_idx).unwrap();
|
||||
let sources = server.load_sources(&nodes).unwrap();
|
||||
|
@ -998,21 +710,29 @@ fn test_generate_merge_list_02() {
|
|||
let graph_borrow = server.graph.borrow();
|
||||
let result = merge_views::generate_merge_list(&nodes, &sources, &graph_borrow);
|
||||
|
||||
let merge_file = tmp_path.clone().join("shaders").join("final.fsh.merge");
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
let mut truth = fs::read_to_string(merge_file).unwrap();
|
||||
|
||||
for file in &[
|
||||
"sample.glsl",
|
||||
"burger.glsl",
|
||||
"sample.glsl",
|
||||
"test.glsl",
|
||||
"sample.glsl",
|
||||
] {
|
||||
for file in &["sample.glsl", "burger.glsl", "sample.glsl", "test.glsl", "sample.glsl"] {
|
||||
let path = tmp_path.clone();
|
||||
truth = truth.replacen("!!", &&path.join("shaders").join("utils").join(file).to_str().unwrap().replace("\\", "\\\\"), 1);
|
||||
truth = truth.replacen(
|
||||
"!!",
|
||||
&path
|
||||
.join("shaders")
|
||||
.join("utils")
|
||||
.join(file)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace('\\', "\\\\"),
|
||||
1,
|
||||
);
|
||||
}
|
||||
truth = truth.replacen("!!", &&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace("\\", "\\\\"), 1);
|
||||
truth = truth.replacen(
|
||||
"!!",
|
||||
&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
|
||||
1,
|
||||
);
|
||||
|
||||
assert_eq!(result, truth);
|
||||
}
|
||||
|
@ -1024,46 +744,39 @@ fn test_generate_merge_list_03() {
|
|||
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/03", &mut server);
|
||||
server.endpoint.request_shutdown();
|
||||
|
||||
let final_idx = server.graph.borrow_mut()
|
||||
let final_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/{}", tmp_path, "final.fsh").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("final.fsh"));
|
||||
let test_idx = server.graph.borrow_mut()
|
||||
let test_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "test.glsl").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("utils").join("test.glsl"));
|
||||
let burger_idx = server.graph.borrow_mut()
|
||||
let burger_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "burger.glsl").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("utils").join("burger.glsl"));
|
||||
let sample_idx = server.graph.borrow_mut()
|
||||
let sample_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "sample.glsl").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("utils").join("sample.glsl"));
|
||||
|
||||
server.graph.borrow_mut().add_edge(
|
||||
final_idx,
|
||||
sample_idx,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server.graph.borrow_mut().add_edge(
|
||||
sample_idx,
|
||||
burger_idx,
|
||||
IncludePosition {
|
||||
line: 4,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server.graph.borrow_mut().add_edge(
|
||||
sample_idx,
|
||||
test_idx,
|
||||
IncludePosition {
|
||||
line: 6,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(final_idx, sample_idx, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(sample_idx, burger_idx, IncludePosition { line: 4, start: 0, end: 0 });
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(sample_idx, test_idx, IncludePosition { line: 6, start: 0, end: 0 });
|
||||
|
||||
let nodes = server.get_dfs_for_node(final_idx).unwrap();
|
||||
let sources = server.load_sources(&nodes).unwrap();
|
||||
|
@ -1071,21 +784,29 @@ fn test_generate_merge_list_03() {
|
|||
let graph_borrow = server.graph.borrow();
|
||||
let result = merge_views::generate_merge_list(&nodes, &sources, &graph_borrow);
|
||||
|
||||
let merge_file = tmp_path.clone().join("shaders").join("final.fsh.merge");
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
let mut truth = fs::read_to_string(merge_file).unwrap();
|
||||
|
||||
for file in &[
|
||||
"sample.glsl",
|
||||
"burger.glsl",
|
||||
"sample.glsl",
|
||||
"test.glsl",
|
||||
"sample.glsl",
|
||||
] {
|
||||
for file in &["sample.glsl", "burger.glsl", "sample.glsl", "test.glsl", "sample.glsl"] {
|
||||
let path = tmp_path.clone();
|
||||
truth = truth.replacen("!!", &&&path.join("shaders").join("utils").join(file).to_str().unwrap().replace("\\", "\\\\"), 1);
|
||||
truth = truth.replacen(
|
||||
"!!",
|
||||
&path
|
||||
.join("shaders")
|
||||
.join("utils")
|
||||
.join(file)
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace('\\', "\\\\"),
|
||||
1,
|
||||
);
|
||||
}
|
||||
truth = truth.replacen("!!", &&&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace("\\", "\\\\"), 1);
|
||||
truth = truth.replacen(
|
||||
"!!",
|
||||
&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
|
||||
1,
|
||||
);
|
||||
|
||||
assert_eq!(result, truth);
|
||||
}
|
||||
|
@ -1097,58 +818,48 @@ fn test_generate_merge_list_04() {
|
|||
let (_tmp_dir, tmp_path) = copy_to_and_set_root("./testdata/04", &mut server);
|
||||
server.endpoint.request_shutdown();
|
||||
|
||||
let final_idx = server.graph.borrow_mut()
|
||||
let final_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/{}", tmp_path, "final.fsh").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("final.fsh"));
|
||||
let utilities_idx = server.graph.borrow_mut()
|
||||
let utilities_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "utilities.glsl").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("utils").join("utilities.glsl"));
|
||||
let stuff1_idx = server.graph.borrow_mut()
|
||||
let stuff1_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "stuff1.glsl").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("utils").join("stuff1.glsl"));
|
||||
let stuff2_idx = server.graph.borrow_mut()
|
||||
let stuff2_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/utils/{}", tmp_path, "stuff2.glsl").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("utils").join("stuff2.glsl"));
|
||||
let matrices_idx = server.graph.borrow_mut()
|
||||
let matrices_idx = server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
//.add_node(&format!("{}/shaders/lib/{}", tmp_path, "matrices.glsl").try_into().unwrap());
|
||||
.add_node(&tmp_path.join("shaders").join("lib").join("matrices.glsl"));
|
||||
|
||||
server.graph.borrow_mut().add_edge(
|
||||
final_idx,
|
||||
utilities_idx,
|
||||
IncludePosition {
|
||||
line: 2,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server.graph.borrow_mut().add_edge(
|
||||
utilities_idx,
|
||||
stuff1_idx,
|
||||
IncludePosition {
|
||||
line: 0,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server.graph.borrow_mut().add_edge(
|
||||
utilities_idx,
|
||||
stuff2_idx,
|
||||
IncludePosition {
|
||||
line: 1,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server.graph.borrow_mut().add_edge(
|
||||
final_idx,
|
||||
matrices_idx,
|
||||
IncludePosition {
|
||||
line: 3,
|
||||
start: 0,
|
||||
end: 0,
|
||||
},
|
||||
);
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(final_idx, utilities_idx, IncludePosition { line: 2, start: 0, end: 0 });
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(utilities_idx, stuff1_idx, IncludePosition { line: 0, start: 0, end: 0 });
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(utilities_idx, stuff2_idx, IncludePosition { line: 1, start: 0, end: 0 });
|
||||
server
|
||||
.graph
|
||||
.borrow_mut()
|
||||
.add_edge(final_idx, matrices_idx, IncludePosition { line: 3, start: 0, end: 0 });
|
||||
|
||||
let nodes = server.get_dfs_for_node(final_idx).unwrap();
|
||||
let sources = server.load_sources(&nodes).unwrap();
|
||||
|
@ -1156,7 +867,7 @@ fn test_generate_merge_list_04() {
|
|||
let graph_borrow = server.graph.borrow();
|
||||
let result = merge_views::generate_merge_list(&nodes, &sources, &graph_borrow);
|
||||
|
||||
let merge_file = tmp_path.clone().join("shaders").join("final.fsh.merge");
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
let mut truth = fs::read_to_string(merge_file).unwrap();
|
||||
|
||||
|
@ -1168,12 +879,52 @@ fn test_generate_merge_list_04() {
|
|||
PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(),
|
||||
PathBuf::new().join("final.fsh").to_str().unwrap(),
|
||||
PathBuf::new().join("lib").join("matrices.glsl").to_str().unwrap(),
|
||||
PathBuf::new().join("final.fsh").to_str().unwrap()
|
||||
PathBuf::new().join("final.fsh").to_str().unwrap(),
|
||||
] {
|
||||
let path = tmp_path.clone();
|
||||
//path.f
|
||||
truth = truth.replacen("!!", &path.join("shaders").join(file).to_str().unwrap().replace("\\", "\\\\"), 1);
|
||||
truth = truth.replacen("!!", &path.join("shaders").join(file).to_str().unwrap().replace('\\', "\\\\"), 1);
|
||||
}
|
||||
|
||||
assert_eq!(result, truth);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_nvidia_diagnostics() {
|
||||
let mut mockgl = opengl::MockShaderValidator::new();
|
||||
mockgl.expect_vendor().returning(|| "NVIDIA".into());
|
||||
let server = new_temp_server(Some(Box::new(mockgl)));
|
||||
|
||||
let output = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh(9) : error C0000: syntax error, unexpected '}', expecting ',' or ';' at token \"}\"";
|
||||
|
||||
let results = server.parse_validator_stdout(
|
||||
&PathBuf::from_str("/home/noah/.minecraft/shaderpacks/test").unwrap(),
|
||||
output.to_string(),
|
||||
"",
|
||||
);
|
||||
assert_eq!(results.len(), 1);
|
||||
let first = results.into_iter().next().unwrap();
|
||||
assert_eq!(
|
||||
first.0,
|
||||
url::Url::from_file_path("/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh").unwrap()
|
||||
);
|
||||
server.endpoint.request_shutdown();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_amd_diagnostics() {
|
||||
let mut mockgl = opengl::MockShaderValidator::new();
|
||||
mockgl.expect_vendor().returning(|| "ATI Technologies".into());
|
||||
let server = new_temp_server(Some(Box::new(mockgl)));
|
||||
|
||||
let output = "ERROR: 0:1: '' : syntax error: #line
|
||||
ERROR: 0:10: '' : syntax error: #line
|
||||
ERROR: 0:15: 'varying' : syntax error: syntax error
|
||||
";
|
||||
|
||||
let results = server.parse_validator_stdout(&PathBuf::from_str("/home/test").unwrap(), output.to_string(), "");
|
||||
assert_eq!(results.len(), 1);
|
||||
let first = results.into_iter().next().unwrap();
|
||||
assert_eq!(first.1.len(), 3);
|
||||
server.endpoint.request_shutdown();
|
||||
}
|
||||
|
|
|
@ -1,27 +1,38 @@
|
|||
use std::path::PathBuf;
|
||||
|
||||
use slog_scope::trace;
|
||||
use anyhow::Result;
|
||||
use path_slash::PathBufExt;
|
||||
use url::Url;
|
||||
use anyhow::Result;
|
||||
|
||||
pub trait FromUrl {
|
||||
fn from_url(u: Url) -> Self;
|
||||
}
|
||||
|
||||
pub trait FromJson {
|
||||
fn from_json(v: &serde_json::value::Value) -> Result<Self> where Self: Sized;
|
||||
fn from_json(v: &serde_json::value::Value) -> Result<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
}
|
||||
|
||||
impl FromUrl for PathBuf {
|
||||
#[cfg(target_family = "windows")]
|
||||
fn from_url(u: Url) -> Self {
|
||||
let path = percent_encoding::percent_decode_str(u.path().strip_prefix("/").unwrap()).decode_utf8().unwrap();
|
||||
let path = percent_encoding::percent_decode_str(u.path().strip_prefix('/').unwrap())
|
||||
.decode_utf8()
|
||||
.unwrap();
|
||||
|
||||
trace!("converted win path from url"; "old" => u.as_str(), "new" => path.to_string());
|
||||
|
||||
PathBuf::from_slash(path)
|
||||
}
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
fn from_url(u: Url) -> Self {
|
||||
let path = percent_encoding::percent_decode_str(u.path()).decode_utf8().unwrap();
|
||||
|
||||
trace!("converted unix path from url"; "old" => u.as_str(), "new" => path.to_string());
|
||||
|
||||
PathBuf::from_slash(path)
|
||||
}
|
||||
}
|
||||
|
@ -29,27 +40,34 @@ impl FromUrl for PathBuf {
|
|||
impl FromJson for PathBuf {
|
||||
#[cfg(target_family = "windows")]
|
||||
fn from_json(v: &serde_json::value::Value) -> Result<Self>
|
||||
where Self: Sized {
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
if !v.is_string() {
|
||||
return Err(anyhow::format_err!("cannot convert {:?} to PathBuf", v));
|
||||
}
|
||||
let path = v.to_string();
|
||||
let path = percent_encoding::percent_decode_str(
|
||||
path.trim_start_matches('"').trim_end_matches('"').strip_prefix("/").unwrap()
|
||||
).decode_utf8()?;
|
||||
let path = percent_encoding::percent_decode_str(path.trim_start_matches('"').trim_end_matches('"').strip_prefix('/').unwrap())
|
||||
.decode_utf8()?;
|
||||
|
||||
trace!("converted win path from json"; "old" => v.to_string(), "new" => path.to_string());
|
||||
|
||||
Ok(PathBuf::from_slash(path))
|
||||
}
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
fn from_json(v: &serde_json::value::Value) -> Result<Self>
|
||||
where Self: Sized {
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
if !v.is_string() {
|
||||
return Err(anyhow::format_err!("cannot convert {:?} to PathBuf", v));
|
||||
}
|
||||
let path = v.to_string();
|
||||
let path = percent_encoding::percent_decode_str(
|
||||
path.trim_start_matches('"').trim_end_matches('"')
|
||||
).decode_utf8()?;
|
||||
let path = percent_encoding::percent_decode_str(path.trim_start_matches('"').trim_end_matches('"')).decode_utf8()?;
|
||||
|
||||
trace!("converted unix path from json"; "old" => v.to_string(), "new" => path.to_string());
|
||||
|
||||
Ok(PathBuf::from_slash(path))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue