mirror of
https://github.com/Strum355/mcshader-lsp.git
synced 2025-07-23 19:25:51 +00:00
big heckin reworkerino to use the standard #line directive as per spec
This commit is contained in:
parent
cb7c9b8b49
commit
d3365c3bff
52 changed files with 574 additions and 398 deletions
63
server/Cargo.lock
generated
63
server/Cargo.lock
generated
|
@ -61,21 +61,6 @@ version = "0.12.3"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff"
|
||||
|
||||
[[package]]
|
||||
name = "bit-set"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6e11e16035ea35e4e5997b393eacbf6f63983188f7a2ad25bfb13465f5ad59de"
|
||||
dependencies = [
|
||||
"bit-vec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-vec"
|
||||
version = "0.6.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.2.1"
|
||||
|
@ -131,15 +116,6 @@ dependencies = [
|
|||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "chan"
|
||||
version = "0.1.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d14956a3dae065ffaa0d92ece848ab4ced88d32361e7fdfbfd653a5c454a1ed8"
|
||||
dependencies = [
|
||||
"rand 0.3.23",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cocoa"
|
||||
version = "0.24.0"
|
||||
|
@ -700,6 +676,26 @@ dependencies = [
|
|||
"cfg-if 1.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "logging"
|
||||
version = "0.9.5"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"rand 0.8.5",
|
||||
"slog",
|
||||
"slog-atomic",
|
||||
"slog-scope",
|
||||
"slog-term",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "logging_macro"
|
||||
version = "0.9.5"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.86.0"
|
||||
|
@ -739,30 +735,25 @@ name = "mcshader-lsp"
|
|||
version = "0.9.5"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"bit-set",
|
||||
"chan",
|
||||
"ctor",
|
||||
"fs_extra",
|
||||
"gl",
|
||||
"glutin",
|
||||
"hamcrest2",
|
||||
"lazy_static",
|
||||
"logging",
|
||||
"logging_macro",
|
||||
"mockall",
|
||||
"once_cell",
|
||||
"path-slash",
|
||||
"percent-encoding",
|
||||
"petgraph",
|
||||
"pretty_assertions",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
"rust_lsp",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"slog",
|
||||
"slog-atomic",
|
||||
"slog-scope",
|
||||
"slog-term",
|
||||
"tempdir",
|
||||
"thiserror",
|
||||
"tree-sitter",
|
||||
|
@ -1205,16 +1196,6 @@ dependencies = [
|
|||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.3.23"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64ac302d8f83c0c1974bf758f6b041c6c8ada916fbb44a609158ca8b064cc76c"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rand 0.4.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.4.6"
|
||||
|
|
|
@ -1,40 +1,6 @@
|
|||
[package]
|
||||
name = "mcshader-lsp"
|
||||
version = "0.9.5"
|
||||
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
rust_lsp = { git = "https://github.com/Strum355/RustLSP", branch = "master" }
|
||||
serde_json = "1.0"
|
||||
serde = "1.0"
|
||||
walkdir = "2.3"
|
||||
petgraph = "0.6"
|
||||
lazy_static = "1.4"
|
||||
regex = "1.4"
|
||||
chan = "0.1"
|
||||
url = "2.2"
|
||||
percent-encoding = "2.1"
|
||||
anyhow = "1.0"
|
||||
bit-set = "0.5"
|
||||
thiserror = "1.0"
|
||||
glutin = "0.28"
|
||||
gl = "0.14"
|
||||
ctor = "0.1"
|
||||
mockall = "0.11"
|
||||
path-slash = "0.1"
|
||||
slog = { version = "2.7", features = [ "max_level_trace", "release_max_level_trace" ] }
|
||||
slog-term = "2.9"
|
||||
slog-scope = "4.4"
|
||||
slog-atomic = "3.1"
|
||||
once_cell = "1.7"
|
||||
rand = "0.8"
|
||||
arc-swap = "1.5.0"
|
||||
tree-sitter = "0.20.6"
|
||||
tree-sitter-glsl = "0.1.2"
|
||||
|
||||
[dev-dependencies]
|
||||
tempdir = "0.3"
|
||||
fs_extra = "1.2"
|
||||
hamcrest2 = "*"
|
||||
pretty_assertions = "1.1"
|
||||
[workspace]
|
||||
members = [
|
||||
"main",
|
||||
"logging",
|
||||
"logging_macro"
|
||||
]
|
|
@ -4,7 +4,7 @@ watchtest:
|
|||
RUST_BACKTRACE=0 cargo watch -x test -i Makefile
|
||||
|
||||
test:
|
||||
RUST_LIB_BACKTRACE=0 RUST_BACKTRACE=0 cargo test
|
||||
RUST_LIB_BACKTRACE=0 RUST_BACKTRACE=0 cargo test -- --nocapture --color always
|
||||
|
||||
build:
|
||||
cargo build
|
||||
|
|
13
server/logging/Cargo.toml
Normal file
13
server/logging/Cargo.toml
Normal file
|
@ -0,0 +1,13 @@
|
|||
[package]
|
||||
name = "logging"
|
||||
version = "0.9.5"
|
||||
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
slog = { version = "2.7", features = [ "max_level_trace", "release_max_level_trace" ] }
|
||||
slog-term = "2.9"
|
||||
slog-scope = "4.4"
|
||||
slog-atomic = "3.1"
|
||||
rand = "0.8"
|
||||
lazy_static = "1.4"
|
|
@ -2,7 +2,10 @@ use rand::{rngs, Rng};
|
|||
use slog::slog_o;
|
||||
use slog_scope::GlobalLoggerGuard;
|
||||
use slog_term::{FullFormat, PlainSyncDecorator};
|
||||
use std::{cell::RefCell, io::Stderr, sync::Arc};
|
||||
use std::{cell::RefCell, sync::Arc};
|
||||
|
||||
use std::io::Stderr;
|
||||
|
||||
use lazy_static::lazy_static;
|
||||
use slog::*;
|
||||
use slog_atomic::*;
|
||||
|
@ -13,9 +16,7 @@ fn new_trace_id() -> String {
|
|||
}
|
||||
|
||||
pub fn slog_with_trace_id<F: FnOnce()>(f: F) {
|
||||
slog_scope::scope(&slog_scope::logger().new(slog_o!("trace" => new_trace_id())), || {
|
||||
f()
|
||||
})
|
||||
slog_scope::scope(&slog_scope::logger().new(slog_o!("trace" => new_trace_id())), f)
|
||||
}
|
||||
|
||||
pub fn set_logger_with_level(level: Level) -> GlobalLoggerGuard {
|
12
server/logging_macro/Cargo.toml
Normal file
12
server/logging_macro/Cargo.toml
Normal file
|
@ -0,0 +1,12 @@
|
|||
[package]
|
||||
name = "logging_macro"
|
||||
version = "0.9.5"
|
||||
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[dependencies]
|
||||
quote = "1.0"
|
||||
syn = { version = "1.0", features = [ "full" ] }
|
24
server/logging_macro/src/lib.rs
Normal file
24
server/logging_macro/src/lib.rs
Normal file
|
@ -0,0 +1,24 @@
|
|||
use proc_macro::TokenStream;
|
||||
use quote::quote;
|
||||
use syn::{parse_macro_input, parse_quote, ItemFn};
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn log_scope(_args: TokenStream, function: TokenStream) -> TokenStream {
|
||||
let mut function = parse_macro_input!(function as ItemFn);
|
||||
|
||||
let function_name = function.sig.ident.to_string();
|
||||
|
||||
let stmts = function.block.stmts;
|
||||
|
||||
function.block = Box::new(parse_quote!({
|
||||
use slog::{slog_o, FnValue, Level};
|
||||
use std::thread::current;
|
||||
|
||||
let _guard = logging::set_logger_with_level(Level::Trace);
|
||||
slog_scope::scope(&slog_scope::logger().new(slog_o!("test_name" => #function_name, "thread_num" => FnValue(|_| format!("{:?}", current().id())))), || {
|
||||
#(#stmts)*
|
||||
});
|
||||
}));
|
||||
|
||||
TokenStream::from(quote!(#function))
|
||||
}
|
35
server/main/Cargo.toml
Normal file
35
server/main/Cargo.toml
Normal file
|
@ -0,0 +1,35 @@
|
|||
[package]
|
||||
name = "mcshader-lsp"
|
||||
version = "0.9.5"
|
||||
authors = ["Noah Santschi-Cooney <noah@santschi-cooney.ch>"]
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
rust_lsp = { git = "https://github.com/Strum355/RustLSP", branch = "master" }
|
||||
serde_json = "1.0"
|
||||
serde = "1.0"
|
||||
walkdir = "2.3"
|
||||
petgraph = "0.6"
|
||||
lazy_static = "1.4"
|
||||
regex = "1.4"
|
||||
url = "2.2"
|
||||
percent-encoding = "2.1"
|
||||
anyhow = "1.0"
|
||||
thiserror = "1.0"
|
||||
glutin = "0.28"
|
||||
gl = "0.14"
|
||||
mockall = "0.11"
|
||||
path-slash = "0.1"
|
||||
slog = { version = "2.7", features = [ "max_level_trace", "release_max_level_trace" ] }
|
||||
slog-scope = "4.4"
|
||||
once_cell = "1.7"
|
||||
tree-sitter = "0.20.6"
|
||||
tree-sitter-glsl = "0.1.2"
|
||||
logging = { path = "../logging" }
|
||||
logging_macro = { path = "../logging_macro" }
|
||||
|
||||
[dev-dependencies]
|
||||
tempdir = "0.3"
|
||||
fs_extra = "1.2"
|
||||
hamcrest2 = "*"
|
||||
pretty_assertions = "1.1"
|
|
@ -15,6 +15,7 @@ use std::fs;
|
|||
|
||||
use crate::dfs;
|
||||
use crate::merge_views::FilialTuple;
|
||||
use crate::source_mapper::SourceMapper;
|
||||
use crate::{graph::CachedStableGraph, merge_views, url_norm::FromJson};
|
||||
|
||||
use super::Invokeable;
|
||||
|
@ -100,8 +101,9 @@ impl Invokeable for VirtualMergedDocument {
|
|||
};
|
||||
all_sources.extend(sources);
|
||||
|
||||
let mut source_mapper = SourceMapper::new(all_sources.len());
|
||||
let graph = self.graph.borrow();
|
||||
let view = merge_views::generate_merge_list(&tree, &all_sources, &graph);
|
||||
let view = merge_views::generate_merge_list(&tree, &all_sources, &graph, &mut source_mapper);
|
||||
return Ok(serde_json::value::Value::String(view));
|
||||
}
|
||||
return Err(format_err!(
|
|
@ -166,6 +166,7 @@ mod dfs_test {
|
|||
use crate::{dfs, IncludePosition};
|
||||
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_graph_dfs() {
|
||||
{
|
||||
let mut graph = CachedStableGraph::new();
|
||||
|
@ -272,6 +273,7 @@ mod dfs_test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_graph_dfs_cycle() {
|
||||
{
|
||||
let mut graph = CachedStableGraph::new();
|
194
server/main/src/diagnostics_parser.rs
Normal file
194
server/main/src/diagnostics_parser.rs
Normal file
|
@ -0,0 +1,194 @@
|
|||
use std::{collections::HashMap, lazy::OnceCell, path::Path};
|
||||
|
||||
use regex::Regex;
|
||||
use rust_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range};
|
||||
use slog_scope::debug;
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
consts,
|
||||
graph::CachedStableGraph,
|
||||
opengl,
|
||||
source_mapper::{SourceMapper, SourceNum},
|
||||
};
|
||||
|
||||
pub struct DiagnosticsParser<'a, T: opengl::ShaderValidator + ?Sized> {
|
||||
line_offset: OnceCell<u32>,
|
||||
line_regex: OnceCell<Regex>,
|
||||
vendor_querier: &'a T,
|
||||
}
|
||||
|
||||
impl<'a, T: opengl::ShaderValidator + ?Sized> DiagnosticsParser<'a, T> {
|
||||
pub fn new(vendor_querier: &'a T) -> Self {
|
||||
DiagnosticsParser {
|
||||
line_offset: OnceCell::new(),
|
||||
line_regex: OnceCell::new(),
|
||||
vendor_querier,
|
||||
}
|
||||
}
|
||||
|
||||
fn get_line_regex(&self) -> &Regex {
|
||||
self.line_regex.get_or_init(|| match self.vendor_querier.vendor().as_str() {
|
||||
"NVIDIA Corporation" => {
|
||||
Regex::new(r#"^(?P<filepath>\d+)\((?P<linenum>\d+)\) : (?P<severity>error|warning) [A-C]\d+: (?P<output>.+)"#).unwrap()
|
||||
}
|
||||
_ => Regex::new(r#"^(?P<severity>ERROR|WARNING): (?P<filepath>[^?<>*|"\n]+):(?P<linenum>\d+): '[a-z]*' : (?P<output>.+)$"#)
|
||||
.unwrap(),
|
||||
})
|
||||
}
|
||||
|
||||
fn get_line_offset(&self) -> u32 {
|
||||
*self.line_offset.get_or_init(|| match self.vendor_querier.vendor().as_str() {
|
||||
"ATI Technologies" => 0,
|
||||
_ => 2,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_diagnostics_output(
|
||||
&self, output: String, uri: &Path, source_mapper: &SourceMapper, graph: &CachedStableGraph,
|
||||
) -> HashMap<Url, Vec<Diagnostic>> {
|
||||
let output_lines = output.split('\n').collect::<Vec<&str>>();
|
||||
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::with_capacity(output_lines.len());
|
||||
|
||||
debug!("diagnostics regex selected"; "regex" => self.get_line_regex() .as_str());
|
||||
|
||||
for line in output_lines {
|
||||
let diagnostic_capture = match self.get_line_regex().captures(line) {
|
||||
Some(d) => d,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
debug!("found match for output line"; "line" => line, "capture" => format!("{:?}", diagnostic_capture));
|
||||
|
||||
let msg = diagnostic_capture.name("output").unwrap().as_str();
|
||||
|
||||
let line = match diagnostic_capture.name("linenum") {
|
||||
Some(c) => c.as_str().parse::<u32>().unwrap_or(0),
|
||||
None => 0,
|
||||
} - self.get_line_offset();
|
||||
|
||||
// TODO: line matching maybe
|
||||
/* let line_text = source_lines[line as usize];
|
||||
let leading_whitespace = line_text.len() - line_text.trim_start().len(); */
|
||||
|
||||
let severity = match diagnostic_capture.name("severity") {
|
||||
Some(c) => match c.as_str().to_lowercase().as_str() {
|
||||
"error" => DiagnosticSeverity::Error,
|
||||
"warning" => DiagnosticSeverity::Warning,
|
||||
_ => DiagnosticSeverity::Information,
|
||||
},
|
||||
_ => DiagnosticSeverity::Information,
|
||||
};
|
||||
|
||||
let origin = match diagnostic_capture.name("filepath") {
|
||||
Some(o) => {
|
||||
let source_num: SourceNum = o.as_str().parse::<usize>().unwrap().into();
|
||||
let graph_node = source_mapper.get_node(source_num);
|
||||
graph.get_node(graph_node).to_str().unwrap().to_string()
|
||||
}
|
||||
None => uri.to_str().unwrap().to_string(),
|
||||
};
|
||||
|
||||
let diagnostic = Diagnostic {
|
||||
range: Range::new(
|
||||
/* Position::new(line, leading_whitespace as u64),
|
||||
Position::new(line, line_text.len() as u64) */
|
||||
Position::new(line, 0),
|
||||
Position::new(line, 1000),
|
||||
),
|
||||
code: None,
|
||||
severity: Some(severity),
|
||||
source: Some(consts::SOURCE.into()),
|
||||
message: msg.trim().into(),
|
||||
related_information: None,
|
||||
tags: None,
|
||||
code_description: Option::None,
|
||||
data: Option::None,
|
||||
};
|
||||
|
||||
let origin_url = Url::from_file_path(origin).unwrap();
|
||||
match diagnostics.get_mut(&origin_url) {
|
||||
Some(d) => d.push(diagnostic),
|
||||
None => {
|
||||
diagnostics.insert(origin_url, vec![diagnostic]);
|
||||
}
|
||||
};
|
||||
}
|
||||
diagnostics
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod diagnostics_test {
|
||||
use std::path::PathBuf;
|
||||
|
||||
use slog::{slog_o, Level};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
diagnostics_parser::DiagnosticsParser, opengl::MockShaderValidator, source_mapper::SourceMapper, test::new_temp_server,
|
||||
};
|
||||
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_nvidia_diagnostics() {
|
||||
slog_scope::scope(&slog_scope::logger().new(slog_o!("driver" => "nvidia")), || {
|
||||
let mut mockgl = MockShaderValidator::new();
|
||||
mockgl.expect_vendor().returning(|| "NVIDIA Corporation".into());
|
||||
let server = new_temp_server(Some(Box::new(mockgl)));
|
||||
|
||||
let output = "0(9) : error C0000: syntax error, unexpected '}', expecting ',' or ';' at token \"}\"";
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
let path: PathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into();
|
||||
#[cfg(target_family = "windows")]
|
||||
let path: PathBuf = "c:\\home\\noah\\.minecraft\\shaderpacks\\test\\shaders\\final.fsh".into();
|
||||
|
||||
let mut source_mapper = SourceMapper::new(0);
|
||||
source_mapper.get_num(server.graph.borrow_mut().add_node(&path));
|
||||
|
||||
let parser = DiagnosticsParser::new(server.opengl_context.as_ref());
|
||||
|
||||
let results =
|
||||
parser.parse_diagnostics_output(output.to_string(), path.parent().unwrap(), &source_mapper, &server.graph.borrow());
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
let first = results.into_iter().next().unwrap();
|
||||
assert_eq!(first.0, Url::from_file_path(path).unwrap());
|
||||
server.endpoint.request_shutdown();
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_amd_diagnostics() {
|
||||
slog_scope::scope(&slog_scope::logger().new(slog_o!("driver" => "amd")), || {
|
||||
let mut mockgl = MockShaderValidator::new();
|
||||
mockgl.expect_vendor().returning(|| "ATI Technologies".into());
|
||||
let server = new_temp_server(Some(Box::new(mockgl)));
|
||||
|
||||
let output = "ERROR: 0:1: '' : syntax error: #line
|
||||
ERROR: 0:10: '' : syntax error: #line
|
||||
ERROR: 0:15: 'varying' : syntax error: syntax error
|
||||
";
|
||||
|
||||
#[cfg(target_family = "unix")]
|
||||
let path: PathBuf = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh".into();
|
||||
#[cfg(target_family = "windows")]
|
||||
let path: PathBuf = "c:\\home\\noah\\.minecraft\\shaderpacks\\test\\shaders\\final.fsh".into();
|
||||
|
||||
let mut source_mapper = SourceMapper::new(0);
|
||||
source_mapper.get_num(server.graph.borrow_mut().add_node(&path));
|
||||
|
||||
let parser = DiagnosticsParser::new(server.opengl_context.as_ref());
|
||||
|
||||
let results =
|
||||
parser.parse_diagnostics_output(output.to_string(), path.parent().unwrap(), &source_mapper, &server.graph.borrow());
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
let first = results.into_iter().next().unwrap();
|
||||
assert_eq!(first.1.len(), 3);
|
||||
server.endpoint.request_shutdown();
|
||||
});
|
||||
}
|
||||
}
|
|
@ -170,6 +170,7 @@ mod graph_test {
|
|||
use crate::{graph::CachedStableGraph, IncludePosition};
|
||||
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_graph_two_connected_nodes() {
|
||||
let mut graph = CachedStableGraph::new();
|
||||
|
||||
|
@ -211,6 +212,7 @@ mod graph_test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_collect_root_ancestors() {
|
||||
{
|
||||
let mut graph = CachedStableGraph::new();
|
|
@ -1,3 +1,5 @@
|
|||
#![feature(once_cell)]
|
||||
|
||||
use merge_views::FilialTuple;
|
||||
use rust_lsp::jsonrpc::{method_types::*, *};
|
||||
use rust_lsp::lsp::*;
|
||||
|
@ -45,11 +47,11 @@ mod consts;
|
|||
mod dfs;
|
||||
mod diagnostics_parser;
|
||||
mod graph;
|
||||
mod logging;
|
||||
mod lsp_ext;
|
||||
mod merge_views;
|
||||
mod navigation;
|
||||
mod opengl;
|
||||
mod source_mapper;
|
||||
mod url_norm;
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -317,9 +319,11 @@ impl MinecraftShaderLanguageServer {
|
|||
|
||||
all_sources.extend(self.load_sources(&tree)?);
|
||||
|
||||
let mut source_mapper = source_mapper::SourceMapper::new(all_sources.len());
|
||||
|
||||
let view = {
|
||||
let graph = self.graph.borrow();
|
||||
merge_views::generate_merge_list(&tree, &all_sources, &graph)
|
||||
merge_views::generate_merge_list(&tree, &all_sources, &graph, &mut source_mapper)
|
||||
};
|
||||
|
||||
let root_path = self.graph.borrow().get_node(root);
|
||||
|
@ -347,18 +351,17 @@ impl MinecraftShaderLanguageServer {
|
|||
return Ok(diagnostics);
|
||||
};
|
||||
|
||||
let stdout = match self.opengl_context.clone().validate(tree_type, view) {
|
||||
let stdout = match self.compile_shader_source(&view, tree_type, &root_path) {
|
||||
Some(s) => s,
|
||||
None => {
|
||||
back_fill(&all_sources, &mut diagnostics);
|
||||
return Ok(diagnostics);
|
||||
}
|
||||
};
|
||||
diagnostics.extend(diagnostics_parser::parse_diagnostics_output(
|
||||
stdout,
|
||||
uri,
|
||||
self.opengl_context.as_ref(),
|
||||
));
|
||||
|
||||
let diagnostics_parser = diagnostics_parser::DiagnosticsParser::new(self.opengl_context.as_ref());
|
||||
|
||||
diagnostics.extend(diagnostics_parser.parse_diagnostics_output(stdout, uri, &source_mapper, &self.graph.borrow()));
|
||||
} else {
|
||||
let mut all_trees: Vec<(TreeType, Vec<(NodeIndex, Option<_>)>)> = Vec::new();
|
||||
|
||||
|
@ -399,20 +402,22 @@ impl MinecraftShaderLanguageServer {
|
|||
}
|
||||
|
||||
for tree in all_trees {
|
||||
// bit over-zealous in allocation but better than having to resize
|
||||
let mut source_mapper = source_mapper::SourceMapper::new(all_sources.len());
|
||||
let view = {
|
||||
let graph = self.graph.borrow();
|
||||
merge_views::generate_merge_list(&tree.1, &all_sources, &graph)
|
||||
merge_views::generate_merge_list(&tree.1, &all_sources, &graph, &mut source_mapper)
|
||||
};
|
||||
|
||||
let stdout = match self.opengl_context.clone().validate(tree.0, view) {
|
||||
let root_path = self.graph.borrow().get_node(tree.1[0].0);
|
||||
let stdout = match self.compile_shader_source(&view, tree.0, &root_path) {
|
||||
Some(s) => s,
|
||||
None => continue,
|
||||
};
|
||||
diagnostics.extend(diagnostics_parser::parse_diagnostics_output(
|
||||
stdout,
|
||||
uri,
|
||||
self.opengl_context.as_ref(),
|
||||
));
|
||||
|
||||
let diagnostics_parser = diagnostics_parser::DiagnosticsParser::new(self.opengl_context.as_ref());
|
||||
|
||||
diagnostics.extend(diagnostics_parser.parse_diagnostics_output(stdout, uri, &source_mapper, &self.graph.borrow()));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -420,6 +425,17 @@ impl MinecraftShaderLanguageServer {
|
|||
Ok(diagnostics)
|
||||
}
|
||||
|
||||
fn compile_shader_source(&self, source: &str, tree_type: TreeType, path: &Path) -> Option<String> {
|
||||
let result = self.opengl_context.clone().validate(tree_type, source);
|
||||
match &result {
|
||||
Some(output) => {
|
||||
info!("compilation errors reported"; "errors" => format!("`{}`", output.replace('\n', "\\n")), "tree_root" => path.to_str().unwrap())
|
||||
}
|
||||
None => info!("compilation reported no errors"; "tree_root" => path.to_str().unwrap()),
|
||||
};
|
||||
result
|
||||
}
|
||||
|
||||
pub fn get_dfs_for_node(&self, root: NodeIndex) -> Result<Vec<FilialTuple>, dfs::error::CycleError> {
|
||||
let graph_ref = self.graph.borrow();
|
||||
|
||||
|
@ -674,22 +690,22 @@ impl LanguageServerHandling for MinecraftShaderLanguageServer {
|
|||
fn goto_definition(&mut self, params: TextDocumentPositionParams, completable: LSCompletable<Vec<Location>>) {
|
||||
logging::slog_with_trace_id(|| {
|
||||
let parser = &mut self.tree_sitter.borrow_mut();
|
||||
let parser_ctx = match navigation::ParserContext::new(parser, params.text_document.uri.clone()) {
|
||||
let parser_ctx = match navigation::ParserContext::new(parser, ¶ms.text_document.uri) {
|
||||
Ok(ctx) => ctx,
|
||||
Err(e) => {
|
||||
return completable.complete(Err(MethodError {
|
||||
code: 42069,
|
||||
message: format!("error building parser context: {}", e),
|
||||
message: format!("error building parser context: {}", e.context(params.text_document.uri)),
|
||||
data: (),
|
||||
}))
|
||||
}
|
||||
};
|
||||
|
||||
match parser_ctx.find_definitions(params.text_document.uri, params.position) {
|
||||
match parser_ctx.find_definitions(¶ms.text_document.uri, params.position) {
|
||||
Ok(locations) => completable.complete(Ok(locations)),
|
||||
Err(e) => completable.complete(Err(MethodError {
|
||||
code: 42069,
|
||||
message: format!("error finding definitions: {}", e),
|
||||
message: format!("error finding definitions: {}", e.context(params.text_document.uri)),
|
||||
data: (),
|
||||
})),
|
||||
}
|
|
@ -10,13 +10,16 @@ use core::slice::Iter;
|
|||
use petgraph::stable_graph::NodeIndex;
|
||||
|
||||
use crate::graph::CachedStableGraph;
|
||||
use crate::source_mapper::SourceMapper;
|
||||
|
||||
/// FilialTuple represents a tuple with a child at index 0
|
||||
/// and a parent at index 1. Parent can be nullable in the case of
|
||||
/// the child being a top level node in the tree.
|
||||
pub type FilialTuple = (NodeIndex, Option<NodeIndex>);
|
||||
|
||||
pub fn generate_merge_list<'a>(nodes: &'a [FilialTuple], sources: &'a HashMap<PathBuf, String>, graph: &'a CachedStableGraph) -> String {
|
||||
pub fn generate_merge_list<'a>(
|
||||
nodes: &'a [FilialTuple], sources: &'a HashMap<PathBuf, String>, graph: &'a CachedStableGraph, source_mapper: &mut SourceMapper,
|
||||
) -> String {
|
||||
// contains additionally inserted lines such as #line and other directives, preamble defines etc
|
||||
let mut extra_lines: Vec<String> = Vec::new();
|
||||
extra_lines.reserve((nodes.len() * 2) + 2);
|
||||
|
@ -36,18 +39,24 @@ pub fn generate_merge_list<'a>(nodes: &'a [FilialTuple], sources: &'a HashMap<Pa
|
|||
let first_path = graph.get_node(first);
|
||||
let first_source = sources.get(&first_path).unwrap();
|
||||
|
||||
// seed source_mapper with top-level file
|
||||
source_mapper.get_num(first);
|
||||
|
||||
let version_line_offset = find_version_offset(first_source);
|
||||
let version_char_offsets = char_offset_for_line(version_line_offset, first_source);
|
||||
add_preamble(
|
||||
version_line_offset,
|
||||
version_char_offsets.1,
|
||||
first_path.to_str().unwrap(),
|
||||
first_source,
|
||||
&mut merge_list,
|
||||
&mut extra_lines,
|
||||
);
|
||||
// add_preamble(
|
||||
// version_line_offset,
|
||||
// version_char_offsets.1,
|
||||
// &first_path,
|
||||
// first,
|
||||
// first_source,
|
||||
// &mut merge_list,
|
||||
// &mut extra_lines,
|
||||
// source_mapper,
|
||||
// );
|
||||
|
||||
last_offset_set.insert((first, None), version_char_offsets.1);
|
||||
// last_offset_set.insert((first, None), version_char_offsets.1);
|
||||
last_offset_set.insert((first, None), 0);
|
||||
|
||||
// stack to keep track of the depth first traversal
|
||||
let mut stack = VecDeque::<NodeIndex>::new();
|
||||
|
@ -60,6 +69,7 @@ pub fn generate_merge_list<'a>(nodes: &'a [FilialTuple], sources: &'a HashMap<Pa
|
|||
sources,
|
||||
&mut extra_lines,
|
||||
&mut stack,
|
||||
source_mapper,
|
||||
);
|
||||
|
||||
// now we add a view of the remainder of the root file
|
||||
|
@ -79,6 +89,7 @@ pub fn generate_merge_list<'a>(nodes: &'a [FilialTuple], sources: &'a HashMap<Pa
|
|||
fn create_merge_views<'a>(
|
||||
nodes: &mut Peekable<Iter<FilialTuple>>, merge_list: &mut LinkedList<&'a str>, last_offset_set: &mut HashMap<FilialTuple, usize>,
|
||||
graph: &'a CachedStableGraph, sources: &'a HashMap<PathBuf, String>, extra_lines: &mut Vec<String>, stack: &mut VecDeque<NodeIndex>,
|
||||
source_mapper: &mut SourceMapper,
|
||||
) {
|
||||
loop {
|
||||
let n = match nodes.next() {
|
||||
|
@ -100,7 +111,7 @@ fn create_merge_views<'a>(
|
|||
.insert((parent, stack.back().copied()), char_following_line)
|
||||
.get_or_insert(0);
|
||||
merge_list.push_back(&parent_source[offset..char_for_line]);
|
||||
add_opening_line_directive(&child_path, merge_list, extra_lines);
|
||||
add_opening_line_directive(&child_path, child, merge_list, extra_lines, source_mapper);
|
||||
|
||||
match nodes.peek() {
|
||||
Some(next) => {
|
||||
|
@ -118,7 +129,7 @@ fn create_merge_views<'a>(
|
|||
merge_list.push_back(&child_source[..offset]);
|
||||
last_offset_set.insert((child, Some(parent)), 0);
|
||||
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
||||
add_closing_line_directive(edge.line + 2, &parent_path, merge_list, extra_lines);
|
||||
add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines, source_mapper);
|
||||
// if the next pair's parent is not the current pair's parent, we need to bubble up
|
||||
if stack.contains(&next.1.unwrap()) {
|
||||
return;
|
||||
|
@ -127,7 +138,16 @@ fn create_merge_views<'a>(
|
|||
}
|
||||
|
||||
stack.push_back(parent);
|
||||
create_merge_views(nodes, merge_list, last_offset_set, graph, sources, extra_lines, stack);
|
||||
create_merge_views(
|
||||
nodes,
|
||||
merge_list,
|
||||
last_offset_set,
|
||||
graph,
|
||||
sources,
|
||||
extra_lines,
|
||||
stack,
|
||||
source_mapper,
|
||||
);
|
||||
stack.pop_back();
|
||||
|
||||
let offset = *last_offset_set.get(&(child, Some(parent))).unwrap();
|
||||
|
@ -144,7 +164,7 @@ fn create_merge_views<'a>(
|
|||
}
|
||||
|
||||
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
||||
add_closing_line_directive(edge.line + 2, &parent_path, merge_list, extra_lines);
|
||||
add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines, source_mapper);
|
||||
|
||||
// we need to check the next item at the point of original return further down the callstack
|
||||
if nodes.peek().is_some() && stack.contains(&nodes.peek().unwrap().1.unwrap()) {
|
||||
|
@ -161,7 +181,7 @@ fn create_merge_views<'a>(
|
|||
merge_list.push_back(&child_source[..offset]);
|
||||
last_offset_set.insert((child, Some(parent)), 0);
|
||||
// +2 because edge.line is 0 indexed but #line is 1 indexed and references the *following* line
|
||||
add_closing_line_directive(edge.line + 2, &parent_path, merge_list, extra_lines);
|
||||
add_closing_line_directive(edge.line + 2, &parent_path, parent, merge_list, extra_lines, source_mapper);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -191,38 +211,63 @@ fn find_version_offset(source: &str) -> usize {
|
|||
.map_or(0, |(i, _)| i)
|
||||
}
|
||||
|
||||
fn add_preamble<'a>(
|
||||
version_line_offset: usize, version_char_offset: usize, path: &str, source: &'a str, merge_list: &mut LinkedList<&'a str>,
|
||||
extra_lines: &mut Vec<String>,
|
||||
) {
|
||||
// TODO: Optifine #define preabmle
|
||||
merge_list.push_back(&source[..version_char_offset]);
|
||||
let google_line_directive = format!(
|
||||
"#extension GL_GOOGLE_cpp_style_line_directive : enable\n#line {} \"{}\"\n",
|
||||
// +2 because 0 indexed but #line is 1 indexed and references the *following* line
|
||||
version_line_offset + 2,
|
||||
path,
|
||||
);
|
||||
extra_lines.push(google_line_directive);
|
||||
unsafe_get_and_insert(merge_list, extra_lines);
|
||||
}
|
||||
// fn add_preamble<'a>(
|
||||
// version_line_offset: usize, version_char_offset: usize, path: &Path, node: NodeIndex, source: &'a str,
|
||||
// merge_list: &mut LinkedList<&'a str>, extra_lines: &mut Vec<String>, source_mapper: &mut SourceMapper,
|
||||
// ) {
|
||||
// // TODO: Optifine #define preabmle
|
||||
// merge_list.push_back(&source[..version_char_offset]);
|
||||
// let google_line_directive = format!(
|
||||
// "#extension GL_GOOGLE_cpp_style_line_directive : enable\n#line {} {} // {}\n",
|
||||
// // +2 because 0 indexed but #line is 1 indexed and references the *following* line
|
||||
// version_line_offset + 2,
|
||||
// source_mapper.get_num(node),
|
||||
// path.to_str().unwrap().replace('\\', "\\\\"),
|
||||
// );
|
||||
// extra_lines.push(google_line_directive);
|
||||
// unsafe_get_and_insert(merge_list, extra_lines);
|
||||
// }
|
||||
|
||||
fn add_opening_line_directive(path: &Path, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec<String>) {
|
||||
let line_directive = format!("#line 1 \"{}\"\n", path.to_str().unwrap().replace('\\', "\\\\"));
|
||||
fn add_opening_line_directive(
|
||||
path: &Path, node: NodeIndex, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec<String>, source_mapper: &mut SourceMapper,
|
||||
) {
|
||||
let line_directive = format!(
|
||||
"#line 1 {} // {}\n",
|
||||
source_mapper.get_num(node),
|
||||
path.to_str().unwrap().replace('\\', "\\\\")
|
||||
);
|
||||
extra_lines.push(line_directive);
|
||||
unsafe_get_and_insert(merge_list, extra_lines);
|
||||
}
|
||||
|
||||
fn add_closing_line_directive(line: usize, path: &Path, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec<String>) {
|
||||
fn add_closing_line_directive(
|
||||
line: usize, path: &Path, node: NodeIndex, merge_list: &mut LinkedList<&str>, extra_lines: &mut Vec<String>,
|
||||
source_mapper: &mut SourceMapper,
|
||||
) {
|
||||
// Optifine doesn't seem to add a leading newline if the previous line was a #line directive
|
||||
let line_directive = if let Some(l) = merge_list.back() {
|
||||
if l.trim().starts_with("#line") {
|
||||
format!("#line {} \"{}\"\n", line, path.to_str().unwrap().replace('\\', "\\\\"))
|
||||
format!(
|
||||
"#line {} {} // {}\n",
|
||||
line,
|
||||
source_mapper.get_num(node),
|
||||
path.to_str().unwrap().replace('\\', "\\\\")
|
||||
)
|
||||
} else {
|
||||
format!("\n#line {} \"{}\"\n", line, path.to_str().unwrap().replace('\\', "\\\\"))
|
||||
format!(
|
||||
"\n#line {} {} // {}\n",
|
||||
line,
|
||||
source_mapper.get_num(node),
|
||||
path.to_str().unwrap().replace('\\', "\\\\")
|
||||
)
|
||||
}
|
||||
} else {
|
||||
format!("\n#line {} \"{}\"\n", line, path.to_str().unwrap().replace('\\', "\\\\"))
|
||||
format!(
|
||||
"\n#line {} {} // {}\n",
|
||||
line,
|
||||
source_mapper.get_num(node),
|
||||
path.to_str().unwrap().replace('\\', "\\\\")
|
||||
)
|
||||
};
|
||||
|
||||
extra_lines.push(line_directive);
|
||||
|
@ -243,10 +288,12 @@ mod merge_view_test {
|
|||
use std::path::PathBuf;
|
||||
|
||||
use crate::merge_views::generate_merge_list;
|
||||
use crate::source_mapper::SourceMapper;
|
||||
use crate::test::{copy_to_and_set_root, new_temp_server};
|
||||
use crate::IncludePosition;
|
||||
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_generate_merge_list_01() {
|
||||
let mut server = new_temp_server(None);
|
||||
|
||||
|
@ -273,16 +320,17 @@ mod merge_view_test {
|
|||
let sources = server.load_sources(&nodes).unwrap();
|
||||
|
||||
let graph_borrow = server.graph.borrow();
|
||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow);
|
||||
let mut source_mapper = SourceMapper::new(0);
|
||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow, &mut source_mapper);
|
||||
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
let mut truth = fs::read_to_string(merge_file).unwrap();
|
||||
truth = truth.replacen(
|
||||
"!!",
|
||||
&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
|
||||
1,
|
||||
);
|
||||
// truth = truth.replacen(
|
||||
// "!!",
|
||||
// &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
|
||||
// 1,
|
||||
// );
|
||||
truth = truth.replacen(
|
||||
"!!",
|
||||
&tmp_path.join("shaders").join("common.glsl").to_str().unwrap().replace('\\', "\\\\"),
|
||||
|
@ -297,6 +345,7 @@ mod merge_view_test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_generate_merge_list_02() {
|
||||
let mut server = new_temp_server(None);
|
||||
|
||||
|
@ -341,17 +390,18 @@ mod merge_view_test {
|
|||
let sources = server.load_sources(&nodes).unwrap();
|
||||
|
||||
let graph_borrow = server.graph.borrow();
|
||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow);
|
||||
let mut source_mapper = SourceMapper::new(0);
|
||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow, &mut source_mapper);
|
||||
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
let mut truth = fs::read_to_string(merge_file).unwrap();
|
||||
|
||||
truth = truth.replacen(
|
||||
"!!",
|
||||
&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
|
||||
1,
|
||||
);
|
||||
// truth = truth.replacen(
|
||||
// "!!",
|
||||
// &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
|
||||
// 1,
|
||||
// );
|
||||
|
||||
for file in &["sample.glsl", "burger.glsl", "sample.glsl", "test.glsl", "sample.glsl"] {
|
||||
let path = tmp_path.clone();
|
||||
|
@ -377,6 +427,7 @@ mod merge_view_test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_generate_merge_list_03() {
|
||||
let mut server = new_temp_server(None);
|
||||
|
||||
|
@ -421,17 +472,18 @@ mod merge_view_test {
|
|||
let sources = server.load_sources(&nodes).unwrap();
|
||||
|
||||
let graph_borrow = server.graph.borrow();
|
||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow);
|
||||
let mut source_mapper = SourceMapper::new(0);
|
||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow, &mut source_mapper);
|
||||
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
let mut truth = fs::read_to_string(merge_file).unwrap();
|
||||
|
||||
truth = truth.replacen(
|
||||
"!!",
|
||||
&tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
|
||||
1,
|
||||
);
|
||||
// truth = truth.replacen(
|
||||
// "!!",
|
||||
// &tmp_path.join("shaders").join("final.fsh").to_str().unwrap().replace('\\', "\\\\"),
|
||||
// 1,
|
||||
// );
|
||||
|
||||
for file in &["sample.glsl", "burger.glsl", "sample.glsl", "test.glsl", "sample.glsl"] {
|
||||
let path = tmp_path.clone();
|
||||
|
@ -457,6 +509,7 @@ mod merge_view_test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_generate_merge_list_04() {
|
||||
let mut server = new_temp_server(None);
|
||||
|
||||
|
@ -510,14 +563,15 @@ mod merge_view_test {
|
|||
let sources = server.load_sources(&nodes).unwrap();
|
||||
|
||||
let graph_borrow = server.graph.borrow();
|
||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow);
|
||||
let mut source_mapper = SourceMapper::new(0);
|
||||
let result = generate_merge_list(&nodes, &sources, &graph_borrow, &mut source_mapper);
|
||||
|
||||
let merge_file = tmp_path.join("shaders").join("final.fsh.merge");
|
||||
|
||||
let mut truth = fs::read_to_string(merge_file).unwrap();
|
||||
|
||||
for file in &[
|
||||
PathBuf::new().join("final.fsh").to_str().unwrap(),
|
||||
// PathBuf::new().join("final.fsh").to_str().unwrap(),
|
||||
PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(),
|
||||
PathBuf::new().join("utils").join("stuff1.glsl").to_str().unwrap(),
|
||||
PathBuf::new().join("utils").join("utilities.glsl").to_str().unwrap(),
|
||||
|
@ -528,7 +582,6 @@ mod merge_view_test {
|
|||
PathBuf::new().join("final.fsh").to_str().unwrap(),
|
||||
] {
|
||||
let path = tmp_path.clone();
|
||||
//path.f
|
||||
truth = truth.replacen("!!", &path.join("shaders").join(file).to_str().unwrap().replace('\\', "\\\\"), 1);
|
||||
}
|
||||
|
|
@ -24,7 +24,7 @@ pub struct ParserContext<'a> {
|
|||
}
|
||||
|
||||
impl<'a> ParserContext<'a> {
|
||||
pub fn new(parser: &'a mut Parser, document_uri: Url) -> Result<Self> {
|
||||
pub fn new(parser: &'a mut Parser, document_uri: &Url) -> Result<Self> {
|
||||
let source = read_to_string(document_uri.path())?;
|
||||
|
||||
let tree = parser.parse(&source, None).unwrap();
|
||||
|
@ -32,7 +32,7 @@ impl<'a> ParserContext<'a> {
|
|||
Ok(ParserContext { source, tree, parser })
|
||||
}
|
||||
|
||||
pub fn find_definitions(&self, document_uri: Url, point: Position) -> Result<Vec<Location>> {
|
||||
pub fn find_definitions(&self, document_uri: &Url, point: Position) -> Result<Vec<Location>> {
|
||||
let current_node = match self.find_node_at_point(point) {
|
||||
Some(node) => node,
|
||||
None => return Ok(vec![]),
|
|
@ -8,7 +8,7 @@ use mockall::automock;
|
|||
|
||||
#[cfg_attr(test, automock)]
|
||||
pub trait ShaderValidator {
|
||||
fn validate(&self, tree_type: super::TreeType, source: String) -> Option<String>;
|
||||
fn validate(&self, tree_type: super::TreeType, source: &str) -> Option<String>;
|
||||
fn vendor(&self) -> String;
|
||||
}
|
||||
|
||||
|
@ -42,7 +42,7 @@ impl OpenGlContext {
|
|||
gl_ctx
|
||||
}
|
||||
|
||||
unsafe fn compile_and_get_shader_log(&self, shader: gl::types::GLuint, source: String) -> Option<String> {
|
||||
unsafe fn compile_and_get_shader_log(&self, shader: gl::types::GLuint, source: &str) -> Option<String> {
|
||||
let mut success = i32::from(gl::FALSE);
|
||||
let c_str_frag = CString::new(source).unwrap();
|
||||
gl::ShaderSource(shader, 1, &c_str_frag.as_ptr(), ptr::null());
|
||||
|
@ -71,8 +71,8 @@ impl OpenGlContext {
|
|||
}
|
||||
|
||||
impl ShaderValidator for OpenGlContext {
|
||||
fn validate(&self, tree_type: super::TreeType, source: String) -> Option<String> {
|
||||
let result = unsafe {
|
||||
fn validate(&self, tree_type: super::TreeType, source: &str) -> Option<String> {
|
||||
unsafe {
|
||||
match tree_type {
|
||||
crate::TreeType::Fragment => {
|
||||
// Fragment shader
|
||||
|
@ -95,14 +95,7 @@ impl ShaderValidator for OpenGlContext {
|
|||
self.compile_and_get_shader_log(compute_shader, source)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match &result {
|
||||
Some(output) => info!("compilation errors reported"; "errors" => format!("`{}`", output.replace('\n', "\\n"))),
|
||||
None => info!("compilation reported no errors"),
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn vendor(&self) -> String {
|
52
server/main/src/source_mapper.rs
Normal file
52
server/main/src/source_mapper.rs
Normal file
|
@ -0,0 +1,52 @@
|
|||
use std::{collections::HashMap, fmt::Display};
|
||||
|
||||
use petgraph::graph::NodeIndex;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub struct SourceNum(usize);
|
||||
|
||||
impl Display for SourceNum {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.write_str(format!("{}", self.0).as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<usize> for SourceNum {
|
||||
fn from(val: usize) -> Self {
|
||||
SourceNum(val)
|
||||
}
|
||||
}
|
||||
|
||||
// Maps from a graph node index to a virtual OpenGL
|
||||
// source number (for when building the merged source view),
|
||||
// and in reverse (for when mapping from GLSL error source numbers to their source path).
|
||||
// What is a source number: https://community.khronos.org/t/what-is-source-string-number/70976
|
||||
pub struct SourceMapper {
|
||||
next: SourceNum,
|
||||
mapping: HashMap<NodeIndex, SourceNum>,
|
||||
reverse_mapping: Vec<NodeIndex>,
|
||||
}
|
||||
|
||||
impl SourceMapper {
|
||||
pub fn new(capacity: usize) -> Self {
|
||||
SourceMapper {
|
||||
next: SourceNum(0),
|
||||
mapping: HashMap::with_capacity(capacity),
|
||||
reverse_mapping: Vec::with_capacity(capacity),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_num(&mut self, node: NodeIndex) -> SourceNum {
|
||||
let num = &*self.mapping.entry(node).or_insert_with(|| {
|
||||
let next = self.next;
|
||||
self.next.0 += 1;
|
||||
self.reverse_mapping.push(node);
|
||||
next
|
||||
});
|
||||
*num
|
||||
}
|
||||
|
||||
pub fn get_node(&self, num: SourceNum) -> NodeIndex {
|
||||
self.reverse_mapping[num.0]
|
||||
}
|
||||
}
|
|
@ -5,8 +5,6 @@ use std::io::Result;
|
|||
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use slog::o;
|
||||
use slog::Logger;
|
||||
use tempdir::TempDir;
|
||||
|
||||
use fs_extra::{copy_items, dir};
|
||||
|
@ -37,8 +35,6 @@ pub fn new_temp_server(opengl_context: Option<Box<dyn opengl::ShaderValidator>>)
|
|||
let endpoint = LSPEndpoint::create_lsp_output_with_output_stream(|| StdoutNewline { s: Box::new(io::sink()) });
|
||||
|
||||
let context = opengl_context.unwrap_or_else(|| Box::new(opengl::MockShaderValidator::new()));
|
||||
let logger = Logger::root(slog::Discard, o!());
|
||||
let guard = slog_scope::set_global_logger(logger);
|
||||
|
||||
MinecraftShaderLanguageServer {
|
||||
endpoint,
|
||||
|
@ -46,7 +42,7 @@ pub fn new_temp_server(opengl_context: Option<Box<dyn opengl::ShaderValidator>>)
|
|||
root: "".into(),
|
||||
command_provider: None,
|
||||
opengl_context: context.into(),
|
||||
log_guard: Some(guard),
|
||||
log_guard: None,
|
||||
tree_sitter: Rc::new(RefCell::new(Parser::new())),
|
||||
}
|
||||
}
|
||||
|
@ -82,6 +78,7 @@ fn copy_to_tmp_dir(test_path: &str) -> (Rc<TempDir>, PathBuf) {
|
|||
|
||||
#[allow(deprecated)]
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_empty_initialize() {
|
||||
let mut server = new_temp_server(None);
|
||||
|
||||
|
@ -130,6 +127,7 @@ fn test_empty_initialize() {
|
|||
|
||||
#[allow(deprecated)]
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_01_initialize() {
|
||||
let mut server = new_temp_server(None);
|
||||
|
||||
|
@ -189,7 +187,9 @@ fn test_01_initialize() {
|
|||
assert_eq!(server.graph.borrow().graph.edge_weight(edge).unwrap().line, 2);
|
||||
}
|
||||
|
||||
#[allow(deprecated)]
|
||||
#[test]
|
||||
#[logging_macro::log_scope]
|
||||
fn test_05_initialize() {
|
||||
let mut server = new_temp_server(None);
|
||||
|
11
server/main/testdata/01/final.fsh.merge
vendored
Normal file
11
server/main/testdata/01/final.fsh.merge
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
#version 120
|
||||
|
||||
#line 1 1 // !!
|
||||
float test() {
|
||||
return 0.5;
|
||||
}
|
||||
#line 4 0 // !!
|
||||
|
||||
void main() {
|
||||
gl_FragColor[0] = vec4(0.0);
|
||||
}
|
|
@ -1,28 +1,26 @@
|
|||
#version 120
|
||||
#extension GL_GOOGLE_cpp_style_line_directive : enable
|
||||
#line 2 "!!"
|
||||
|
||||
#line 1 "!!"
|
||||
#line 1 1 // !!
|
||||
int sample() {
|
||||
return 5;
|
||||
}
|
||||
|
||||
#line 1 "!!"
|
||||
#line 1 2 // !!
|
||||
void burger() {
|
||||
// sample text
|
||||
}
|
||||
#line 6 "!!"
|
||||
#line 6 1 // !!
|
||||
|
||||
#line 1 "!!"
|
||||
#line 1 3 // !!
|
||||
float test() {
|
||||
return 3.0;
|
||||
}
|
||||
#line 8 "!!"
|
||||
#line 8 1 // !!
|
||||
|
||||
int sample_more() {
|
||||
return 5;
|
||||
}
|
||||
#line 4 "!!"
|
||||
#line 4 0 // !!
|
||||
|
||||
void main() {
|
||||
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
|
|
@ -1,24 +1,22 @@
|
|||
#version 120
|
||||
#extension GL_GOOGLE_cpp_style_line_directive : enable
|
||||
#line 2 "!!"
|
||||
|
||||
#line 1 "!!"
|
||||
#line 1 1 // !!
|
||||
int sample() {
|
||||
return 5;
|
||||
}
|
||||
|
||||
#line 1 "!!"
|
||||
#line 1 2 // !!
|
||||
void burger() {
|
||||
// sample text
|
||||
}
|
||||
#line 6 "!!"
|
||||
#line 6 1 // !!
|
||||
|
||||
#line 1 "!!"
|
||||
#line 1 3 // !!
|
||||
float test() {
|
||||
return 3.0;
|
||||
}
|
||||
#line 8 "!!"
|
||||
#line 4 "!!"
|
||||
#line 8 1 // !!
|
||||
#line 4 0 // !!
|
||||
|
||||
void main() {
|
||||
gl_FragColor = vec4(1.0, 1.0, 1.0, 1.0);
|
23
server/main/testdata/04/final.fsh.merge
vendored
Normal file
23
server/main/testdata/04/final.fsh.merge
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
#version 120
|
||||
|
||||
#line 1 1 // !!
|
||||
#line 1 2 // !!
|
||||
void stuff1() {
|
||||
|
||||
}
|
||||
#line 2 1 // !!
|
||||
#line 1 3 // !!
|
||||
void stuff2() {
|
||||
|
||||
}
|
||||
#line 3 1 // !!
|
||||
#line 4 0 // !!
|
||||
#line 1 4 // !!
|
||||
void matrix() {
|
||||
|
||||
}
|
||||
#line 5 0 // !!
|
||||
|
||||
void main() {
|
||||
|
||||
}
|
|
@ -1,162 +0,0 @@
|
|||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use regex::Regex;
|
||||
use rust_lsp::lsp_types::{Diagnostic, DiagnosticSeverity, Position, Range};
|
||||
use url::Url;
|
||||
|
||||
use crate::{consts, opengl};
|
||||
|
||||
static RE_DIAGNOSTIC: OnceCell<Regex> = OnceCell::new();
|
||||
fn diagnostics_regex<T>(vendor: &T) -> &'static Regex
|
||||
where
|
||||
T: opengl::ShaderValidator + ?Sized,
|
||||
{
|
||||
RE_DIAGNOSTIC.get_or_init(|| match vendor.vendor().as_str() {
|
||||
"NVIDIA Corporation" => {
|
||||
Regex::new(r#"^(?P<filepath>[^?<>*|"]+)\((?P<linenum>\d+)\) : (?P<severity>error|warning) [A-C]\d+: (?P<output>.+)"#).unwrap()
|
||||
}
|
||||
_ => {
|
||||
Regex::new(r#"^(?P<severity>ERROR|WARNING): (?P<filepath>[^?<>*|"\n]+):(?P<linenum>\d+): '[a-z]*' : (?P<output>.+)$"#).unwrap()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
static LINE_NUM_OFFSET: OnceCell<u32> = OnceCell::new();
|
||||
fn line_number_offset<T>(vendor: &T) -> &'static u32
|
||||
where
|
||||
T: opengl::ShaderValidator + ?Sized,
|
||||
{
|
||||
LINE_NUM_OFFSET.get_or_init(|| match vendor.vendor().as_str() {
|
||||
"ATI Technologies" => 0,
|
||||
_ => 2,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_diagnostics_output<T>(output: String, uri: &Path, vendor_querier: &T) -> HashMap<Url, Vec<Diagnostic>>
|
||||
where
|
||||
T: opengl::ShaderValidator + ?Sized,
|
||||
{
|
||||
let output_lines = output.split('\n');
|
||||
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::with_capacity(output_lines.count());
|
||||
let output_lines = output.split('\n');
|
||||
|
||||
for line in output_lines {
|
||||
let diagnostic_capture = match diagnostics_regex(vendor_querier).captures(line) {
|
||||
Some(d) => d,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
// info!("match {:?}", diagnostic_capture);
|
||||
|
||||
let msg = diagnostic_capture.name("output").unwrap().as_str();
|
||||
|
||||
let line = match diagnostic_capture.name("linenum") {
|
||||
Some(c) => c.as_str().parse::<u32>().unwrap_or(0),
|
||||
None => 0,
|
||||
} - line_number_offset(vendor_querier);
|
||||
|
||||
// TODO: line matching maybe
|
||||
/* let line_text = source_lines[line as usize];
|
||||
let leading_whitespace = line_text.len() - line_text.trim_start().len(); */
|
||||
|
||||
let severity = match diagnostic_capture.name("severity") {
|
||||
Some(c) => match c.as_str().to_lowercase().as_str() {
|
||||
"error" => DiagnosticSeverity::Error,
|
||||
"warning" => DiagnosticSeverity::Warning,
|
||||
_ => DiagnosticSeverity::Information,
|
||||
},
|
||||
_ => DiagnosticSeverity::Information,
|
||||
};
|
||||
|
||||
let origin = match diagnostic_capture.name("filepath") {
|
||||
Some(o) => {
|
||||
if o.as_str() == "0" {
|
||||
uri.to_str().unwrap().to_string()
|
||||
} else {
|
||||
o.as_str().to_string()
|
||||
}
|
||||
}
|
||||
None => uri.to_str().unwrap().to_string(),
|
||||
};
|
||||
|
||||
let diagnostic = Diagnostic {
|
||||
range: Range::new(
|
||||
/* Position::new(line, leading_whitespace as u64),
|
||||
Position::new(line, line_text.len() as u64) */
|
||||
Position::new(line, 0),
|
||||
Position::new(line, 1000),
|
||||
),
|
||||
code: None,
|
||||
severity: Some(severity),
|
||||
source: Some(consts::SOURCE.into()),
|
||||
message: msg.trim().into(),
|
||||
related_information: None,
|
||||
tags: None,
|
||||
code_description: Option::None,
|
||||
data: Option::None,
|
||||
};
|
||||
|
||||
let origin_url = Url::from_file_path(origin).unwrap();
|
||||
match diagnostics.get_mut(&origin_url) {
|
||||
Some(d) => d.push(diagnostic),
|
||||
None => {
|
||||
diagnostics.insert(origin_url, vec![diagnostic]);
|
||||
}
|
||||
};
|
||||
}
|
||||
diagnostics
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod diagnostics_test {
|
||||
use std::{path::PathBuf, str::FromStr};
|
||||
|
||||
use crate::{diagnostics_parser::parse_diagnostics_output, opengl::MockShaderValidator, test::new_temp_server};
|
||||
|
||||
#[test]
|
||||
fn test_nvidia_diagnostics() {
|
||||
let mut mockgl = MockShaderValidator::new();
|
||||
mockgl.expect_vendor().returning(|| "NVIDIA Corporation".into());
|
||||
let server = new_temp_server(Some(Box::new(mockgl)));
|
||||
|
||||
let output = "/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh(9) : error C0000: syntax error, unexpected '}', expecting ',' or ';' at token \"}\"";
|
||||
|
||||
let results = parse_diagnostics_output(
|
||||
output.to_string(),
|
||||
&PathBuf::from_str("/home/noah/.minecraft/shaderpacks/test").unwrap(),
|
||||
server.opengl_context.as_ref(),
|
||||
);
|
||||
|
||||
assert_eq!(results.len(), 1);
|
||||
let first = results.into_iter().next().unwrap();
|
||||
assert_eq!(
|
||||
first.0,
|
||||
url::Url::from_file_path("/home/noah/.minecraft/shaderpacks/test/shaders/final.fsh").unwrap()
|
||||
);
|
||||
server.endpoint.request_shutdown();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_amd_diagnostics() {
|
||||
let mut mockgl = MockShaderValidator::new();
|
||||
mockgl.expect_vendor().returning(|| "ATI Technologies".into());
|
||||
let server = new_temp_server(Some(Box::new(mockgl)));
|
||||
|
||||
let output = "ERROR: 0:1: '' : syntax error: #line
|
||||
ERROR: 0:10: '' : syntax error: #line
|
||||
ERROR: 0:15: 'varying' : syntax error: syntax error
|
||||
";
|
||||
|
||||
let results = parse_diagnostics_output(
|
||||
output.to_string(),
|
||||
&PathBuf::from_str("/home/test").unwrap(),
|
||||
server.opengl_context.as_ref(),
|
||||
);
|
||||
assert_eq!(results.len(), 1);
|
||||
let first = results.into_iter().next().unwrap();
|
||||
assert_eq!(first.1.len(), 3);
|
||||
server.endpoint.request_shutdown();
|
||||
}
|
||||
}
|
13
server/testdata/01/final.fsh.merge
vendored
13
server/testdata/01/final.fsh.merge
vendored
|
@ -1,13 +0,0 @@
|
|||
#version 120
|
||||
#extension GL_GOOGLE_cpp_style_line_directive : enable
|
||||
#line 2 "!!"
|
||||
|
||||
#line 1 "!!"
|
||||
float test() {
|
||||
return 0.5;
|
||||
}
|
||||
#line 4 "!!"
|
||||
|
||||
void main() {
|
||||
gl_FragColor[0] = vec4(0.0);
|
||||
}
|
25
server/testdata/04/final.fsh.merge
vendored
25
server/testdata/04/final.fsh.merge
vendored
|
@ -1,25 +0,0 @@
|
|||
#version 120
|
||||
#extension GL_GOOGLE_cpp_style_line_directive : enable
|
||||
#line 2 "!!"
|
||||
|
||||
#line 1 "!!"
|
||||
#line 1 "!!"
|
||||
void stuff1() {
|
||||
|
||||
}
|
||||
#line 2 "!!"
|
||||
#line 1 "!!"
|
||||
void stuff2() {
|
||||
|
||||
}
|
||||
#line 3 "!!"
|
||||
#line 4 "!!"
|
||||
#line 1 "!!"
|
||||
void matrix() {
|
||||
|
||||
}
|
||||
#line 5 "!!"
|
||||
|
||||
void main() {
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue