mirror of
https://github.com/Strum355/mcshader-lsp.git
synced 2025-09-09 18:20:34 +00:00
fixed all lint warnings
This commit is contained in:
parent
db5e5afb26
commit
c7d8b02ee3
8 changed files with 66 additions and 67 deletions
|
@ -1,4 +1,4 @@
|
||||||
use std::{collections::HashMap, path::PathBuf};
|
use std::{collections::HashMap, path::{Path, PathBuf}};
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::cell::RefCell;
|
use std::cell::RefCell;
|
||||||
use std::fs::OpenOptions;
|
use std::fs::OpenOptions;
|
||||||
|
@ -12,7 +12,7 @@ use anyhow::{Result, format_err};
|
||||||
|
|
||||||
use std::fs;
|
use std::fs;
|
||||||
|
|
||||||
use crate::{graph::CachedStableGraph, merge_views, url_norm::FromJSON};
|
use crate::{graph::CachedStableGraph, merge_views, url_norm::FromJson};
|
||||||
use crate::dfs;
|
use crate::dfs;
|
||||||
|
|
||||||
pub struct CustomCommandProvider {
|
pub struct CustomCommandProvider {
|
||||||
|
@ -28,7 +28,7 @@ impl CustomCommandProvider {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn execute(&self, command: &str, args: Vec<Value>, root_path: &PathBuf) -> Result<Value> {
|
pub fn execute(&self, command: &str, args: Vec<Value>, root_path: &Path) -> Result<Value> {
|
||||||
if self.commands.contains_key(command) {
|
if self.commands.contains_key(command) {
|
||||||
return self.commands.get(command).unwrap().run_command(root_path, args);
|
return self.commands.get(command).unwrap().run_command(root_path, args);
|
||||||
}
|
}
|
||||||
|
@ -37,7 +37,7 @@ impl CustomCommandProvider {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Invokeable {
|
pub trait Invokeable {
|
||||||
fn run_command(&self, root: &PathBuf, arguments: Vec<Value>) -> Result<Value>;
|
fn run_command(&self, root: &Path, arguments: Vec<Value>) -> Result<Value>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct GraphDotCommand {
|
pub struct GraphDotCommand {
|
||||||
|
@ -45,7 +45,7 @@ pub struct GraphDotCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Invokeable for GraphDotCommand {
|
impl Invokeable for GraphDotCommand {
|
||||||
fn run_command(&self, root: &PathBuf, _: Vec<Value>) -> Result<Value> {
|
fn run_command(&self, root: &Path, _: Vec<Value>) -> Result<Value> {
|
||||||
let filepath = root.join("graph.dot");
|
let filepath = root.join("graph.dot");
|
||||||
eprintln!("generating dot file at {:?}", filepath);
|
eprintln!("generating dot file at {:?}", filepath);
|
||||||
let mut file = OpenOptions::new()
|
let mut file = OpenOptions::new()
|
||||||
|
@ -78,7 +78,7 @@ pub struct VirtualMergedDocument {
|
||||||
|
|
||||||
impl VirtualMergedDocument {
|
impl VirtualMergedDocument {
|
||||||
// TODO: DUPLICATE CODE
|
// TODO: DUPLICATE CODE
|
||||||
fn get_file_toplevel_ancestors(&self, uri: &PathBuf) -> Result<Option<Vec<petgraph::stable_graph::NodeIndex>>> {
|
fn get_file_toplevel_ancestors(&self, uri: &Path) -> Result<Option<Vec<petgraph::stable_graph::NodeIndex>>> {
|
||||||
let curr_node = match self.graph.borrow_mut().find_node(uri) {
|
let curr_node = match self.graph.borrow_mut().find_node(uri) {
|
||||||
Some(n) => n,
|
Some(n) => n,
|
||||||
None => return Err(format_err!("node not found {:?}", uri)),
|
None => return Err(format_err!("node not found {:?}", uri)),
|
||||||
|
@ -113,7 +113,7 @@ impl VirtualMergedDocument {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(e) => return Err(format_err!("error reading {:?}: {}", path, e))
|
Err(e) => return Err(format_err!("error reading {:?}: {}", path, e))
|
||||||
};
|
};
|
||||||
let source = crate::RE_CRLF.replace_all(&source, "\n").to_string();
|
let source = source.replace("\r\n", "\n");
|
||||||
sources.insert(path.clone(), source);
|
sources.insert(path.clone(), source);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -122,7 +122,7 @@ impl VirtualMergedDocument {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Invokeable for VirtualMergedDocument {
|
impl Invokeable for VirtualMergedDocument {
|
||||||
fn run_command(&self, root: &PathBuf, arguments: Vec<Value>) -> Result<Value> {
|
fn run_command(&self, root: &Path, arguments: Vec<Value>) -> Result<Value> {
|
||||||
let path = PathBuf::from_json(arguments.get(0).unwrap())?;
|
let path = PathBuf::from_json(arguments.get(0).unwrap())?;
|
||||||
|
|
||||||
let file_ancestors = match self.get_file_toplevel_ancestors(&path) {
|
let file_ancestors = match self.get_file_toplevel_ancestors(&path) {
|
||||||
|
|
|
@ -114,8 +114,8 @@ pub mod error {
|
||||||
|
|
||||||
impl CycleError {
|
impl CycleError {
|
||||||
pub fn new(nodes: &[NodeIndex], current_node: NodeIndex, graph: &CachedStableGraph) -> Self {
|
pub fn new(nodes: &[NodeIndex], current_node: NodeIndex, graph: &CachedStableGraph) -> Self {
|
||||||
let mut resolved_nodes: Vec<PathBuf> = nodes.iter().map(|i| graph.get_node(*i).clone()).collect();
|
let mut resolved_nodes: Vec<PathBuf> = nodes.iter().map(|i| graph.get_node(*i)).collect();
|
||||||
resolved_nodes.push(graph.get_node(current_node).clone());
|
resolved_nodes.push(graph.get_node(current_node));
|
||||||
CycleError(resolved_nodes)
|
CycleError(resolved_nodes)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -132,13 +132,13 @@ pub mod error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<Diagnostic> for CycleError {
|
impl From<CycleError> for Diagnostic {
|
||||||
fn into(self) -> Diagnostic {
|
fn from(e: CycleError) -> Diagnostic {
|
||||||
Diagnostic{
|
Diagnostic{
|
||||||
severity: Some(DiagnosticSeverity::Error),
|
severity: Some(DiagnosticSeverity::Error),
|
||||||
range: Range::new(Position::new(0, 0), Position::new(0, 500)),
|
range: Range::new(Position::new(0, 0), Position::new(0, 500)),
|
||||||
source: Some(consts::SOURCE.into()),
|
source: Some(consts::SOURCE.into()),
|
||||||
message: self.into(),
|
message: e.into(),
|
||||||
code: None,
|
code: None,
|
||||||
tags: None,
|
tags: None,
|
||||||
related_information: None,
|
related_information: None,
|
||||||
|
@ -147,10 +147,10 @@ pub mod error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Into<String> for CycleError {
|
impl From<CycleError> for String {
|
||||||
fn into(self) -> String {
|
fn from(e: CycleError) -> String {
|
||||||
format!("{}", self)
|
format!("{}", e)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -3,7 +3,7 @@ use petgraph::stable_graph::NodeIndex;
|
||||||
use petgraph::Direction;
|
use petgraph::Direction;
|
||||||
use petgraph::stable_graph::EdgeIndex;
|
use petgraph::stable_graph::EdgeIndex;
|
||||||
|
|
||||||
use std::{collections::{HashMap, HashSet}, path::PathBuf, str::FromStr};
|
use std::{collections::{HashMap, HashSet}, path::{Path, PathBuf}, str::FromStr};
|
||||||
|
|
||||||
use super::IncludePosition;
|
use super::IncludePosition;
|
||||||
|
|
||||||
|
@ -21,6 +21,7 @@ pub struct CachedStableGraph {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CachedStableGraph {
|
impl CachedStableGraph {
|
||||||
|
#[allow(clippy::new_without_default)]
|
||||||
pub fn new() -> CachedStableGraph {
|
pub fn new() -> CachedStableGraph {
|
||||||
CachedStableGraph{
|
CachedStableGraph{
|
||||||
graph: StableDiGraph::new(),
|
graph: StableDiGraph::new(),
|
||||||
|
@ -33,13 +34,13 @@ impl CachedStableGraph {
|
||||||
/// and caches the result in the `HashMap`. Complexity is **O(1)** if the value
|
/// and caches the result in the `HashMap`. Complexity is **O(1)** if the value
|
||||||
/// is cached (which should always be the case), else **O(n)** where **n** is
|
/// is cached (which should always be the case), else **O(n)** where **n** is
|
||||||
/// the number of node indices, as an exhaustive search must be done.
|
/// the number of node indices, as an exhaustive search must be done.
|
||||||
pub fn find_node(&mut self, name: &PathBuf) -> Option<NodeIndex> {
|
pub fn find_node(&mut self, name: &Path) -> Option<NodeIndex> {
|
||||||
match self.cache.get(name) {
|
match self.cache.get(name) {
|
||||||
Some(n) => Some(*n),
|
Some(n) => Some(*n),
|
||||||
None => {
|
None => {
|
||||||
// If the string is not in cache, O(n) search the graph (i know...) and then cache the NodeIndex
|
// If the string is not in cache, O(n) search the graph (i know...) and then cache the NodeIndex
|
||||||
// for later
|
// for later
|
||||||
let n = self.graph.node_indices().find(|n| self.graph[*n] == name.to_str().unwrap().to_string());
|
let n = self.graph.node_indices().find(|n| self.graph[*n] == name.to_str().unwrap());
|
||||||
if let Some(n) = n {
|
if let Some(n) = n {
|
||||||
self.cache.insert(name.into(), n);
|
self.cache.insert(name.into(), n);
|
||||||
}
|
}
|
||||||
|
@ -57,20 +58,20 @@ impl CachedStableGraph {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub fn remove_node(&mut self, name: &PathBuf) {
|
pub fn remove_node(&mut self, name: &Path) {
|
||||||
let idx = self.cache.remove(name);
|
let idx = self.cache.remove(name);
|
||||||
if let Some(idx) = idx {
|
if let Some(idx) = idx {
|
||||||
self.graph.remove_node(idx);
|
self.graph.remove_node(idx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_node(&mut self, name: &PathBuf) -> NodeIndex {
|
pub fn add_node(&mut self, name: &Path) -> NodeIndex {
|
||||||
if let Some(idx) = self.cache.get(name) {
|
if let Some(idx) = self.cache.get(name) {
|
||||||
return *idx;
|
return *idx;
|
||||||
}
|
}
|
||||||
let idx = self.graph.add_node(name.to_str().unwrap().to_string());
|
let idx = self.graph.add_node(name.to_str().unwrap().to_string());
|
||||||
self.cache.insert(name.clone(), idx);
|
self.cache.insert(name.to_owned(), idx);
|
||||||
self.reverse_index.insert(idx, name.clone());
|
self.reverse_index.insert(idx, name.to_owned());
|
||||||
idx
|
idx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ use serde_json::Value;
|
||||||
use url_norm::FromUrl;
|
use url_norm::FromUrl;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
use std::{cell::RefCell, path::PathBuf, str::FromStr};
|
use std::{cell::RefCell, path::{Path, PathBuf}, str::FromStr};
|
||||||
use std::collections::{HashMap, HashSet};
|
use std::collections::{HashMap, HashSet};
|
||||||
use std::collections::hash_map::RandomState;
|
use std::collections::hash_map::RandomState;
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
|
@ -45,7 +45,6 @@ lazy_static! {
|
||||||
static ref RE_VERSION: Regex = Regex::new(r#"#version [\d]{3}"#).unwrap();
|
static ref RE_VERSION: Regex = Regex::new(r#"#version [\d]{3}"#).unwrap();
|
||||||
static ref RE_INCLUDE: Regex = Regex::new(r#"^(?:\s)*?(?:#include) "(.+)"\r?"#).unwrap();
|
static ref RE_INCLUDE: Regex = Regex::new(r#"^(?:\s)*?(?:#include) "(.+)"\r?"#).unwrap();
|
||||||
static ref RE_INCLUDE_EXTENSION: Regex = Regex::new(r#"#extension GL_GOOGLE_include_directive ?: ?require"#).unwrap();
|
static ref RE_INCLUDE_EXTENSION: Regex = Regex::new(r#"#extension GL_GOOGLE_include_directive ?: ?require"#).unwrap();
|
||||||
pub static ref RE_CRLF: Regex = Regex::new(r#"\r\n"#).unwrap();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
|
@ -61,7 +60,7 @@ fn main() {
|
||||||
wait: WaitGroup::new(),
|
wait: WaitGroup::new(),
|
||||||
root: "".into(),
|
root: "".into(),
|
||||||
command_provider: None,
|
command_provider: None,
|
||||||
opengl_context: Rc::new(opengl::OpenGLContext::new())
|
opengl_context: Rc::new(opengl::OpenGlContext::new())
|
||||||
};
|
};
|
||||||
|
|
||||||
langserver.command_provider = Some(commands::CustomCommandProvider::new(vec![
|
langserver.command_provider = Some(commands::CustomCommandProvider::new(vec![
|
||||||
|
@ -158,7 +157,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
eprintln!("finished building project include graph");
|
eprintln!("finished building project include graph");
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_file_and_includes_to_graph(&self, path: &PathBuf) {
|
fn add_file_and_includes_to_graph(&self, path: &Path) {
|
||||||
let includes = self.find_includes(path);
|
let includes = self.find_includes(path);
|
||||||
|
|
||||||
let idx = self.graph.borrow_mut().add_node(&path);
|
let idx = self.graph.borrow_mut().add_node(&path);
|
||||||
|
@ -174,7 +173,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
self.graph.borrow_mut().add_edge(node, child, include.1);
|
self.graph.borrow_mut().add_edge(node, child, include.1);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn find_includes(&self, file: &PathBuf) -> Vec<(PathBuf, IncludePosition)> {
|
pub fn find_includes(&self, file: &Path) -> Vec<(PathBuf, IncludePosition)> {
|
||||||
let mut includes = Vec::default();
|
let mut includes = Vec::default();
|
||||||
|
|
||||||
let buf = BufReader::new(std::fs::File::open(file).unwrap());
|
let buf = BufReader::new(std::fs::File::open(file).unwrap());
|
||||||
|
@ -217,7 +216,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
includes
|
includes
|
||||||
}
|
}
|
||||||
|
|
||||||
fn update_includes(&self, file: &PathBuf) {
|
fn update_includes(&self, file: &Path) {
|
||||||
let includes = self.find_includes(file);
|
let includes = self.find_includes(file);
|
||||||
|
|
||||||
eprintln!("updating {:?} with {:?}", file, includes);
|
eprintln!("updating {:?} with {:?}", file, includes);
|
||||||
|
@ -230,7 +229,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
};
|
};
|
||||||
|
|
||||||
let prev_children: HashSet<_, RandomState> = HashSet::from_iter(self.graph.borrow().child_node_meta(idx));
|
let prev_children: HashSet<_, RandomState> = HashSet::from_iter(self.graph.borrow().child_node_meta(idx));
|
||||||
let new_children: HashSet<_, RandomState> = HashSet::from_iter(includes.iter().map(|e| e.clone()));
|
let new_children: HashSet<_, RandomState> = includes.iter().cloned().collect();
|
||||||
|
|
||||||
let to_be_added = new_children.difference(&prev_children);
|
let to_be_added = new_children.difference(&prev_children);
|
||||||
let to_be_removed = prev_children.difference(&new_children);
|
let to_be_removed = prev_children.difference(&new_children);
|
||||||
|
@ -247,7 +246,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn lint(&self, uri: &PathBuf) -> Result<HashMap<Url, Vec<Diagnostic>>> {
|
pub fn lint(&self, uri: &Path) -> Result<HashMap<Url, Vec<Diagnostic>>> {
|
||||||
// get all top level ancestors of this file
|
// get all top level ancestors of this file
|
||||||
let file_ancestors = match self.get_file_toplevel_ancestors(uri) {
|
let file_ancestors = match self.get_file_toplevel_ancestors(uri) {
|
||||||
Ok(opt) => match opt {
|
Ok(opt) => match opt {
|
||||||
|
@ -259,14 +258,14 @@ impl MinecraftShaderLanguageServer {
|
||||||
|
|
||||||
eprintln!("ancestors for {:?}:\n\t{:?}", uri, file_ancestors.iter().map(|e| PathBuf::from_str(&self.graph.borrow().graph.node_weight(*e).unwrap().clone()).unwrap()).collect::<Vec<PathBuf>>());
|
eprintln!("ancestors for {:?}:\n\t{:?}", uri, file_ancestors.iter().map(|e| PathBuf::from_str(&self.graph.borrow().graph.node_weight(*e).unwrap().clone()).unwrap()).collect::<Vec<PathBuf>>());
|
||||||
|
|
||||||
// the set of all filepath->content. TODO: change to Url?
|
// the set of all filepath->content.
|
||||||
let mut all_sources: HashMap<PathBuf, String> = HashMap::new();
|
let mut all_sources: HashMap<PathBuf, String> = HashMap::new();
|
||||||
// the set of filepath->list of diagnostics to report
|
// the set of filepath->list of diagnostics to report
|
||||||
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::new();
|
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::new();
|
||||||
|
|
||||||
// we want to backfill the diagnostics map with all linked sources
|
// we want to backfill the diagnostics map with all linked sources
|
||||||
let back_fill = |all_sources, diagnostics: &mut HashMap<Url, Vec<Diagnostic>>| {
|
let back_fill = |all_sources: &HashMap<PathBuf, String>, diagnostics: &mut HashMap<Url, Vec<Diagnostic>>| {
|
||||||
for (path, _) in all_sources {
|
for path in all_sources.keys() {
|
||||||
diagnostics.entry(Url::from_file_path(path).unwrap()).or_default();
|
diagnostics.entry(Url::from_file_path(path).unwrap()).or_default();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -374,7 +373,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
Ok(diagnostics)
|
Ok(diagnostics)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_validator_stdout(&self, uri: &PathBuf, stdout: String, _source: &str) -> HashMap<Url, Vec<Diagnostic>> {
|
fn parse_validator_stdout(&self, uri: &Path, stdout: String, _source: &str) -> HashMap<Url, Vec<Diagnostic>> {
|
||||||
let stdout_lines = stdout.split('\n');
|
let stdout_lines = stdout.split('\n');
|
||||||
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::with_capacity(stdout_lines.count());
|
let mut diagnostics: HashMap<Url, Vec<Diagnostic>> = HashMap::with_capacity(stdout_lines.count());
|
||||||
let stdout_lines = stdout.split('\n');
|
let stdout_lines = stdout.split('\n');
|
||||||
|
@ -390,10 +389,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
let msg = diagnostic_capture.name("output").unwrap().as_str();
|
let msg = diagnostic_capture.name("output").unwrap().as_str();
|
||||||
|
|
||||||
let line = match diagnostic_capture.name("linenum") {
|
let line = match diagnostic_capture.name("linenum") {
|
||||||
Some(c) => match c.as_str().parse::<u32>() {
|
Some(c) => c.as_str().parse::<u32>().unwrap_or(0),
|
||||||
Ok(i) => i,
|
|
||||||
Err(_) => 0,
|
|
||||||
},
|
|
||||||
None => 0,
|
None => 0,
|
||||||
} - 2;
|
} - 2;
|
||||||
|
|
||||||
|
@ -412,7 +408,7 @@ impl MinecraftShaderLanguageServer {
|
||||||
|
|
||||||
let origin = match diagnostic_capture.name("filepath") {
|
let origin = match diagnostic_capture.name("filepath") {
|
||||||
Some(o) => {
|
Some(o) => {
|
||||||
if o.as_str().to_string() == "0" {
|
if o.as_str() == "0" {
|
||||||
uri.to_str().unwrap().to_string()
|
uri.to_str().unwrap().to_string()
|
||||||
} else {
|
} else {
|
||||||
o.as_str().to_string()
|
o.as_str().to_string()
|
||||||
|
@ -472,14 +468,14 @@ impl MinecraftShaderLanguageServer {
|
||||||
Ok(s) => s,
|
Ok(s) => s,
|
||||||
Err(e) => return Err(anyhow!("error reading {:?}: {}", path, e))
|
Err(e) => return Err(anyhow!("error reading {:?}: {}", path, e))
|
||||||
};
|
};
|
||||||
let source = RE_CRLF.replace_all(&source, "\n").to_string();
|
let source = source.replace("\r\n", "\n");
|
||||||
sources.insert(path.clone(), source);
|
sources.insert(path.clone(), source);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(sources)
|
Ok(sources)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_file_toplevel_ancestors(&self, uri: &PathBuf) -> Result<Option<Vec<petgraph::stable_graph::NodeIndex>>> {
|
fn get_file_toplevel_ancestors(&self, uri: &Path) -> Result<Option<Vec<petgraph::stable_graph::NodeIndex>>> {
|
||||||
let curr_node = match self.graph.borrow_mut().find_node(uri) {
|
let curr_node = match self.graph.borrow_mut().find_node(uri) {
|
||||||
Some(n) => n,
|
Some(n) => n,
|
||||||
None => return Err(anyhow!("node not found {:?}", uri)),
|
None => return Err(anyhow!("node not found {:?}", uri)),
|
||||||
|
@ -515,21 +511,20 @@ impl LanguageServerHandling for MinecraftShaderLanguageServer {
|
||||||
fn initialize(&mut self, params: InitializeParams, completable: MethodCompletable<InitializeResult, InitializeError>) {
|
fn initialize(&mut self, params: InitializeParams, completable: MethodCompletable<InitializeResult, InitializeError>) {
|
||||||
self.wait.add(1);
|
self.wait.add(1);
|
||||||
|
|
||||||
let mut capabilities = ServerCapabilities::default();
|
let capabilities = ServerCapabilities{
|
||||||
capabilities.hover_provider = None;
|
document_link_provider: Some(DocumentLinkOptions {
|
||||||
capabilities.document_link_provider = Some(DocumentLinkOptions {
|
|
||||||
resolve_provider: None,
|
resolve_provider: None,
|
||||||
work_done_progress_options: WorkDoneProgressOptions {
|
work_done_progress_options: WorkDoneProgressOptions {
|
||||||
work_done_progress: None,
|
work_done_progress: None,
|
||||||
},
|
},
|
||||||
});
|
}),
|
||||||
capabilities.execute_command_provider = Some(ExecuteCommandOptions {
|
execute_command_provider: Some(ExecuteCommandOptions {
|
||||||
commands: vec!["graphDot".into()],
|
commands: vec!["graphDot".into()],
|
||||||
work_done_progress_options: WorkDoneProgressOptions {
|
work_done_progress_options: WorkDoneProgressOptions {
|
||||||
work_done_progress: None,
|
work_done_progress: None,
|
||||||
},
|
},
|
||||||
});
|
}),
|
||||||
capabilities.text_document_sync = Some(TextDocumentSyncCapability::Options(
|
text_document_sync: Some(TextDocumentSyncCapability::Options(
|
||||||
TextDocumentSyncOptions {
|
TextDocumentSyncOptions {
|
||||||
open_close: Some(true),
|
open_close: Some(true),
|
||||||
will_save: None,
|
will_save: None,
|
||||||
|
@ -539,7 +534,9 @@ impl LanguageServerHandling for MinecraftShaderLanguageServer {
|
||||||
include_text: Some(true),
|
include_text: Some(true),
|
||||||
}))
|
}))
|
||||||
},
|
},
|
||||||
));
|
)),
|
||||||
|
.. ServerCapabilities::default()
|
||||||
|
};
|
||||||
|
|
||||||
let root = match params.root_uri {
|
let root = match params.root_uri {
|
||||||
Some(uri) => PathBuf::from_url(uri),
|
Some(uri) => PathBuf::from_url(uri),
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::{collections::{HashMap, LinkedList, VecDeque}, path::PathBuf};
|
use std::{collections::{HashMap, LinkedList, VecDeque}, path::{Path, PathBuf}};
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
use std::cmp::min;
|
use std::cmp::min;
|
||||||
|
|
||||||
|
@ -16,7 +16,7 @@ struct FilialTuple(Option<NodeIndex>, NodeIndex);
|
||||||
|
|
||||||
impl From<(Option<&NodeIndex>, NodeIndex)> for FilialTuple {
|
impl From<(Option<&NodeIndex>, NodeIndex)> for FilialTuple {
|
||||||
fn from(tuple: (Option<&NodeIndex>, NodeIndex)) -> Self {
|
fn from(tuple: (Option<&NodeIndex>, NodeIndex)) -> Self {
|
||||||
FilialTuple(tuple.0.and_then(|n| Some(*n)), tuple.1)
|
FilialTuple(tuple.0.copied(), tuple.1)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -103,7 +103,7 @@ fn create_merge_views<'a>(
|
||||||
let child_source = sources.get(&child_path).unwrap();
|
let child_source = sources.get(&child_path).unwrap();
|
||||||
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
||||||
let offset = {
|
let offset = {
|
||||||
match child_source.ends_with("\n") {
|
match child_source.ends_with('\n') {
|
||||||
true => child_source.len()-1,
|
true => child_source.len()-1,
|
||||||
false => child_source.len(),
|
false => child_source.len(),
|
||||||
}
|
}
|
||||||
|
@ -127,7 +127,7 @@ fn create_merge_views<'a>(
|
||||||
let child_source = sources.get(&child_path).unwrap();
|
let child_source = sources.get(&child_path).unwrap();
|
||||||
// this evaluates to false once the file contents have been exhausted aka offset = child_source.len() + 1
|
// this evaluates to false once the file contents have been exhausted aka offset = child_source.len() + 1
|
||||||
let end_offset = {
|
let end_offset = {
|
||||||
match child_source.ends_with("\n") {
|
match child_source.ends_with('\n') {
|
||||||
true => 1/* child_source.len()-1 */,
|
true => 1/* child_source.len()-1 */,
|
||||||
false => 0/* child_source.len() */,
|
false => 0/* child_source.len() */,
|
||||||
}
|
}
|
||||||
|
@ -150,7 +150,7 @@ fn create_merge_views<'a>(
|
||||||
let child_source = sources.get(&child_path).unwrap();
|
let child_source = sources.get(&child_path).unwrap();
|
||||||
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
// if ends in \n\n, we want to exclude the last \n for some reason. Ask optilad
|
||||||
let offset = {
|
let offset = {
|
||||||
match child_source.ends_with("\n") {
|
match child_source.ends_with('\n') {
|
||||||
true => child_source.len()-1,
|
true => child_source.len()-1,
|
||||||
false => child_source.len(),
|
false => child_source.len(),
|
||||||
}
|
}
|
||||||
|
@ -180,13 +180,13 @@ fn char_offset_for_line(line_num: usize, source: &str) -> (usize, usize) {
|
||||||
(char_for_line, char_following_line)
|
(char_for_line, char_following_line)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_opening_line_directive(path: &PathBuf, merge_list: &mut LinkedList<&str>, line_directives: &mut Vec<String>) {
|
fn add_opening_line_directive(path: &Path, merge_list: &mut LinkedList<&str>, line_directives: &mut Vec<String>) {
|
||||||
let line_directive = format!("#line 1 \"{}\"\n", path.to_str().unwrap().replace("\\", "\\\\"));
|
let line_directive = format!("#line 1 \"{}\"\n", path.to_str().unwrap().replace("\\", "\\\\"));
|
||||||
line_directives.push(line_directive);
|
line_directives.push(line_directive);
|
||||||
unsafe_get_and_insert(merge_list, line_directives);
|
unsafe_get_and_insert(merge_list, line_directives);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_closing_line_directive(line: usize, path: &PathBuf, merge_list: &mut LinkedList<&str>, line_directives: &mut Vec<String>) {
|
fn add_closing_line_directive(line: usize, path: &Path, merge_list: &mut LinkedList<&str>, line_directives: &mut Vec<String>) {
|
||||||
// Optifine doesn't seem to add a leading newline if the previous line was a #line directive
|
// Optifine doesn't seem to add a leading newline if the previous line was a #line directive
|
||||||
let line_directive = if let Some(l) = merge_list.back() {
|
let line_directive = if let Some(l) = merge_list.back() {
|
||||||
if l.trim().starts_with("#line") {
|
if l.trim().starts_with("#line") {
|
||||||
|
@ -202,7 +202,7 @@ fn add_closing_line_directive(line: usize, path: &PathBuf, merge_list: &mut Link
|
||||||
unsafe_get_and_insert(merge_list, line_directives);
|
unsafe_get_and_insert(merge_list, line_directives);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unsafe_get_and_insert(merge_list: &mut LinkedList<&str>, line_directives: &Vec<String>) {
|
fn unsafe_get_and_insert(merge_list: &mut LinkedList<&str>, line_directives: &[String]) {
|
||||||
// :^)
|
// :^)
|
||||||
unsafe {
|
unsafe {
|
||||||
let vec_ptr_offset = line_directives.as_ptr().add(line_directives.len()-1);
|
let vec_ptr_offset = line_directives.as_ptr().add(line_directives.len()-1);
|
||||||
|
|
|
@ -3,17 +3,18 @@ use std::ffi::{CString, CStr};
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
use mockall::automock;
|
use mockall::automock;
|
||||||
|
|
||||||
#[cfg_attr(test, automock)]
|
#[cfg_attr(test, automock)]
|
||||||
pub trait ShaderValidator {
|
pub trait ShaderValidator {
|
||||||
fn validate(&self, tree_type: super::TreeType, source: String) -> Option<String>;
|
fn validate(&self, tree_type: super::TreeType, source: String) -> Option<String>;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct OpenGLContext {
|
pub struct OpenGlContext {
|
||||||
_ctx: glutin::Context<glutin::PossiblyCurrent>
|
_ctx: glutin::Context<glutin::PossiblyCurrent>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OpenGLContext {
|
impl OpenGlContext {
|
||||||
pub fn new() -> OpenGLContext {
|
pub fn new() -> OpenGlContext {
|
||||||
let events_loop = glutin::event_loop::EventLoop::new();
|
let events_loop = glutin::event_loop::EventLoop::new();
|
||||||
let gl_window = glutin::ContextBuilder::new().build_headless(&*events_loop, glutin::dpi::PhysicalSize::new(1, 1)).unwrap();
|
let gl_window = glutin::ContextBuilder::new().build_headless(&*events_loop, glutin::dpi::PhysicalSize::new(1, 1)).unwrap();
|
||||||
|
|
||||||
|
@ -31,7 +32,7 @@ impl OpenGLContext {
|
||||||
String::from_utf8(CStr::from_ptr(gl::GetString(gl::RENDERER) as *const _).to_bytes().to_vec()).unwrap()
|
String::from_utf8(CStr::from_ptr(gl::GetString(gl::RENDERER) as *const _).to_bytes().to_vec()).unwrap()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
OpenGLContext{
|
OpenGlContext{
|
||||||
_ctx: gl_window,
|
_ctx: gl_window,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -59,7 +60,7 @@ impl OpenGLContext {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ShaderValidator for OpenGLContext {
|
impl ShaderValidator for OpenGlContext {
|
||||||
fn validate(&self, tree_type: super::TreeType, source: String) -> Option<String> {
|
fn validate(&self, tree_type: super::TreeType, source: String) -> Option<String> {
|
||||||
unsafe {
|
unsafe {
|
||||||
match tree_type {
|
match tree_type {
|
||||||
|
|
|
@ -89,7 +89,7 @@ fn test_empty_initialize() {
|
||||||
let initialize_params = InitializeParams {
|
let initialize_params = InitializeParams {
|
||||||
process_id: None,
|
process_id: None,
|
||||||
root_path: None,
|
root_path: None,
|
||||||
root_uri: Some(Url::from_directory_path(tmp_path.clone()).unwrap()),
|
root_uri: Some(Url::from_directory_path(tmp_path).unwrap()),
|
||||||
client_info: None,
|
client_info: None,
|
||||||
initialization_options: None,
|
initialization_options: None,
|
||||||
capabilities: ClientCapabilities {
|
capabilities: ClientCapabilities {
|
||||||
|
|
|
@ -8,7 +8,7 @@ pub trait FromUrl {
|
||||||
fn from_url(u: Url) -> Self;
|
fn from_url(u: Url) -> Self;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait FromJSON {
|
pub trait FromJson {
|
||||||
fn from_json(v: &serde_json::value::Value) -> Result<Self> where Self: Sized;
|
fn from_json(v: &serde_json::value::Value) -> Result<Self> where Self: Sized;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -26,7 +26,7 @@ impl FromUrl for PathBuf {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromJSON for PathBuf {
|
impl FromJson for PathBuf {
|
||||||
#[cfg(target_family = "windows")]
|
#[cfg(target_family = "windows")]
|
||||||
fn from_json(v: &serde_json::value::Value) -> Result<Self>
|
fn from_json(v: &serde_json::value::Value) -> Result<Self>
|
||||||
where Self: Sized {
|
where Self: Sized {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue