mirror of
https://github.com/latex-lsp/texlab.git
synced 2025-08-04 10:49:55 +00:00
Refactor diagnostics module
This commit is contained in:
parent
f36086c4d3
commit
f1b6388a83
9 changed files with 353 additions and 15 deletions
25
crates/base-db/src/diagnostics.rs
Normal file
25
crates/base-db/src/diagnostics.rs
Normal file
|
@ -0,0 +1,25 @@
|
|||
pub mod bib;
|
||||
pub mod log;
|
||||
pub mod tex;
|
||||
|
||||
use rowan::TextRange;
|
||||
use syntax::BuildError;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Diagnostic {
|
||||
pub range: TextRange,
|
||||
pub code: ErrorCode,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ErrorCode {
|
||||
UnexpectedRCurly,
|
||||
RCurlyInserted,
|
||||
MismatchedEnvironment,
|
||||
ExpectingLCurly,
|
||||
ExpectingKey,
|
||||
ExpectingRCurly,
|
||||
ExpectingEq,
|
||||
ExpectingFieldValue,
|
||||
Build(BuildError),
|
||||
}
|
69
crates/base-db/src/diagnostics/bib.rs
Normal file
69
crates/base-db/src/diagnostics/bib.rs
Normal file
|
@ -0,0 +1,69 @@
|
|||
use rowan::{ast::AstNode, TextRange};
|
||||
use syntax::bibtex::{self, HasDelims, HasEq, HasName, HasType, HasValue};
|
||||
|
||||
use crate::{Document, DocumentData};
|
||||
|
||||
use super::{Diagnostic, ErrorCode};
|
||||
|
||||
pub fn analyze(document: &mut Document) {
|
||||
let DocumentData::Bib(data) = &document.data else { return };
|
||||
|
||||
for node in bibtex::SyntaxNode::new_root(data.green.clone()).descendants() {
|
||||
if let Some(entry) = bibtex::Entry::cast(node.clone()) {
|
||||
analyze_entry(document, entry);
|
||||
} else if let Some(field) = bibtex::Field::cast(node.clone()) {
|
||||
analyze_field(document, field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn analyze_entry(document: &mut Document, entry: bibtex::Entry) {
|
||||
if entry.left_delim_token().is_none() {
|
||||
document.diagnostics.push(Diagnostic {
|
||||
range: entry.type_token().unwrap().text_range(),
|
||||
code: ErrorCode::ExpectingLCurly,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if entry.name_token().is_none() {
|
||||
document.diagnostics.push(Diagnostic {
|
||||
range: entry.left_delim_token().unwrap().text_range(),
|
||||
code: ErrorCode::ExpectingKey,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if entry.right_delim_token().is_none() {
|
||||
document.diagnostics.push(Diagnostic {
|
||||
range: TextRange::empty(entry.syntax().text_range().end()),
|
||||
code: ErrorCode::ExpectingRCurly,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
fn analyze_field(document: &mut Document, field: bibtex::Field) {
|
||||
if field.eq_token().is_none() {
|
||||
let code = ErrorCode::ExpectingEq;
|
||||
document.diagnostics.push(Diagnostic {
|
||||
range: field.name_token().unwrap().text_range(),
|
||||
code,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if field.value().is_none() {
|
||||
let code = ErrorCode::ExpectingFieldValue;
|
||||
document.diagnostics.push(Diagnostic {
|
||||
range: field.name_token().unwrap().text_range(),
|
||||
code,
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
68
crates/base-db/src/diagnostics/log.rs
Normal file
68
crates/base-db/src/diagnostics/log.rs
Normal file
|
@ -0,0 +1,68 @@
|
|||
use rowan::{TextLen, TextRange, TextSize};
|
||||
use rustc_hash::FxHashMap;
|
||||
use syntax::BuildError;
|
||||
use url::Url;
|
||||
|
||||
use crate::{Document, DocumentData, Workspace};
|
||||
|
||||
use super::{Diagnostic, ErrorCode};
|
||||
|
||||
pub fn analyze(workspace: &Workspace, log_document: &Document) -> FxHashMap<Url, Vec<Diagnostic>> {
|
||||
let mut results = FxHashMap::default();
|
||||
|
||||
let DocumentData::Log(data) = &log_document.data else { return results };
|
||||
|
||||
let parents = workspace.parents(log_document);
|
||||
let Some(root_document) = parents.iter().next() else { return results };
|
||||
|
||||
let Some(base_path) = root_document.path
|
||||
.as_deref()
|
||||
.and_then(|path| path.parent()) else { return results };
|
||||
|
||||
for error in &data.errors {
|
||||
let full_path = base_path.join(&error.relative_path);
|
||||
let Ok(full_path_uri) = Url::from_file_path(&full_path) else { continue };
|
||||
let tex_document = workspace.lookup(&full_path_uri).unwrap_or(root_document);
|
||||
|
||||
let range = find_range_of_hint(tex_document, error).unwrap_or_else(|| {
|
||||
let line = error.line.unwrap_or(0);
|
||||
let offset = *tex_document
|
||||
.line_index
|
||||
.newlines
|
||||
.get(line as usize)
|
||||
.unwrap_or(&TextSize::from(0));
|
||||
|
||||
TextRange::empty(offset)
|
||||
});
|
||||
|
||||
let diagnostic = Diagnostic {
|
||||
range,
|
||||
code: ErrorCode::Build(error.clone()),
|
||||
};
|
||||
|
||||
results
|
||||
.entry(tex_document.uri.clone())
|
||||
.or_default()
|
||||
.push(diagnostic);
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
fn find_range_of_hint(document: &Document, error: &BuildError) -> Option<TextRange> {
|
||||
let line = error.line? as usize;
|
||||
let hint = error.hint.as_deref()?;
|
||||
let line_index = &document.line_index;
|
||||
|
||||
let line_start = line_index.newlines.get(line).copied()?;
|
||||
let line_end = line_index
|
||||
.newlines
|
||||
.get(line + 1)
|
||||
.copied()
|
||||
.unwrap_or((&document.text).text_len());
|
||||
|
||||
let line_text = &document.text[line_start.into()..line_end.into()];
|
||||
let hint_start = line_start + TextSize::try_from(line_text.find(hint)?).unwrap();
|
||||
let hint_end = hint_start + hint.text_len();
|
||||
Some(TextRange::new(hint_start, hint_end))
|
||||
}
|
116
crates/base-db/src/diagnostics/tex.rs
Normal file
116
crates/base-db/src/diagnostics/tex.rs
Normal file
|
@ -0,0 +1,116 @@
|
|||
use rowan::{ast::AstNode, NodeOrToken, TextRange};
|
||||
use syntax::latex;
|
||||
|
||||
use crate::{Config, Document, DocumentData};
|
||||
|
||||
use super::{Diagnostic, ErrorCode};
|
||||
|
||||
pub fn analyze(document: &mut Document, config: &Config) {
|
||||
if !document.uri.as_str().ends_with(".tex") {
|
||||
return;
|
||||
}
|
||||
|
||||
let DocumentData::Tex(data) = &document.data else { return };
|
||||
|
||||
let mut traversal = latex::SyntaxNode::new_root(data.green.clone()).preorder();
|
||||
while let Some(event) = traversal.next() {
|
||||
match event {
|
||||
rowan::WalkEvent::Enter(node) => {
|
||||
if let Some(environment) = latex::Environment::cast(node.clone()) {
|
||||
if environment
|
||||
.begin()
|
||||
.and_then(|begin| begin.name())
|
||||
.and_then(|name| name.key())
|
||||
.map_or(false, |name| {
|
||||
config
|
||||
.syntax
|
||||
.verbatim_environments
|
||||
.contains(&name.to_string())
|
||||
})
|
||||
{
|
||||
traversal.skip_subtree();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
analyze_environment(document, node.clone())
|
||||
.or_else(|| analyze_curly_group(document, node.clone(), config))
|
||||
.or_else(|| analyze_curly_braces(document, node));
|
||||
}
|
||||
rowan::WalkEvent::Leave(_) => {
|
||||
continue;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn analyze_environment(document: &mut Document, node: latex::SyntaxNode) -> Option<()> {
|
||||
let environment = latex::Environment::cast(node)?;
|
||||
let begin = environment.begin()?.name()?.key()?;
|
||||
let end = environment.end()?.name()?.key()?;
|
||||
if begin != end {
|
||||
document.diagnostics.push(Diagnostic {
|
||||
range: latex::small_range(&begin),
|
||||
code: ErrorCode::MismatchedEnvironment,
|
||||
});
|
||||
}
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn analyze_curly_group(
|
||||
document: &mut Document,
|
||||
node: latex::SyntaxNode,
|
||||
config: &Config,
|
||||
) -> Option<()> {
|
||||
if !matches!(
|
||||
node.kind(),
|
||||
latex::CURLY_GROUP
|
||||
| latex::CURLY_GROUP_COMMAND
|
||||
| latex::CURLY_GROUP_KEY_VALUE
|
||||
| latex::CURLY_GROUP_WORD
|
||||
| latex::CURLY_GROUP_WORD_LIST
|
||||
) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let is_inside_verbatim_environment = node
|
||||
.ancestors()
|
||||
.filter_map(latex::Environment::cast)
|
||||
.filter_map(|env| env.begin())
|
||||
.filter_map(|begin| begin.name())
|
||||
.filter_map(|name| name.key())
|
||||
.any(|name| {
|
||||
config
|
||||
.syntax
|
||||
.verbatim_environments
|
||||
.contains(&name.to_string())
|
||||
});
|
||||
|
||||
if !is_inside_verbatim_environment
|
||||
&& !node
|
||||
.children_with_tokens()
|
||||
.filter_map(NodeOrToken::into_token)
|
||||
.any(|token| token.kind() == latex::R_CURLY)
|
||||
{
|
||||
document.diagnostics.push(Diagnostic {
|
||||
range: TextRange::empty(node.text_range().end()),
|
||||
code: ErrorCode::RCurlyInserted,
|
||||
});
|
||||
}
|
||||
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn analyze_curly_braces(document: &mut Document, node: latex::SyntaxNode) -> Option<()> {
|
||||
if node.kind() == latex::ERROR && node.first_token()?.text() == "}" {
|
||||
document.diagnostics.push(Diagnostic {
|
||||
range: node.text_range(),
|
||||
code: ErrorCode::UnexpectedRCurly,
|
||||
});
|
||||
|
||||
Some(())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
|
@ -5,7 +5,11 @@ use rowan::TextSize;
|
|||
use syntax::{latex, BuildError};
|
||||
use url::Url;
|
||||
|
||||
use crate::{line_index::LineIndex, semantics};
|
||||
use crate::{
|
||||
diagnostics::{self, Diagnostic},
|
||||
line_index::LineIndex,
|
||||
semantics, Config,
|
||||
};
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
|
||||
pub enum Owner {
|
||||
|
@ -22,13 +26,19 @@ pub struct Document {
|
|||
pub line_index: LineIndex,
|
||||
pub owner: Owner,
|
||||
pub cursor: TextSize,
|
||||
pub chktex: Vec<()>,
|
||||
pub language: Language,
|
||||
pub data: DocumentData,
|
||||
pub diagnostics: Vec<Diagnostic>,
|
||||
}
|
||||
|
||||
impl Document {
|
||||
pub fn parse(uri: Url, text: String, language: Language, owner: Owner) -> Self {
|
||||
pub fn parse(
|
||||
uri: Url,
|
||||
text: String,
|
||||
language: Language,
|
||||
owner: Owner,
|
||||
config: &Config,
|
||||
) -> Self {
|
||||
let dir = uri.join(".").unwrap();
|
||||
|
||||
let path = if uri.scheme() == "file" {
|
||||
|
@ -40,7 +50,7 @@ impl Document {
|
|||
let line_index = LineIndex::new(&text);
|
||||
|
||||
let cursor = TextSize::from(0);
|
||||
let chktex = Vec::new();
|
||||
let diagnostics = Vec::new();
|
||||
let data = match language {
|
||||
Language::Tex => {
|
||||
let green = parser::parse_latex(&text);
|
||||
|
@ -60,7 +70,7 @@ impl Document {
|
|||
Language::Tectonic => DocumentData::Tectonic,
|
||||
};
|
||||
|
||||
Self {
|
||||
let mut document = Self {
|
||||
uri,
|
||||
dir,
|
||||
path,
|
||||
|
@ -68,10 +78,18 @@ impl Document {
|
|||
line_index,
|
||||
owner,
|
||||
cursor,
|
||||
chktex,
|
||||
language,
|
||||
data,
|
||||
}
|
||||
diagnostics,
|
||||
};
|
||||
|
||||
match language {
|
||||
Language::Tex => diagnostics::tex::analyze(&mut document, config),
|
||||
Language::Bib => diagnostics::bib::analyze(&mut document),
|
||||
Language::Log | Language::Root | Language::Tectonic => (),
|
||||
};
|
||||
|
||||
document
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ impl<'a> Graph<'a> {
|
|||
.map(|ext| format!("{stem}.{ext}"))
|
||||
.for_each(|name| file_names.push(name));
|
||||
|
||||
let file_name_db = self.workspace.distro();
|
||||
let file_name_db = &self.workspace.distro().file_name_db;
|
||||
let distro_files = file_names
|
||||
.iter()
|
||||
.filter_map(|name| file_name_db.get(name))
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
mod config;
|
||||
pub mod diagnostics;
|
||||
mod document;
|
||||
pub mod graph;
|
||||
mod line_index;
|
||||
|
|
|
@ -129,8 +129,15 @@ impl Semantics {
|
|||
.and_then(|group| group.key())
|
||||
.map(|key| key.to_string()) else { continue };
|
||||
|
||||
let caption = environment
|
||||
.syntax()
|
||||
.children()
|
||||
.filter_map(latex::Caption::cast)
|
||||
.find_map(|node| node.long())
|
||||
.and_then(|node| node.content_text());
|
||||
|
||||
let range = latex::small_range(&environment);
|
||||
let kind = LabelObject::Environment { name };
|
||||
let kind = LabelObject::Environment { name, caption };
|
||||
objects.push(LabelTarget {
|
||||
object: kind,
|
||||
range,
|
||||
|
@ -253,9 +260,15 @@ pub struct LabelTarget {
|
|||
|
||||
#[derive(Debug)]
|
||||
pub enum LabelObject {
|
||||
Section { prefix: String, text: String },
|
||||
Section {
|
||||
prefix: String,
|
||||
text: String,
|
||||
},
|
||||
EnumItem,
|
||||
Environment { name: String },
|
||||
Environment {
|
||||
name: String,
|
||||
caption: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
|
|
|
@ -3,7 +3,7 @@ use std::{
|
|||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use distro::{FileNameDB, Language};
|
||||
use distro::{Distro, Language};
|
||||
use itertools::Itertools;
|
||||
use rustc_hash::FxHashSet;
|
||||
use url::Url;
|
||||
|
@ -14,7 +14,7 @@ use crate::{graph, Config, Document, DocumentData, Owner};
|
|||
pub struct Workspace {
|
||||
documents: FxHashSet<Document>,
|
||||
config: Config,
|
||||
distro: FileNameDB,
|
||||
distro: Distro,
|
||||
folders: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
|
@ -40,7 +40,7 @@ impl Workspace {
|
|||
&self.config
|
||||
}
|
||||
|
||||
pub fn distro(&self) -> &FileNameDB {
|
||||
pub fn distro(&self) -> &Distro {
|
||||
&self.distro
|
||||
}
|
||||
|
||||
|
@ -48,7 +48,7 @@ impl Workspace {
|
|||
log::debug!("Opening document {uri}...");
|
||||
self.documents.remove(&uri);
|
||||
self.documents
|
||||
.insert(Document::parse(uri, text, language, owner));
|
||||
.insert(Document::parse(uri, text, language, owner, &self.config));
|
||||
}
|
||||
|
||||
pub fn load(&mut self, path: &Path, language: Language, owner: Owner) -> std::io::Result<()> {
|
||||
|
@ -139,6 +139,34 @@ impl Workspace {
|
|||
.collect()
|
||||
}
|
||||
|
||||
pub fn set_config(&mut self, config: Config) {
|
||||
self.config = config;
|
||||
self.reload();
|
||||
}
|
||||
|
||||
pub fn set_distro(&mut self, distro: Distro) {
|
||||
self.distro = distro;
|
||||
self.reload();
|
||||
}
|
||||
|
||||
pub fn reload(&mut self) {
|
||||
let uris = self
|
||||
.documents
|
||||
.iter()
|
||||
.map(|document| document.uri.clone())
|
||||
.collect::<Vec<Url>>();
|
||||
|
||||
for uri in uris {
|
||||
let document = self.lookup(&uri).unwrap();
|
||||
self.open(
|
||||
uri,
|
||||
document.text.clone(),
|
||||
document.language,
|
||||
document.owner,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn discover(&mut self) {
|
||||
loop {
|
||||
let mut changed = false;
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue