Replace salsa crate to avoid memory leak

This commit is contained in:
Patrick Förster 2023-04-09 11:44:18 +02:00
parent 672e820f51
commit 0d7b21440d
94 changed files with 1558 additions and 3840 deletions

View file

@ -7,7 +7,10 @@ use crate::{Document, DocumentData, Workspace};
use super::{Diagnostic, ErrorCode};
pub fn analyze(workspace: &Workspace, log_document: &Document) -> FxHashMap<Url, Vec<Diagnostic>> {
pub fn analyze<'a>(
workspace: &'a Workspace,
log_document: &'a Document,
) -> FxHashMap<&'a Document, Vec<Diagnostic>> {
let mut results = FxHashMap::default();
let DocumentData::Log(data) = &log_document.data else { return results };
@ -40,10 +43,7 @@ pub fn analyze(workspace: &Workspace, log_document: &Document) -> FxHashMap<Url,
code: ErrorCode::Build(error.clone()),
};
results
.entry(tex_document.uri.clone())
.or_default()
.push(diagnostic);
results.entry(tex_document).or_default().push(diagnostic);
}
results

View file

@ -2,7 +2,7 @@ use std::path::PathBuf;
use distro::Language;
use rowan::TextSize;
use syntax::{latex, BuildError};
use syntax::{bibtex, latex, BuildError};
use url::Url;
use crate::{
@ -17,7 +17,7 @@ pub enum Owner {
Server,
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct Document {
pub uri: Url,
pub dir: Url,
@ -37,6 +37,7 @@ impl Document {
text: String,
language: Language,
owner: Owner,
cursor: TextSize,
config: &Config,
) -> Self {
let dir = uri.join(".").unwrap();
@ -49,7 +50,6 @@ impl Document {
let line_index = LineIndex::new(&text);
let cursor = TextSize::from(0);
let diagnostics = Vec::new();
let data = match language {
Language::Tex => {
@ -62,6 +62,12 @@ impl Document {
let green = parser::parse_bibtex(&text);
DocumentData::Bib(BibDocumentData { green })
}
Language::Aux => {
let green = parser::parse_latex(&text);
let mut semantics = semantics::aux::Semantics::default();
semantics.process_root(&latex::SyntaxNode::new_root(green.clone()));
DocumentData::Aux(AuxDocumentData { green, semantics })
}
Language::Log => {
let errors = parser::parse_build_log(&text).errors;
DocumentData::Log(LogDocumentData { errors })
@ -86,7 +92,7 @@ impl Document {
match language {
Language::Tex => diagnostics::tex::analyze(&mut document, config),
Language::Bib => diagnostics::bib::analyze(&mut document),
Language::Log | Language::Root | Language::Tectonic => (),
Language::Aux | Language::Log | Language::Root | Language::Tectonic => (),
};
document
@ -119,7 +125,7 @@ impl std::hash::Hash for Document {
}
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub enum DocumentData {
Tex(TexDocumentData),
Bib(BibDocumentData),
@ -129,23 +135,61 @@ pub enum DocumentData {
Tectonic,
}
#[derive(Debug)]
impl DocumentData {
pub fn as_tex(&self) -> Option<&TexDocumentData> {
if let DocumentData::Tex(data) = self {
Some(data)
} else {
None
}
}
pub fn as_bib(&self) -> Option<&BibDocumentData> {
if let DocumentData::Bib(data) = self {
Some(data)
} else {
None
}
}
pub fn as_aux(&self) -> Option<&AuxDocumentData> {
if let DocumentData::Aux(data) = self {
Some(data)
} else {
None
}
}
}
#[derive(Debug, Clone)]
pub struct TexDocumentData {
pub green: rowan::GreenNode,
pub semantics: semantics::tex::Semantics,
}
#[derive(Debug)]
impl TexDocumentData {
pub fn root_node(&self) -> latex::SyntaxNode {
latex::SyntaxNode::new_root(self.green.clone())
}
}
#[derive(Debug, Clone)]
pub struct BibDocumentData {
pub green: rowan::GreenNode,
}
#[derive(Debug)]
impl BibDocumentData {
pub fn root_node(&self) -> bibtex::SyntaxNode {
bibtex::SyntaxNode::new_root(self.green.clone())
}
}
#[derive(Debug, Clone)]
pub struct LogDocumentData {
pub errors: Vec<BuildError>,
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct AuxDocumentData {
pub green: rowan::GreenNode,
pub semantics: semantics::aux::Semantics,

View file

@ -10,14 +10,14 @@ use crate::{semantics, Document, DocumentData, Workspace};
pub static HOME_DIR: Lazy<Option<PathBuf>> = Lazy::new(dirs::home_dir);
#[derive(Debug)]
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub struct Edge<'a> {
pub source: &'a Document,
pub target: &'a Document,
pub weight: Option<EdgeWeight<'a>>,
}
#[derive(Debug)]
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub struct EdgeWeight<'a> {
pub link: &'a semantics::tex::Link,
pub old_base_dir: Url,

View file

@ -6,4 +6,4 @@ mod line_index;
pub mod semantics;
mod workspace;
pub use self::{config::*, document::*, workspace::*};
pub use self::{config::*, document::*, line_index::*, workspace::*};

View file

@ -2,7 +2,7 @@ use rowan::ast::AstNode;
use rustc_hash::FxHashMap;
use syntax::latex;
#[derive(Debug)]
#[derive(Debug, Clone, Default)]
pub struct Semantics {
pub label_numbers: FxHashMap<String, String>,
}

View file

@ -1,15 +1,15 @@
use rowan::ast::AstNode;
use rowan::{ast::AstNode, TextLen};
use rustc_hash::FxHashSet;
use syntax::latex::{self, HasCurly};
use syntax::latex::{self, HasBrack, HasCurly};
use text_size::TextRange;
use super::Span;
#[derive(Debug, Default)]
#[derive(Debug, Clone, Default)]
pub struct Semantics {
pub links: Vec<Link>,
pub labels: Vec<Label>,
pub commands: FxHashSet<String>,
pub commands: Vec<(TextRange, String)>,
pub environments: FxHashSet<String>,
pub theorem_definitions: Vec<TheoremDefinition>,
pub graphics_paths: FxHashSet<String>,
@ -26,19 +26,22 @@ impl Semantics {
}
latex::SyntaxElement::Token(token) => {
if token.kind() == latex::COMMAND_NAME {
self.commands.insert(String::from(token.text()));
let range = token.text_range();
let range = TextRange::new(range.start() + "\\".text_len(), range.end());
let text = String::from(&token.text()[1..]);
self.commands.push((range, text));
}
}
};
}
self.can_be_compiled = self.environments.contains("document");
self.can_be_root = self.can_be_compiled
&& self
.links
.iter()
.filter(|link| link.kind == LinkKind::Cls)
.any(|link| link.path.text == "subfiles");
self.can_be_root = self
.links
.iter()
.filter(|link| link.kind == LinkKind::Cls)
.any(|link| link.path.text != "subfiles");
self.can_be_compiled = self.can_be_root || self.environments.contains("document");
}
fn process_node(&mut self, node: &latex::SyntaxNode) {
@ -136,8 +139,18 @@ impl Semantics {
.find_map(|node| node.long())
.and_then(|node| node.content_text());
let options = environment
.begin()
.and_then(|begin| begin.options())
.and_then(|options| options.content_text());
let range = latex::small_range(&environment);
let kind = LabelObject::Environment { name, caption };
let kind = LabelObject::Environment {
name,
options,
caption,
};
objects.push(LabelTarget {
object: kind,
range,
@ -231,7 +244,7 @@ impl LinkKind {
}
}
#[derive(Debug)]
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub struct Link {
pub kind: LinkKind,
pub path: Span,
@ -245,20 +258,20 @@ pub enum LabelKind {
ReferenceRange,
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct Label {
pub kind: LabelKind,
pub name: Span,
pub targets: Vec<LabelTarget>,
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct LabelTarget {
pub object: LabelObject,
pub range: TextRange,
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub enum LabelObject {
Section {
prefix: String,
@ -267,11 +280,12 @@ pub enum LabelObject {
EnumItem,
Environment {
name: String,
options: Option<String>,
caption: Option<String>,
},
}
#[derive(Debug)]
#[derive(Debug, Clone)]
pub struct TheoremDefinition {
pub name: Span,
pub description: String,

View file

@ -5,6 +5,7 @@ use std::{
use distro::{Distro, Language};
use itertools::Itertools;
use rowan::{TextRange, TextSize};
use rustc_hash::FxHashSet;
use url::Url;
@ -44,11 +45,24 @@ impl Workspace {
&self.distro
}
pub fn open(&mut self, uri: Url, text: String, language: Language, owner: Owner) {
pub fn open(
&mut self,
uri: Url,
text: String,
language: Language,
owner: Owner,
cursor: TextSize,
) {
log::debug!("Opening document {uri}...");
self.documents.remove(&uri);
self.documents
.insert(Document::parse(uri, text, language, owner, &self.config));
self.documents.insert(Document::parse(
uri,
text,
language,
owner,
cursor,
&self.config,
));
}
pub fn load(&mut self, path: &Path, language: Language, owner: Owner) -> std::io::Result<()> {
@ -60,7 +74,22 @@ impl Workspace {
Cow::Owned(text) => text,
};
Ok(self.open(uri, text, language, owner))
Ok(self.open(uri, text, language, owner, TextSize::default()))
}
pub fn edit(&mut self, uri: &Url, delete: TextRange, insert: &str) -> Option<()> {
let document = self.lookup(uri)?;
let mut text = document.text.clone();
text.replace_range(std::ops::Range::<usize>::from(delete), insert);
self.open(
document.uri.clone(),
text,
document.language,
Owner::Client,
delete.start(),
);
Some(())
}
pub fn watch(
@ -119,12 +148,7 @@ impl Workspace {
for graph in self
.iter()
.map(|start| graph::Graph::new(self, start))
.filter(|graph| {
graph
.edges
.iter()
.any(|edge| edge.source == child || edge.target == child)
})
.filter(|graph| graph.preorder().contains(&child))
{
results.extend(graph.preorder());
}
@ -160,6 +184,13 @@ impl Workspace {
self.folders = folders;
}
pub fn set_cursor(&mut self, uri: &Url, cursor: TextSize) -> Option<()> {
let mut document = self.lookup(uri)?.clone();
document.cursor = cursor;
self.documents.insert(document);
Some(())
}
pub fn reload(&mut self) {
let uris = self
.documents
@ -174,10 +205,22 @@ impl Workspace {
document.text.clone(),
document.language,
document.owner,
document.cursor,
);
}
}
pub fn remove(&mut self, uri: &Url) {
self.documents.remove(uri);
}
pub fn close(&mut self, uri: &Url) -> Option<()> {
let mut document = self.lookup(uri)?.clone();
document.owner = Owner::Server;
self.documents.insert(document);
Some(())
}
pub fn discover(&mut self) {
loop {
let mut changed = false;

View file

@ -4,6 +4,7 @@ use std::path::Path;
pub enum Language {
Tex,
Bib,
Aux,
Log,
Root,
Tectonic,
@ -22,8 +23,9 @@ impl Language {
let extname = path.extension()?.to_str()?;
match extname.to_lowercase().as_str() {
"tex" | "sty" | "cls" | "def" | "lco" | "aux" | "rnw" => Some(Self::Tex),
"tex" | "sty" | "cls" | "def" | "lco" | "rnw" => Some(Self::Tex),
"bib" | "bibtex" => Some(Self::Bib),
"aux" => Some(Self::Aux),
"log" => Some(Self::Log),
_ => None,
}

View file

@ -37,7 +37,7 @@ citeproc = { path = "../citeproc" }
clap = { version = "4.1.6", features = ["derive"] }
crossbeam-channel = "0.5.6"
dashmap = "5.4.0"
dirs = "4.0.0"
dirs = "5.0.0"
distro = { path = "../distro" }
encoding_rs = "0.8.32"
encoding_rs_io = "0.1.7"
@ -52,6 +52,7 @@ lsp-server = "0.7.0"
lsp-types = "0.94.0"
notify = "5.1.0"
once_cell = "1.17.1"
parking_lot = "0.12.1"
parser = { path = "../parser" }
regex = "1.7.1"
rowan = "0.15.10"
@ -67,11 +68,6 @@ thiserror = "1.0.38"
threadpool = "1.8.1"
titlecase = "2.2.1"
[dependencies.salsa]
git = "https://github.com/salsa-rs/salsa"
rev = "ef7c0f12c8159e7025316e959c26f6278a576fa5"
package = "salsa-2022"
[dev-dependencies]
assert_unordered = "0.3.5"
criterion = { version = "0.4.0" }

View file

@ -1,11 +1,9 @@
use base_db::{Owner, Workspace};
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use distro::Language;
use lsp_types::{Position, Url};
use lsp_types::{ClientCapabilities, Position, Url};
use parser::parse_latex;
use texlab::{
db::{Owner, Workspace},
Database,
};
use rowan::TextSize;
const CODE: &str = include_str!("../../../texlab.tex");
@ -17,9 +15,26 @@ fn criterion_benchmark(c: &mut Criterion) {
c.bench_function("LaTeX/Completion/Command", |b| {
let uri = Url::parse("http://example.com/texlab.tex").unwrap();
let text = CODE.to_string();
let mut db = Database::default();
Workspace::get(&db).open(&mut db, uri.clone(), text, Language::Tex, Owner::Client);
b.iter(|| texlab::features::completion::complete(&db, &uri, Position::new(0, 1)));
let mut workspace = Workspace::default();
workspace.open(
uri.clone(),
text,
Language::Tex,
Owner::Client,
TextSize::default(),
);
let client_capabilities = ClientCapabilities::default();
b.iter(|| {
texlab::features::completion::complete(
&workspace,
&uri,
Position::new(0, 1),
&client_capabilities,
None,
)
});
});
}

View file

@ -1,230 +0,0 @@
use std::time::Duration;
use regex::Regex;
use rustc_hash::FxHashSet;
#[derive(Debug)]
pub struct Config {
pub root_dir: Option<String>,
pub build: BuildConfig,
pub diagnostics: DiagnosticsConfig,
pub formatting: FormattingConfig,
pub synctex: Option<SynctexConfig>,
pub symbols: SymbolConfig,
pub syntax: SyntaxConfig,
}
#[derive(Debug)]
pub struct BuildConfig {
pub program: String,
pub args: Vec<String>,
pub on_save: bool,
pub forward_search_after: bool,
pub output_dir: String,
}
#[derive(Debug)]
pub struct DiagnosticsConfig {
pub allowed_patterns: Vec<Regex>,
pub ignored_patterns: Vec<Regex>,
pub chktex: ChktexConfig,
pub delay: Duration,
}
#[derive(Debug)]
pub struct ChktexConfig {
pub on_open: bool,
pub on_save: bool,
pub on_edit: bool,
}
#[derive(Debug)]
pub struct SynctexConfig {
pub program: String,
pub args: Vec<String>,
}
#[derive(Debug)]
pub struct FormattingConfig {
pub tex_formatter: Formatter,
pub bib_formatter: Formatter,
pub latex_indent: LatexIndentConfig,
pub line_length: usize,
}
#[derive(Debug)]
pub enum Formatter {
Null,
Server,
LatexIndent,
}
#[derive(Debug)]
pub struct LatexIndentConfig {
pub local: Option<String>,
pub modify_line_breaks: bool,
}
#[derive(Debug)]
pub struct SymbolConfig {
pub allowed_patterns: Vec<Regex>,
pub ignored_patterns: Vec<Regex>,
}
#[derive(Debug)]
pub struct SyntaxConfig {
pub math_environments: FxHashSet<String>,
pub enum_environments: FxHashSet<String>,
pub verbatim_environments: FxHashSet<String>,
}
impl Default for Config {
fn default() -> Self {
Self {
root_dir: None,
build: BuildConfig::default(),
diagnostics: DiagnosticsConfig::default(),
formatting: FormattingConfig::default(),
synctex: None,
symbols: SymbolConfig::default(),
syntax: SyntaxConfig::default(),
}
}
}
impl Default for BuildConfig {
fn default() -> Self {
Self {
program: String::from("latexmk"),
args: ["-pdf", "-interaction=nonstopmode", "-synctex=1", "%f"]
.into_iter()
.map(String::from)
.collect(),
on_save: false,
forward_search_after: false,
output_dir: String::from("."),
}
}
}
impl Default for DiagnosticsConfig {
fn default() -> Self {
Self {
allowed_patterns: Vec::new(),
ignored_patterns: Vec::new(),
delay: Duration::from_millis(300),
chktex: ChktexConfig::default(),
}
}
}
impl Default for ChktexConfig {
fn default() -> Self {
Self {
on_open: false,
on_save: false,
on_edit: false,
}
}
}
impl Default for FormattingConfig {
fn default() -> Self {
Self {
tex_formatter: Formatter::LatexIndent,
bib_formatter: Formatter::Server,
line_length: 80,
latex_indent: LatexIndentConfig::default(),
}
}
}
impl Default for LatexIndentConfig {
fn default() -> Self {
Self {
local: None,
modify_line_breaks: false,
}
}
}
impl Default for SymbolConfig {
fn default() -> Self {
Self {
allowed_patterns: Vec::new(),
ignored_patterns: Vec::new(),
}
}
}
impl Default for SyntaxConfig {
fn default() -> Self {
let math_environments = [
"align",
"align*",
"alignat",
"alignat*",
"aligned",
"aligned*",
"alignedat",
"alignedat*",
"array",
"array*",
"Bmatrix",
"Bmatrix*",
"bmatrix",
"bmatrix*",
"cases",
"cases*",
"CD",
"CD*",
"eqnarray",
"eqnarray*",
"equation",
"equation*",
"IEEEeqnarray",
"IEEEeqnarray*",
"subequations",
"subequations*",
"gather",
"gather*",
"gathered",
"gathered*",
"matrix",
"matrix*",
"multline",
"multline*",
"pmatrix",
"pmatrix*",
"smallmatrix",
"smallmatrix*",
"split",
"split*",
"subarray",
"subarray*",
"Vmatrix",
"Vmatrix*",
"vmatrix",
"vmatrix*",
]
.into_iter()
.map(String::from)
.collect();
let enum_environments = ["enumerate", "itemize", "description"]
.into_iter()
.map(String::from)
.collect();
let verbatim_environments = ["pycode", "minted", "asy", "lstlisting", "verbatim"]
.into_iter()
.map(String::from)
.collect();
Self {
math_environments,
enum_environments,
verbatim_environments,
}
}
}

View file

@ -1,15 +0,0 @@
pub mod analysis;
mod context;
pub mod diagnostics;
mod discovery;
mod document;
pub mod parse;
mod workspace;
pub use {context::*, discovery::*, document::*, workspace::*};
#[salsa::interned]
pub struct Word {
#[return_ref]
pub text: String,
}

View file

@ -1,219 +0,0 @@
pub mod label;
use rowan::{ast::AstNode, NodeOrToken, TextRange};
use syntax::latex::{self, HasCurly};
use crate::Db;
use super::Word;
#[salsa::tracked]
pub struct TexLink {
pub kind: TexLinkKind,
pub path: Word,
pub range: TextRange,
pub base_dir: Option<Word>,
}
impl TexLink {
fn of_include(db: &dyn Db, node: latex::SyntaxNode, results: &mut Vec<Self>) -> Option<()> {
let include = latex::Include::cast(node)?;
let kind = match include.syntax().kind() {
latex::LATEX_INCLUDE => TexLinkKind::Tex,
latex::BIBLATEX_INCLUDE | latex::BIBTEX_INCLUDE => TexLinkKind::Bib,
latex::PACKAGE_INCLUDE => TexLinkKind::Sty,
latex::CLASS_INCLUDE => TexLinkKind::Cls,
_ => return None,
};
for path in include.path_list()?.keys() {
results.push(Self::new(
db,
kind,
Word::new(db, path.to_string()),
latex::small_range(&path),
None,
));
}
Some(())
}
fn of_import(db: &dyn Db, node: latex::SyntaxNode, results: &mut Vec<Self>) -> Option<()> {
let import = latex::Import::cast(node)?;
let mut base_dir = import.directory()?.key()?.to_string();
if !base_dir.ends_with('/') {
base_dir.push('/');
}
let path = import.file()?.key()?;
results.push(Self::new(
db,
TexLinkKind::Tex,
Word::new(db, path.to_string()),
latex::small_range(&path),
Some(Word::new(db, base_dir)),
));
Some(())
}
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub enum TexLinkKind {
Sty,
Cls,
Tex,
Bib,
}
impl TexLinkKind {
pub fn extensions(self) -> &'static [&'static str] {
match self {
Self::Sty => &["sty"],
Self::Cls => &["cls"],
Self::Tex => &["tex"],
Self::Bib => &["bib"],
}
}
}
#[salsa::tracked]
pub struct TheoremEnvironment {
pub name: Word,
pub description: Word,
}
impl TheoremEnvironment {
fn of_definition(db: &dyn Db, node: latex::SyntaxNode, results: &mut Vec<Self>) -> Option<()> {
let theorem = latex::TheoremDefinition::cast(node)?;
let name = theorem.name()?.key()?.to_string();
let description = theorem.description()?;
let description = description.content_text()?;
results.push(Self::new(
db,
Word::new(db, name),
Word::new(db, description),
));
Some(())
}
}
#[salsa::tracked]
pub struct GraphicsPath {
#[return_ref]
pub path: String,
}
impl GraphicsPath {
pub fn of_command(db: &dyn Db, node: latex::SyntaxNode, results: &mut Vec<Self>) -> Option<()> {
let definition = latex::GraphicsPath::cast(node)?;
for path in definition
.path_list()
.filter_map(|group| group.key())
.map(|path| path.to_string())
{
results.push(GraphicsPath::new(db, path));
}
Some(())
}
}
#[salsa::tracked]
pub struct TexAnalysis {
#[return_ref]
pub links: Vec<TexLink>,
#[return_ref]
pub labels: Vec<label::Name>,
#[return_ref]
pub label_numbers: Vec<label::Number>,
#[return_ref]
pub theorem_environments: Vec<TheoremEnvironment>,
#[return_ref]
pub graphics_paths: Vec<GraphicsPath>,
#[return_ref]
pub command_name_ranges: Vec<TextRange>,
#[return_ref]
pub environment_names: Vec<String>,
}
#[salsa::tracked]
impl TexAnalysis {
#[salsa::tracked]
pub fn has_document_environment(self, db: &dyn Db) -> bool {
self.environment_names(db)
.iter()
.any(|name| name == "document")
}
}
impl TexAnalysis {
pub(super) fn analyze(db: &dyn Db, root: &latex::SyntaxNode) -> Self {
let mut links = Vec::new();
let mut labels = Vec::new();
let mut label_numbers = Vec::new();
let mut theorem_environments = Vec::new();
let mut graphics_paths = Vec::new();
let mut command_name_ranges = Vec::new();
let mut environment_names = Vec::new();
for element in root.descendants_with_tokens() {
match element {
NodeOrToken::Token(token) if token.kind() == latex::COMMAND_NAME => {
command_name_ranges.push(token.text_range());
}
NodeOrToken::Token(_) => {}
NodeOrToken::Node(node) => {
TexLink::of_include(db, node.clone(), &mut links)
.or_else(|| TexLink::of_import(db, node.clone(), &mut links))
.or_else(|| label::Name::of_definition(db, node.clone(), &mut labels))
.or_else(|| label::Name::of_reference(db, node.clone(), &mut labels))
.or_else(|| label::Name::of_reference_range(db, node.clone(), &mut labels))
.or_else(|| label::Number::of_number(db, node.clone(), &mut label_numbers))
.or_else(|| {
TheoremEnvironment::of_definition(
db,
node.clone(),
&mut theorem_environments,
)
})
.or_else(|| GraphicsPath::of_command(db, node.clone(), &mut graphics_paths))
.or_else(|| {
let range = latex::GenericCommand::cast(node.clone())?
.name()?
.text_range();
command_name_ranges.push(range);
Some(())
})
.or_else(|| {
let begin = latex::Begin::cast(node.clone())?;
environment_names.push(begin.name()?.key()?.to_string());
Some(())
});
}
};
}
Self::new(
db,
links,
labels,
label_numbers,
theorem_environments,
graphics_paths,
command_name_ranges,
environment_names,
)
}
}

View file

@ -1,123 +0,0 @@
use rowan::{
ast::{AstNode, AstPtr},
TextRange,
};
use syntax::latex;
use crate::{db::Word, Db};
#[salsa::tracked]
pub struct Number {
pub name: Word,
pub text: Word,
}
impl Number {
pub(super) fn of_number(
db: &dyn Db,
node: latex::SyntaxNode,
results: &mut Vec<Self>,
) -> Option<()> {
let number = latex::LabelNumber::cast(node)?;
let name = number.name()?.key()?.to_string();
let text = number
.text()?
.syntax()
.descendants_with_tokens()
.filter_map(|element| element.into_node())
.find(|node| node.kind() == latex::TEXT || node.kind() == latex::MIXED_GROUP)?
.text()
.to_string();
results.push(Self::new(db, Word::new(db, name), Word::new(db, text)));
Some(())
}
}
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub enum Origin {
Definition(AstPtr<latex::LabelDefinition>),
Reference(AstPtr<latex::LabelReference>),
ReferenceRange(AstPtr<latex::LabelReferenceRange>),
}
impl Origin {
pub fn as_definition(&self) -> Option<&AstPtr<latex::LabelDefinition>> {
match self {
Self::Definition(ptr) => Some(ptr),
_ => None,
}
}
}
#[salsa::tracked]
pub struct Name {
pub origin: Origin,
pub name: Word,
pub range: TextRange,
}
impl Name {
pub(super) fn of_definition(
db: &dyn Db,
node: latex::SyntaxNode,
results: &mut Vec<Self>,
) -> Option<()> {
let label = latex::LabelDefinition::cast(node)?;
let name = label.name()?.key()?;
results.push(Self::new(
db,
Origin::Definition(AstPtr::new(&label)),
Word::new(db, name.to_string()),
latex::small_range(&name),
));
Some(())
}
pub(super) fn of_reference(
db: &dyn Db,
node: latex::SyntaxNode,
results: &mut Vec<Self>,
) -> Option<()> {
let label = latex::LabelReference::cast(node)?;
for name in label.name_list()?.keys() {
results.push(Self::new(
db,
Origin::Reference(AstPtr::new(&label)),
Word::new(db, name.to_string()),
latex::small_range(&name),
));
}
Some(())
}
pub(super) fn of_reference_range(
db: &dyn Db,
node: latex::SyntaxNode,
results: &mut Vec<Self>,
) -> Option<()> {
let label = latex::LabelReferenceRange::cast(node)?;
if let Some(name) = label.from().and_then(|name| name.key()) {
results.push(Self::new(
db,
Origin::ReferenceRange(AstPtr::new(&label)),
Word::new(db, name.to_string()),
latex::small_range(&name),
));
}
if let Some(name) = label.to().and_then(|name| name.key()) {
results.push(Self::new(
db,
Origin::ReferenceRange(AstPtr::new(&label)),
Word::new(db, name.to_string()),
latex::small_range(&name),
));
}
Some(())
}
}

View file

@ -1,18 +0,0 @@
use crate::Config;
/// Contains the global context of the server throughout the application.
#[salsa::input(singleton)]
pub struct ServerContext {
/// The server configuration which is extracted from either
/// the `workspace/configuration` or `workspace/didChangeConfiguration` messages.
#[return_ref]
pub config: Config,
/// Disable usage of `isIncomplete = false` in completion lists.
///
/// Due to the large number of completion results,
/// the server can only send a subset of the items most of the time.
/// When the filtered list is small enough, `CompletionList.isIncomplete` can be set to `false`.
/// On VSCode, this optimization should not be done so this flag is needed.
pub always_incomplete_completion_list: bool,
}

View file

@ -1,178 +0,0 @@
pub mod bib;
pub mod log;
pub mod tex;
use distro::Language;
use lsp_types::{DiagnosticSeverity, NumberOrString, Range};
use rustc_hash::FxHashMap;
use crate::{db::workspace::Workspace, util, Db};
use super::document::Document;
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Diagnostic {
pub severity: DiagnosticSeverity,
pub range: Range,
pub code: DiagnosticCode,
pub message: String,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Hash)]
pub enum DiagnosticCode {
Tex(TexCode),
Bib(BibCode),
Log(Document),
Chktex(String),
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub enum TexCode {
UnexpectedRCurly,
RCurlyInserted,
MismatchedEnvironment,
}
impl From<TexCode> for String {
fn from(code: TexCode) -> Self {
match code {
TexCode::UnexpectedRCurly => "Unexpected \"}\"".to_string(),
TexCode::RCurlyInserted => "Missing \"}\" inserted".to_string(),
TexCode::MismatchedEnvironment => "Mismatched environment".to_string(),
}
}
}
impl From<TexCode> for NumberOrString {
fn from(code: TexCode) -> Self {
match code {
TexCode::UnexpectedRCurly => NumberOrString::Number(1),
TexCode::RCurlyInserted => NumberOrString::Number(2),
TexCode::MismatchedEnvironment => NumberOrString::Number(3),
}
}
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
#[allow(clippy::enum_variant_names)]
pub enum BibCode {
ExpectingLCurly,
ExpectingKey,
ExpectingRCurly,
ExpectingEq,
ExpectingFieldValue,
}
impl From<BibCode> for String {
fn from(code: BibCode) -> Self {
match code {
BibCode::ExpectingLCurly => "Expecting a curly bracket: \"{\"".to_string(),
BibCode::ExpectingKey => "Expecting a key".to_string(),
BibCode::ExpectingRCurly => "Expecting a curly bracket: \"}\"".to_string(),
BibCode::ExpectingEq => "Expecting an equality sign: \"=\"".to_string(),
BibCode::ExpectingFieldValue => "Expecting a field value".to_string(),
}
}
}
impl From<BibCode> for NumberOrString {
fn from(code: BibCode) -> Self {
match code {
BibCode::ExpectingLCurly => NumberOrString::Number(4),
BibCode::ExpectingKey => NumberOrString::Number(5),
BibCode::ExpectingRCurly => NumberOrString::Number(6),
BibCode::ExpectingEq => NumberOrString::Number(7),
BibCode::ExpectingFieldValue => NumberOrString::Number(8),
}
}
}
#[salsa::tracked(return_ref)]
pub fn collect(db: &dyn Db, workspace: Workspace) -> FxHashMap<Document, Vec<Diagnostic>> {
let mut results: FxHashMap<Document, Vec<Diagnostic>> = FxHashMap::default();
for document in workspace.documents(db).iter().copied() {
match document.language(db) {
Language::Tex => {
results.entry(document).or_default().extend(
tex::collect(db, document)
.iter()
.chain(document.linter(db).chktex(db))
.cloned(),
);
}
Language::Bib => {
results
.entry(document)
.or_default()
.extend(bib::collect(db, document).iter().cloned());
}
Language::Log => {
log::collect(db, workspace, document)
.iter()
.for_each(|(document, diagnostics)| {
results
.entry(*document)
.or_default()
.extend(diagnostics.clone());
});
}
Language::Root | Language::Tectonic => {}
}
}
results
}
#[salsa::tracked]
pub fn collect_filtered(
db: &dyn Db,
workspace: Workspace,
) -> FxHashMap<Document, Vec<lsp_types::Diagnostic>> {
let all_diagnostics = collect(db, workspace);
let mut all_filtered: FxHashMap<Document, Vec<lsp_types::Diagnostic>> = FxHashMap::default();
let config = &db.config().diagnostics;
for document in workspace.documents(db) {
let mut filtered = Vec::new();
if !matches!(document.language(db), Language::Tex | Language::Bib) {
continue;
}
if let Some(diagnostics) = all_diagnostics.get(document) {
for diagnostic in diagnostics.iter().filter(|diag| {
util::regex_filter::filter(
&diag.message,
&config.allowed_patterns,
&config.ignored_patterns,
)
}) {
let source = match diagnostic.code {
DiagnosticCode::Tex(_) | DiagnosticCode::Bib(_) => "texlab",
DiagnosticCode::Log(_) => "latex-build",
DiagnosticCode::Chktex(_) => "chktex",
};
let code = match diagnostic.code.clone() {
DiagnosticCode::Tex(code) => Some(code.into()),
DiagnosticCode::Bib(code) => Some(code.into()),
DiagnosticCode::Chktex(code) => Some(NumberOrString::String(code)),
DiagnosticCode::Log(_) => None,
};
filtered.push(lsp_types::Diagnostic {
range: diagnostic.range,
code,
severity: Some(diagnostic.severity),
message: diagnostic.message.clone(),
source: Some(source.to_string()),
..Default::default()
});
}
}
all_filtered.insert(*document, filtered);
}
all_filtered
}

View file

@ -1,108 +0,0 @@
use lsp_types::DiagnosticSeverity;
use rowan::{ast::AstNode, TextRange};
use syntax::bibtex::{self, HasDelims, HasEq, HasName, HasType, HasValue};
use crate::{db::document::Document, util::line_index_ext::LineIndexExt, Db};
use super::{BibCode, Diagnostic, DiagnosticCode};
#[salsa::tracked(return_ref)]
pub fn collect(db: &dyn Db, document: Document) -> Vec<Diagnostic> {
let mut results = Vec::new();
let data = match document.parse(db).as_bib() {
Some(data) => data,
None => return results,
};
for node in data.root(db).descendants() {
analyze_entry(db, document, node.clone(), &mut results)
.or_else(|| analyze_field(db, document, node, &mut results));
}
results
}
fn analyze_entry(
db: &dyn Db,
document: Document,
node: bibtex::SyntaxNode,
results: &mut Vec<Diagnostic>,
) -> Option<()> {
let line_index = document.line_index(db);
let entry = bibtex::Entry::cast(node)?;
if entry.left_delim_token().is_none() {
let code = BibCode::ExpectingLCurly;
results.push(Diagnostic {
severity: DiagnosticSeverity::ERROR,
range: line_index.line_col_lsp_range(entry.type_token()?.text_range()),
code: DiagnosticCode::Bib(code),
message: String::from(code),
});
return Some(());
}
if entry.name_token().is_none() {
let code = BibCode::ExpectingKey;
results.push(Diagnostic {
severity: DiagnosticSeverity::ERROR,
range: line_index.line_col_lsp_range(entry.left_delim_token()?.text_range()),
code: DiagnosticCode::Bib(code),
message: String::from(code),
});
return Some(());
}
if entry.right_delim_token().is_none() {
let code = BibCode::ExpectingRCurly;
results.push(Diagnostic {
severity: DiagnosticSeverity::ERROR,
range: line_index
.line_col_lsp_range(TextRange::empty(entry.syntax().text_range().end())),
code: DiagnosticCode::Bib(code),
message: String::from(code),
});
return Some(());
}
Some(())
}
fn analyze_field(
db: &dyn Db,
document: Document,
node: bibtex::SyntaxNode,
results: &mut Vec<Diagnostic>,
) -> Option<()> {
let line_index = document.line_index(db);
let field = bibtex::Field::cast(node)?;
if field.eq_token().is_none() {
let code = BibCode::ExpectingEq;
results.push(Diagnostic {
severity: DiagnosticSeverity::ERROR,
range: line_index.line_col_lsp_range(field.name_token()?.text_range()),
code: DiagnosticCode::Bib(code),
message: String::from(code),
});
return Some(());
}
if field.value().is_none() {
let code = BibCode::ExpectingFieldValue;
results.push(Diagnostic {
severity: DiagnosticSeverity::ERROR,
range: line_index.line_col_lsp_range(field.name_token()?.text_range()),
code: DiagnosticCode::Bib(code),
message: String::from(code),
});
return Some(());
}
Some(())
}

View file

@ -1,100 +0,0 @@
use lsp_types::{DiagnosticSeverity, Position, Range, Url};
use rowan::{TextLen, TextRange, TextSize};
use rustc_hash::FxHashMap;
use syntax::{BuildError, BuildErrorLevel};
use crate::{
db::{document::Document, workspace::Workspace},
util::line_index_ext::LineIndexExt,
Db,
};
use super::{Diagnostic, DiagnosticCode};
#[salsa::tracked(return_ref)]
pub fn collect(
db: &dyn Db,
workspace: Workspace,
log_document: Document,
) -> FxHashMap<Document, Vec<Diagnostic>> {
let mut results = FxHashMap::default();
let log = match log_document.parse(db).as_log() {
Some(data) => data.log(db),
None => return results,
};
let root_document = match workspace.parents(db, log_document).iter().next().copied() {
Some(document) => document,
None => return results,
};
let base_path = match root_document
.location(db)
.path(db)
.as_deref()
.and_then(|path| path.parent())
{
Some(path) => path,
None => return results,
};
for error in &log.errors {
let full_path = base_path.join(&error.relative_path);
let full_path_uri = if let Ok(uri) = Url::from_file_path(&full_path) {
uri
} else {
continue;
};
let severity = match error.level {
BuildErrorLevel::Error => DiagnosticSeverity::ERROR,
BuildErrorLevel::Warning => DiagnosticSeverity::WARNING,
};
let range = find_range_of_hint(db, workspace, &full_path_uri, error).unwrap_or_else(|| {
let line = error.line.unwrap_or(0);
Range::new(Position::new(line, 0), Position::new(line, 0))
});
let diagnostic = Diagnostic {
severity,
range,
code: DiagnosticCode::Log(log_document),
message: error.message.clone(),
};
let tex_document = workspace
.lookup_uri(db, &full_path_uri)
.unwrap_or(root_document);
results.entry(tex_document).or_default().push(diagnostic);
}
results
}
fn find_range_of_hint(
db: &dyn Db,
workspace: Workspace,
uri: &Url,
error: &BuildError,
) -> Option<Range> {
let document = workspace.lookup_uri(db, uri)?;
let text = document.text(db);
let line = error.line? as usize;
let hint = error.hint.as_deref()?;
let line_index = document.line_index(db);
let line_start = line_index.newlines.get(line).copied()?;
let line_end = line_index
.newlines
.get(line + 1)
.copied()
.unwrap_or(text.text_len());
let line_text = &text[line_start.into()..line_end.into()];
let hint_start = line_start + TextSize::try_from(line_text.find(hint)?).unwrap();
let hint_end = hint_start + hint.text_len();
Some(line_index.line_col_lsp_range(TextRange::new(hint_start, hint_end)))
}

View file

@ -1,148 +0,0 @@
use lsp_types::DiagnosticSeverity;
use rowan::{ast::AstNode, NodeOrToken, TextRange};
use syntax::latex;
use crate::{db::document::Document, util::line_index_ext::LineIndexExt, Db};
use super::{Diagnostic, DiagnosticCode, TexCode};
#[salsa::tracked]
pub fn collect(db: &dyn Db, document: Document) -> Vec<Diagnostic> {
let mut results = Vec::new();
if !document.location(db).uri(db).as_str().ends_with(".tex") {
return results;
}
let data = match document.parse(db).as_tex() {
Some(data) => data,
None => return results,
};
let mut traversal = data.root(db).preorder();
while let Some(event) = traversal.next() {
match event {
rowan::WalkEvent::Enter(node) => {
if let Some(environment) = latex::Environment::cast(node.clone()) {
if environment
.begin()
.and_then(|begin| begin.name())
.and_then(|name| name.key())
.map_or(false, |name| {
db.config()
.syntax
.verbatim_environments
.contains(&name.to_string())
})
{
traversal.skip_subtree();
continue;
}
}
analyze_environment(db, document, node.clone(), &mut results)
.or_else(|| analyze_curly_group(db, document, node.clone(), &mut results))
.or_else(|| analyze_curly_braces(document, db, node, &mut results));
}
rowan::WalkEvent::Leave(_) => {
continue;
}
};
}
results
}
fn analyze_environment(
db: &dyn Db,
document: Document,
node: latex::SyntaxNode,
results: &mut Vec<Diagnostic>,
) -> Option<()> {
let environment = latex::Environment::cast(node)?;
let name1 = environment.begin()?.name()?.key()?;
let name2 = environment.end()?.name()?.key()?;
if name1 != name2 {
let code = TexCode::MismatchedEnvironment;
results.push(Diagnostic {
severity: DiagnosticSeverity::ERROR,
range: document
.line_index(db)
.line_col_lsp_range(latex::small_range(&name1)),
code: DiagnosticCode::Tex(code),
message: String::from(code),
});
}
Some(())
}
fn analyze_curly_group(
db: &dyn Db,
document: Document,
node: latex::SyntaxNode,
results: &mut Vec<Diagnostic>,
) -> Option<()> {
if !matches!(
node.kind(),
latex::CURLY_GROUP
| latex::CURLY_GROUP_COMMAND
| latex::CURLY_GROUP_KEY_VALUE
| latex::CURLY_GROUP_WORD
| latex::CURLY_GROUP_WORD_LIST
) {
return None;
}
let is_inside_verbatim_environment = node
.ancestors()
.filter_map(latex::Environment::cast)
.filter_map(|env| env.begin())
.filter_map(|begin| begin.name())
.filter_map(|name| name.key())
.any(|name| {
["asy", "lstlisting", "minted", "verbatim"].contains(&name.to_string().as_str())
});
if !is_inside_verbatim_environment
&& !node
.children_with_tokens()
.filter_map(NodeOrToken::into_token)
.any(|token| token.kind() == latex::R_CURLY)
{
let code = TexCode::RCurlyInserted;
results.push(Diagnostic {
severity: DiagnosticSeverity::ERROR,
range: document
.line_index(db)
.line_col_lsp_range(TextRange::empty(node.text_range().end())),
code: DiagnosticCode::Tex(code),
message: String::from(code),
});
}
Some(())
}
fn analyze_curly_braces(
document: Document,
db: &dyn Db,
node: rowan::SyntaxNode<latex::LatexLanguage>,
results: &mut Vec<Diagnostic>,
) -> Option<()> {
if node.kind() == latex::ERROR && node.first_token()?.text() == "}" {
let code = TexCode::UnexpectedRCurly;
results.push(Diagnostic {
severity: DiagnosticSeverity::ERROR,
range: document
.line_index(db)
.line_col_lsp_range(node.text_range()),
code: DiagnosticCode::Tex(code),
message: String::from(code),
});
Some(())
} else {
None
}
}

View file

@ -1,259 +0,0 @@
use std::path::Path;
use distro::Language;
use itertools::Itertools;
use lsp_types::Url;
use rustc_hash::FxHashSet;
use crate::{util::HOME_DIR, Db};
use super::{
analysis::TexLink,
document::{Document, Location, Owner},
workspace::Workspace,
};
#[salsa::accumulator]
pub struct MissingDependencies(MissingDependency);
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub struct MissingDependency {
pub location: Location,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub struct Dependency {
pub source: Document,
pub target: Document,
pub origin: Option<DependencyOrigin>,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub struct DependencyOrigin {
pub link: TexLink,
pub old_base_dir: Location,
pub new_base_dir: Location,
}
pub fn hidden_dependencies(
db: &dyn Db,
document: Document,
base_dir: Location,
dependencies: &mut Vec<Dependency>,
) {
let uri = document.location(db).uri(db).as_str();
if document.language(db) == Language::Tex && !uri.ends_with(".aux") {
dependencies.extend(hidden_dependency(db, document, base_dir, "log"));
dependencies.extend(hidden_dependency(db, document, base_dir, "aux"));
}
}
#[salsa::tracked]
pub fn hidden_dependency(
db: &dyn Db,
source: Document,
base_dir: Location,
extension: &'static str,
) -> Option<Dependency> {
let workspace = Workspace::get(db);
let stem = source.location(db).stem(db)?;
let name = format!("{stem}.{extension}");
let location = workspace.output_dir(db, base_dir).join(db, &name)?;
match workspace.lookup(db, location) {
Some(target) => Some(Dependency {
source,
target,
origin: None,
}),
None => {
MissingDependencies::push(db, MissingDependency { location });
None
}
}
}
pub fn source_dependencies<'db>(
db: &'db dyn Db,
source: Document,
base_dir: Location,
) -> impl Iterator<Item = Dependency> + 'db {
source
.parse(db)
.as_tex()
.into_iter()
.flat_map(|data| data.analyze(db).links(db))
.filter_map(move |link| source_dependency(db, source, base_dir, *link))
}
#[salsa::tracked]
pub fn source_dependency(
db: &dyn Db,
source: Document,
base_dir: Location,
link: TexLink,
) -> Option<Dependency> {
let workspace = Workspace::get(db);
let stem = link.path(db).text(db);
let mut file_names = vec![stem.clone()];
link.kind(db)
.extensions()
.iter()
.map(|ext| format!("{stem}.{ext}"))
.for_each(|name| file_names.push(name));
let file_name_db = workspace.file_name_db(db);
let distro_files = file_names
.iter()
.filter_map(|name| file_name_db.get(name))
.filter(|path| {
HOME_DIR
.as_deref()
.map_or(false, |dir| path.starts_with(dir))
})
.flat_map(Url::from_file_path)
.map(|uri| Location::new(db, uri));
for location in file_names
.iter()
.filter_map(|file_name| base_dir.join(db, file_name))
.chain(distro_files)
{
match workspace.lookup(db, location) {
Some(target) => {
let origin = Some(DependencyOrigin {
link,
old_base_dir: base_dir,
new_base_dir: link
.base_dir(db)
.and_then(|path| base_dir.join(db, path.text(db)))
.unwrap_or(base_dir),
});
return Some(Dependency {
source,
target,
origin,
});
}
None => {
MissingDependencies::push(db, MissingDependency { location });
}
};
}
None
}
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
pub struct DependencyGraph {
pub start: Document,
pub edges: Vec<Dependency>,
}
impl DependencyGraph {
pub fn preorder(&self) -> impl DoubleEndedIterator<Item = Document> + '_ {
std::iter::once(self.start)
.chain(self.edges.iter().map(|group| group.target))
.unique()
}
}
#[salsa::tracked(return_ref)]
pub fn dependency_graph(db: &dyn Db, start: Document) -> DependencyGraph {
let workspace = Workspace::get(db);
let base_dir = workspace.working_dir(db, start.directory(db));
let mut edges = Vec::new();
let mut stack = vec![(start, base_dir)];
let mut visited = FxHashSet::default();
while let Some((source, base_dir)) = stack.pop() {
for edge in source_dependencies(db, source, base_dir) {
edges.push(edge);
if visited.insert(edge.target) {
stack.push((edge.target, edge.origin.unwrap().new_base_dir));
}
}
hidden_dependencies(db, source, base_dir, &mut edges);
}
DependencyGraph { start, edges }
}
pub fn discover_dependencies(db: &mut dyn Db) {
let workspace = Workspace::get(db);
loop {
let mut changed = discover_parents(db, workspace);
let paths: FxHashSet<_> = workspace
.documents(db)
.iter()
.flat_map(|&start| dependency_graph::accumulated::<MissingDependencies>(db, start))
.filter_map(|link| link.location.path(db).as_deref())
.filter(|path| path.is_file())
.map(Path::to_path_buf)
.collect();
for path in paths {
if workspace.lookup_path(db, &path).is_none() {
let language = Language::from_path(&path).unwrap_or(Language::Tex);
changed |= workspace.load(db, &path, language, Owner::Server).is_some();
}
}
if !changed {
break;
}
}
}
fn discover_parents(db: &mut dyn Db, workspace: Workspace) -> bool {
let mut changed = false;
let dirs: FxHashSet<_> = workspace
.documents(db)
.iter()
.flat_map(|document| document.ancestor_dirs(db))
.filter(|path| is_part_of_workspace(db, workspace, path))
.map(Path::to_path_buf)
.collect();
for path in dirs
.iter()
.flat_map(std::fs::read_dir)
.flatten()
.flatten()
.filter(|entry| entry.file_type().map_or(false, |ty| ty.is_file()))
.map(|entry| entry.path())
{
if let Some(language) = Language::from_path(&path) {
let can_be_parent = matches!(
language,
Language::Tex | Language::Root | Language::Tectonic
);
if can_be_parent && workspace.lookup_path(db, &path).is_none() {
changed |= workspace.load(db, &path, language, Owner::Server).is_some();
}
}
}
changed
}
fn is_part_of_workspace(db: &dyn Db, workspace: Workspace, path: &Path) -> bool {
let root_dirs = workspace.root_dirs(db);
if root_dirs.is_empty() {
return true;
}
root_dirs
.iter()
.filter_map(|dir| dir.path(db).as_deref())
.any(|dir| path.starts_with(dir))
}

View file

@ -1,161 +0,0 @@
use std::path::{Path, PathBuf};
use distro::Language;
use lsp_types::Url;
use parser::{parse_bibtex, parse_build_log, parse_latex};
use rowan::{TextRange, TextSize};
use crate::{
db::{
diagnostics::Diagnostic,
parse::{BibDocumentData, LogDocumentData, TectonicData, TexDocumentData, TexlabRootData},
},
util::line_index::LineIndex,
Db,
};
use super::{
analysis::TexLinkKind,
parse::{self, DocumentData},
};
#[salsa::interned]
pub struct Location {
#[return_ref]
pub uri: Url,
}
#[salsa::tracked]
impl Location {
#[salsa::tracked(return_ref)]
pub fn path(self, db: &dyn Db) -> Option<PathBuf> {
let uri = self.uri(db);
if uri.scheme() == "file" {
uri.to_file_path().ok()
} else {
None
}
}
pub fn stem(self, db: &dyn Db) -> Option<String> {
let path = self.uri(db).to_file_path().ok()?;
let stem = path.file_stem()?.to_str()?;
Some(String::from(stem))
}
pub fn join(self, db: &dyn Db, path: &str) -> Option<Location> {
let uri = self.uri(db).join(path).ok()?;
Some(Location::new(db, uri))
}
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub enum Owner {
Client,
Server,
}
#[salsa::input]
pub struct LinterData {
#[return_ref]
pub chktex: Vec<Diagnostic>,
}
#[salsa::input]
pub struct Document {
/// An object containing the URI of the document.
pub location: Location,
/// The source code.
#[return_ref]
pub text: String,
/// The programming language.
pub language: Language,
/// The program (either server or client) which opened the document.
pub owner: Owner,
/// An estimate of the current cursor position.
pub cursor: TextSize,
/// The diagnostics reported from external linters such as ChkTeX.
pub linter: LinterData,
}
impl Document {
pub fn edit(self, db: &mut dyn Db, range: TextRange, replace_with: &str) {
let mut text = self.set_text(db).to(String::new());
text.replace_range(std::ops::Range::<usize>::from(range), replace_with);
self.set_text(db).to(text);
self.set_cursor(db).to(range.start());
}
pub fn directory(self, db: &dyn Db) -> Location {
self.location(db).join(db, ".").unwrap()
}
pub fn ancestor_dirs<'db>(self, db: &'db dyn Db) -> impl Iterator<Item = &'db Path> + 'db {
self.location(db)
.path(db)
.as_deref()
.into_iter()
.flat_map(|path| path.ancestors())
.skip(1)
}
}
#[salsa::tracked]
impl Document {
#[salsa::tracked]
pub fn parse(self, db: &dyn Db) -> DocumentData {
let text = self.text(db);
match self.language(db) {
Language::Tex => {
let data = TexDocumentData::new(db, parse_latex(text));
parse::DocumentData::Tex(data)
}
Language::Bib => {
let data = BibDocumentData::new(db, parse_bibtex(text));
DocumentData::Bib(data)
}
Language::Log => {
let data = LogDocumentData::new(db, parse_build_log(text));
DocumentData::Log(data)
}
Language::Root => {
let data = TexlabRootData;
DocumentData::TexlabRoot(data)
}
Language::Tectonic => {
let data = TectonicData;
DocumentData::Tectonic(data)
}
}
}
#[salsa::tracked]
pub fn can_be_root(self, db: &dyn Db) -> bool {
self.parse(db).as_tex().map_or(false, |data| {
let analysis = data.analyze(db);
analysis.has_document_environment(db)
&& !analysis
.links(db)
.iter()
.filter(|link| link.kind(db) == TexLinkKind::Cls)
.any(|link| link.path(db).text(db) == "subfiles")
})
}
#[salsa::tracked]
pub fn can_be_built(self, db: &dyn Db) -> bool {
self.parse(db)
.as_tex()
.map_or(false, |data| data.analyze(db).has_document_environment(db))
}
#[salsa::tracked(return_ref)]
pub fn line_index(self, db: &dyn Db) -> LineIndex {
LineIndex::new(self.text(db))
}
}

View file

@ -1,77 +0,0 @@
use syntax::{bibtex, latex, BuildLog};
use crate::{db::analysis::TexAnalysis, Db};
#[salsa::interned]
pub struct TexDocumentData {
pub green: rowan::GreenNode,
}
impl TexDocumentData {
pub fn root(self, db: &dyn Db) -> latex::SyntaxNode {
latex::SyntaxNode::new_root(self.green(db))
}
}
#[salsa::tracked]
impl TexDocumentData {
#[salsa::tracked]
pub fn analyze(self, db: &dyn Db) -> TexAnalysis {
let root = latex::SyntaxNode::new_root(self.green(db));
TexAnalysis::analyze(db, &root)
}
}
#[salsa::interned]
pub struct BibDocumentData {
pub green: rowan::GreenNode,
}
impl BibDocumentData {
pub fn root(self, db: &dyn Db) -> bibtex::SyntaxNode {
bibtex::SyntaxNode::new_root(self.green(db))
}
}
#[salsa::interned]
pub struct LogDocumentData {
pub log: BuildLog,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub struct TexlabRootData;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub struct TectonicData;
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
pub enum DocumentData {
Tex(TexDocumentData),
Bib(BibDocumentData),
Log(LogDocumentData),
TexlabRoot(TexlabRootData),
Tectonic(TectonicData),
}
impl DocumentData {
pub fn as_tex(self) -> Option<TexDocumentData> {
match self {
Self::Tex(data) => Some(data),
_ => None,
}
}
pub fn as_bib(self) -> Option<BibDocumentData> {
match self {
Self::Bib(data) => Some(data),
_ => None,
}
}
pub fn as_log(self) -> Option<LogDocumentData> {
match self {
Self::Log(data) => Some(data),
_ => None,
}
}
}

View file

@ -1,211 +0,0 @@
use std::{
borrow::Cow,
path::{Path, PathBuf},
};
use distro::{FileNameDB, Language};
use itertools::Itertools;
use lsp_types::{ClientCapabilities, Url};
use rowan::TextSize;
use rustc_hash::FxHashSet;
use crate::{
db::document::{Document, Location},
Db,
};
use super::{
dependency_graph,
document::{LinterData, Owner},
Word,
};
#[salsa::input(singleton)]
pub struct Workspace {
#[return_ref]
pub documents: FxHashSet<Document>,
#[return_ref]
pub client_capabilities: ClientCapabilities,
#[return_ref]
pub root_dirs: Vec<Location>,
#[return_ref]
pub file_name_db: FileNameDB,
}
impl Workspace {
pub fn lookup(self, db: &dyn Db, location: Location) -> Option<Document> {
self.documents(db)
.iter()
.find(|document| document.location(db) == location)
.copied()
}
pub fn lookup_uri(self, db: &dyn Db, uri: &Url) -> Option<Document> {
self.documents(db)
.iter()
.find(|document| document.location(db).uri(db) == uri)
.copied()
}
pub fn lookup_path(self, db: &dyn Db, path: &Path) -> Option<Document> {
self.documents(db)
.iter()
.find(|document| document.location(db).path(db).as_deref() == Some(path))
.copied()
}
pub fn index_files<'db>(self, db: &'db dyn Db) -> impl Iterator<Item = Document> + 'db {
self.documents(db)
.iter()
.copied()
.filter(|&document| document.can_be_root(db))
}
pub fn open(
self,
db: &mut dyn Db,
uri: Url,
text: String,
language: Language,
owner: Owner,
) -> Document {
let location = Location::new(db, uri);
let cursor = TextSize::from(0);
match self.lookup(db, location) {
Some(document) => {
document.set_text(db).to(text);
document.set_language(db).to(language);
document.set_owner(db).to(owner);
document.set_cursor(db).to(cursor);
document
}
None => {
let document = Document::new(
db,
location,
text,
language,
owner,
cursor,
LinterData::new(db, Vec::new()),
);
let mut documents = self.set_documents(db).to(FxHashSet::default());
documents.insert(document);
self.set_documents(db).to(documents);
document
}
}
}
pub fn load(
self,
db: &mut dyn Db,
path: &Path,
language: Language,
owner: Owner,
) -> Option<Document> {
log::debug!("Loading document {} from disk...", path.display());
let uri = Url::from_file_path(path).ok()?;
let data = std::fs::read(path).ok()?;
let text = match String::from_utf8_lossy(&data) {
Cow::Borrowed(_) => unsafe { String::from_utf8_unchecked(data) },
Cow::Owned(text) => text,
};
Some(self.open(db, uri, text, language, owner))
}
pub fn watch(
self,
db: &dyn Db,
watcher: &mut dyn notify::Watcher,
watched_dirs: &mut FxHashSet<PathBuf>,
) {
let output_dirs = self
.documents(db)
.iter()
.map(|document| self.working_dir(db, document.directory(db)))
.map(|base_dir| self.output_dir(db, base_dir))
.filter_map(|location| location.path(db).as_deref());
self.documents(db)
.iter()
.map(|document| document.location(db))
.filter_map(|location| location.path(db).as_deref())
.filter_map(|path| path.parent())
.chain(output_dirs)
.filter(|path| watched_dirs.insert(path.to_path_buf()))
.for_each(|path| {
let _ = watcher.watch(path, notify::RecursiveMode::NonRecursive);
});
}
}
#[salsa::tracked]
impl Workspace {
#[salsa::tracked]
pub fn working_dir(self, db: &dyn Db, base_dir: Location) -> Location {
if let Some(dir) = db
.config()
.root_dir
.as_ref()
.and_then(|path| base_dir.join(db, path))
{
return dir;
}
self.documents(db)
.iter()
.filter(|doc| matches!(doc.language(db), Language::Root | Language::Tectonic))
.filter_map(|doc| doc.location(db).join(db, "."))
.find(|root_dir| {
base_dir
.uri(db)
.as_str()
.starts_with(root_dir.uri(db).as_str())
})
.unwrap_or(base_dir)
}
#[salsa::tracked]
pub fn output_dir(self, db: &dyn Db, base_dir: Location) -> Location {
let mut path = db.config().build.output_dir.clone();
if !path.ends_with('/') {
path.push('/');
}
base_dir.join(db, &path).unwrap_or(base_dir)
}
#[salsa::tracked(return_ref)]
pub fn parents(self, db: &dyn Db, child: Document) -> Vec<Document> {
self.index_files(db)
.filter(|&parent| dependency_graph(db, parent).preorder().contains(&child))
.collect()
}
#[salsa::tracked(return_ref)]
pub fn related(self, db: &dyn Db, child: Document) -> FxHashSet<Document> {
self.index_files(db)
.chain(self.documents(db).iter().copied())
.map(|start| dependency_graph(db, start).preorder().collect_vec())
.filter(|project| project.contains(&child))
.flatten()
.collect()
}
#[salsa::tracked]
pub fn number_of_label(self, db: &dyn Db, child: Document, name: Word) -> Option<Word> {
self.related(db, child)
.iter()
.filter_map(|document| document.parse(db).as_tex())
.flat_map(|data| data.analyze(db).label_numbers(db))
.find(|number| number.name(db) == name)
.map(|number| number.text(db))
}
}

View file

@ -7,12 +7,15 @@ use std::{
thread::{self, JoinHandle},
};
use base_db::Workspace;
use encoding_rs_io::DecodeReaderBytesBuilder;
use lsp_types::{notification::LogMessage, LogMessageParams, TextDocumentIdentifier, Url};
use lsp_types::{
notification::LogMessage, ClientCapabilities, LogMessageParams, TextDocumentIdentifier, Url,
};
use serde::{Deserialize, Serialize};
use serde_repr::{Deserialize_repr, Serialize_repr};
use crate::{client::LspClient, db::Workspace, util::capabilities::ClientCapabilitiesExt, Db};
use crate::{client::LspClient, util::capabilities::ClientCapabilitiesExt};
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
@ -46,42 +49,34 @@ pub struct Command {
}
impl Command {
pub fn new(db: &dyn Db, uri: Url, client: LspClient) -> Option<Self> {
let workspace = Workspace::get(db);
let document = match workspace.lookup_uri(db, &uri) {
Some(child) => workspace
.parents(db, child)
.iter()
.next()
.copied()
.unwrap_or(child),
None => return None,
};
pub fn new(
workspace: &Workspace,
uri: Url,
client: LspClient,
client_capabilities: &ClientCapabilities,
) -> Option<Self> {
let Some(document) = workspace
.lookup(&uri)
.map(|child| workspace.parents(child).into_iter().next().unwrap_or(child)) else { return None };
if document.location(db).path(db).is_none() {
let Some(path) = document.path.as_deref() else {
log::warn!("Document {uri} cannot be compiled; skipping...");
return None;
}
};
let config = &db.config().build;
let config = &workspace.config().build;
let program = config.program.clone();
let path = document.location(db).path(db).as_deref().unwrap();
let args = config
.args
.iter()
.map(|arg| replace_placeholder(arg, path))
.collect();
let working_dir = workspace
.working_dir(db, document.directory(db))
.path(db)
.clone()?;
let working_dir = workspace.current_dir(&document.dir).to_file_path().ok()?;
Some(Self {
uri: document.location(db).uri(db).clone(),
progress: workspace
.client_capabilities(db)
.has_work_done_progress_support(),
uri: document.uri.clone(),
progress: client_capabilities.has_work_done_progress_support(),
program,
args,
working_dir,

View file

@ -18,15 +18,22 @@ mod tikz_library;
mod user_command;
mod user_environment;
use lsp_types::{CompletionList, Position, Url};
use base_db::Workspace;
use lsp_types::{ClientCapabilities, ClientInfo, CompletionList, Position, Url};
use crate::{features::completion::builder::CompletionBuilder, util::cursor::CursorContext, Db};
use crate::{features::completion::builder::CompletionBuilder, util::cursor::CursorContext};
pub const COMPLETION_LIMIT: usize = 50;
pub fn complete(db: &dyn Db, uri: &Url, position: Position) -> Option<CompletionList> {
let context = CursorContext::new(db, uri, position, ())?;
let mut builder = CompletionBuilder::new(&context);
pub fn complete(
workspace: &Workspace,
uri: &Url,
position: Position,
client_capabilities: &ClientCapabilities,
client_info: Option<&ClientInfo>,
) -> Option<CompletionList> {
let context = CursorContext::new(workspace, uri, position, ())?;
let mut builder = CompletionBuilder::new(&context, client_capabilities, client_info);
log::debug!("[Completion] Cursor: {:?}", context.cursor);
entry_type::complete(&context, &mut builder);
field::complete(&context, &mut builder);

View file

@ -1,3 +1,4 @@
use base_db::DocumentData;
use rowan::ast::AstNode;
use syntax::latex;
@ -12,17 +13,17 @@ pub fn complete<'db>(
let (_, range, group) = context.find_curly_group_word()?;
latex::AcronymReference::cast(group.syntax().parent()?)?;
for document in context.related() {
if let Some(data) = document.parse(context.db).as_tex() {
for name in data
.root(context.db)
.descendants()
.filter_map(latex::AcronymDefinition::cast)
.filter_map(|node| node.name())
.filter_map(|name| name.key())
{
builder.glossary_entry(range, name.to_string());
}
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
for name in data
.root_node()
.descendants()
.filter_map(latex::AcronymDefinition::cast)
.filter_map(|node| node.name())
.filter_map(|name| name.key())
{
builder.glossary_entry(range, name.to_string());
}
}

View file

@ -5,10 +5,7 @@ use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
use super::builder::CompletionBuilder;
pub fn complete<'db>(
context: &'db CursorContext,
builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
pub fn complete<'a>(context: &'a CursorContext, builder: &mut CompletionBuilder<'a>) -> Option<()> {
let token = context.cursor.as_tex()?;
let range = if token.kind() == latex::WORD {
@ -37,7 +34,7 @@ pub fn complete<'db>(
let command_name = command.name()?;
let command_name = &command_name.text()[1..];
for component in COMPONENT_DATABASE.linked_components(context.db, context.document) {
for component in COMPONENT_DATABASE.linked_components(&context.related) {
for component_command in component
.commands
.iter()

View file

@ -1,8 +1,9 @@
use base_db::Document;
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
use itertools::Itertools;
use lsp_types::{
CompletionItem, CompletionItemKind, CompletionList, CompletionTextEdit, Documentation,
InsertTextFormat, MarkupContent, MarkupKind, TextEdit, Url,
ClientCapabilities, ClientInfo, CompletionItem, CompletionItemKind, CompletionList,
CompletionTextEdit, Documentation, InsertTextFormat, MarkupContent, MarkupKind, TextEdit, Url,
};
use once_cell::sync::Lazy;
use regex::Regex;
@ -14,33 +15,35 @@ use syntax::{
latex,
};
use crate::{
db::{Document, ServerContext, Workspace},
util::{
capabilities::ClientCapabilitiesExt,
cursor::{Cursor, CursorContext},
lang_data::{BibtexEntryTypeCategory, BibtexEntryTypeDoc, BibtexFieldDoc, LANGUAGE_DATA},
line_index_ext::LineIndexExt,
lsp_enums::Structure,
},
use crate::util::{
capabilities::ClientCapabilitiesExt,
cursor::{Cursor, CursorContext},
lang_data::{BibtexEntryTypeCategory, BibtexEntryTypeDoc, BibtexFieldDoc, LANGUAGE_DATA},
line_index_ext::LineIndexExt,
lsp_enums::Structure,
};
use super::COMPLETION_LIMIT;
pub struct CompletionBuilder<'db> {
context: &'db CursorContext<'db>,
items: Vec<Item<'db>>,
pub struct CompletionBuilder<'a> {
context: &'a CursorContext<'a>,
items: Vec<Item<'a>>,
matcher: SkimMatcherV2,
text_pattern: String,
file_pattern: String,
preselect: Option<String>,
snippets: bool,
markdown: bool,
item_kinds: &'db [CompletionItemKind],
item_kinds: &'a [CompletionItemKind],
always_incomplete: bool,
}
impl<'db> CompletionBuilder<'db> {
pub fn new(context: &'db CursorContext) -> Self {
impl<'a> CompletionBuilder<'a> {
pub fn new(
context: &'a CursorContext,
client_capabilities: &'a ClientCapabilities,
client_info: Option<&'a ClientInfo>,
) -> Self {
let items = Vec::new();
let matcher = SkimMatcherV2::default().ignore_case();
let text_pattern = match &context.cursor {
@ -94,7 +97,6 @@ impl<'db> CompletionBuilder<'db> {
.and_then(|name| name.key())
.map(|name| name.to_string());
let client_capabilities = context.workspace.client_capabilities(context.db);
let snippets = client_capabilities.has_snippet_support();
let markdown = client_capabilities.has_completion_markdown_support();
let item_kinds = client_capabilities
@ -105,6 +107,8 @@ impl<'db> CompletionBuilder<'db> {
.and_then(|cap| cap.value_set.as_deref())
.unwrap_or_default();
let always_incomplete = client_info.map_or(false, |info| info.name == "Visual Studio Code");
Self {
context,
items,
@ -115,6 +119,7 @@ impl<'db> CompletionBuilder<'db> {
snippets,
markdown,
item_kinds,
always_incomplete,
}
}
@ -133,8 +138,8 @@ impl<'db> CompletionBuilder<'db> {
pub fn generic_argument(
&mut self,
range: TextRange,
name: &'db str,
image: Option<&'db str>,
name: &'a str,
image: Option<&'a str>,
) -> Option<()> {
let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
self.items.push(Item {
@ -148,8 +153,7 @@ impl<'db> CompletionBuilder<'db> {
}
pub fn begin_snippet(&mut self, range: TextRange) -> Option<()> {
let capabilities = Workspace::get(self.context.db).client_capabilities(self.context.db);
if capabilities.has_snippet_support() {
if self.snippets {
let score = self.matcher.fuzzy_match("begin", &self.text_pattern[1..])?;
self.items.push(Item {
range,
@ -165,7 +169,7 @@ impl<'db> CompletionBuilder<'db> {
pub fn citation(
&mut self,
range: TextRange,
document: Document,
document: &'a Document,
entry: &bibtex::Entry,
) -> Option<()> {
let key = entry.name_token()?.to_string();
@ -202,7 +206,7 @@ impl<'db> CompletionBuilder<'db> {
Some(())
}
pub fn color_model(&mut self, range: TextRange, name: &'db str) -> Option<()> {
pub fn color_model(&mut self, range: TextRange, name: &'a str) -> Option<()> {
let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
self.items.push(Item {
range,
@ -214,7 +218,7 @@ impl<'db> CompletionBuilder<'db> {
Some(())
}
pub fn color(&mut self, range: TextRange, name: &'db str) -> Option<()> {
pub fn color(&mut self, range: TextRange, name: &'a str) -> Option<()> {
let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
self.items.push(Item {
range,
@ -229,10 +233,10 @@ impl<'db> CompletionBuilder<'db> {
pub fn component_command(
&mut self,
range: TextRange,
name: &'db str,
image: Option<&'db str>,
glyph: Option<&'db str>,
file_names: &'db [SmolStr],
name: &'a str,
image: Option<&'a str>,
glyph: Option<&'a str>,
file_names: &'a [SmolStr],
) -> Option<()> {
let score = self.matcher.fuzzy_match(name, &self.text_pattern[1..])?;
let data = Data::ComponentCommand {
@ -255,8 +259,8 @@ impl<'db> CompletionBuilder<'db> {
pub fn component_environment(
&mut self,
range: TextRange,
name: &'db str,
file_names: &'db [SmolStr],
name: &'a str,
file_names: &'a [SmolStr],
) -> Option<()> {
let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
self.items.push(Item {
@ -272,7 +276,7 @@ impl<'db> CompletionBuilder<'db> {
pub fn entry_type(
&mut self,
range: TextRange,
entry_type: &'db BibtexEntryTypeDoc,
entry_type: &'a BibtexEntryTypeDoc,
) -> Option<()> {
let score = self
.matcher
@ -288,7 +292,7 @@ impl<'db> CompletionBuilder<'db> {
Some(())
}
pub fn field(&mut self, range: TextRange, field: &'db BibtexFieldDoc) -> Option<()> {
pub fn field(&mut self, range: TextRange, field: &'a BibtexFieldDoc) -> Option<()> {
let score = self.matcher.fuzzy_match(&field.name, &self.text_pattern)?;
self.items.push(Item {
range,
@ -300,7 +304,7 @@ impl<'db> CompletionBuilder<'db> {
Some(())
}
pub fn class(&mut self, range: TextRange, name: &'db str) -> Option<()> {
pub fn class(&mut self, range: TextRange, name: &'a str) -> Option<()> {
let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
self.items.push(Item {
range,
@ -312,7 +316,7 @@ impl<'db> CompletionBuilder<'db> {
Some(())
}
pub fn package(&mut self, range: TextRange, name: &'db str) -> Option<()> {
pub fn package(&mut self, range: TextRange, name: &'a str) -> Option<()> {
let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
self.items.push(Item {
range,
@ -351,10 +355,10 @@ impl<'db> CompletionBuilder<'db> {
pub fn label(
&mut self,
range: TextRange,
name: &'db str,
name: &'a str,
kind: Structure,
header: Option<String>,
footer: Option<String>,
footer: Option<&'a str>,
text: String,
) -> Option<()> {
let score = self.matcher.fuzzy_match(&text, &self.text_pattern)?;
@ -374,7 +378,7 @@ impl<'db> CompletionBuilder<'db> {
Some(())
}
pub fn tikz_library(&mut self, range: TextRange, name: &'db str) -> Option<()> {
pub fn tikz_library(&mut self, range: TextRange, name: &'a str) -> Option<()> {
let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
self.items.push(Item {
range,
@ -386,7 +390,7 @@ impl<'db> CompletionBuilder<'db> {
Some(())
}
pub fn user_command(&mut self, range: TextRange, name: &'db str) -> Option<()> {
pub fn user_command(&mut self, range: TextRange, name: &'a str) -> Option<()> {
let score = self.matcher.fuzzy_match(name, &self.text_pattern[1..])?;
self.items.push(Item {
range,
@ -398,7 +402,7 @@ impl<'db> CompletionBuilder<'db> {
Some(())
}
pub fn user_environment(&mut self, range: TextRange, name: &'db str) -> Option<()> {
pub fn user_environment(&mut self, range: TextRange, name: &'a str) -> Option<()> {
let score = self.matcher.fuzzy_match(name, &self.text_pattern)?;
self.items.push(Item {
range,
@ -426,14 +430,17 @@ impl<'db> CompletionBuilder<'db> {
.map(|(i, item)| self.convert_item(item, i))
.collect();
let db = self.context.db;
let always_incomplete = ServerContext::get(db).always_incomplete_completion_list(db);
list.is_incomplete = always_incomplete || list.items.len() >= COMPLETION_LIMIT;
list.is_incomplete = self.always_incomplete || list.items.len() >= COMPLETION_LIMIT;
list
}
fn convert_item(&self, item: Item, index: usize) -> CompletionItem {
let range = self.context.line_index.line_col_lsp_range(item.range);
let range = self
.context
.document
.line_index
.line_col_lsp_range(item.range);
let preselect = item.preselect;
let mut item = match item.data {
Data::EntryType { entry_type } => CompletionItem {
@ -496,10 +503,7 @@ impl<'db> CompletionBuilder<'db> {
sort_text: Some(filter_text),
data: Some(
serde_json::to_value(CompletionItemData::Citation {
uri: document
.location(self.context.db)
.uri(self.context.db)
.clone(),
uri: document.uri.clone(),
key: key.clone(),
})
.unwrap(),
@ -582,7 +586,7 @@ impl<'db> CompletionBuilder<'db> {
label: name.into(),
kind: Some(kind.completion_kind()),
detail: header,
documentation: footer.map(Documentation::String),
documentation: footer.map(|footer| Documentation::String(footer.into())),
sort_text: Some(text.clone()),
filter_text: Some(text),
text_edit: Some(TextEdit::new(range, name.into()).into()),
@ -590,7 +594,6 @@ impl<'db> CompletionBuilder<'db> {
},
Data::UserCommand { name } => {
let detail = "user-defined".into();
let name = &name[1..];
CompletionItem {
kind: Some(Structure::Command.completion_kind()),
text_edit: Some(TextEdit::new(range, name.into()).into()),
@ -656,53 +659,53 @@ impl<'db> CompletionBuilder<'db> {
}
#[derive(Debug, Clone)]
struct Item<'db> {
struct Item<'a> {
range: TextRange,
data: Data<'db>,
data: Data<'a>,
preselect: bool,
score: i32,
}
#[derive(Debug, Clone)]
enum Data<'db> {
enum Data<'a> {
EntryType {
entry_type: &'db BibtexEntryTypeDoc,
entry_type: &'a BibtexEntryTypeDoc,
},
Field {
field: &'db BibtexFieldDoc,
field: &'a BibtexFieldDoc,
},
Argument {
name: &'db str,
image: Option<&'db str>,
name: &'a str,
image: Option<&'a str>,
},
BeginSnippet,
Citation {
document: Document,
document: &'a Document,
key: String,
filter_text: String,
category: BibtexEntryTypeCategory,
},
ComponentCommand {
name: &'db str,
image: Option<&'db str>,
glyph: Option<&'db str>,
file_names: &'db [SmolStr],
name: &'a str,
image: Option<&'a str>,
glyph: Option<&'a str>,
file_names: &'a [SmolStr],
},
ComponentEnvironment {
name: &'db str,
file_names: &'db [SmolStr],
name: &'a str,
file_names: &'a [SmolStr],
},
Class {
name: &'db str,
name: &'a str,
},
Package {
name: &'db str,
name: &'a str,
},
Color {
name: &'db str,
name: &'a str,
},
ColorModel {
name: &'db str,
name: &'a str,
},
GlossaryEntry {
name: String,
@ -714,20 +717,20 @@ enum Data<'db> {
name: String,
},
Label {
name: &'db str,
name: &'a str,
kind: Structure,
header: Option<String>,
footer: Option<String>,
footer: Option<&'a str>,
text: String,
},
UserCommand {
name: &'db str,
name: &'a str,
},
UserEnvironment {
name: &'db str,
name: &'a str,
},
TikzLibrary {
name: &'db str,
name: &'a str,
},
}

View file

@ -1,3 +1,4 @@
use base_db::DocumentData;
use rowan::{ast::AstNode, TextRange};
use syntax::{bibtex, latex};
@ -25,15 +26,11 @@ pub fn complete<'db>(
};
check_citation(context).or_else(|| check_acronym(context))?;
for document in context.related() {
if let Some(data) = document.parse(context.db).as_bib() {
for entry in data
.root(context.db)
.children()
.filter_map(bibtex::Entry::cast)
{
builder.citation(range, document, &entry);
}
for document in &context.related {
let DocumentData::Bib(data) = &document.data else { continue };
for entry in data.root_node().children().filter_map(bibtex::Entry::cast) {
builder.citation(range, document, &entry);
}
}

View file

@ -8,7 +8,7 @@ pub fn complete<'db>(
) -> Option<()> {
let range = context.cursor.command_range(context.offset)?;
for component in COMPONENT_DATABASE.linked_components(context.db, context.document) {
for component in COMPONENT_DATABASE.linked_components(&context.related) {
for command in &component.commands {
builder.component_command(
range,

View file

@ -8,7 +8,7 @@ pub fn complete<'db>(
) -> Option<()> {
let (_, range) = context.find_environment_name()?;
for component in COMPONENT_DATABASE.linked_components(context.db, context.document) {
for component in COMPONENT_DATABASE.linked_components(&context.related) {
for name in &component.environments {
builder.component_environment(range, name, &component.file_names);
}

View file

@ -1,3 +1,4 @@
use base_db::DocumentData;
use rowan::ast::AstNode;
use syntax::latex;
@ -12,22 +13,22 @@ pub fn complete<'db>(
let (_, range, group) = context.find_curly_group_word()?;
latex::GlossaryEntryReference::cast(group.syntax().parent()?)?;
for document in context.related() {
if let Some(data) = document.parse(context.db).as_tex() {
for node in data.root(context.db).descendants() {
if let Some(name) = latex::GlossaryEntryDefinition::cast(node.clone())
.and_then(|entry| entry.name())
.and_then(|name| name.key())
.map(|name| name.to_string())
{
builder.glossary_entry(range, name);
} else if let Some(name) = latex::AcronymDefinition::cast(node)
.and_then(|entry| entry.name())
.and_then(|name| name.key())
.map(|name| name.to_string())
{
builder.glossary_entry(range, name);
}
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
for node in data.root_node().descendants() {
if let Some(name) = latex::GlossaryEntryDefinition::cast(node.clone())
.and_then(|entry| entry.name())
.and_then(|name| name.key())
.map(|name| name.to_string())
{
builder.glossary_entry(range, name);
} else if let Some(name) = latex::AcronymDefinition::cast(node)
.and_then(|entry| entry.name())
.and_then(|name| name.key())
.map(|name| name.to_string())
{
builder.glossary_entry(range, name);
}
}
}

View file

@ -35,7 +35,7 @@ pub fn complete<'db>(
}
}
let file_name_db = context.workspace.file_name_db(context.db);
let file_name_db = &context.workspace.distro().file_name_db;
for file_name in file_name_db
.iter()
.map(|(file_name, _)| file_name)

View file

@ -4,6 +4,7 @@ use std::{
path::{Path, PathBuf},
};
use base_db::DocumentData;
use rowan::{ast::AstNode, TextRange, TextSize};
use syntax::latex;
@ -15,12 +16,7 @@ pub fn complete<'db>(
context: &'db CursorContext,
builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
if context
.document
.location(context.db)
.path(context.db)
.is_none()
{
if context.document.path.is_none() {
return None;
}
@ -56,16 +52,10 @@ pub fn complete<'db>(
let mut dirs = vec![current_dir(context, &path_text, None)];
if include.kind() == latex::GRAPHICS_INCLUDE {
for document in context.related() {
if let Some(data) = document.parse(context.db).as_tex() {
for path in data
.analyze(context.db)
.graphics_paths(context.db)
.iter()
.map(|node| node.path(context.db))
{
dirs.push(current_dir(context, &path_text, Some(path)));
}
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
for graphics_path in &data.semantics.graphics_paths {
dirs.push(current_dir(context, &path_text, Some(graphics_path)));
}
}
}
@ -103,16 +93,16 @@ fn current_dir(
) -> Option<PathBuf> {
let parent = context
.workspace
.parents(context.db, context.document)
.parents(context.document)
.iter()
.next()
.map_or(context.document, Clone::clone);
let path = context
.workspace
.working_dir(context.db, parent.directory(context.db))
.path(context.db)
.as_deref()?;
.current_dir(&parent.dir)
.to_file_path()
.ok()?;
let mut path = PathBuf::from(path.to_str()?.replace('\\', "/"));
if !path_text.is_empty() {

View file

@ -1,3 +1,4 @@
use base_db::{semantics::tex::LabelKind, DocumentData};
use rowan::{ast::AstNode, TextRange};
use syntax::latex;
@ -11,50 +12,44 @@ pub fn complete<'db>(
) -> Option<()> {
let (range, is_math) = find_reference(context).or_else(|| find_reference_range(context))?;
let db = context.db;
for document in context.related() {
if let Some(data) = document.parse(db).as_tex() {
for label in data
.analyze(db)
.labels(db)
.iter()
.filter(|label| label.origin(db).as_definition().is_some())
{
match util::label::render(db, document, *label) {
Some(rendered_label) => {
let kind = match &rendered_label.object {
LabeledObject::Section { .. } => Structure::Section,
LabeledObject::Float { .. } => Structure::Float,
LabeledObject::Theorem { .. } => Structure::Theorem,
LabeledObject::Equation => Structure::Equation,
LabeledObject::EnumItem => Structure::Item,
};
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
for label in data
.semantics
.labels
.iter()
.filter(|label| label.kind == LabelKind::Definition)
{
match util::label::render(context.workspace, &context.related, label) {
Some(rendered_label) => {
let kind = match &rendered_label.object {
LabeledObject::Section { .. } => Structure::Section,
LabeledObject::Float { .. } => Structure::Float,
LabeledObject::Theorem { .. } => Structure::Theorem,
LabeledObject::Equation => Structure::Equation,
LabeledObject::EnumItem => Structure::Item,
};
if is_math && kind != Structure::Equation {
continue;
}
let header = rendered_label.detail(db);
let footer = match &rendered_label.object {
LabeledObject::Float { caption, .. } => Some(caption.clone()),
_ => None,
};
let text = format!(
"{} {}",
label.name(db).text(db),
rendered_label.reference(db)
);
builder.label(range, label.name(db).text(db), kind, header, footer, text);
}
None => {
let kind = Structure::Label;
let header = None;
let footer = None;
let text = label.name(db).text(db).clone();
builder.label(range, label.name(db).text(db), kind, header, footer, text);
if is_math && kind != Structure::Equation {
continue;
}
let header = rendered_label.detail();
let footer = match &rendered_label.object {
LabeledObject::Float { caption, .. } => Some(caption.clone()),
_ => None,
};
let text = format!("{} {}", label.name.text, rendered_label.reference());
builder.label(range, &label.name.text, kind, header, footer, text);
}
None => {
let kind = Structure::Label;
let header = None;
let footer = None;
let text = label.name.text.clone();
builder.label(range, &label.name.text, kind, header, footer, text);
}
}
}

View file

@ -1,3 +1,5 @@
use base_db::DocumentData;
use crate::util::cursor::CursorContext;
use super::builder::CompletionBuilder;
@ -8,12 +10,10 @@ pub fn complete<'db>(
) -> Option<()> {
let (_, range) = context.find_environment_name()?;
let db = context.db;
for document in context.related() {
if let Some(data) = document.parse(db).as_tex() {
for environment in data.analyze(db).theorem_environments(db) {
builder.user_environment(range, environment.name(db).text(db));
}
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
for theorem in &data.semantics.theorem_definitions {
builder.user_environment(range, &theorem.name.text);
}
}

View file

@ -1,3 +1,5 @@
use base_db::DocumentData;
use crate::util::cursor::CursorContext;
use super::builder::CompletionBuilder;
@ -7,22 +9,11 @@ pub fn complete<'db>(
builder: &mut CompletionBuilder<'db>,
) -> Option<()> {
let range = context.cursor.command_range(context.offset)?;
let token = context.cursor.as_tex()?;
let db = context.db;
for document in context.related() {
if let Some(data) = document.parse(db).as_tex() {
let text = document.text(db);
for name in data
.analyze(db)
.command_name_ranges(db)
.iter()
.copied()
.filter(|range| *range != token.text_range())
.map(|range| &text[std::ops::Range::<usize>::from(range)])
{
builder.user_command(range, name);
}
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
for (_, name) in data.semantics.commands.iter().filter(|(r, _)| *r != range) {
builder.user_command(range, name);
}
}

View file

@ -1,3 +1,5 @@
use base_db::DocumentData;
use crate::util::cursor::CursorContext;
use super::builder::CompletionBuilder;
@ -8,16 +10,15 @@ pub fn complete<'db>(
) -> Option<()> {
let (name, range) = context.find_environment_name()?;
for document in context.related() {
if let Some(data) = document.parse(context.db).as_tex() {
for name in data
.analyze(context.db)
.environment_names(context.db)
.iter()
.filter(|n| n.as_str() != name)
{
builder.user_environment(range, name);
}
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
for name in data
.semantics
.environments
.iter()
.filter(|n| n.as_str() != name)
{
builder.user_environment(range, name);
}
}

View file

@ -4,21 +4,18 @@ mod entry;
mod label;
mod string;
use base_db::{Document, Workspace};
use lsp_types::{GotoDefinitionResponse, LocationLink, Position, Url};
use rowan::TextRange;
use crate::{
db::Document,
util::{cursor::CursorContext, line_index_ext::LineIndexExt},
Db,
};
use crate::util::{cursor::CursorContext, line_index_ext::LineIndexExt};
pub fn goto_definition(
db: &dyn Db,
workspace: &Workspace,
uri: &Url,
position: Position,
) -> Option<GotoDefinitionResponse> {
let context = CursorContext::new(db, uri, position, ())?;
let context = CursorContext::new(workspace, uri, position, ())?;
log::debug!("[Definition] Cursor: {:?}", context.cursor);
let links: Vec<_> = command::goto_definition(&context)
@ -31,12 +28,12 @@ pub fn goto_definition(
let origin_selection_range = Some(
context
.document
.line_index(db)
.line_index
.line_col_lsp_range(result.origin_selection_range),
);
let target_line_index = result.target.line_index(db);
let target_uri = result.target.location(context.db).uri(context.db).clone();
let target_line_index = &result.target.line_index;
let target_uri = result.target.uri.clone();
let target_range = target_line_index.line_col_lsp_range(result.target_range);
let target_selection_range =
@ -55,9 +52,9 @@ pub fn goto_definition(
}
#[derive(Debug, Clone)]
struct DefinitionResult {
struct DefinitionResult<'a> {
origin_selection_range: TextRange,
target: Document,
target: &'a Document,
target_range: TextRange,
target_selection_range: TextRange,
}

View file

@ -1,3 +1,4 @@
use base_db::DocumentData;
use rowan::ast::AstNode;
use syntax::latex;
@ -5,7 +6,9 @@ use crate::util::cursor::CursorContext;
use super::DefinitionResult;
pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
pub(super) fn goto_definition<'a>(
context: &CursorContext<'a>,
) -> Option<Vec<DefinitionResult<'a>>> {
let name = context
.cursor
.as_tex()
@ -13,28 +16,28 @@ pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionR
let origin_selection_range = name.text_range();
for document in context.related() {
if let Some(data) = document.parse(context.db).as_tex() {
let root = data.root(context.db);
if let Some(result) = root
.descendants()
.filter_map(latex::CommandDefinition::cast)
.filter(|def| {
def.name()
.and_then(|name| name.command())
.map_or(false, |node| node.text() == name.text())
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
let root = data.root_node();
if let Some(result) = root
.descendants()
.filter_map(latex::CommandDefinition::cast)
.filter(|def| {
def.name()
.and_then(|name| name.command())
.map_or(false, |node| node.text() == name.text())
})
.find_map(|def| {
Some(DefinitionResult {
origin_selection_range,
target: document,
target_range: latex::small_range(&def),
target_selection_range: def.name()?.command()?.text_range(),
})
.find_map(|def| {
Some(DefinitionResult {
origin_selection_range,
target: document,
target_range: latex::small_range(&def),
target_selection_range: def.name()?.command()?.text_range(),
})
})
{
return Some(vec![result]);
}
})
{
return Some(vec![result]);
}
}

View file

@ -1,21 +1,22 @@
use rowan::TextRange;
use crate::{db::dependency_graph, util::cursor::CursorContext};
use crate::util::cursor::CursorContext;
use super::DefinitionResult;
pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
let db = context.db;
pub(super) fn goto_definition<'a>(
context: &CursorContext<'a>,
) -> Option<Vec<DefinitionResult<'a>>> {
context
.workspace
.parents(db, context.document)
.parents(context.document)
.iter()
.copied()
.chain(std::iter::once(context.document))
.flat_map(|parent| dependency_graph(db, parent).edges.iter())
.flat_map(|parent| base_db::graph::Graph::new(context.workspace, parent).edges)
.filter(|edge| edge.source == context.document)
.find_map(|edge| {
let range = edge.origin?.link.range(db);
let range = edge.weight?.link.path.range;
if range.contains_inclusive(context.offset) {
Some(vec![DefinitionResult {
origin_selection_range: range,

View file

@ -1,3 +1,4 @@
use base_db::DocumentData;
use rowan::ast::AstNode;
use syntax::{
bibtex::{self, HasName},
@ -8,9 +9,9 @@ use crate::util::cursor::CursorContext;
use super::DefinitionResult;
pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
let db = context.db;
pub(super) fn goto_definition<'a>(
context: &CursorContext<'a>,
) -> Option<Vec<DefinitionResult<'a>>> {
let word = context
.cursor
.as_tex()
@ -22,17 +23,17 @@ pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionR
let origin_selection_range = latex::small_range(&key);
for document in context.related() {
if let Some(data) = document.parse(db).as_bib() {
for entry in data.root(db).children().filter_map(bibtex::Entry::cast) {
if let Some(key) = entry.name_token().filter(|k| k.text() == word.text()) {
return Some(vec![DefinitionResult {
origin_selection_range,
target: document,
target_selection_range: key.text_range(),
target_range: entry.syntax().text_range(),
}]);
}
for document in &context.related {
let DocumentData::Bib(data) = &document.data else { continue };
for entry in data.root_node().children().filter_map(bibtex::Entry::cast) {
if let Some(key) = entry.name_token().filter(|k| k.text() == word.text()) {
return Some(vec![DefinitionResult {
origin_selection_range,
target: document,
target_selection_range: key.text_range(),
target_range: entry.syntax().text_range(),
}]);
}
}
}

View file

@ -1,37 +1,36 @@
use crate::{
db::analysis::label,
util::{self, cursor::CursorContext},
};
use base_db::{semantics::tex::LabelKind, DocumentData};
use crate::util::{self, cursor::CursorContext};
use super::DefinitionResult;
pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
let db = context.db;
pub(super) fn goto_definition<'a>(
context: &CursorContext<'a>,
) -> Option<Vec<DefinitionResult<'a>>> {
let (name_text, origin_selection_range) = context
.find_label_name_key()
.or_else(|| context.find_label_name_command())?;
for document in context.related() {
if let Some(data) = document.parse(db).as_tex() {
if let Some(label) = data
.analyze(db)
.labels(db)
.iter()
.filter(|label| matches!(label.origin(db), label::Origin::Definition(_)))
.find(|label| label.name(db).text(db) == name_text.as_str())
{
let target_selection_range = label.range(db);
let target_range = util::label::render(db, document, *label)
.map_or(target_selection_range, |label| label.range);
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
return Some(vec![DefinitionResult {
origin_selection_range,
target: document,
target_range,
target_selection_range,
}]);
}
}
let Some(label) = data
.semantics
.labels
.iter()
.filter(|label| label.kind == LabelKind::Definition)
.find(|label| label.name.text == name_text) else { continue };
let target_selection_range = label.name.range;
let target_range = util::label::render(context.workspace, &context.related, label)
.map_or(target_selection_range, |label| label.range);
return Some(vec![DefinitionResult {
origin_selection_range,
target: document,
target_range,
target_selection_range,
}]);
}
None

View file

@ -1,3 +1,4 @@
use base_db::DocumentData;
use rowan::ast::AstNode;
use syntax::bibtex::{self, HasName};
@ -5,9 +6,11 @@ use crate::util::cursor::CursorContext;
use super::DefinitionResult;
pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionResult>> {
let db = context.db;
let data = context.document.parse(db).as_bib()?;
pub(super) fn goto_definition<'a>(
context: &CursorContext<'a>,
) -> Option<Vec<DefinitionResult<'a>>> {
let DocumentData::Bib(data) = &context.document.data else { return None };
let key = context
.cursor
.as_bib()
@ -17,7 +20,7 @@ pub(super) fn goto_definition(context: &CursorContext) -> Option<Vec<DefinitionR
let origin_selection_range = key.text_range();
data.root(db)
data.root_node()
.children()
.filter_map(bibtex::StringDef::cast)
.find_map(|string| {

View file

@ -1,21 +1,17 @@
use base_db::{DocumentData, Workspace};
use lsp_types::{FoldingRange, FoldingRangeKind, Range, Url};
use rowan::ast::AstNode;
use syntax::{bibtex, latex};
use crate::{
db::{parse::DocumentData, Workspace},
util::line_index_ext::LineIndexExt,
Db,
};
use crate::util::line_index_ext::LineIndexExt;
pub fn find_all(db: &dyn Db, uri: &Url) -> Option<Vec<FoldingRange>> {
let document = Workspace::get(db).lookup_uri(db, uri)?;
let line_index = document.line_index(db);
let foldings = match document.parse(db) {
pub fn find_all(workspace: &Workspace, uri: &Url) -> Option<Vec<FoldingRange>> {
let document = workspace.lookup(uri)?;
let line_index = &document.line_index;
let foldings = match &document.data {
DocumentData::Tex(data) => {
let mut results = Vec::new();
let root = data.root(db);
for node in root.descendants() {
for node in data.root_node().descendants() {
if let Some(folding) = latex::Environment::cast(node.clone())
.map(|node| latex::small_range(&node))
.or_else(|| {
@ -32,7 +28,7 @@ pub fn find_all(db: &dyn Db, uri: &Url) -> Option<Vec<FoldingRange>> {
results
}
DocumentData::Bib(data) => {
let root = data.root(db);
let root = data.root_node();
root.descendants()
.filter(|node| {
matches!(
@ -43,7 +39,10 @@ pub fn find_all(db: &dyn Db, uri: &Url) -> Option<Vec<FoldingRange>> {
.map(|node| create_range(line_index.line_col_lsp_range(node.text_range())))
.collect()
}
DocumentData::Log(_) | DocumentData::TexlabRoot(_) | DocumentData::Tectonic(_) => {
DocumentData::Aux(_)
| DocumentData::Log(_)
| DocumentData::Root
| DocumentData::Tectonic => {
return None;
}
};

View file

@ -1,31 +1,29 @@
mod bibtex_internal;
mod latexindent;
use base_db::{Formatter, Workspace};
use distro::Language;
use lsp_types::{FormattingOptions, TextEdit, Url};
use crate::{db::Workspace, Db, Formatter};
use self::{bibtex_internal::format_bibtex_internal, latexindent::format_with_latexindent};
pub fn format_source_code(
db: &dyn Db,
workspace: &Workspace,
uri: &Url,
options: &FormattingOptions,
) -> Option<Vec<TextEdit>> {
let workspace = Workspace::get(db);
let document = workspace.lookup_uri(db, uri)?;
match document.language(db) {
Language::Tex => match db.config().formatting.tex_formatter {
let document = workspace.lookup(uri)?;
match document.language {
Language::Tex => match workspace.config().formatting.tex_formatter {
Formatter::Null => None,
Formatter::Server => None,
Formatter::LatexIndent => format_with_latexindent(db, document),
Formatter::LatexIndent => format_with_latexindent(workspace, document),
},
Language::Bib => match db.config().formatting.bib_formatter {
Language::Bib => match workspace.config().formatting.bib_formatter {
Formatter::Null => None,
Formatter::Server => format_bibtex_internal(db, document, options),
Formatter::LatexIndent => format_with_latexindent(db, document),
Formatter::Server => format_bibtex_internal(workspace, document, options),
Formatter::LatexIndent => format_with_latexindent(workspace, document),
},
Language::Log | Language::Root | Language::Tectonic => None,
Language::Aux | Language::Log | Language::Root | Language::Tectonic => None,
}
}

View file

@ -1,16 +1,13 @@
use base_db::{Document, LineIndex, Workspace};
use lsp_types::{FormattingOptions, TextEdit};
use rowan::{ast::AstNode, NodeOrToken};
use syntax::bibtex::{self, HasName, HasType, HasValue};
use crate::{
db::Document,
util::{line_index::LineIndex, line_index_ext::LineIndexExt},
Db,
};
use crate::util::line_index_ext::LineIndexExt;
pub fn format_bibtex_internal(
db: &dyn Db,
document: Document,
workspace: &Workspace,
document: &Document,
options: &FormattingOptions,
) -> Option<Vec<TextEdit>> {
let mut indent = String::new();
@ -23,13 +20,14 @@ pub fn format_bibtex_internal(
indent.push('\t');
}
let line_length = db.config().formatting.line_length;
let line_length = workspace.config().formatting.line_length;
let line_index = document.line_index(db);
let data = document.parse(db).as_bib()?;
let line_index = &document.line_index;
let data = document.data.as_bib()?;
let mut edits = Vec::new();
for node in data.root(db).children().filter(|node| {
for node in data.root_node().children().filter(|node| {
matches!(
node.kind(),
bibtex::PREAMBLE | bibtex::STRING | bibtex::ENTRY

View file

@ -3,34 +3,30 @@ use std::{
process::{Command, Stdio},
};
use base_db::{Document, LatexIndentConfig, Workspace};
use distro::Language;
use lsp_types::TextEdit;
use rowan::{TextLen, TextRange};
use tempfile::tempdir;
use crate::{
db::{Document, Workspace},
util::line_index_ext::LineIndexExt,
Db, LatexIndentConfig,
};
use crate::util::line_index_ext::LineIndexExt;
pub fn format_with_latexindent(db: &dyn Db, document: Document) -> Option<Vec<TextEdit>> {
let workspace = Workspace::get(db);
let config = db.config();
pub fn format_with_latexindent(
workspace: &Workspace,
document: &Document,
) -> Option<Vec<TextEdit>> {
let config = workspace.config();
let target_dir = tempdir().ok()?;
let source_dir = workspace
.working_dir(db, document.directory(db))
.path(db)
.as_deref()?;
let source_dir = workspace.current_dir(&document.dir).to_file_path().ok()?;
let target_file = target_dir
.path()
.join(if document.language(db) == Language::Bib {
.join(if document.language == Language::Bib {
"file.bib"
} else {
"file.tex"
});
std::fs::write(&target_file, document.text(db)).ok()?;
std::fs::write(&target_file, &document.text).ok()?;
let args = build_arguments(&config.formatting.latex_indent, &target_file);
@ -49,12 +45,12 @@ pub fn format_with_latexindent(db: &dyn Db, document: Document) -> Option<Vec<Te
.output()
.ok()?;
let old_text = document.text(db);
let old_text = &document.text;
let new_text = String::from_utf8_lossy(&output.stdout).into_owned();
if new_text.is_empty() {
None
} else {
let line_index = document.line_index(db);
let line_index = &document.line_index;
Some(vec![TextEdit {
range: line_index.line_col_lsp_range(TextRange::new(0.into(), old_text.text_len())),
new_text,

View file

@ -4,11 +4,12 @@ use std::{
process::Stdio,
};
use base_db::Workspace;
use log::error;
use lsp_types::{Position, Url};
use thiserror::Error;
use crate::{db::Workspace, util::line_index_ext::LineIndexExt, Db};
use crate::util::line_index_ext::LineIndexExt;
#[derive(Debug, Error)]
pub enum Error {
@ -37,49 +38,48 @@ pub struct Command {
}
impl Command {
pub fn configure(db: &dyn Db, uri: &Url, position: Option<Position>) -> Result<Self, Error> {
let workspace = Workspace::get(db);
pub fn configure(
workspace: &Workspace,
uri: &Url,
position: Option<Position>,
) -> Result<Self, Error> {
let child = workspace
.lookup_uri(db, uri)
.lookup(uri)
.ok_or_else(|| Error::TexNotFound(uri.clone()))?;
let parent = workspace
.parents(db, child)
.iter()
.copied()
.next()
.unwrap_or(child);
let parent = *workspace.parents(child).iter().next().unwrap_or(&child);
if parent.uri.scheme() != "file" {
return Err(Error::NoLocalFile(parent.uri.clone()));
}
let output_dir = workspace
.output_dir(db, workspace.working_dir(db, parent.directory(db)))
.path(db)
.as_deref()
.ok_or_else(|| Error::NoLocalFile(uri.clone()))?;
.output_dir(&workspace.current_dir(&parent.dir))
.to_file_path()
.unwrap();
let tex_path = child
.location(db)
.path(db)
.path
.as_deref()
.ok_or_else(|| Error::NoLocalFile(uri.clone()))?;
let pdf_path = match parent.location(db).stem(db) {
Some(stem) => {
let pdf_name = format!("{}.pdf", stem);
output_dir.join(pdf_name)
}
None => {
return Err(Error::InvalidTexFile(uri.clone()));
}
let pdf_path = match parent
.path
.as_deref()
.unwrap()
.file_stem()
.and_then(|stem| stem.to_str())
{
Some(stem) => output_dir.join(format!("{}.pdf", stem)),
None => return Err(Error::InvalidTexFile(uri.clone())),
};
if !pdf_path.exists() {
return Err(Error::PdfNotFound(pdf_path));
}
let position =
position.unwrap_or_else(|| child.line_index(db).line_col_lsp(child.cursor(db)));
let position = position.unwrap_or_else(|| child.line_index.line_col_lsp(child.cursor));
let Some(config) = &db.config().synctex else {
let Some(config) = &workspace.config().synctex else {
return Err(Error::Unconfigured);
};

View file

@ -1,10 +1,15 @@
mod label;
use base_db::Workspace;
use lsp_types::{DocumentHighlight, Position, Url};
use crate::{util::cursor::CursorContext, Db};
use crate::util::cursor::CursorContext;
pub fn find_all(db: &dyn Db, uri: &Url, position: Position) -> Option<Vec<DocumentHighlight>> {
let context = CursorContext::new(db, uri, position, ())?;
pub fn find_all(
workspace: &Workspace,
uri: &Url,
position: Position,
) -> Option<Vec<DocumentHighlight>> {
let context = CursorContext::new(workspace, uri, position, ())?;
label::find_highlights(&context)
}

View file

@ -1,28 +1,26 @@
use base_db::{semantics::tex::LabelKind, DocumentData};
use lsp_types::{DocumentHighlight, DocumentHighlightKind};
use crate::{
db::analysis::label,
util::{cursor::CursorContext, line_index_ext::LineIndexExt},
};
use crate::util::{cursor::CursorContext, line_index_ext::LineIndexExt};
pub fn find_highlights(context: &CursorContext) -> Option<Vec<DocumentHighlight>> {
let db = context.db;
let (name_text, _) = context.find_label_name_key()?;
let data = context.document.parse(db).as_tex()?;
let DocumentData::Tex(data) = &context.document.data else { return None };
let mut highlights = Vec::new();
let line_index = context.document.line_index(db);
let line_index = &context.document.line_index;
for label in data
.analyze(db)
.labels(db)
.semantics
.labels
.iter()
.filter(|label| label.name(db).text(db) == &name_text)
.filter(|label| label.name.text == name_text)
{
let range = line_index.line_col_lsp_range(label.range(db));
let kind = Some(match label.origin(db) {
label::Origin::Definition(_) => DocumentHighlightKind::WRITE,
label::Origin::Reference(_) => DocumentHighlightKind::READ,
label::Origin::ReferenceRange(_) => DocumentHighlightKind::READ,
let range = line_index.line_col_lsp_range(label.name.range);
let kind = Some(match label.kind {
LabelKind::Definition => DocumentHighlightKind::WRITE,
LabelKind::Reference => DocumentHighlightKind::READ,
LabelKind::ReferenceRange => DocumentHighlightKind::READ,
});
highlights.push(DocumentHighlight { range, kind });

View file

@ -5,16 +5,14 @@ mod field;
mod label;
mod string_ref;
use base_db::Workspace;
use lsp_types::{Hover, HoverContents, MarkupContent, MarkupKind, Position, Url};
use rowan::TextRange;
use crate::{
util::{cursor::CursorContext, line_index_ext::LineIndexExt},
Db,
};
use crate::util::{cursor::CursorContext, line_index_ext::LineIndexExt};
pub fn find(db: &dyn Db, uri: &Url, position: Position) -> Option<Hover> {
let context = CursorContext::new(db, uri, position, ())?;
pub fn find(workspace: &Workspace, uri: &Url, position: Position) -> Option<Hover> {
let context = CursorContext::new(workspace, uri, position, ())?;
log::debug!("[Hover] Cursor: {:?}", context.cursor);
let result = label::find_hover(&context)
@ -24,13 +22,12 @@ pub fn find(db: &dyn Db, uri: &Url, position: Position) -> Option<Hover> {
.or_else(|| field::find_hover(&context))
.or_else(|| entry_type::find_hover(&context))?;
let line_index = context.document.line_index(db);
Some(Hover {
contents: HoverContents::Markup(MarkupContent {
kind: result.value_kind,
value: result.value,
}),
range: Some(line_index.line_col_lsp_range(result.range)),
range: Some(context.document.line_index.line_col_lsp_range(result.range)),
})
}

View file

@ -12,10 +12,9 @@ pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
.or_else(|| context.find_citation_key_command())
.or_else(|| context.find_entry_key())?;
let value = context.related().find_map(|document| {
let data = document.parse(context.db).as_bib()?;
let root = data.root(context.db);
let root = bibtex::Root::cast(root)?;
let value = context.related.iter().find_map(|document| {
let data = document.data.as_bib()?;
let root = bibtex::Root::cast(data.root_node())?;
let entry = root.find_entry(&key)?;
citeproc::render(&entry)
})?;

View file

@ -1,28 +1,23 @@
use base_db::{semantics::tex::LinkKind, DocumentData};
use lsp_types::MarkupKind;
use crate::{
db::analysis::TexLinkKind,
util::{components::COMPONENT_DATABASE, cursor::CursorContext},
};
use crate::util::{components::COMPONENT_DATABASE, cursor::CursorContext};
use super::HoverResult;
pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
let db = context.db;
let links = context.document.parse(db).as_tex()?.analyze(db).links(db);
links
let DocumentData::Tex(data) = &context.document.data else { return None };
data.semantics
.links
.iter()
.filter(|link| matches!(link.kind(db), TexLinkKind::Sty | TexLinkKind::Cls))
.filter(|link| link.range(db).contains_inclusive(context.offset))
.filter(|link| matches!(link.kind, LinkKind::Sty | LinkKind::Cls))
.filter(|link| link.path.range.contains_inclusive(context.offset))
.find_map(|link| {
let value = COMPONENT_DATABASE
.documentation(link.path(db).text(db))?
.value;
let value = COMPONENT_DATABASE.documentation(&link.path.text)?.value;
Some(HoverResult {
value,
value_kind: MarkupKind::PlainText,
range: link.range(db),
range: link.path.range,
})
})
}

View file

@ -1,9 +1,7 @@
use base_db::semantics::tex::LabelKind;
use lsp_types::MarkupKind;
use crate::{
db::Word,
util::{self, cursor::CursorContext},
};
use crate::util::{self, cursor::CursorContext};
use super::HoverResult;
@ -12,12 +10,16 @@ pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
.find_label_name_key()
.or_else(|| context.find_label_name_command())?;
let db = context.db;
util::label::find_label_definition(db, context.document, Word::new(db, name_text))
.and_then(|(document, label)| util::label::render(db, document, label))
context
.related
.iter()
.filter_map(|document| document.data.as_tex())
.flat_map(|data| data.semantics.labels.iter())
.find(|label| label.kind == LabelKind::Definition && label.name.text == name_text)
.and_then(|label| util::label::render(context.workspace, &context.related, label))
.map(|label| HoverResult {
range,
value: label.reference(db),
value: label.reference(),
value_kind: MarkupKind::PlainText,
})
}

View file

@ -1,3 +1,4 @@
use base_db::DocumentData;
use citeproc::field::text::TextFieldData;
use lsp_types::MarkupKind;
use rowan::ast::AstNode;
@ -8,7 +9,7 @@ use crate::util::cursor::CursorContext;
use super::HoverResult;
pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
let data = context.document.parse(context.db).as_bib()?;
let DocumentData::Bib(data) = &context.document.data else { return None };
let name = context
.cursor
@ -20,7 +21,7 @@ pub(super) fn find_hover(context: &CursorContext) -> Option<HoverResult> {
})?;
for string in data
.root(context.db)
.root_node()
.children()
.filter_map(bibtex::StringDef::cast)
{

View file

@ -1,36 +1,40 @@
mod label;
use base_db::{Document, Workspace};
use lsp_types::{InlayHint, InlayHintLabel, Range, Url};
use rowan::TextSize;
use rowan::{TextRange, TextSize};
use rustc_hash::FxHashSet;
use crate::{
db::Workspace,
util::{line_index::LineIndex, line_index_ext::LineIndexExt},
Db,
};
use crate::util::line_index_ext::LineIndexExt;
pub fn find_all(db: &dyn Db, uri: &Url, range: Range) -> Option<Vec<InlayHint>> {
let document = Workspace::get(db).lookup_uri(db, uri)?;
let line_index = document.line_index(db);
pub fn find_all(workspace: &Workspace, uri: &Url, range: Range) -> Option<Vec<InlayHint>> {
let document = workspace.lookup(uri)?;
let range = document.line_index.offset_lsp_range(range);
let related = workspace.related(document);
let mut builder = InlayHintBuilder {
line_index,
workspace,
document,
related,
range,
hints: Vec::new(),
};
let range = line_index.offset_lsp_range(range);
label::find_hints(db, document, range, &mut builder);
label::find_hints(&mut builder);
Some(builder.hints)
}
struct InlayHintBuilder<'db> {
line_index: &'db LineIndex,
struct InlayHintBuilder<'a> {
workspace: &'a Workspace,
document: &'a Document,
related: FxHashSet<&'a Document>,
range: TextRange,
hints: Vec<InlayHint>,
}
impl<'db> InlayHintBuilder<'db> {
pub fn push(&mut self, offset: TextSize, text: String) {
let position = self.line_index.line_col_lsp(offset);
let position = self.document.line_index.line_col_lsp(offset);
self.hints.push(InlayHint {
position,
label: InlayHintLabel::String(text),

View file

@ -1,47 +1,38 @@
use rowan::TextRange;
use base_db::{semantics::tex::LabelKind, DocumentData};
use crate::{
db::{analysis::label, Document},
util::{self, label::LabeledObject},
Db,
};
use crate::util::{self, label::LabeledObject};
use super::InlayHintBuilder;
pub(super) fn find_hints(
db: &dyn Db,
document: Document,
range: TextRange,
builder: &mut InlayHintBuilder,
) -> Option<()> {
let data = document.parse(db).as_tex()?;
for label in data
.analyze(db)
.labels(db)
.iter()
.copied()
.filter(|label| matches!(label.origin(db), label::Origin::Definition(_)))
.filter(|label| label.range(db).intersect(range).is_some())
{
if let Some(rendered) = util::label::render(db, document, label) {
if let Some(number) = &rendered.number {
let text = match &rendered.object {
LabeledObject::Section { prefix, .. } => {
format!("{} {}", prefix, number.text(db))
}
LabeledObject::Float { kind, .. } => {
format!("{} {}", kind.as_str(), number.text(db))
}
LabeledObject::Theorem { kind, .. } => {
format!("{} {}", kind.text(db), number.text(db))
}
LabeledObject::Equation => format!("Equation ({})", number.text(db)),
LabeledObject::EnumItem => format!("Item {}", number.text(db)),
};
pub(super) fn find_hints(builder: &mut InlayHintBuilder) -> Option<()> {
let DocumentData::Tex(data) = &builder.document.data else { return None };
builder.push(label.range(db).end(), text);
let range = builder.range;
for label in data
.semantics
.labels
.iter()
.filter(|label| label.kind == LabelKind::Definition)
.filter(|label| label.name.range.intersect(range).is_some())
{
let Some(rendered) = util::label::render(builder.workspace, &builder.related, label) else { continue };
let Some(number) = &rendered.number else { continue };
let text = match &rendered.object {
LabeledObject::Section { prefix, .. } => {
format!("{} {}", prefix, number)
}
}
LabeledObject::Float { kind, .. } => {
format!("{} {}", kind.as_str(), number)
}
LabeledObject::Theorem { kind, .. } => {
format!("{} {}", kind, number)
}
LabeledObject::Equation => format!("Equation ({})", number),
LabeledObject::EnumItem => format!("Item {}", number),
};
builder.push(label.name.range.end(), text);
}
Some(())

View file

@ -1,36 +1,33 @@
mod include;
use base_db::{Document, Workspace};
use lsp_types::{DocumentLink, Url};
use rowan::TextRange;
use crate::{
db::{Document, Workspace},
util::{line_index::LineIndex, line_index_ext::LineIndexExt},
Db,
};
use crate::util::line_index_ext::LineIndexExt;
pub fn find_all(db: &dyn Db, uri: &Url) -> Option<Vec<DocumentLink>> {
let document = Workspace::get(db).lookup_uri(db, uri)?;
pub fn find_all(workspace: &Workspace, uri: &Url) -> Option<Vec<DocumentLink>> {
let document = workspace.lookup(uri)?;
let mut builder = LinkBuilder {
db,
line_index: document.line_index(db),
workspace,
document,
links: Vec::new(),
};
include::find_links(db, document, &mut builder);
include::find_links(&mut builder);
Some(builder.links)
}
struct LinkBuilder<'db> {
db: &'db dyn Db,
line_index: &'db LineIndex,
struct LinkBuilder<'a> {
workspace: &'a Workspace,
document: &'a Document,
links: Vec<DocumentLink>,
}
impl<'db> LinkBuilder<'db> {
pub fn push(&mut self, range: TextRange, target: Document) {
let range = self.line_index.line_col_lsp_range(range);
let target = Some(target.location(self.db).uri(self.db).clone());
impl<'a> LinkBuilder<'a> {
pub fn push(&mut self, range: TextRange, target: &Document) {
let range = self.document.line_index.line_col_lsp_range(range);
let target = Some(target.uri.clone());
self.links.push(DocumentLink {
range,
target,

View file

@ -1,23 +1,20 @@
use crate::{
db::{dependency_graph, Document, Workspace},
Db,
};
use super::LinkBuilder;
pub(super) fn find_links(db: &dyn Db, document: Document, builder: &mut LinkBuilder) -> Option<()> {
let workspace = Workspace::get(db);
let parent = workspace
.parents(db, document)
pub(super) fn find_links(builder: &mut LinkBuilder) -> Option<()> {
let parent = *builder
.workspace
.parents(builder.document)
.iter()
.next()
.copied()
.unwrap_or(document);
.unwrap_or(&builder.document);
let graph = dependency_graph(db, parent);
for edge in graph.edges.iter().filter(|edge| edge.source == document) {
if let Some(origin) = edge.origin {
builder.push(origin.link.range(db), edge.target);
let graph = base_db::graph::Graph::new(builder.workspace, parent);
for edge in &graph.edges {
if edge.source == builder.document {
if let Some(weight) = &edge.weight {
builder.push(weight.link.path.range, edge.target);
}
}
}

View file

@ -2,23 +2,20 @@ mod entry;
mod label;
mod string;
use base_db::{Document, Workspace};
use lsp_types::{Location, Position, ReferenceContext, Url};
use rowan::TextRange;
use crate::{
db::Document,
util::{cursor::CursorContext, line_index_ext::LineIndexExt},
Db,
};
use crate::util::{cursor::CursorContext, line_index_ext::LineIndexExt};
pub fn find_all(
db: &dyn Db,
workspace: &Workspace,
uri: &Url,
position: Position,
params: &ReferenceContext,
) -> Option<Vec<Location>> {
let mut results = Vec::new();
let context = CursorContext::new(db, uri, position, params)?;
let context = CursorContext::new(workspace, uri, position, params)?;
log::debug!("[References] Cursor: {:?}", context.cursor);
label::find_all_references(&context, &mut results);
entry::find_all_references(&context, &mut results);
@ -27,19 +24,16 @@ pub fn find_all(
let locations = results
.into_iter()
.map(|result| Location {
uri: result.document.location(db).uri(db).clone(),
range: result
.document
.line_index(db)
.line_col_lsp_range(result.range),
uri: result.document.uri.clone(),
range: result.document.line_index.line_col_lsp_range(result.range),
})
.collect();
Some(locations)
}
#[derive(Debug, Clone)]
struct ReferenceResult {
document: Document,
#[derive(Debug)]
struct ReferenceResult<'a> {
document: &'a Document,
range: TextRange,
}

View file

@ -1,3 +1,4 @@
use base_db::DocumentData;
use lsp_types::ReferenceContext;
use rowan::ast::AstNode;
use syntax::{
@ -5,24 +6,23 @@ use syntax::{
latex,
};
use crate::{db::parse::DocumentData, util::cursor::CursorContext};
use crate::util::cursor::CursorContext;
use super::ReferenceResult;
pub(super) fn find_all_references(
context: &CursorContext<&ReferenceContext>,
results: &mut Vec<ReferenceResult>,
pub(super) fn find_all_references<'a>(
context: &CursorContext<'a, &ReferenceContext>,
results: &mut Vec<ReferenceResult<'a>>,
) -> Option<()> {
let db = context.db;
let (key_text, _) = context
.find_citation_key_word()
.or_else(|| context.find_citation_key_command())
.or_else(|| context.find_entry_key())?;
for document in context.related() {
match document.parse(db) {
for document in &context.related {
match &document.data {
DocumentData::Tex(data) => {
data.root(db)
data.root_node()
.descendants()
.filter_map(latex::Citation::cast)
.filter_map(|citation| citation.key_list())
@ -34,7 +34,7 @@ pub(super) fn find_all_references(
});
}
DocumentData::Bib(data) if context.params.include_declaration => {
data.root(db)
data.root_node()
.children()
.filter_map(bibtex::Entry::cast)
.filter_map(|entry| entry.name_token())
@ -45,9 +45,10 @@ pub(super) fn find_all_references(
});
}
DocumentData::Bib(_)
| DocumentData::Aux(_)
| DocumentData::Log(_)
| DocumentData::TexlabRoot(_)
| DocumentData::Tectonic(_) => {}
| DocumentData::Root
| DocumentData::Tectonic => {}
};
}

View file

@ -1,34 +1,34 @@
use base_db::{semantics::tex::LabelKind, DocumentData};
use lsp_types::ReferenceContext;
use crate::util::cursor::CursorContext;
use super::ReferenceResult;
pub(super) fn find_all_references(
context: &CursorContext<&ReferenceContext>,
results: &mut Vec<ReferenceResult>,
pub(super) fn find_all_references<'a>(
context: &CursorContext<'a, &ReferenceContext>,
results: &mut Vec<ReferenceResult<'a>>,
) -> Option<()> {
let db = context.db;
let (name_text, _) = context
.find_label_name_key()
.or_else(|| context.find_label_name_command())?;
for document in context.related() {
if let Some(data) = document.parse(db).as_tex() {
for label in data
.analyze(db)
.labels(db)
.iter()
.filter(|label| label.name(db).text(db) == &name_text)
.filter(|label| {
label.origin(db).as_definition().is_none() || context.params.include_declaration
})
{
results.push(ReferenceResult {
document,
range: label.range(db),
});
}
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
for label in data
.semantics
.labels
.iter()
.filter(|label| label.name.text == name_text)
.filter(|label| {
label.kind != LabelKind::Definition || context.params.include_declaration
})
{
results.push(ReferenceResult {
document,
range: label.name.range,
});
}
}

View file

@ -1,3 +1,4 @@
use base_db::DocumentData;
use lsp_types::ReferenceContext;
use rowan::ast::AstNode;
use syntax::bibtex::{self, HasName};
@ -6,11 +7,10 @@ use crate::util::cursor::CursorContext;
use super::ReferenceResult;
pub(super) fn find_all_references(
context: &CursorContext<&ReferenceContext>,
results: &mut Vec<ReferenceResult>,
pub(super) fn find_all_references<'a>(
context: &CursorContext<'a, &ReferenceContext>,
results: &mut Vec<ReferenceResult<'a>>,
) -> Option<()> {
let db = context.db;
let name_text = context
.cursor
.as_bib()
@ -21,8 +21,9 @@ pub(super) fn find_all_references(
})?
.text();
let data = context.document.parse(db).as_bib()?;
for node in data.root(db).descendants() {
let DocumentData::Bib(data) = &context.document.data else { return None };
for node in data.root_node().descendants() {
if let Some(name) = bibtex::StringDef::cast(node.clone())
.and_then(|string| string.name_token())
.filter(|name| context.params.include_declaration && name.text() == name_text)

View file

@ -2,33 +2,29 @@ mod command;
mod entry;
mod label;
use base_db::{Document, Workspace};
use lsp_types::{Position, Range, TextEdit, Url, WorkspaceEdit};
use rowan::TextRange;
use rustc_hash::FxHashMap;
use crate::{
db::Document,
util::{cursor::CursorContext, line_index_ext::LineIndexExt},
Db,
};
use crate::util::{cursor::CursorContext, line_index_ext::LineIndexExt};
pub fn prepare_rename_all(db: &dyn Db, uri: &Url, position: Position) -> Option<Range> {
let context = CursorContext::new(db, uri, position, ())?;
pub fn prepare_rename_all(workspace: &Workspace, uri: &Url, position: Position) -> Option<Range> {
let context = CursorContext::new(workspace, uri, position, ())?;
let range = entry::prepare_rename(&context)
.or_else(|| label::prepare_rename(&context))
.or_else(|| command::prepare_rename(&context))?;
let line_index = context.document.line_index(db);
Some(line_index.line_col_lsp_range(range))
Some(context.document.line_index.line_col_lsp_range(range))
}
pub fn rename_all(
db: &dyn Db,
workspace: &Workspace,
uri: &Url,
position: Position,
new_name: String,
) -> Option<WorkspaceEdit> {
let context = CursorContext::new(db, uri, position, Params { new_name })?;
let context = CursorContext::new(workspace, uri, position, Params { new_name })?;
let result = entry::rename(&context)
.or_else(|| label::rename(&context))
.or_else(|| command::rename(&context))?;
@ -37,15 +33,14 @@ pub fn rename_all(
.changes
.into_iter()
.map(|(document, old_edits)| {
let line_index = document.line_index(db);
let new_edits = old_edits
.into_iter()
.map(|Indel { delete, insert }| {
TextEdit::new(line_index.line_col_lsp_range(delete), insert)
TextEdit::new(document.line_index.line_col_lsp_range(delete), insert)
})
.collect();
(document.location(db).uri(db).clone(), new_edits)
(document.uri.clone(), new_edits)
})
.collect();
@ -63,7 +58,7 @@ struct Indel {
insert: String,
}
#[derive(Debug, PartialEq, Eq, Clone)]
struct RenameResult {
changes: FxHashMap<Document, Vec<Indel>>,
#[derive(Debug)]
struct RenameResult<'a> {
changes: FxHashMap<&'a Document, Vec<Indel>>,
}

View file

@ -1,3 +1,4 @@
use base_db::DocumentData;
use rowan::{TextRange, TextSize};
use rustc_hash::FxHashMap;
use syntax::latex;
@ -10,28 +11,28 @@ pub(super) fn prepare_rename<T>(context: &CursorContext<T>) -> Option<TextRange>
context.cursor.command_range(context.offset)
}
pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
pub(super) fn rename<'a>(context: &CursorContext<'a, Params>) -> Option<RenameResult<'a>> {
prepare_rename(context)?;
let name = context.cursor.as_tex()?.text();
let mut changes = FxHashMap::default();
for document in context.related() {
if let Some(data) = document.parse(context.db).as_tex() {
let root = data.root(context.db);
let edits = root
.descendants_with_tokens()
.filter_map(|element| element.into_token())
.filter(|token| token.kind() == latex::COMMAND_NAME && token.text() == name)
.map(|token| {
let range = token.text_range();
Indel {
delete: TextRange::new(range.start() + TextSize::from(1), range.end()),
insert: context.params.new_name.clone(),
}
})
.collect();
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
changes.insert(document, edits);
}
let root = data.root_node();
let edits = root
.descendants_with_tokens()
.filter_map(|element| element.into_token())
.filter(|token| token.kind() == latex::COMMAND_NAME && token.text() == name)
.map(|token| {
let range = token.text_range();
Indel {
delete: TextRange::new(range.start() + TextSize::from(1), range.end()),
insert: context.params.new_name.clone(),
}
})
.collect();
changes.insert(*document, edits);
}
Some(RenameResult { changes })

View file

@ -1,4 +1,4 @@
use crate::{db::parse::DocumentData, util::cursor::CursorContext};
use base_db::DocumentData;
use rowan::{ast::AstNode, TextRange};
use rustc_hash::FxHashMap;
use syntax::{
@ -6,6 +6,8 @@ use syntax::{
latex,
};
use crate::util::cursor::CursorContext;
use super::{Indel, Params, RenameResult};
pub(super) fn prepare_rename<T>(context: &CursorContext<T>) -> Option<TextRange> {
@ -16,17 +18,18 @@ pub(super) fn prepare_rename<T>(context: &CursorContext<T>) -> Option<TextRange>
Some(range)
}
pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
pub(super) fn rename<'a>(context: &CursorContext<'a, Params>) -> Option<RenameResult<'a>> {
prepare_rename(context)?;
let (key_text, _) = context
.find_citation_key_word()
.or_else(|| context.find_entry_key())?;
let mut changes = FxHashMap::default();
for document in context.related() {
match document.parse(context.db) {
for document in &context.related {
match &document.data {
DocumentData::Tex(data) => {
let root = data.root(context.db);
let root = data.root_node();
let edits: Vec<_> = root
.descendants()
.filter_map(latex::Citation::cast)
@ -38,10 +41,11 @@ pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
insert: context.params.new_name.clone(),
})
.collect();
changes.insert(document, edits);
changes.insert(*document, edits);
}
DocumentData::Bib(data) => {
let root = data.root(context.db);
let root = data.root_node();
let edits: Vec<_> = root
.descendants()
.filter_map(bibtex::Entry::cast)
@ -52,10 +56,14 @@ pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
insert: context.params.new_name.clone(),
})
.collect();
changes.insert(document, edits);
changes.insert(*document, edits);
}
DocumentData::Log(_) | DocumentData::TexlabRoot(_) | DocumentData::Tectonic(_) => {}
}
DocumentData::Aux(_)
| DocumentData::Log(_)
| DocumentData::Root
| DocumentData::Tectonic => {}
};
}
Some(RenameResult { changes })

View file

@ -1,6 +1,6 @@
use rowan::{ast::AstNode, TextRange};
use base_db::DocumentData;
use rowan::TextRange;
use rustc_hash::FxHashMap;
use syntax::latex;
use crate::util::cursor::CursorContext;
@ -11,66 +11,26 @@ pub(super) fn prepare_rename<T>(context: &CursorContext<T>) -> Option<TextRange>
Some(range)
}
pub(super) fn rename(context: &CursorContext<Params>) -> Option<RenameResult> {
pub(super) fn rename<'a>(context: &CursorContext<'a, Params>) -> Option<RenameResult<'a>> {
prepare_rename(context)?;
let (name_text, _) = context.find_label_name_key()?;
let mut changes = FxHashMap::default();
for document in context.related() {
if let Some(data) = document.parse(context.db).as_tex() {
let mut edits = Vec::new();
for node in data.root(context.db).descendants() {
if let Some(range) = latex::LabelDefinition::cast(node.clone())
.and_then(|label| label.name())
.and_then(|name| name.key())
.filter(|name| name.to_string() == name_text)
.map(|name| latex::small_range(&name))
{
edits.push(Indel {
delete: range,
insert: context.params.new_name.clone(),
});
}
for document in &context.related {
let DocumentData::Tex(data) = &document.data else { continue };
latex::LabelReference::cast(node.clone())
.and_then(|label| label.name_list())
.into_iter()
.flat_map(|label| label.keys())
.filter(|name| name.to_string() == name_text)
.for_each(|name| {
edits.push(Indel {
delete: latex::small_range(&name),
insert: context.params.new_name.clone(),
});
});
let edits = data
.semantics
.labels
.iter()
.filter(|label| label.name.text == name_text)
.map(|label| Indel {
delete: label.name.range,
insert: context.params.new_name.clone(),
})
.collect();
if let Some(label) = latex::LabelReferenceRange::cast(node.clone()) {
if let Some(name_from) = label
.from()
.and_then(|name| name.key())
.filter(|name| name.to_string() == name_text)
{
edits.push(Indel {
delete: latex::small_range(&name_from),
insert: context.params.new_name.clone(),
});
}
if let Some(name_to) = label
.to()
.and_then(|name| name.key())
.filter(|name| name.to_string() == name_text)
{
edits.push(Indel {
delete: latex::small_range(&name_to),
insert: context.params.new_name.clone(),
});
}
}
}
changes.insert(document, edits);
}
changes.insert(*document, edits);
}
Some(RenameResult { changes })

View file

@ -5,31 +5,35 @@ mod types;
use std::cmp::Reverse;
use lsp_types::{DocumentSymbolResponse, SymbolInformation, Url, WorkspaceSymbolParams};
use base_db::Workspace;
use lsp_types::{
ClientCapabilities, DocumentSymbolResponse, SymbolInformation, Url, WorkspaceSymbolParams,
};
use crate::{db::Workspace, util::capabilities::ClientCapabilitiesExt, Db};
use crate::util::capabilities::ClientCapabilitiesExt;
use self::{project_order::ProjectOrdering, types::InternalSymbol};
pub fn find_document_symbols(db: &dyn Db, uri: &Url) -> Option<DocumentSymbolResponse> {
let workspace = Workspace::get(db);
let document = workspace.lookup_uri(db, uri)?;
pub fn find_document_symbols(
workspace: &Workspace,
uri: &Url,
client_capabilties: &ClientCapabilities,
) -> Option<DocumentSymbolResponse> {
let document = workspace.lookup(uri)?;
let related = workspace.related(document);
let mut buf = Vec::new();
latex::find_symbols(db, document, &mut buf);
bibtex::find_symbols(db, document, &mut buf);
latex::find_symbols(workspace, &related, document, &mut buf);
bibtex::find_symbols(document, &mut buf);
let config = &db.config().symbols;
let config = &workspace.config().symbols;
InternalSymbol::filter(&mut buf, &config);
InternalSymbol::filter(&mut buf, config);
if workspace
.client_capabilities(db)
.has_hierarchical_document_symbol_support()
{
if client_capabilties.has_hierarchical_document_symbol_support() {
let symbols = buf
.into_iter()
.map(|symbol| symbol.into_document_symbol(db))
.map(|symbol| symbol.into_document_symbol())
.collect();
Some(DocumentSymbolResponse::Nested(symbols))
@ -44,7 +48,7 @@ pub fn find_document_symbols(db: &dyn Db, uri: &Url) -> Option<DocumentSymbolRes
.map(|symbol| symbol.into_symbol_info(uri.clone()))
.collect();
sort_symbols(db, &mut new_buf);
sort_symbols(workspace, &mut new_buf);
Some(DocumentSymbolResponse::Flat(new_buf))
}
}
@ -57,16 +61,17 @@ struct WorkspaceSymbol {
#[must_use]
pub fn find_workspace_symbols(
db: &dyn Db,
workspace: &Workspace,
params: &WorkspaceSymbolParams,
) -> Vec<SymbolInformation> {
let mut symbols = Vec::new();
let workspace = Workspace::get(db);
for document in workspace.documents(db).iter().copied() {
for document in workspace.iter() {
let related = workspace.related(document);
let mut buf = Vec::new();
latex::find_symbols(db, document, &mut buf);
bibtex::find_symbols(db, document, &mut buf);
latex::find_symbols(workspace, &related, document, &mut buf);
bibtex::find_symbols(document, &mut buf);
let mut new_buf = Vec::new();
for symbol in buf {
@ -76,7 +81,7 @@ pub fn find_workspace_symbols(
for symbol in new_buf {
symbols.push(WorkspaceSymbol {
search_text: symbol.search_text(),
info: symbol.into_symbol_info(document.location(db).uri(db).clone()),
info: symbol.into_symbol_info(document.uri.clone()),
});
}
}
@ -102,20 +107,20 @@ pub fn find_workspace_symbols(
}
}
sort_symbols(db, &mut filtered);
sort_symbols(workspace, &mut filtered);
filtered
}
fn sort_symbols(db: &dyn Db, symbols: &mut [SymbolInformation]) {
let ordering = ProjectOrdering::new(db);
fn sort_symbols(workspace: &Workspace, symbols: &mut [SymbolInformation]) {
let ordering = ProjectOrdering::new(workspace);
symbols.sort_by(|left, right| {
let left_key = (
ordering.get(db, &left.location.uri),
ordering.get(&left.location.uri),
left.location.range.start,
Reverse(left.location.range.end),
);
let right_key = (
ordering.get(db, &right.location.uri),
ordering.get(&right.location.uri),
right.location.range.start,
Reverse(right.location.range.end),
);

View file

@ -1,27 +1,22 @@
use base_db::{Document, DocumentData, LineIndex};
use rowan::ast::AstNode;
use syntax::bibtex::{self, HasName, HasType};
use crate::{
db::Document,
util::{
lang_data::{BibtexEntryTypeCategory, LANGUAGE_DATA},
line_index::LineIndex,
line_index_ext::LineIndexExt,
},
Db,
use crate::util::{
lang_data::{BibtexEntryTypeCategory, LANGUAGE_DATA},
line_index_ext::LineIndexExt,
};
use super::types::{InternalSymbol, InternalSymbolKind};
pub fn find_symbols(db: &dyn Db, document: Document, buf: &mut Vec<InternalSymbol>) -> Option<()> {
let data = document.parse(db).as_bib()?;
let line_index = document.line_index(db);
for node in data.root(db).children() {
pub fn find_symbols(document: &Document, buf: &mut Vec<InternalSymbol>) {
let DocumentData::Bib(data) = &document.data else { return };
let line_index = &document.line_index;
for node in data.root_node().children() {
process_string(node.clone(), line_index, buf)
.or_else(|| process_entry(node, line_index, buf));
}
Some(())
}
fn process_string(

View file

@ -1,29 +1,37 @@
use std::str::FromStr;
use base_db::{semantics::Span, Document, DocumentData, Workspace};
use lsp_types::Range;
use rowan::ast::AstNode;
use rowan::{ast::AstNode, TextRange};
use rustc_hash::FxHashSet;
use syntax::latex::{self, HasBrack, HasCurly};
use titlecase::titlecase;
use crate::{
db::{Document, Word, Workspace},
util::{
label::{find_caption_by_parent, LabeledFloatKind},
line_index_ext::LineIndexExt,
},
Db,
use crate::util::{
label::{find_caption_by_parent, LabeledFloatKind},
line_index_ext::LineIndexExt,
};
use super::types::{InternalSymbol, InternalSymbolKind};
pub fn find_symbols(db: &dyn Db, document: Document, buf: &mut Vec<InternalSymbol>) -> Option<()> {
let data = document.parse(db).as_tex()?;
let mut symbols = visit(db, document, data.root(db));
pub fn find_symbols(
workspace: &Workspace,
related: &FxHashSet<&Document>,
document: &Document,
buf: &mut Vec<InternalSymbol>,
) {
let DocumentData::Tex(data) = &document.data else { return };
let mut symbols = visit(workspace, related, document, data.root_node());
buf.append(&mut symbols);
Some(())
}
fn visit(db: &dyn Db, document: Document, node: latex::SyntaxNode) -> Vec<InternalSymbol> {
fn visit(
workspace: &Workspace,
related: &FxHashSet<&Document>,
document: &Document,
node: latex::SyntaxNode,
) -> Vec<InternalSymbol> {
let symbol = match node.kind() {
latex::PART
| latex::CHAPTER
@ -31,23 +39,25 @@ fn visit(db: &dyn Db, document: Document, node: latex::SyntaxNode) -> Vec<Intern
| latex::SUBSECTION
| latex::SUBSUBSECTION
| latex::PARAGRAPH
| latex::SUBPARAGRAPH => visit_section(db, document, node.clone()),
latex::ENUM_ITEM => visit_enum_item(db, document, node.clone()),
latex::EQUATION => visit_equation(db, document, node.clone()),
| latex::SUBPARAGRAPH => visit_section(related, document, node.clone()),
latex::ENUM_ITEM => visit_enum_item(workspace, related, document, node.clone()),
latex::EQUATION => visit_equation(related, document, node.clone()),
latex::ENVIRONMENT => latex::Environment::cast(node.clone())
.and_then(|env| env.begin())
.and_then(|begin| begin.name())
.and_then(|name| name.key())
.map(|name| name.to_string())
.and_then(|name| {
if db.config().syntax.math_environments.contains(&name) {
visit_equation_environment(db, document, node.clone())
} else if db.config().syntax.enum_environments.contains(&name) {
visit_enumeration(db, document, node.clone(), &name)
let config = &workspace.config().syntax;
if config.math_environments.contains(&name) {
visit_equation_environment(related, document, node.clone())
} else if config.enum_environments.contains(&name) {
visit_enumeration(related, document, node.clone(), &name)
} else if let Ok(float_kind) = LabeledFloatKind::from_str(&name) {
visit_float(db, document, node.clone(), float_kind)
visit_float(related, document, node.clone(), float_kind)
} else {
visit_theorem(db, document, node.clone(), &name)
visit_theorem(related, document, node.clone(), &name)
}
}),
_ => None,
@ -56,51 +66,53 @@ fn visit(db: &dyn Db, document: Document, node: latex::SyntaxNode) -> Vec<Intern
match symbol {
Some(mut parent) => {
for child in node.children() {
parent.children.append(&mut visit(db, document, child));
parent
.children
.append(&mut visit(workspace, related, document, child));
}
vec![parent]
}
None => {
let mut symbols = Vec::new();
for child in node.children() {
symbols.append(&mut visit(db, document, child));
symbols.append(&mut visit(workspace, related, document, child));
}
symbols
}
}
}
fn visit_section(
db: &dyn Db,
document: Document,
related: &FxHashSet<&Document>,
document: &Document,
node: latex::SyntaxNode,
) -> Option<InternalSymbol> {
let section = latex::Section::cast(node)?;
let full_range = document
.line_index(db)
.line_index
.line_col_lsp_range(latex::small_range(&section));
let group = section.name()?;
let group_text = group.content_text()?;
let symbol = match find_label_by_parent(db, document, section.syntax()) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => {
let name = match number {
Some(number) => format!("{} {}", number.text(db), group_text),
let label = NumberedLabel::find(related, section.syntax());
let symbol = match label {
Some(label) => {
let name = match label.number {
Some(number) => format!("{} {}", number, group_text),
None => group_text,
};
InternalSymbol {
name,
label: Some(label),
label: Some(label.name.text),
kind: InternalSymbolKind::Section,
deprecated: false,
full_range,
selection_range,
selection_range: document.line_index.line_col_lsp_range(label.range),
children: Vec::new(),
}
}
@ -119,11 +131,12 @@ fn visit_section(
}
fn visit_enum_item(
db: &dyn Db,
document: Document,
workspace: &Workspace,
related: &FxHashSet<&Document>,
document: &Document,
node: latex::SyntaxNode,
) -> Option<InternalSymbol> {
let enum_envs = &db.config().syntax.enum_environments;
let enum_envs = &workspace.config().syntax.enum_environments;
let enum_item = latex::EnumItem::cast(node.clone())?;
if !enum_item
.syntax()
@ -138,7 +151,7 @@ fn visit_enum_item(
}
let full_range = document
.line_index(db)
.line_index
.line_col_lsp_range(latex::small_range(&enum_item));
let name = enum_item
@ -146,20 +159,14 @@ fn visit_enum_item(
.and_then(|label| label.content_text())
.unwrap_or_else(|| "Item".to_string());
let symbol = match find_label_by_parent(db, document, &node) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => InternalSymbol {
name: number
.map(|num| num.text(db).clone())
.unwrap_or_else(|| name.clone()),
label: Some(label),
let symbol = match NumberedLabel::find(related, &node) {
Some(label) => InternalSymbol {
name: label.number.map_or_else(|| name.clone(), String::from),
label: Some(label.name.text),
kind: InternalSymbolKind::EnumerationItem,
deprecated: false,
full_range,
selection_range,
selection_range: document.line_index.line_col_lsp_range(label.range),
children: Vec::new(),
},
None => InternalSymbol {
@ -172,61 +179,58 @@ fn visit_enum_item(
children: Vec::new(),
},
};
Some(symbol)
}
fn visit_equation(
db: &dyn Db,
document: Document,
related: &FxHashSet<&Document>,
document: &Document,
node: latex::SyntaxNode,
) -> Option<InternalSymbol> {
let equation = latex::Equation::cast(node)?;
let full_range = document
.line_index(db)
.line_index
.line_col_lsp_range(latex::small_range(&equation));
make_equation_symbol(db, document, equation.syntax(), full_range)
make_equation_symbol(related, document, equation.syntax(), full_range)
}
fn visit_equation_environment(
db: &dyn Db,
document: Document,
related: &FxHashSet<&Document>,
document: &Document,
node: latex::SyntaxNode,
) -> Option<InternalSymbol> {
let environment = latex::Environment::cast(node)?;
let full_range = document
.line_index(db)
.line_index
.line_col_lsp_range(latex::small_range(&environment));
make_equation_symbol(db, document, environment.syntax(), full_range)
make_equation_symbol(related, document, environment.syntax(), full_range)
}
fn make_equation_symbol(
db: &dyn Db,
document: Document,
related: &FxHashSet<&Document>,
document: &Document,
node: &latex::SyntaxNode,
full_range: Range,
) -> Option<InternalSymbol> {
let symbol = match find_label_by_parent(db, document, node) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => {
let name = match number {
Some(number) => format!("Equation ({})", number.text(db)),
let symbol = match NumberedLabel::find(related, node) {
Some(label) => {
let name = match label.number {
Some(number) => format!("Equation ({})", number),
None => "Equation".to_string(),
};
InternalSymbol {
name,
label: Some(label),
label: Some(label.name.text),
kind: InternalSymbolKind::Equation,
deprecated: false,
full_range,
selection_range,
selection_range: document.line_index.line_col_lsp_range(label.range),
children: Vec::new(),
}
}
@ -240,39 +244,36 @@ fn make_equation_symbol(
children: Vec::new(),
},
};
Some(symbol)
}
fn visit_enumeration(
db: &dyn Db,
document: Document,
related: &FxHashSet<&Document>,
document: &Document,
node: latex::SyntaxNode,
env_name: &str,
) -> Option<InternalSymbol> {
let environment = latex::Environment::cast(node)?;
let full_range = document
.line_index(db)
.line_index
.line_col_lsp_range(latex::small_range(&environment));
let name = titlecase(env_name);
let symbol = match find_label_by_parent(db, document, environment.syntax()) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => {
let name = match number {
Some(number) => format!("{} {}", name, number.text(db)),
let symbol = match NumberedLabel::find(related, environment.syntax()) {
Some(label) => {
let name = match label.number {
Some(number) => format!("{} {}", name, number),
None => name,
};
InternalSymbol {
name,
label: Some(label),
label: Some(label.name.text),
kind: InternalSymbolKind::Enumeration,
deprecated: false,
full_range,
selection_range,
selection_range: document.line_index.line_col_lsp_range(label.range),
children: Vec::new(),
}
}
@ -290,14 +291,14 @@ fn visit_enumeration(
}
fn visit_float(
db: &dyn Db,
document: Document,
related: &FxHashSet<&Document>,
document: &Document,
node: latex::SyntaxNode,
float_kind: LabeledFloatKind,
) -> Option<InternalSymbol> {
let environment = latex::Environment::cast(node)?;
let full_range = document
.line_index(db)
.line_index
.line_col_lsp_range(latex::small_range(&environment));
let (float_kind, symbol_kind) = match float_kind {
@ -308,24 +309,20 @@ fn visit_float(
};
let caption = find_caption_by_parent(environment.syntax())?;
let symbol = match find_label_by_parent(db, document, environment.syntax()) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => {
let name = match number {
Some(number) => format!("{} {}: {}", float_kind, number.text(db), caption),
let symbol = match NumberedLabel::find(related, environment.syntax()) {
Some(label) => {
let name = match label.number {
Some(number) => format!("{} {}: {}", float_kind, number, caption),
None => format!("{}: {}", float_kind, caption),
};
InternalSymbol {
name,
label: Some(label),
label: Some(label.name.text),
kind: symbol_kind,
deprecated: false,
full_range,
selection_range,
selection_range: document.line_index.line_col_lsp_range(label.range),
children: Vec::new(),
}
}
@ -344,17 +341,16 @@ fn visit_float(
}
fn visit_theorem(
db: &dyn Db,
document: Document,
related: &FxHashSet<&Document>,
document: &Document,
node: latex::SyntaxNode,
environment_name: &str,
) -> Option<InternalSymbol> {
let definition = Workspace::get(db)
.related(db, document)
let definition = related
.iter()
.filter_map(|document| document.parse(db).as_tex())
.flat_map(|data| data.analyze(db).theorem_environments(db))
.find(|env| env.name(db).text(db) == environment_name)?;
.filter_map(|document| document.data.as_tex())
.flat_map(|data| data.semantics.theorem_definitions.iter())
.find(|theorem| theorem.name.text == environment_name)?;
let node = latex::Environment::cast(node)?;
let theorem_description = node
@ -363,48 +359,36 @@ fn visit_theorem(
.and_then(|option| option.content_text());
let full_range = document
.line_index(db)
.line_index
.line_col_lsp_range(latex::small_range(&node));
let symbol = match find_label_by_parent(db, document, node.syntax()) {
Some(NumberedLabel {
name: label,
range: selection_range,
number,
}) => {
let name = match (number, theorem_description) {
let symbol = match NumberedLabel::find(related, node.syntax()) {
Some(label) => {
let name = match (label.number, theorem_description) {
(Some(number), Some(desc)) => {
format!(
"{} {} ({})",
definition.description(db).text(db),
number.text(db),
desc
)
format!("{} {} ({})", definition.description, number, desc)
}
(Some(number), None) => format!(
"{} {}",
definition.description(db).text(db),
number.text(db)
),
(None, Some(desc)) => format!("{} ({})", definition.description(db).text(db), desc),
(None, None) => definition.description(db).text(db).clone(),
(Some(number), None) => format!("{} {}", definition.description, number),
(None, Some(desc)) => format!("{} ({})", definition.description, desc),
(None, None) => definition.description.clone(),
};
InternalSymbol {
name,
label: Some(label),
label: Some(label.name.text),
kind: InternalSymbolKind::Theorem,
deprecated: false,
full_range,
selection_range,
selection_range: document.line_index.line_col_lsp_range(label.range),
children: Vec::new(),
}
}
None => {
let name = match theorem_description {
Some(desc) => format!("{} ({})", definition.description(db).text(db), desc),
None => definition.description(db).text(db).clone(),
Some(desc) => format!("{} ({})", definition.description, desc),
None => definition.description.clone(),
};
InternalSymbol {
name,
label: None,
@ -416,31 +400,31 @@ fn visit_theorem(
}
}
};
Some(symbol)
}
#[derive(Debug, PartialEq, Eq, Clone)]
struct NumberedLabel {
name: Word,
range: Range,
number: Option<Word>,
#[derive(Debug)]
struct NumberedLabel<'a> {
name: Span,
range: TextRange,
number: Option<&'a str>,
}
fn find_label_by_parent(
db: &dyn Db,
document: Document,
parent: &latex::SyntaxNode,
) -> Option<NumberedLabel> {
let node = parent.children().find_map(latex::LabelDefinition::cast)?;
let name = Word::new(db, node.name()?.key()?.to_string());
let range = document
.line_index(db)
.line_col_lsp_range(latex::small_range(&node));
impl<'a> NumberedLabel<'a> {
fn find(related: &FxHashSet<&'a Document>, parent: &latex::SyntaxNode) -> Option<Self> {
let label = parent.children().find_map(latex::LabelDefinition::cast)?;
let name = Span::from(&label.name()?.key()?);
let number = related
.iter()
.filter_map(|document| document.data.as_aux())
.find_map(|data| data.semantics.label_numbers.get(&name.text))
.map(|number| number.as_str());
let number = Workspace::get(db).number_of_label(db, document, name);
Some(NumberedLabel {
name,
range,
number,
})
Some(NumberedLabel {
name,
range: latex::small_range(&label),
number,
})
}
}

View file

@ -1,25 +1,25 @@
use base_db::{graph, Document, Workspace};
use itertools::Itertools;
use lsp_types::Url;
use crate::{
db::{dependency_graph, Document, Workspace},
Db,
};
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ProjectOrdering {
ordering: Vec<Document>,
pub struct ProjectOrdering<'a> {
ordering: Vec<&'a Document>,
}
impl ProjectOrdering {
pub fn new(db: &dyn Db) -> Self {
let workspace = Workspace::get(db);
impl<'a> ProjectOrdering<'a> {
pub fn new(workspace: &'a Workspace) -> Self {
let ordering: Vec<_> = workspace
.index_files(db)
.chain(workspace.documents(db).iter().copied())
.iter()
.filter(|document| {
document
.data
.as_tex()
.map_or(false, |data| data.semantics.can_be_root)
})
.chain(workspace.iter())
.flat_map(|document| {
dependency_graph(db, document)
graph::Graph::new(workspace, document)
.preorder()
.rev()
.collect_vec()
@ -30,172 +30,174 @@ impl ProjectOrdering {
Self { ordering }
}
pub fn get(&self, db: &dyn Db, uri: &Url) -> usize {
pub fn get(&self, uri: &Url) -> usize {
self.ordering
.iter()
.position(|doc| doc.location(db).uri(db) == uri)
.position(|doc| doc.uri == *uri)
.unwrap_or(std::usize::MAX)
}
}
#[cfg(test)]
mod tests {
use base_db::Owner;
use distro::Language;
use crate::{db::Owner, Database};
use rowan::TextSize;
use super::*;
#[test]
fn test_no_cycles() {
let mut db = Database::default();
let workspace = Workspace::get(&db);
let mut workspace = Workspace::default();
let a = workspace.open(
&mut db,
Url::parse("http://example.com/a.tex").unwrap(),
let a = Url::parse("http://example.com/a.tex").unwrap();
let b = Url::parse("http://example.com/b.tex").unwrap();
let c = Url::parse("http://example.com/c.tex").unwrap();
workspace.open(
a.clone(),
String::new(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let b = workspace.open(
&mut db,
Url::parse("http://example.com/b.tex").unwrap(),
workspace.open(
b.clone(),
String::new(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let c = workspace.open(
&mut db,
Url::parse("http://example.com/c.tex").unwrap(),
workspace.open(
c.clone(),
r#"\documentclass{article}\include{b}\include{a}"#.to_string(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let ordering = ProjectOrdering::new(&db);
assert_eq!(ordering.get(&db, a.location(&db).uri(&db)), 0);
assert_eq!(ordering.get(&db, b.location(&db).uri(&db)), 1);
assert_eq!(ordering.get(&db, c.location(&db).uri(&db)), 2);
let ordering = ProjectOrdering::new(&workspace);
assert_eq!(ordering.get(&a), 0);
assert_eq!(ordering.get(&b), 1);
assert_eq!(ordering.get(&c), 2);
}
#[test]
fn test_two_layers() {
let mut db = Database::default();
let workspace = Workspace::get(&db);
let mut workspace = Workspace::default();
let a = workspace.open(
&mut db,
Url::parse("http://example.com/a.tex").unwrap(),
let a = Url::parse("http://example.com/a.tex").unwrap();
let b = Url::parse("http://example.com/b.tex").unwrap();
let c = Url::parse("http://example.com/c.tex").unwrap();
workspace.open(
a.clone(),
String::new(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let b = workspace.open(
&mut db,
Url::parse("http://example.com/b.tex").unwrap(),
workspace.open(
b.clone(),
r#"\include{a}"#.to_string(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let c = workspace.open(
&mut db,
Url::parse("http://example.com/c.tex").unwrap(),
workspace.open(
c.clone(),
r#"\documentclass{article}\include{b}"#.to_string(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let ordering = ProjectOrdering::new(&db);
assert_eq!(ordering.get(&db, a.location(&db).uri(&db)), 0);
assert_eq!(ordering.get(&db, b.location(&db).uri(&db)), 1);
assert_eq!(ordering.get(&db, c.location(&db).uri(&db)), 2);
let ordering = ProjectOrdering::new(&workspace);
assert_eq!(ordering.get(&a), 0);
assert_eq!(ordering.get(&b), 1);
assert_eq!(ordering.get(&c), 2);
}
#[test]
fn test_cycles() {
let mut db = Database::default();
let workspace = Workspace::get(&db);
let mut workspace = Workspace::default();
let a = workspace.open(
&mut db,
Url::parse("http://example.com/a.tex").unwrap(),
let a = Url::parse("http://example.com/a.tex").unwrap();
let b = Url::parse("http://example.com/b.tex").unwrap();
let c = Url::parse("http://example.com/c.tex").unwrap();
workspace.open(
a.clone(),
r#"\documentclass{article}\include{b}"#.to_string(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
workspace.open(
&mut db,
Url::parse("http://example.com/b.tex").unwrap(),
b.clone(),
r#"\include{a}"#.to_string(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
workspace.open(
&mut db,
Url::parse("http://example.com/c.tex").unwrap(),
c.clone(),
r#"\include{a}"#.to_string(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let ordering = ProjectOrdering::new(&db);
assert_ne!(ordering.get(&db, a.location(&db).uri(&db)), 0);
let ordering = ProjectOrdering::new(&workspace);
assert_ne!(ordering.get(&a), 0);
}
#[test]
fn test_multiple_roots() {
let mut db = Database::default();
let workspace = Workspace::get(&db);
let mut workspace = Workspace::default();
let a = workspace.open(
&mut db,
Url::parse("http://example.com/a.tex").unwrap(),
let a = Url::parse("http://example.com/a.tex").unwrap();
let b = Url::parse("http://example.com/b.tex").unwrap();
let c = Url::parse("http://example.com/c.tex").unwrap();
let d = Url::parse("http://example.com/d.tex").unwrap();
workspace.open(
a.clone(),
r#"\documentclass{article}\include{b}"#.to_string(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let b = workspace.open(
&mut db,
Url::parse("http://example.com/b.tex").unwrap(),
workspace.open(
b.clone(),
String::new(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let c = workspace.open(
&mut db,
Url::parse("http://example.com/c.tex").unwrap(),
workspace.open(
c.clone(),
String::new(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let d = workspace.open(
&mut db,
Url::parse("http://example.com/d.tex").unwrap(),
workspace.open(
d.clone(),
r#"\documentclass{article}\include{c}"#.to_string(),
Language::Tex,
Owner::Client,
TextSize::default(),
);
let ordering = ProjectOrdering::new(&db);
assert!(
ordering.get(&db, b.location(&db).uri(&db))
< ordering.get(&db, a.location(&db).uri(&db))
);
assert!(
ordering.get(&db, c.location(&db).uri(&db))
< ordering.get(&db, d.location(&db).uri(&db))
);
let ordering = ProjectOrdering::new(&workspace);
assert!(ordering.get(&b) < ordering.get(&a));
assert!(ordering.get(&c) < ordering.get(&d));
}
}

View file

@ -1,10 +1,7 @@
use base_db::SymbolConfig;
use lsp_types::{DocumentSymbol, Location, Range, SymbolInformation, SymbolKind, Url};
use crate::{
db::Word,
util::{self, lang_data::BibtexEntryTypeCategory, lsp_enums::Structure},
Db, SymbolConfig,
};
use crate::util::{self, lang_data::BibtexEntryTypeCategory, lsp_enums::Structure};
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum InternalSymbolKind {
@ -43,7 +40,7 @@ impl InternalSymbolKind {
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct InternalSymbol {
pub name: String,
pub label: Option<Word>,
pub label: Option<String>,
pub kind: InternalSymbolKind,
pub deprecated: bool,
pub full_range: Range,
@ -102,17 +99,17 @@ impl InternalSymbol {
}
}
pub fn into_document_symbol(self, db: &dyn Db) -> DocumentSymbol {
pub fn into_document_symbol(self) -> DocumentSymbol {
let children = self
.children
.into_iter()
.map(|child| child.into_document_symbol(db))
.map(|child| child.into_document_symbol())
.collect();
#[allow(deprecated)]
DocumentSymbol {
name: self.name,
detail: self.label.map(|word| word.text(db).clone()),
detail: self.label,
kind: self.kind.into_symbol_kind(),
deprecated: Some(self.deprecated),
range: self.full_range,

View file

@ -1,6 +1,7 @@
use std::collections::hash_map::HashMap;
use anyhow::Result;
use base_db::Workspace;
use lsp_types::{ApplyWorkspaceEditParams, TextDocumentPositionParams, TextEdit, WorkspaceEdit};
use rowan::ast::AstNode;
use serde::{Deserialize, Serialize};
@ -9,11 +10,10 @@ use thiserror::Error;
use crate::{
normalize_uri,
util::{cursor::CursorContext, line_index_ext::LineIndexExt},
Db,
};
fn change_environment_context(
db: &dyn Db,
workspace: &Workspace,
args: Vec<serde_json::Value>,
) -> Result<CursorContext<Params>> {
let params: ChangeEnvironmentParams = serde_json::from_value(
@ -28,7 +28,7 @@ fn change_environment_context(
let position = params.text_document_position.position;
CursorContext::new(
db,
workspace,
&uri,
position,
Params {
@ -39,10 +39,10 @@ fn change_environment_context(
}
pub fn change_environment(
db: &dyn Db,
workspace: &Workspace,
args: Vec<serde_json::Value>,
) -> Option<((), ApplyWorkspaceEditParams)> {
let context = change_environment_context(db, args).ok()?;
let context = change_environment_context(workspace, args).ok()?;
let (beg, end) = context.find_environment()?;
let beg_name = beg.to_string();
@ -53,10 +53,10 @@ pub fn change_environment(
}
let new_name = &context.params.new_name;
let line_index = context.document.line_index(db);
let line_index = &context.document.line_index;
let mut changes = HashMap::default();
changes.insert(
context.document.location(db).uri(db).clone(),
context.document.uri.clone(),
vec![
TextEdit::new(
line_index.line_col_lsp_range(beg.syntax().text_range()),

View file

@ -1,10 +1,11 @@
use std::process::Stdio;
use anyhow::Result;
use base_db::Workspace;
use lsp_types::{TextDocumentIdentifier, Url};
use thiserror::Error;
use crate::{db::Workspace, normalize_uri, Db};
use crate::normalize_uri;
#[derive(Debug, Error)]
pub enum CleanError {
@ -31,7 +32,11 @@ pub struct CleanCommand {
}
impl CleanCommand {
pub fn new(db: &dyn Db, options: CleanOptions, args: Vec<serde_json::Value>) -> Result<Self> {
pub fn new(
workspace: &Workspace,
options: CleanOptions,
args: Vec<serde_json::Value>,
) -> Result<Self> {
let params: TextDocumentIdentifier =
serde_json::from_value(args.into_iter().next().ok_or(CleanError::MissingArg)?)
.map_err(CleanError::InvalidArg)?;
@ -39,26 +44,19 @@ impl CleanCommand {
let mut uri = params.uri;
normalize_uri(&mut uri);
let workspace = Workspace::get(db);
let document = workspace
.lookup_uri(db, &uri)
.lookup(&uri)
.ok_or_else(|| CleanError::DocumentNotFound(uri.clone()))?;
let working_dir = workspace.working_dir(db, document.directory(db));
let output_dir = workspace
.output_dir(db, working_dir)
.path(db)
.as_deref()
.ok_or_else(|| CleanError::NoLocalFile(uri.clone()))?;
let path = document
.location(db)
.path(db)
.path
.as_deref()
.ok_or_else(|| CleanError::NoLocalFile(uri.clone()))?;
let current_dir = workspace.current_dir(&document.dir);
let output_dir = workspace.output_dir(&current_dir).to_file_path().unwrap();
let flag = match options {
CleanOptions::Auxiliary => "-c",
CleanOptions::Artifacts => "-C",

View file

@ -1,33 +1,34 @@
use anyhow::Result;
use base_db::{graph, Document, Workspace};
use itertools::Itertools;
use std::io::Write;
use rustc_hash::FxHashMap;
use crate::{
db::{dependency_graph, Document, Workspace},
Db,
};
pub fn show_dependency_graph(db: &dyn Db) -> Result<String> {
let workspace = Workspace::get(db);
pub fn show_dependency_graph(workspace: &Workspace) -> Result<String> {
let documents = workspace
.documents(db)
.iter()
.enumerate()
.map(|(i, doc)| (*doc, format!("v{i:0>5}")))
.collect::<FxHashMap<Document, String>>();
.map(|(i, doc)| (doc, format!("v{i:0>5}")))
.collect::<FxHashMap<&Document, String>>();
let mut writer = Vec::new();
writeln!(&mut writer, "digraph G {{")?;
writeln!(&mut writer, "rankdir = LR;")?;
for (document, node) in &documents {
let label = document.location(db).uri(db).as_str();
let shape = if document.can_be_root(db) {
let label = document.uri.as_str();
let shape = if document
.data
.as_tex()
.map_or(false, |data| data.semantics.can_be_root)
{
"tripleoctagon"
} else if document.can_be_built(db) {
} else if document
.data
.as_tex()
.map_or(false, |data| data.semantics.can_be_compiled)
{
"doubleoctagon"
} else {
"octagon"
@ -37,17 +38,16 @@ pub fn show_dependency_graph(db: &dyn Db) -> Result<String> {
}
for edge in workspace
.documents(db)
.iter()
.flat_map(|start| dependency_graph(db, *start).edges.iter())
.flat_map(|start| graph::Graph::new(workspace, start).edges)
.unique()
{
let source = &documents[&edge.source];
let target = &documents[&edge.target];
let source = &documents[edge.source];
let target = &documents[edge.target];
let label = edge
.origin
.weight
.as_ref()
.map_or("<artifact>", |origin| &origin.link.path(db).text(db));
.map_or("<artifact>", |weight| &weight.link.path.text);
writeln!(&mut writer, "\t{source} -> {target} [label=\"{label}\"];")?;
}

View file

@ -1,97 +1,9 @@
#![allow(clippy::needless_lifetimes)]
mod client;
mod config;
pub mod db;
pub mod features;
mod server;
pub mod util;
pub use self::{client::LspClient, config::*, server::Server};
#[salsa::jar(db = Db)]
pub struct Jar(
db::Word,
db::ServerContext,
db::Location,
db::Location_path,
db::LinterData,
db::Document,
db::Document_parse,
db::Document_can_be_root,
db::Document_can_be_built,
db::Document_line_index,
db::parse::TexDocumentData,
db::parse::TexDocumentData_analyze,
db::parse::BibDocumentData,
db::parse::LogDocumentData,
db::analysis::TexLink,
db::analysis::label::Number,
db::analysis::label::Name,
db::analysis::TheoremEnvironment,
db::analysis::GraphicsPath,
db::analysis::TexAnalysis,
db::analysis::TexAnalysis_has_document_environment,
db::MissingDependencies,
db::hidden_dependency,
db::source_dependency,
db::dependency_graph,
db::Workspace,
db::Workspace_working_dir,
db::Workspace_output_dir,
db::Workspace_parents,
db::Workspace_related,
db::Workspace_number_of_label,
db::diagnostics::tex::collect,
db::diagnostics::bib::collect,
db::diagnostics::log::collect,
db::diagnostics::collect,
db::diagnostics::collect_filtered,
);
pub trait Db: salsa::DbWithJar<Jar> {
fn config(&self) -> &Config;
}
#[salsa::db(crate::Jar)]
pub struct Database {
storage: salsa::Storage<Self>,
}
impl Db for Database {
fn config(&self) -> &Config {
db::ServerContext::get(self).config(self)
}
}
impl Default for Database {
fn default() -> Self {
let storage = salsa::Storage::default();
let db = Self { storage };
db::ServerContext::new(&db, Default::default(), Default::default());
db::Workspace::new(
&db,
Default::default(),
Default::default(),
Default::default(),
Default::default(),
);
db
}
}
impl salsa::Database for Database {}
impl salsa::ParallelDatabase for Database {
fn snapshot(&self) -> salsa::Snapshot<Self> {
salsa::Snapshot::new(Self {
storage: self.storage.snapshot(),
})
}
}
pub use self::{client::LspClient, server::Server};
pub(crate) fn normalize_uri(uri: &mut lsp_types::Url) {
fn fix_drive_letter(text: &str) -> Option<String> {

View file

@ -1,6 +1,5 @@
mod dispatch;
pub mod options;
mod query;
use std::{
path::PathBuf,
@ -8,14 +7,15 @@ use std::{
};
use anyhow::Result;
use base_db::{Config, Owner, Workspace};
use crossbeam_channel::{Receiver, Sender};
use distro::{Distro, Language};
use log::{error, info};
use lsp_server::{Connection, ErrorCode, Message, RequestId};
use lsp_types::{notification::*, request::*, *};
use once_cell::sync::Lazy;
use parking_lot::RwLock;
use rowan::{ast::AstNode, TextSize};
use rustc_hash::FxHashSet;
use rustc_hash::{FxHashMap, FxHashSet};
use serde::{Deserialize, Serialize};
use serde_repr::{Deserialize_repr, Serialize_repr};
use syntax::bibtex;
@ -23,7 +23,6 @@ use threadpool::ThreadPool;
use crate::{
client::LspClient,
db::{self, discover_dependencies, Document, Owner, Workspace},
features::{
build::{self, BuildParams, BuildResult, BuildStatus},
completion::{self, builder::CompletionItemData},
@ -36,7 +35,6 @@ use crate::{
self, capabilities::ClientCapabilitiesExt, components::COMPONENT_DATABASE,
line_index_ext::LineIndexExt,
},
Config, Db,
};
use self::options::{Options, StartupOptions};
@ -48,15 +46,18 @@ enum InternalMessage {
FileEvent(notify::Event),
ForwardSearch(Url),
Diagnostics,
ChktexResult(Url, Vec<db::diagnostics::Diagnostic>),
ChktexResult(Url, Vec<lsp_types::Diagnostic>),
}
pub struct Server {
connection: Arc<Connection>,
internal_tx: Sender<InternalMessage>,
internal_rx: Receiver<InternalMessage>,
workspace: Arc<RwLock<Workspace>>,
client: LspClient,
engine: query::Engine,
client_capabilities: Arc<ClientCapabilities>,
client_info: Option<Arc<ClientInfo>>,
chktex_diagnostics: FxHashMap<Url, Vec<Diagnostic>>,
watcher: FileWatcher,
pool: ThreadPool,
}
@ -70,33 +71,38 @@ impl Server {
connection: Arc::new(connection),
internal_tx,
internal_rx,
workspace: Default::default(),
client,
engine: query::Engine::default(),
client_capabilities: Default::default(),
client_info: Default::default(),
chktex_diagnostics: Default::default(),
watcher,
pool: threadpool::Builder::new().build(),
}
}
fn run_with_db<R, Q>(&self, id: RequestId, query: Q)
fn run_query<R, Q>(&self, id: RequestId, query: Q)
where
R: Serialize,
Q: FnOnce(&dyn Db) -> R + Send + 'static,
Q: FnOnce(&Workspace) -> R + Send + 'static,
{
let client = self.client.clone();
self.engine.fork(move |db| {
let response = lsp_server::Response::new_ok(id, query(db));
let workspace = Arc::clone(&self.workspace);
self.pool.execute(move || {
let response = lsp_server::Response::new_ok(id, query(&workspace.read()));
client.send_response(response).unwrap();
});
}
fn run_and_request_with_db<R, S, Q>(&self, id: RequestId, query: Q)
fn run_query_and_request<R, S, Q>(&self, id: RequestId, query: Q)
where
R: Request,
S: Serialize,
Q: FnOnce(&dyn Db) -> Option<(S, R::Params)> + Send + 'static,
Q: FnOnce(&Workspace) -> Option<(S, R::Params)> + Send + 'static,
{
let client = self.client.clone();
self.engine.fork(move |db| match query(db) {
let workspace = Arc::clone(&self.workspace);
self.pool.execute(move || match query(&workspace.read()) {
Some((result, request_params)) => {
let response = lsp_server::Response::new_ok(id, result);
client.send_response(response).unwrap();
@ -109,7 +115,7 @@ impl Server {
});
}
fn run_errorable<R, Q>(&self, id: RequestId, query: Q)
fn run_fallible<R, Q>(&self, id: RequestId, query: Q)
where
R: Serialize,
Q: FnOnce() -> Result<R> + Send + 'static,
@ -186,31 +192,18 @@ impl Server {
let (id, params) = self.connection.initialize_start()?;
let params: InitializeParams = serde_json::from_value(params)?;
let db = self.engine.write();
let workspace = Workspace::get(db);
workspace
.set_client_capabilities(db)
.with_durability(salsa::Durability::HIGH)
.to(params.capabilities);
self.client_capabilities = Arc::new(params.capabilities);
self.client_info = params.client_info.map(Arc::new);
db::ServerContext::get(db)
.set_always_incomplete_completion_list(db)
.with_durability(salsa::Durability::HIGH)
.to(params
.client_info
.map_or(false, |client| &client.name == "Visual Studio Code"));
let root_dirs = params
let workspace_folders = params
.workspace_folders
.unwrap_or_default()
.into_iter()
.map(|folder| db::Location::new(db, folder.uri))
.filter(|folder| folder.uri.scheme() == "file")
.flat_map(|folder| folder.uri.to_file_path())
.collect();
workspace
.set_root_dirs(db)
.with_durability(salsa::Durability::HIGH)
.to(root_dirs);
self.workspace.write().set_folders(workspace_folders);
let result = InitializeResult {
capabilities: self.capabilities(),
@ -234,7 +227,7 @@ impl Server {
Distro::default()
});
info!("Detected distribution: {:?}", distro.kind);
log::info!("Detected distribution: {:?}", distro.kind);
sender.send(InternalMessage::SetDistro(distro)).unwrap();
});
}
@ -245,12 +238,7 @@ impl Server {
}
fn register_configuration(&mut self) {
let db = self.engine.read();
if Workspace::get(db)
.client_capabilities(db)
.has_push_configuration_support()
{
if self.client_capabilities.has_push_configuration_support() {
let registration = Registration {
id: "pull-config".to_string(),
method: DidChangeConfiguration::METHOD.to_string(),
@ -275,19 +263,27 @@ impl Server {
}
fn update_workspace(&mut self) {
let db = self.engine.write();
discover_dependencies(db);
self.watcher.watch(db);
let mut workspace = self.workspace.write();
workspace.discover();
self.watcher.watch(&mut workspace);
drop(workspace);
self.publish_diagnostics_with_delay();
}
fn publish_diagnostics(&mut self) -> Result<()> {
let db = self.engine.read();
let workspace = self.workspace.read();
let mut all_diagnostics = util::diagnostics::collect(&workspace);
let all_diagnostics = db::diagnostics::collect_filtered(db, Workspace::get(db));
for (uri, diagnostics) in &self.chktex_diagnostics {
let Some(document) = workspace.lookup(uri) else { continue };
let Some(existing) = all_diagnostics.get_mut(document) else { continue };
existing.extend(diagnostics.iter().cloned());
}
util::diagnostics::filter(&mut all_diagnostics, &workspace);
for (document, diagnostics) in all_diagnostics {
let uri = document.location(db).uri(db).clone();
let uri = document.uri.clone();
let version = None;
let params = PublishDiagnosticsParams {
uri,
@ -303,9 +299,8 @@ impl Server {
}
fn publish_diagnostics_with_delay(&mut self) {
let db = self.engine.read();
let sender = self.internal_tx.clone();
let delay = db.config().diagnostics.delay;
let delay = self.workspace.read().config().diagnostics.delay;
self.pool.execute(move || {
std::thread::sleep(delay);
sender.send(InternalMessage::Diagnostics).unwrap();
@ -313,12 +308,7 @@ impl Server {
}
fn pull_options(&mut self) {
let db = self.engine.read();
let workspace = Workspace::get(db);
if !workspace
.client_capabilities(db)
.has_pull_configuration_support()
{
if !self.client_capabilities.has_pull_configuration_support() {
return;
}
@ -341,20 +331,16 @@ impl Server {
sender.send(InternalMessage::SetOptions(options)).unwrap();
}
Err(why) => {
error!("Retrieving configuration failed: {}", why);
log::error!("Retrieving configuration failed: {}", why);
}
};
});
}
fn update_options(&mut self, options: Options) {
let db = self.engine.write();
db::ServerContext::get(db)
.set_config(db)
.with_durability(salsa::Durability::MEDIUM)
.to(Config::from(options));
self.watcher.watch(db);
let mut workspace = self.workspace.write();
workspace.set_config(Config::from(options));
self.watcher.watch(&mut workspace);
}
fn cancel(&self, _params: CancelParams) -> Result<()> {
@ -366,12 +352,7 @@ impl Server {
}
fn did_change_configuration(&mut self, params: DidChangeConfigurationParams) -> Result<()> {
let db = self.engine.read();
let workspace = Workspace::get(db);
if workspace
.client_capabilities(db)
.has_pull_configuration_support()
{
if self.client_capabilities.has_pull_configuration_support() {
self.pull_options();
} else {
let options = self.client.parse_options(params.settings)?;
@ -381,25 +362,24 @@ impl Server {
Ok(())
}
fn did_open(&mut self, mut params: DidOpenTextDocumentParams) -> Result<()> {
normalize_uri(&mut params.text_document.uri);
fn did_open(&mut self, params: DidOpenTextDocumentParams) -> Result<()> {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
let db = self.engine.write();
let workspace = Workspace::get(db);
let language_id = &params.text_document.language_id;
let language = Language::from_id(language_id).unwrap_or(Language::Tex);
let document = workspace.open(
db,
params.text_document.uri,
self.workspace.write().open(
uri.clone(),
params.text_document.text,
language,
Owner::Client,
TextSize::default(),
);
self.update_workspace();
if self.engine.read().config().diagnostics.chktex.on_open {
self.run_chktex(document);
if self.workspace.read().config().diagnostics.chktex.on_open {
self.run_chktex(&uri);
}
Ok(())
@ -409,37 +389,35 @@ impl Server {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
let db = self.engine.write();
let workspace = Workspace::get(db);
let document = match workspace.lookup_uri(db, &uri) {
Some(document) => document,
None => return Ok(()),
};
let mut workspace = self.workspace.write();
for change in params.content_changes {
let Some(document) = workspace.lookup(&uri) else { return Ok(()) };
match change.range {
Some(range) => {
let range = document.line_index(db).offset_lsp_range(range);
document.edit(db, range, &change.text);
let range = document.line_index.offset_lsp_range(range);
drop(document);
workspace.edit(&uri, range, &change.text);
}
None => {
document
.set_text(db)
.with_durability(salsa::Durability::LOW)
.to(change.text);
document
.set_cursor(db)
.with_durability(salsa::Durability::LOW)
.to(TextSize::from(0));
let language = document.language;
drop(document);
workspace.open(
uri.clone(),
change.text,
language,
Owner::Client,
TextSize::default(),
);
}
};
}
drop(workspace);
self.update_workspace();
if self.engine.read().config().diagnostics.chktex.on_edit {
self.run_chktex(document);
if self.workspace.read().config().diagnostics.chktex.on_edit {
self.run_chktex(&uri);
}
Ok(())
@ -449,19 +427,14 @@ impl Server {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
let db = self.engine.read();
let workspace = Workspace::get(db);
if db.config().build.on_save {
if self.workspace.read().config().build.on_save {
self.build_internal(uri.clone(), |_| ())?;
}
self.publish_diagnostics_with_delay();
let db = self.engine.read();
if let Some(document) = workspace.lookup_uri(db, &uri) {
if db.config().diagnostics.chktex.on_save {
self.run_chktex(document);
}
if self.workspace.read().config().diagnostics.chktex.on_save {
self.run_chktex(&uri);
}
Ok(())
@ -470,49 +443,47 @@ impl Server {
fn did_close(&mut self, params: DidCloseTextDocumentParams) -> Result<()> {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
let db = self.engine.write();
if let Some(document) = Workspace::get(db).lookup_uri(db, &uri) {
document
.set_owner(db)
.with_durability(salsa::Durability::LOW)
.to(Owner::Server);
}
self.workspace.write().close(&uri);
self.publish_diagnostics_with_delay();
Ok(())
}
fn run_chktex(&mut self, document: Document) {
let db = self.engine.read();
if let Some(command) = util::chktex::Command::new(db, document) {
let sender = self.internal_tx.clone();
let uri = document.location(db).uri(db).clone();
self.pool.execute(move || {
let diagnostics = command.run().unwrap_or_default();
sender
.send(InternalMessage::ChktexResult(uri, diagnostics))
.unwrap();
});
}
fn run_chktex(&mut self, uri: &Url) {
let workspace = self.workspace.read();
let Some(document) = workspace.lookup(uri) else { return };
let Some(command) = util::chktex::Command::new(&workspace, document) else { return };
let sender = self.internal_tx.clone();
let uri = document.uri.clone();
self.pool.execute(move || {
let diagnostics = command.run().unwrap_or_default();
sender
.send(InternalMessage::ChktexResult(uri, diagnostics))
.unwrap();
});
}
fn document_link(&self, id: RequestId, params: DocumentLinkParams) -> Result<()> {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
self.run_with_db(id, move |db| link::find_all(db, &uri).unwrap_or_default());
self.run_query(id, move |workspace| {
link::find_all(workspace, &uri).unwrap_or_default()
});
Ok(())
}
fn document_symbols(&self, id: RequestId, params: DocumentSymbolParams) -> Result<()> {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
self.run_with_db(id, move |db| symbol::find_document_symbols(db, &uri));
let client_capabilities = Arc::clone(&self.client_capabilities);
self.run_query(id, move |workspace| {
symbol::find_document_symbols(workspace, &uri, &client_capabilities)
});
Ok(())
}
fn workspace_symbols(&self, id: RequestId, params: WorkspaceSymbolParams) -> Result<()> {
self.run_with_db(id, move |db| symbol::find_workspace_symbols(db, &params));
self.run_query(id, move |db| symbol::find_workspace_symbols(db, &params));
Ok(())
}
@ -520,12 +491,22 @@ impl Server {
let mut uri = params.text_document_position.text_document.uri;
normalize_uri(&mut uri);
let position = params.text_document_position.position;
self.run_with_db(id, move |db| completion::complete(db, &uri, position));
let client_capabilities = Arc::clone(&self.client_capabilities);
let client_info = self.client_info.clone();
self.run_query(id, move |db| {
completion::complete(
db,
&uri,
position,
&client_capabilities,
client_info.as_deref(),
)
});
Ok(())
}
fn completion_resolve(&self, id: RequestId, mut item: CompletionItem) -> Result<()> {
self.run_with_db(id, move |db| {
self.run_query(id, move |workspace| {
match item
.data
.clone()
@ -537,11 +518,11 @@ impl Server {
.map(Documentation::MarkupContent);
}
Some(CompletionItemData::Citation { uri, key }) => {
if let Some(root) = Workspace::get(db)
.lookup_uri(db, &uri)
.and_then(|document| document.parse(db).as_bib().map(|data| data.root(db)))
if let Some(data) = workspace
.lookup(&uri)
.and_then(|document| document.data.as_bib())
{
item.documentation = bibtex::Root::cast(root)
item.documentation = bibtex::Root::cast(data.root_node())
.and_then(|root| root.find_entry(&key))
.and_then(|entry| citeproc::render(&entry))
.map(|value| {
@ -564,7 +545,7 @@ impl Server {
fn folding_range(&self, id: RequestId, params: FoldingRangeParams) -> Result<()> {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
self.run_with_db(id, move |db| {
self.run_query(id, move |db| {
folding::find_all(db, &uri).unwrap_or_default()
});
Ok(())
@ -574,7 +555,7 @@ impl Server {
let mut uri = params.text_document_position.text_document.uri;
normalize_uri(&mut uri);
let position = params.text_document_position.position;
self.run_with_db(id, move |db| {
self.run_query(id, move |db| {
reference::find_all(db, &uri, position, &params.context).unwrap_or_default()
});
@ -585,21 +566,18 @@ impl Server {
let mut uri = params.text_document_position_params.text_document.uri;
normalize_uri(&mut uri);
let db = self.engine.write();
let workspace = Workspace::get(db);
if let Some(document) = workspace.lookup_uri(db, &uri) {
let workspace = self.workspace.read();
if let Some(document) = workspace.lookup(&uri) {
let position = document
.line_index(db)
.line_index
.offset_lsp(params.text_document_position_params.position);
document
.set_cursor(db)
.with_durability(salsa::Durability::LOW)
.to(position);
drop(workspace);
self.workspace.write().set_cursor(&uri, position);
}
let position = params.text_document_position_params.position;
self.run_with_db(id, move |db| hover::find(db, &uri, position));
self.run_query(id, move |db| hover::find(db, &uri, position));
Ok(())
}
@ -607,7 +585,7 @@ impl Server {
let mut uri = params.text_document_position_params.text_document.uri;
normalize_uri(&mut uri);
let position = params.text_document_position_params.position;
self.run_with_db(id, move |db| {
self.run_query(id, move |db| {
definition::goto_definition(db, &uri, position)
});
@ -617,7 +595,7 @@ impl Server {
fn prepare_rename(&self, id: RequestId, params: TextDocumentPositionParams) -> Result<()> {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
self.run_with_db(id, move |db| {
self.run_query(id, move |db| {
rename::prepare_rename_all(db, &uri, params.position)
});
@ -628,7 +606,7 @@ impl Server {
let mut uri = params.text_document_position.text_document.uri;
normalize_uri(&mut uri);
let position = params.text_document_position.position;
self.run_with_db(id, move |db| {
self.run_query(id, move |db| {
rename::rename_all(db, &uri, position, params.new_name)
});
@ -639,7 +617,7 @@ impl Server {
let mut uri = params.text_document_position_params.text_document.uri;
normalize_uri(&mut uri);
let position = params.text_document_position_params.position;
self.run_with_db(id, move |db| {
self.run_query(id, move |db| {
highlight::find_all(db, &uri, position).unwrap_or_default()
});
Ok(())
@ -648,7 +626,7 @@ impl Server {
fn formatting(&self, id: RequestId, params: DocumentFormattingParams) -> Result<()> {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
self.run_with_db(id, move |db| {
self.run_query(id, move |db| {
formatting::format_source_code(db, &uri, &params.options)
});
@ -658,24 +636,26 @@ impl Server {
fn execute_command(&mut self, id: RequestId, params: ExecuteCommandParams) -> Result<()> {
match params.command.as_str() {
"texlab.cleanAuxiliary" => {
let db = self.engine.read();
let workspace = self.workspace.read();
let opt = clean::CleanOptions::Auxiliary;
let command = clean::CleanCommand::new(db, opt, params.arguments);
self.run_errorable(id, || command?.run());
let command = clean::CleanCommand::new(&workspace, opt, params.arguments);
self.run_fallible(id, || command?.run());
}
"texlab.cleanArtifacts" => {
let db = self.engine.read();
let workspace = self.workspace.read();
let opt = clean::CleanOptions::Auxiliary;
let command = clean::CleanCommand::new(db, opt, params.arguments);
self.run_errorable(id, || command?.run());
let command = clean::CleanCommand::new(&workspace, opt, params.arguments);
self.run_fallible(id, || command?.run());
}
"texlab.changeEnvironment" => {
self.run_and_request_with_db::<ApplyWorkspaceEdit, _, _>(id, move |db| {
change_environment::change_environment(db, params.arguments)
self.run_query_and_request::<ApplyWorkspaceEdit, _, _>(id, move |workspace| {
change_environment::change_environment(workspace, params.arguments)
});
}
"texlab.showDependencyGraph" => {
self.run_with_db(id, move |db| dep_graph::show_dependency_graph(db).unwrap());
self.run_query(id, move |workspace| {
dep_graph::show_dependency_graph(workspace).unwrap()
});
}
_ => {
self.client
@ -694,7 +674,7 @@ impl Server {
fn inlay_hints(&self, id: RequestId, params: InlayHintParams) -> Result<()> {
let mut uri = params.text_document.uri;
normalize_uri(&mut uri);
self.run_with_db(id, move |db| {
self.run_query(id, move |db| {
inlay_hint::find_all(db, &uri, params.range).unwrap_or_default()
});
Ok(())
@ -734,16 +714,14 @@ impl Server {
) -> Result<()> {
static LOCK: Lazy<Mutex<()>> = Lazy::new(|| Mutex::new(()));
let db = self.engine.read();
let compiler = match build::Command::new(db, uri.clone(), self.client.clone()) {
Some(compiler) => compiler,
None => {
callback(BuildStatus::FAILURE);
return Ok(());
}
let workspace = self.workspace.read();
let client = self.client.clone();
let Some(compiler) = build::Command::new(&workspace, uri.clone(), client, &self.client_capabilities) else {
callback(BuildStatus::FAILURE);
return Ok(());
};
let forward_search_after = db.config().build.forward_search_after;
let forward_search_after = workspace.config().build.forward_search_after;
let sender = self.internal_tx.clone();
self.pool.execute(move || {
@ -780,8 +758,9 @@ impl Server {
position: Option<Position>,
callback: impl FnOnce(ForwardSearchStatus) + Send + 'static,
) -> Result<()> {
let db = self.engine.read();
let command = match forward_search::Command::configure(db, &uri, position) {
let workspace = self.workspace.read();
let command = match forward_search::Command::configure(&workspace, &uri, position) {
Ok(command) => command,
Err(why) => {
log::error!("Forward search failed: {}", why);
@ -816,37 +795,27 @@ impl Server {
fn handle_file_event(&mut self, event: notify::Event) {
let mut changed = false;
let db = self.engine.write();
let workspace = Workspace::get(db);
let mut workspace = self.workspace.write();
match event.kind {
notify::EventKind::Create(_) | notify::EventKind::Modify(_) => {
for path in event.paths {
if workspace
.lookup_path(db, &path)
.map_or(true, |document| document.owner(db) == Owner::Server)
.lookup_path(&path)
.map_or(true, |document| document.owner == Owner::Server)
{
if let Some(language) = Language::from_path(&path) {
workspace.load(db, &path, language, Owner::Server);
changed = true;
changed |= workspace.load(&path, language, Owner::Server).is_ok();
}
}
}
}
notify::EventKind::Remove(_) => {
for path in event.paths {
if let Some(document) = workspace.lookup_path(db, &path) {
if document.owner(db) == Owner::Server {
let mut documents = workspace
.set_documents(db)
.with_durability(salsa::Durability::LOW)
.to(FxHashSet::default());
documents.remove(&document);
workspace
.set_documents(db)
.with_durability(salsa::Durability::MEDIUM)
.to(documents);
if let Some(document) = workspace.lookup_path(&path) {
if document.owner == Owner::Server {
let uri = document.uri.clone();
drop(document);
workspace.remove(&uri);
changed = true;
}
}
@ -855,6 +824,7 @@ impl Server {
notify::EventKind::Any | notify::EventKind::Access(_) | notify::EventKind::Other => {}
};
drop(workspace);
if changed {
self.publish_diagnostics_with_delay();
}
@ -944,11 +914,7 @@ impl Server {
recv(&self.internal_rx) -> msg => {
match msg? {
InternalMessage::SetDistro(distro) => {
let db = self.engine.write();
Workspace::get(db)
.set_file_name_db(db)
.with_durability(salsa::Durability::HIGH)
.to(distro.file_name_db);
self.workspace.write().set_distro(distro);
}
InternalMessage::SetOptions(options) => {
self.update_options(options);
@ -963,12 +929,7 @@ impl Server {
self.publish_diagnostics()?;
}
InternalMessage::ChktexResult(uri, diagnostics) => {
let db = self.engine.write();
let workspace = Workspace::get(db);
if let Some(document) = workspace.lookup_uri(db, &uri) {
document.linter(db).set_chktex(db).to(diagnostics);
}
self.chktex_diagnostics.insert(uri, diagnostics);
self.publish_diagnostics()?;
}
};
@ -981,7 +942,6 @@ impl Server {
self.initialize()?;
self.process_messages()?;
self.pool.join();
self.engine.finish();
Ok(())
}
}
@ -1005,9 +965,8 @@ impl FileWatcher {
})
}
pub fn watch(&mut self, db: &dyn Db) {
let workspace = Workspace::get(db);
workspace.watch(db, &mut self.watcher, &mut self.watched_dirs);
pub fn watch(&mut self, workspace: &mut Workspace) {
workspace.watch(&mut self.watcher, &mut self.watched_dirs);
}
}

View file

@ -1,5 +1,4 @@
use anyhow::Result;
use log::warn;
use lsp_server::{ErrorCode, Notification, Request, RequestId, Response};
use serde::de::DeserializeOwned;
@ -37,7 +36,7 @@ impl NotificationDispatcher {
pub fn default(self) {
if let Some(not) = &self.not {
warn!("Unknown notification: {}", not.method);
log::warn!("Unknown notification: {}", not.method);
}
}
}
@ -76,7 +75,7 @@ impl RequestDispatcher {
pub fn default(self) -> Option<Response> {
self.req.map(|req| {
warn!("Unknown request: {}", req.method);
log::warn!("Unknown request: {}", req.method);
Response::new_err(
req.id,
ErrorCode::MethodNotFound as i32,

View file

@ -1,10 +1,9 @@
use std::time::Duration;
use base_db::{Config, Formatter, SynctexConfig};
use regex::Regex;
use serde::{Deserialize, Serialize};
use crate::{Config, Formatter, SynctexConfig};
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
#[serde(default)]

View file

@ -1,32 +0,0 @@
use salsa::{DbWithJar, ParallelDatabase};
use threadpool::ThreadPool;
use crate::{Database, Db};
#[derive(Default)]
pub struct Engine {
db: Database,
pool: ThreadPool,
}
impl Engine {
pub fn read(&self) -> &dyn Db {
&self.db
}
pub fn write(&mut self) -> &mut dyn Db {
self.pool.join();
&mut self.db
}
pub fn fork<F: FnOnce(&dyn Db) + Send + 'static>(&self, action: F) {
let snapshot = self.db.snapshot();
self.pool.execute(move || {
action(snapshot.as_jar_db());
});
}
pub fn finish(self) {
self.pool.join();
}
}

View file

@ -2,9 +2,9 @@ pub mod capabilities;
pub mod chktex;
pub mod components;
pub mod cursor;
pub mod diagnostics;
pub mod label;
pub mod lang_data;
pub mod line_index;
pub mod line_index_ext;
pub mod lsp_enums;
pub mod regex_filter;

View file

@ -4,19 +4,14 @@ use std::{
process::Stdio,
};
use base_db::{Document, Workspace};
use distro::Language;
use encoding_rs_io::DecodeReaderBytesBuilder;
use lsp_types::{Diagnostic, NumberOrString};
use lsp_types::{DiagnosticSeverity, Position, Range};
use once_cell::sync::Lazy;
use regex::Regex;
use crate::{
db::{
diagnostics::{Diagnostic, DiagnosticCode},
Document, Workspace,
},
Db,
};
#[derive(Debug)]
pub struct Command {
text: String,
@ -24,25 +19,26 @@ pub struct Command {
}
impl Command {
pub fn new(db: &dyn Db, document: Document) -> Option<Self> {
document.parse(db).as_tex()?;
pub fn new(workspace: &Workspace, document: &Document) -> Option<Self> {
if document.language != Language::Tex {
return None;
}
let workspace = Workspace::get(db);
let parent = workspace
.parents(db, document)
.iter()
.parents(document)
.into_iter()
.next()
.map_or(document, Clone::clone);
.unwrap_or(document);
let working_dir = workspace
.working_dir(db, parent.directory(db))
.path(db)
.as_deref()?
.to_owned();
if parent.uri.scheme() != "file" {
log::warn!("Calling ChkTeX on non-local files is not supported yet.");
return None;
}
let working_dir = workspace.current_dir(&parent.dir).to_file_path().ok()?;
log::debug!("Calling ChkTeX from directory: {}", working_dir.display());
let text = document.text(db).clone();
let text = document.text.clone();
Some(Self { text, working_dir })
}
@ -88,9 +84,14 @@ impl Command {
diagnostics.push(Diagnostic {
range,
severity,
code: DiagnosticCode::Chktex(code.into()),
severity: Some(severity),
code: Some(NumberOrString::String(code.into())),
message,
code_description: None,
source: Some(String::from("ChkTeX")),
related_information: None,
tags: None,
data: None,
});
}

View file

@ -1,17 +1,14 @@
use std::io::Read;
use base_db::{semantics::tex::LinkKind, Document};
use flate2::read::GzDecoder;
use itertools::Itertools;
use lsp_types::{MarkupContent, MarkupKind};
use once_cell::sync::Lazy;
use rustc_hash::FxHashSet;
use serde::Deserialize;
use smol_str::SmolStr;
use crate::{
db::{analysis::TexLinkKind, Document, Workspace},
Db,
};
#[derive(Debug, PartialEq, Eq, Clone, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ComponentDatabase {
@ -29,15 +26,14 @@ impl ComponentDatabase {
})
}
pub fn linked_components(&self, db: &dyn Db, child: Document) -> Vec<&Component> {
Workspace::get(db)
.related(db, child)
pub fn linked_components(&self, related: &FxHashSet<&Document>) -> Vec<&Component> {
related
.iter()
.filter_map(|document| document.parse(db).as_tex())
.flat_map(|data| data.analyze(db).links(db))
.filter_map(|link| match link.kind(db) {
TexLinkKind::Sty => Some(format!("{}.sty", link.path(db).text(db))),
TexLinkKind::Cls => Some(format!("{}.cls", link.path(db).text(db))),
.filter_map(|document| document.data.as_tex())
.flat_map(|data| data.semantics.links.iter())
.filter_map(|link| match link.kind {
LinkKind::Sty => Some(format!("{}.sty", link.path.text)),
LinkKind::Cls => Some(format!("{}.cls", link.path.text)),
_ => None,
})
.filter_map(|name| self.find(&name))

View file

@ -1,13 +1,10 @@
use base_db::{Document, DocumentData, Workspace};
use lsp_types::{Position, Url};
use rowan::{ast::AstNode, TextRange, TextSize};
use rustc_hash::FxHashSet;
use syntax::{bibtex, latex};
use crate::{
db::{parse::DocumentData, Document, Workspace},
Db,
};
use super::{line_index::LineIndex, line_index_ext::LineIndexExt};
use super::line_index_ext::LineIndexExt;
#[derive(Debug)]
pub enum Cursor {
@ -121,57 +118,49 @@ impl Cursor {
}
}
pub struct CursorContext<'db, T = ()> {
pub db: &'db dyn Db,
pub document: Document,
pub line_index: &'db LineIndex,
pub workspace: Workspace,
pub struct CursorContext<'a, T = ()> {
pub workspace: &'a Workspace,
pub document: &'a Document,
pub related: FxHashSet<&'a Document>,
pub cursor: Cursor,
pub offset: TextSize,
pub params: T,
}
impl<'db, T> CursorContext<'db, T> {
pub fn new(db: &'db dyn Db, uri: &Url, position: Position, params: T) -> Option<Self> {
let workspace = Workspace::get(db);
let document = workspace.lookup_uri(db, uri)?;
let line_index = document.line_index(db);
let offset = line_index.offset_lsp(position);
impl<'a, T> CursorContext<'a, T> {
pub fn new(workspace: &'a Workspace, uri: &Url, position: Position, params: T) -> Option<Self> {
let document = workspace.lookup(uri)?;
let offset = document.line_index.offset_lsp(position);
let cursor = match document.parse(db) {
let cursor = match &document.data {
DocumentData::Tex(data) => {
let root = data.root(db);
let root = data.root_node();
let left = root.token_at_offset(offset).left_biased();
let right = root.token_at_offset(offset).right_biased();
Cursor::new_tex(left, right)
}
DocumentData::Bib(data) => {
let root = data.root(db);
let root = data.root_node();
let left = root.token_at_offset(offset).left_biased();
let right = root.token_at_offset(offset).right_biased();
Cursor::new_bib(left, right)
}
DocumentData::Log(_) | DocumentData::TexlabRoot(_) | DocumentData::Tectonic(_) => None,
DocumentData::Aux(_)
| DocumentData::Log(_)
| DocumentData::Root
| DocumentData::Tectonic => None,
};
Some(Self {
db,
document,
line_index,
workspace,
document,
related: workspace.related(document),
cursor: cursor.unwrap_or(Cursor::Nothing),
offset,
params,
})
}
pub fn related(&self) -> impl Iterator<Item = Document> + '_ {
self.workspace
.related(self.db, self.document)
.iter()
.copied()
}
pub fn is_inside_latex_curly(&self, group: &impl latex::HasCurly) -> bool {
latex::small_range(group).contains(self.offset) || group.right_curly().is_none()
}

View file

@ -0,0 +1,120 @@
use base_db::{diagnostics::ErrorCode, Document, Workspace};
use distro::Language;
use lsp_types::{DiagnosticSeverity, NumberOrString};
use rustc_hash::FxHashMap;
use syntax::BuildErrorLevel;
use crate::util;
use super::line_index_ext::LineIndexExt;
pub fn collect(workspace: &Workspace) -> FxHashMap<&Document, Vec<lsp_types::Diagnostic>> {
let mut results = FxHashMap::default();
for document in workspace.iter() {
let lsp_diagnostics = document
.diagnostics
.iter()
.map(|diagnostic| create_diagnostic(document, diagnostic))
.collect::<Vec<_>>();
results.insert(document, lsp_diagnostics);
}
for document in workspace
.iter()
.filter(|document| document.language == Language::Log)
{
for (document, diagnostics) in base_db::diagnostics::log::analyze(workspace, document) {
let lsp_diagnostics = diagnostics
.iter()
.map(|diagnostic| create_diagnostic(document, diagnostic))
.collect::<Vec<_>>();
results.get_mut(document).unwrap().extend(lsp_diagnostics);
}
}
results
}
fn create_diagnostic(
document: &Document,
diagnostic: &base_db::diagnostics::Diagnostic,
) -> lsp_types::Diagnostic {
let range = document.line_index.line_col_lsp_range(diagnostic.range);
let severity = match &diagnostic.code {
ErrorCode::UnexpectedRCurly
| ErrorCode::RCurlyInserted
| ErrorCode::MismatchedEnvironment
| ErrorCode::ExpectingLCurly
| ErrorCode::ExpectingKey
| ErrorCode::ExpectingRCurly
| ErrorCode::ExpectingEq
| ErrorCode::ExpectingFieldValue => DiagnosticSeverity::ERROR,
ErrorCode::Build(error) => match error.level {
BuildErrorLevel::Error => DiagnosticSeverity::ERROR,
BuildErrorLevel::Warning => DiagnosticSeverity::WARNING,
},
};
let code = match &diagnostic.code {
ErrorCode::UnexpectedRCurly => Some(1),
ErrorCode::RCurlyInserted => Some(2),
ErrorCode::MismatchedEnvironment => Some(3),
ErrorCode::ExpectingLCurly => Some(4),
ErrorCode::ExpectingKey => Some(5),
ErrorCode::ExpectingRCurly => Some(6),
ErrorCode::ExpectingEq => Some(7),
ErrorCode::ExpectingFieldValue => Some(8),
ErrorCode::Build(_) => None,
};
let source = match &diagnostic.code {
ErrorCode::UnexpectedRCurly
| ErrorCode::RCurlyInserted
| ErrorCode::MismatchedEnvironment
| ErrorCode::ExpectingLCurly
| ErrorCode::ExpectingKey
| ErrorCode::ExpectingRCurly
| ErrorCode::ExpectingEq
| ErrorCode::ExpectingFieldValue => "texlab",
ErrorCode::Build(_) => "latex",
};
let message = String::from(match &diagnostic.code {
ErrorCode::UnexpectedRCurly => "Unexpected \"}\"",
ErrorCode::RCurlyInserted => "Missing \"}\" inserted",
ErrorCode::MismatchedEnvironment => "Mismatched environment",
ErrorCode::ExpectingLCurly => "Expecting a curly bracket: \"{\"",
ErrorCode::ExpectingKey => "Expecting a key",
ErrorCode::ExpectingRCurly => "Expecting a curly bracket: \"}\"",
ErrorCode::ExpectingEq => "Expecting an equality sign: \"=\"",
ErrorCode::ExpectingFieldValue => "Expecting a field value",
ErrorCode::Build(error) => &error.message,
});
lsp_types::Diagnostic {
severity: Some(severity),
code: code.map(NumberOrString::Number),
source: Some(String::from(source)),
..lsp_types::Diagnostic::new_simple(range, message)
}
}
pub fn filter(
all_diagnostics: &mut FxHashMap<&Document, Vec<lsp_types::Diagnostic>>,
workspace: &Workspace,
) {
let config = &workspace.config().diagnostics;
for diagnostics in all_diagnostics.values_mut() {
diagnostics.retain(|diagnostic| {
util::regex_filter::filter(
&diagnostic.message,
&config.allowed_patterns,
&config.ignored_patterns,
)
});
}
}

View file

@ -1,12 +1,12 @@
use std::str::FromStr;
use rowan::{ast::AstNode, TextRange};
use syntax::latex::{self, HasBrack, HasCurly};
use crate::{
db::{analysis::label, Document, Word, Workspace},
Db,
use base_db::{
semantics::tex::{Label, LabelObject},
Document, Workspace,
};
use rowan::{ast::AstNode, TextRange};
use rustc_hash::FxHashSet;
use syntax::latex::{self, HasCurly};
use self::LabeledObject::*;
@ -44,48 +44,48 @@ impl FromStr for LabeledFloatKind {
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum LabeledObject {
pub enum LabeledObject<'a> {
Section {
prefix: &'static str,
text: String,
prefix: &'a str,
text: &'a str,
},
Float {
kind: LabeledFloatKind,
caption: String,
caption: &'a str,
},
Theorem {
kind: Word,
description: Option<String>,
kind: &'a str,
description: Option<&'a str>,
},
Equation,
EnumItem,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct RenderedLabel {
pub struct RenderedLabel<'a> {
pub range: TextRange,
pub number: Option<Word>,
pub object: LabeledObject,
pub number: Option<&'a str>,
pub object: LabeledObject<'a>,
}
impl RenderedLabel {
pub fn reference(&self, db: &dyn Db) -> String {
impl<'a> RenderedLabel<'a> {
pub fn reference(&self) -> String {
match &self.number {
Some(number) => match &self.object {
Section { prefix, text } => format!("{} {} ({})", prefix, number.text(db), text),
Section { prefix, text } => format!("{} {} ({})", prefix, number, text),
Float { kind, caption } => {
format!("{} {}: {}", kind.as_str(), number.text(db), caption)
format!("{} {}: {}", kind.as_str(), number, caption)
}
Theorem {
kind,
description: None,
} => format!("{} {}", kind.text(db), number.text(db)),
} => format!("{} {}", kind, number),
Theorem {
kind,
description: Some(description),
} => format!("{} {} ({})", kind.text(db), number.text(db), description),
Equation => format!("Equation ({})", number.text(db)),
EnumItem => format!("Item {}", number.text(db)),
} => format!("{} {} ({})", kind, number, description),
Equation => format!("Equation ({})", number),
EnumItem => format!("Item {}", number),
},
None => match &self.object {
Section { prefix, text } => format!("{} ({})", prefix, text),
@ -93,23 +93,23 @@ impl RenderedLabel {
Theorem {
kind,
description: None,
} => kind.text(db).into(),
} => String::from(*kind),
Theorem {
kind,
description: Some(description),
} => format!("{} ({})", kind.text(db), description),
} => format!("{} ({})", kind, description),
Equation => "Equation".into(),
EnumItem => "Item".into(),
},
}
}
pub fn detail(&self, db: &dyn Db) -> Option<String> {
pub fn detail(&self) -> Option<String> {
match &self.object {
Section { .. } | Theorem { .. } | Equation | EnumItem => Some(self.reference(db)),
Section { .. } | Theorem { .. } | Equation | EnumItem => Some(self.reference()),
Float { kind, .. } => {
let result = match &self.number {
Some(number) => format!("{} {}", kind.as_str(), number.text(db)),
Some(number) => format!("{} {}", kind.as_str(), number),
None => kind.as_str().to_owned(),
};
Some(result)
@ -118,147 +118,81 @@ impl RenderedLabel {
}
}
pub fn render(db: &dyn Db, document: Document, label_def: label::Name) -> Option<RenderedLabel> {
let workspace = Workspace::get(db);
let label_num = workspace.number_of_label(db, document, label_def.name(db));
let root = document.parse(db).as_tex()?.root(db);
label_def
.origin(db)
.as_definition()?
.to_node(&root)
.syntax()
.ancestors()
.find_map(|parent| {
render_label_float(parent.clone(), label_num)
.or_else(|| render_label_section(parent.clone(), label_num))
.or_else(|| render_label_enum_item(db, parent.clone(), label_num))
.or_else(|| render_label_equation(db, parent.clone(), label_num))
.or_else(|| render_label_theorem(db, document, parent, label_num))
})
}
pub fn find_label_definition(
db: &dyn Db,
child: Document,
name: Word,
) -> Option<(Document, label::Name)> {
Workspace::get(db)
.related(db, child)
pub fn render<'a>(
workspace: &'a Workspace,
related: &FxHashSet<&'a Document>,
label: &'a Label,
) -> Option<RenderedLabel<'a>> {
let number = related
.iter()
.find_map(|document| {
let data = document.parse(db).as_tex()?;
let label = data
.analyze(db)
.labels(db)
.iter()
.filter(|label| label.origin(db).as_definition().is_some())
.find(|label| label.name(db) == name)?;
.filter_map(|document| document.data.as_aux())
.find_map(|data| data.semantics.label_numbers.get(&label.name.text))
.map(|number| number.as_str());
Some((*document, *label))
})
}
for target in &label.targets {
match &target.object {
LabelObject::Section { prefix, text } => {
return Some(RenderedLabel {
range: target.range,
number,
object: LabeledObject::Section { prefix, text },
});
}
LabelObject::EnumItem => {
return Some(RenderedLabel {
range: target.range,
number,
object: LabeledObject::EnumItem,
});
}
LabelObject::Environment {
name,
options,
caption,
} => {
let config = &workspace.config().syntax;
if config.math_environments.contains(name.as_str()) {
return Some(RenderedLabel {
range: target.range,
number,
object: LabeledObject::Equation,
});
}
fn render_label_float(parent: latex::SyntaxNode, number: Option<Word>) -> Option<RenderedLabel> {
let environment = latex::Environment::cast(parent.clone())?;
let environment_name = environment.begin()?.name()?.key()?.to_string();
let kind = LabeledFloatKind::from_str(&environment_name).ok()?;
let caption = find_caption_by_parent(&parent)?;
Some(RenderedLabel {
range: latex::small_range(&environment),
number,
object: LabeledObject::Float { caption, kind },
})
}
if let Ok(kind) = LabeledFloatKind::from_str(name) {
return Some(RenderedLabel {
range: target.range,
number,
object: LabeledObject::Float {
kind,
caption: caption.as_deref()?,
},
});
}
fn render_label_section(parent: latex::SyntaxNode, number: Option<Word>) -> Option<RenderedLabel> {
let section = latex::Section::cast(parent)?;
let text_group = section.name()?;
let text = text_group.content_text()?;
Some(RenderedLabel {
range: latex::small_range(&section),
number,
object: LabeledObject::Section {
prefix: match section.syntax().kind() {
latex::PART => "Part",
latex::CHAPTER => "Chapter",
latex::SECTION => "Section",
latex::SUBSECTION => "Subsection",
latex::SUBSUBSECTION => "Subsubsection",
latex::PARAGRAPH => "Paragraph",
latex::SUBPARAGRAPH => "Subparagraph",
_ => unreachable!(),
},
text,
},
})
}
fn render_label_enum_item(
db: &dyn Db,
parent: latex::SyntaxNode,
number: Option<Word>,
) -> Option<RenderedLabel> {
let enum_item = latex::EnumItem::cast(parent)?;
Some(RenderedLabel {
range: latex::small_range(&enum_item),
number: enum_item
.label()
.and_then(|label| label.content_text())
.map(|text| Word::new(db, text))
.or(number),
object: LabeledObject::EnumItem,
})
}
fn render_label_equation(
db: &dyn Db,
parent: latex::SyntaxNode,
number: Option<Word>,
) -> Option<RenderedLabel> {
let env = latex::Environment::cast(parent)?;
let env_name = env.begin()?.name()?.key()?.to_string();
if !db.config().syntax.math_environments.contains(&env_name) {
return None;
if let Some(theorem) = related
.iter()
.filter_map(|document| document.data.as_tex())
.flat_map(|data| data.semantics.theorem_definitions.iter())
.find(|theorem| theorem.name.text == *name)
{
return Some(RenderedLabel {
range: target.range,
number,
object: LabeledObject::Theorem {
kind: &theorem.description,
description: options.as_deref(),
},
});
}
}
};
}
Some(RenderedLabel {
range: latex::small_range(&env),
number,
object: LabeledObject::Equation,
})
None
}
fn render_label_theorem(
db: &dyn Db,
document: Document,
parent: latex::SyntaxNode,
number: Option<Word>,
) -> Option<RenderedLabel> {
let environment = latex::Environment::cast(parent)?;
let begin = environment.begin()?;
let description = begin.options().and_then(|options| options.content_text());
let environment_name = begin.name()?.key()?.to_string();
let kind = Workspace::get(db)
.related(db, document)
.iter()
.filter_map(|document| document.parse(db).as_tex())
.flat_map(|data| data.analyze(db).theorem_environments(db))
.find(|env| env.name(db).text(db) == &environment_name)
.map(|env| env.description(db))?;
Some(RenderedLabel {
range: latex::small_range(&environment),
number,
object: LabeledObject::Theorem { kind, description },
})
}
pub fn find_caption_by_parent(parent: &latex::SyntaxNode) -> Option<String> {
pub(crate) fn find_caption_by_parent(parent: &latex::SyntaxNode) -> Option<String> {
parent
.children()
.filter_map(latex::Caption::cast)

View file

@ -1,217 +0,0 @@
// The following code has been copied from rust-analyzer.
//! `LineIndex` maps flat `TextSize` offsets into `(Line, Column)`
//! representation.
use std::iter;
use rowan::{TextRange, TextSize};
use rustc_hash::FxHashMap;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct LineIndex {
/// Offset the the beginning of each line, zero-based
pub(crate) newlines: Vec<TextSize>,
/// List of non-ASCII characters on each line
pub(crate) utf16_lines: FxHashMap<u32, Vec<Utf16Char>>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct LineColUtf16 {
/// Zero-based
pub line: u32,
/// Zero-based
pub col: u32,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
pub struct LineCol {
/// Zero-based
pub line: u32,
/// Zero-based utf8 offset
pub col: u32,
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub(crate) struct Utf16Char {
/// Start offset of a character inside a line, zero-based
pub(crate) start: TextSize,
/// End offset of a character inside a line, zero-based
pub(crate) end: TextSize,
}
impl Utf16Char {
/// Returns the length in 8-bit UTF-8 code units.
fn len(&self) -> TextSize {
self.end - self.start
}
/// Returns the length in 16-bit UTF-16 code units.
fn len_utf16(&self) -> usize {
if self.len() == TextSize::from(4) {
2
} else {
1
}
}
}
impl LineIndex {
pub fn new(text: &str) -> LineIndex {
let mut utf16_lines = FxHashMap::default();
let mut utf16_chars = Vec::new();
let mut newlines = vec![0.into()];
let mut curr_row = 0.into();
let mut curr_col = 0.into();
let mut line = 0;
for c in text.chars() {
let c_len = TextSize::of(c);
curr_row += c_len;
if c == '\n' {
newlines.push(curr_row);
// Save any utf-16 characters seen in the previous line
if !utf16_chars.is_empty() {
utf16_lines.insert(line, utf16_chars);
utf16_chars = Vec::new();
}
// Prepare for processing the next line
curr_col = 0.into();
line += 1;
continue;
}
if !c.is_ascii() {
utf16_chars.push(Utf16Char {
start: curr_col,
end: curr_col + c_len,
});
}
curr_col += c_len;
}
// Save any utf-16 characters seen in the last line
if !utf16_chars.is_empty() {
utf16_lines.insert(line, utf16_chars);
}
LineIndex {
newlines,
utf16_lines,
}
}
pub fn line_col(&self, offset: TextSize) -> LineCol {
let line = partition_point(&self.newlines, |&it| it <= offset) - 1;
let line_start_offset = self.newlines[line];
let col = offset - line_start_offset;
LineCol {
line: line as u32,
col: col.into(),
}
}
pub fn offset(&self, line_col: LineCol) -> TextSize {
self.newlines[line_col.line as usize] + TextSize::from(line_col.col)
}
pub fn to_utf16(&self, line_col: LineCol) -> LineColUtf16 {
let col = self.utf8_to_utf16_col(line_col.line, line_col.col.into());
LineColUtf16 {
line: line_col.line,
col: col as u32,
}
}
pub fn to_utf8(&self, line_col: LineColUtf16) -> LineCol {
let col = self.utf16_to_utf8_col(line_col.line, line_col.col);
LineCol {
line: line_col.line,
col: col.into(),
}
}
pub fn lines(&self, range: TextRange) -> impl Iterator<Item = TextRange> + '_ {
let lo = partition_point(&self.newlines, |&it| it < range.start());
let hi = partition_point(&self.newlines, |&it| it <= range.end());
let all = iter::once(range.start())
.chain(self.newlines[lo..hi].iter().copied())
.chain(iter::once(range.end()));
all.clone()
.zip(all.skip(1))
.map(|(lo, hi)| TextRange::new(lo, hi))
.filter(|it| !it.is_empty())
}
fn utf8_to_utf16_col(&self, line: u32, col: TextSize) -> usize {
let mut res: usize = col.into();
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
for c in utf16_chars {
if c.end <= col {
res -= usize::from(c.len()) - c.len_utf16();
} else {
// From here on, all utf16 characters come *after* the character we are mapping,
// so we don't need to take them into account
break;
}
}
}
res
}
fn utf16_to_utf8_col(&self, line: u32, mut col: u32) -> TextSize {
if let Some(utf16_chars) = self.utf16_lines.get(&line) {
for c in utf16_chars {
if col > u32::from(c.start) {
col += u32::from(c.len()) - c.len_utf16() as u32;
} else {
// From here on, all utf16 characters come *after* the character we are mapping,
// so we don't need to take them into account
break;
}
}
}
col.into()
}
}
/// Returns `idx` such that:
///
/// ```text
/// ∀ x in slice[..idx]: pred(x)
/// && ∀ x in slice[idx..]: !pred(x)
/// ```
///
/// https://github.com/rust-lang/rust/issues/73831
fn partition_point<T, P>(slice: &[T], mut pred: P) -> usize
where
P: FnMut(&T) -> bool,
{
let mut left = 0;
let mut right = slice.len();
while left != right {
let mid = left + (right - left) / 2;
// SAFETY:
// When left < right, left <= mid < right.
// Therefore left always increases and right always decreases,
// and either of them is selected.
// In both cases left <= right is satisfied.
// Therefore if left < right in a step,
// left <= right is satisfied in the next step.
// Therefore as long as left != right, 0 <= left < right <= len is satisfied
// and if this case 0 <= mid < len is satisfied too.
let value = unsafe { slice.get_unchecked(mid) };
if pred(value) {
left = mid + 1;
} else {
right = mid;
}
}
left
}

View file

@ -1,8 +1,7 @@
use base_db::{LineCol, LineColUtf16, LineIndex};
use lsp_types::{Position, Range};
use rowan::{TextRange, TextSize};
use super::line_index::{LineColUtf16, LineIndex};
pub trait LineIndexExt {
fn offset_lsp(&self, line_col: Position) -> TextSize;
@ -36,7 +35,17 @@ impl LineIndexExt for LineIndex {
fn line_col_lsp_range(&self, offset: TextRange) -> Range {
let start = self.line_col_lsp(offset.start());
let end = self.line_col_lsp(offset.end());
let mut end = self.line_col_lsp(offset.end());
if end.line != start.line && end.character == 0 {
// Prefer keeping multi-line ranges on the same line
let line_end = self.offset(LineCol {
line: end.line,
col: 0,
});
end = self.line_col_lsp(line_end - TextSize::from(1));
}
Range::new(start, end)
}
}

View file

@ -51,7 +51,7 @@ impl Fixture {
let language_id = String::from(match language {
Language::Tex => "latex",
Language::Bib => "bibtex",
Language::Log | Language::Root | Language::Tectonic => continue,
Language::Aux | Language::Log | Language::Root | Language::Tectonic => continue,
});
client.send_notification::<DidOpenTextDocument>(DidOpenTextDocumentParams {

View file

@ -1,8 +1,9 @@
use base_db::LineIndex;
use insta::assert_snapshot;
use lsp_types::{
request::Formatting, ClientCapabilities, DocumentFormattingParams, FormattingOptions,
};
use texlab::util::{line_index::LineIndex, line_index_ext::LineIndexExt};
use texlab::util::line_index_ext::LineIndexExt;
use crate::fixture::TestBed;

View file

@ -1,5 +1,5 @@
---
source: tests/lsp/text_document/completion.rs
source: crates/texlab/tests/lsp/text_document/completion.rs
expression: "complete(r#\"\n%! bug.tex\n\\documentclass{article}\n\\def\\あいうえお{}\n\\begin{document}\n\\あ\n |\n ^\n\\end{document}\"#)"
---
[

View file

@ -1,5 +1,5 @@
---
source: tests/lsp/text_document/completion.rs
source: crates/texlab/tests/lsp/text_document/completion.rs
expression: "complete(r#\"\n%! main.tex\n\\foobar\n\\fooba\n |\n ^^^^^\n\\begin{foo}\n\\end{foo}\n\\begin{fo}\"#)"
---
[