mirror of
https://github.com/latex-lsp/texlab.git
synced 2025-08-03 18:29:03 +00:00
Load kpsewhich database on startup
This commit is contained in:
parent
5ac87b1bde
commit
04f2ba792d
7 changed files with 315 additions and 12 deletions
66
crates/tex/src/kpsewhich.rs
Normal file
66
crates/tex/src/kpsewhich.rs
Normal file
|
@ -0,0 +1,66 @@
|
|||
use super::language::Language;
|
||||
use super::{LoadError, Resolver};
|
||||
use std::collections::HashMap;
|
||||
use std::env;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::{Path, PathBuf};
|
||||
use tokio::process::Command;
|
||||
|
||||
pub async fn parse_database<R>(reader: R) -> Result<Resolver, LoadError>
|
||||
where
|
||||
R: Fn(&Path) -> Result<Vec<PathBuf>, LoadError>,
|
||||
{
|
||||
let root_directories = root_directories().await?;
|
||||
let mut files_by_name = HashMap::new();
|
||||
for directory in &root_directories {
|
||||
let database = reader(directory)?
|
||||
.into_iter()
|
||||
.filter(|path| {
|
||||
path.extension()
|
||||
.and_then(OsStr::to_str)
|
||||
.and_then(Language::by_extension)
|
||||
.is_some()
|
||||
})
|
||||
.filter_map(|path| {
|
||||
root_directories
|
||||
.iter()
|
||||
.rev()
|
||||
.find_map(move |dir| dir.join(&path).canonicalize().ok())
|
||||
})
|
||||
.map(|path| (path.file_name().unwrap().to_str().unwrap().to_owned(), path));
|
||||
|
||||
files_by_name.extend(database);
|
||||
}
|
||||
Ok(Resolver::new(files_by_name))
|
||||
}
|
||||
|
||||
async fn root_directories() -> Result<Vec<PathBuf>, LoadError> {
|
||||
let texmf = run(&["-var-value", "TEXMF"]).await?;
|
||||
let expand_arg = format!("--expand-braces={}", texmf);
|
||||
let expanded = run(&[&expand_arg]).await?;
|
||||
let directories = env::split_paths(&expanded.replace("!", ""))
|
||||
.filter(|path| path.exists())
|
||||
.collect();
|
||||
Ok(directories)
|
||||
}
|
||||
|
||||
async fn run<I, S>(args: I) -> Result<String, LoadError>
|
||||
where
|
||||
I: IntoIterator<Item = S>,
|
||||
S: AsRef<OsStr>,
|
||||
{
|
||||
let output = Command::new("kpsewhich")
|
||||
.args(args)
|
||||
.output()
|
||||
.await
|
||||
.map_err(|_| LoadError::KpsewhichNotFound)?;
|
||||
|
||||
let result = String::from_utf8(output.stdout)
|
||||
.expect("Could not decode output from kpsewhich")
|
||||
.lines()
|
||||
.next()
|
||||
.expect("Invalid output from kpsewhich")
|
||||
.into();
|
||||
|
||||
Ok(result)
|
||||
}
|
|
@ -1,4 +1,5 @@
|
|||
mod compile;
|
||||
mod kpsewhich;
|
||||
mod language;
|
||||
mod miktex;
|
||||
mod tectonic;
|
||||
|
@ -11,7 +12,10 @@ use self::miktex::Miktex;
|
|||
use self::tectonic::Tectonic;
|
||||
use self::texlive::Texlive;
|
||||
use futures_boxed::boxed;
|
||||
use std::collections::HashMap;
|
||||
use std::future::Future;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use tokio::process::Command;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
@ -49,6 +53,23 @@ impl DistributionKind {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Default)]
|
||||
pub struct Resolver {
|
||||
pub files_by_name: HashMap<String, PathBuf>,
|
||||
}
|
||||
|
||||
impl Resolver {
|
||||
pub fn new(files_by_name: HashMap<String, PathBuf>) -> Self {
|
||||
Self { files_by_name }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum LoadError {
|
||||
KpsewhichNotFound,
|
||||
CorruptFileDatabase,
|
||||
}
|
||||
|
||||
pub trait Distribution: Send + Sync {
|
||||
fn kind(&self) -> DistributionKind;
|
||||
|
||||
|
@ -70,16 +91,22 @@ pub trait Distribution: Send + Sync {
|
|||
let args = &["--interaction=batchmode", "-shell-escape", params.file_name];
|
||||
compile(executable, args, params).await
|
||||
}
|
||||
|
||||
#[boxed]
|
||||
async fn load(&self) -> Result<(), LoadError>;
|
||||
|
||||
#[boxed]
|
||||
async fn resolver(&self) -> Arc<Resolver>;
|
||||
}
|
||||
|
||||
impl dyn Distribution {
|
||||
pub async fn detect() -> Box<Self> {
|
||||
let kind = DistributionKind::detect().await;
|
||||
let distro: Box<Self> = match kind {
|
||||
DistributionKind::Texlive => Box::new(Texlive),
|
||||
DistributionKind::Miktex => Box::new(Miktex),
|
||||
DistributionKind::Tectonic => Box::new(Tectonic),
|
||||
DistributionKind::Unknown => Box::new(Unknown),
|
||||
DistributionKind::Texlive => Box::new(Texlive::new()),
|
||||
DistributionKind::Miktex => Box::new(Miktex::new()),
|
||||
DistributionKind::Tectonic => Box::new(Tectonic::new()),
|
||||
DistributionKind::Unknown => Box::new(Unknown::new()),
|
||||
};
|
||||
distro
|
||||
}
|
||||
|
@ -88,6 +115,12 @@ impl dyn Distribution {
|
|||
#[derive(Debug, Default)]
|
||||
pub struct Unknown;
|
||||
|
||||
impl Unknown {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl Distribution for Unknown {
|
||||
fn kind(&self) -> DistributionKind {
|
||||
DistributionKind::Unknown
|
||||
|
@ -104,6 +137,16 @@ impl Distribution for Unknown {
|
|||
) -> Result<CompileResult, CompileError> {
|
||||
Err(CompileError::NotInstalled)
|
||||
}
|
||||
|
||||
#[boxed]
|
||||
async fn load(&self) -> Result<(), LoadError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[boxed]
|
||||
async fn resolver(&self) -> Arc<Resolver> {
|
||||
Arc::new(Resolver::default())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn with_distro<T, A, F>(supported_kinds: &[DistributionKind], action: A) -> Option<T>
|
||||
|
|
|
@ -1,8 +1,26 @@
|
|||
use super::compile::*;
|
||||
use super::{Distribution, DistributionKind};
|
||||
use super::kpsewhich;
|
||||
use super::{Distribution, DistributionKind, LoadError, Resolver};
|
||||
use byteorder::{LittleEndian, ReadBytesExt};
|
||||
use futures::lock::Mutex;
|
||||
use futures_boxed::boxed;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io::{self, Cursor};
|
||||
use std::mem;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Miktex;
|
||||
pub struct Miktex {
|
||||
resolver: Mutex<Arc<Resolver>>,
|
||||
}
|
||||
|
||||
impl Miktex {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl Distribution for Miktex {
|
||||
fn kind(&self) -> DistributionKind {
|
||||
|
@ -15,4 +33,85 @@ impl Distribution for Miktex {
|
|||
Format::Xelatex | Format::Lualatex => true,
|
||||
}
|
||||
}
|
||||
|
||||
#[boxed]
|
||||
async fn load(&self) -> Result<(), LoadError> {
|
||||
let resolver = Arc::new(kpsewhich::parse_database(read_database).await?);
|
||||
let mut resolver_guard = self.resolver.lock().await;
|
||||
mem::replace(&mut *resolver_guard, resolver);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[boxed]
|
||||
async fn resolver(&self) -> Arc<Resolver> {
|
||||
let resolver = self.resolver.lock().await;
|
||||
Arc::clone(&resolver)
|
||||
}
|
||||
}
|
||||
|
||||
const DATABASE_PATH: &str = "miktex/data/le";
|
||||
const FNDB_SIGNATURE: u32 = 0x42444e46;
|
||||
const FNDB_WORD_SIZE: usize = 4;
|
||||
const FNDB_TABLE_POINTER_OFFSET: usize = 4 * FNDB_WORD_SIZE;
|
||||
const FNDB_TABLE_SIZE_OFFSET: usize = 6 * FNDB_WORD_SIZE;
|
||||
const FNDB_ENTRY_SIZE: usize = 4 * FNDB_WORD_SIZE;
|
||||
|
||||
fn read_database(directory: &Path) -> Result<Vec<PathBuf>, LoadError> {
|
||||
let database_directory = directory.join(DATABASE_PATH);
|
||||
if !database_directory.exists() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut database = Vec::new();
|
||||
let files = fs::read_dir(database_directory)
|
||||
.expect("Could not traverse database directory")
|
||||
.filter_map(|x| x.ok())
|
||||
.filter(|x| x.path().extension().and_then(OsStr::to_str) == Some("fndb-5"));
|
||||
|
||||
for file in files {
|
||||
let bytes = fs::read(file.path()).expect("Could not read fndb file");
|
||||
database.extend(parse_database(&bytes).map_err(|_| LoadError::CorruptFileDatabase)?);
|
||||
}
|
||||
|
||||
Ok(database)
|
||||
}
|
||||
|
||||
fn parse_database(bytes: &[u8]) -> io::Result<Vec<PathBuf>> {
|
||||
let mut reader = Cursor::new(bytes);
|
||||
if reader.read_u32::<LittleEndian>()? != FNDB_SIGNATURE {
|
||||
return Err(io::ErrorKind::InvalidData.into());
|
||||
}
|
||||
|
||||
reader.set_position(FNDB_TABLE_POINTER_OFFSET as u64);
|
||||
let table_address = reader.read_u32::<LittleEndian>()?;
|
||||
|
||||
reader.set_position(FNDB_TABLE_SIZE_OFFSET as u64);
|
||||
let table_size = reader.read_u32::<LittleEndian>()?;
|
||||
|
||||
let mut files = Vec::new();
|
||||
for i in 0..table_size {
|
||||
let offset = table_address + i * FNDB_ENTRY_SIZE as u32;
|
||||
reader.set_position(offset as u64);
|
||||
let file_name_offset = reader.read_u32::<LittleEndian>()? as usize;
|
||||
let directory_offset = reader.read_u32::<LittleEndian>()? as usize;
|
||||
let file_name = read_string(bytes, file_name_offset)?;
|
||||
let directory = read_string(bytes, directory_offset)?;
|
||||
|
||||
let file = PathBuf::from(directory).join(file_name);
|
||||
files.push(file);
|
||||
}
|
||||
|
||||
Ok(files)
|
||||
}
|
||||
|
||||
fn read_string(bytes: &[u8], offset: usize) -> io::Result<&str> {
|
||||
let mut byte = bytes[offset];
|
||||
let mut length = 0;
|
||||
while byte != 0x00 {
|
||||
length += 1;
|
||||
byte = bytes[offset + length];
|
||||
}
|
||||
|
||||
std::str::from_utf8(&bytes[offset..offset + length])
|
||||
.map_err(|_| io::ErrorKind::InvalidData.into())
|
||||
}
|
||||
|
|
|
@ -1,10 +1,17 @@
|
|||
use super::compile::*;
|
||||
use super::{Distribution, DistributionKind};
|
||||
use super::{Distribution, DistributionKind, LoadError, Resolver};
|
||||
use futures_boxed::boxed;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Tectonic;
|
||||
|
||||
impl Tectonic {
|
||||
pub fn new() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
impl Distribution for Tectonic {
|
||||
fn kind(&self) -> DistributionKind {
|
||||
DistributionKind::Tectonic
|
||||
|
@ -29,4 +36,14 @@ impl Distribution for Tectonic {
|
|||
let args = [params.file_name];
|
||||
compile("tectonic", &args, params).await
|
||||
}
|
||||
|
||||
#[boxed]
|
||||
async fn load(&self) -> Result<(), LoadError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[boxed]
|
||||
async fn resolver(&self) -> Arc<Resolver> {
|
||||
Arc::new(Resolver::default())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,25 @@
|
|||
use super::compile::*;
|
||||
use super::{Distribution, DistributionKind};
|
||||
use super::kpsewhich;
|
||||
use super::{Distribution, DistributionKind, LoadError, Resolver};
|
||||
use futures::lock::Mutex;
|
||||
use futures_boxed::boxed;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::mem;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str::Lines;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Texlive;
|
||||
pub struct Texlive {
|
||||
resolver: Mutex<Arc<Resolver>>,
|
||||
}
|
||||
|
||||
impl Texlive {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl Distribution for Texlive {
|
||||
fn kind(&self) -> DistributionKind {
|
||||
|
@ -15,4 +32,46 @@ impl Distribution for Texlive {
|
|||
Format::Xelatex | Format::Lualatex => true,
|
||||
}
|
||||
}
|
||||
|
||||
#[boxed]
|
||||
async fn load(&self) -> Result<(), LoadError> {
|
||||
let resolver = Arc::new(kpsewhich::parse_database(read_database).await?);
|
||||
let mut resolver_guard = self.resolver.lock().await;
|
||||
mem::replace(&mut *resolver_guard, resolver);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[boxed]
|
||||
async fn resolver(&self) -> Arc<Resolver> {
|
||||
let resolver = self.resolver.lock().await;
|
||||
Arc::clone(&resolver)
|
||||
}
|
||||
}
|
||||
|
||||
const DATABASE_PATH: &'static str = "ls-R";
|
||||
|
||||
fn read_database(directory: &Path) -> Result<Vec<PathBuf>, LoadError> {
|
||||
let file = directory.join(DATABASE_PATH);
|
||||
if !file.is_file() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let text = fs::read_to_string(file).expect("Could not read ls-R file");
|
||||
parse_database(text.lines()).map_err(|_| LoadError::CorruptFileDatabase)
|
||||
}
|
||||
|
||||
fn parse_database(lines: Lines) -> io::Result<Vec<PathBuf>> {
|
||||
let mut paths = Vec::new();
|
||||
let mut directory = "";
|
||||
|
||||
for line in lines.filter(|x| !x.trim().is_empty() && !x.starts_with('%')) {
|
||||
if line.ends_with(':') {
|
||||
directory = &line[..line.len() - 1];
|
||||
} else {
|
||||
let path = PathBuf::from(directory).join(line);
|
||||
paths.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(paths)
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue