Revert splitting the main crate

This commit is contained in:
Patrick Förster 2019-08-01 16:02:45 +02:00
parent bbb8a44541
commit 4b92707068
101 changed files with 162 additions and 471 deletions

View file

@ -1,20 +0,0 @@
[package]
name = "citeproc"
version = "0.1.0"
authors = [
"Eric Förster <efoerster@users.noreply.github.com>",
"Patrick Förster <pfoerster@users.noreply.github.com>"]
edition = "2018"
[dependencies]
futures-preview = { version = "0.3.0-alpha.17", features = ["compat"] }
html2md = "0.2.9"
lsp-types = { git = "https://github.com/latex-lsp/lsp-types", rev = "9fcc5d9b9d3013ce84e20ef566267754d594b268", features = ["proposed"] }
runtime = "0.3.0-alpha.4"
runtime-tokio = "0.3.0-alpha.4"
serde = { version = "1.0.97", features = ["derive", "rc"] }
tempfile = "3"
texlab-syntax = { path = "../texlab_syntax" }
tokio = "0.1"
tokio-process = "0.2.4"

View file

@ -1,93 +0,0 @@
# Created by https://www.gitignore.io/api/node
# Edit at https://www.gitignore.io/?templates=node
### Node ###
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Runtime data
pids
*.pid
*.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# TypeScript v1 declaration files
typings/
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variables file
.env
.env.test
# parcel-bundler cache (https://parceljs.org/)
.cache
# next.js build output
.next
# nuxt.js build output
.nuxt
# vuepress build output
.vuepress/dist
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# End of https://www.gitignore.io/api/node
# TypeScript output
out/
# dist
dist/

View file

@ -1,5 +0,0 @@
{
"printWidth": 80,
"singleQuote": true,
"trailingComma": "all"
}

File diff suppressed because it is too large Load diff

View file

@ -1,31 +0,0 @@
{
"name": "texlab-citeproc",
"version": "0.1.0",
"description": "LaTeX Language Server",
"repository": "https://github.com/latex-lsp/texlab.git",
"author": "Eric Förster <efoerster@users.noreply.github.com>",
"license": "MIT",
"scripts": {
"dist": "webpack",
"build": "tsc -p .",
"watch": "tsc -p . --watch",
"lint": "tslint --project .",
"format": "prettier --write \"src/**/*.{ts,json}\" \"*.{ts,json,yml,md}\" \".vscode/**/*.{json}\""
},
"devDependencies": {
"@citation-js/core": "^0.4.8",
"@citation-js/plugin-bibtex": "^0.4.8",
"@citation-js/plugin-csl": "^0.4.8",
"@types/node": "^11.13.17",
"@types/webpack": "^4.4.35",
"null-loader": "^0.1.1",
"prettier": "^1.18.2",
"ts-loader": "^5.4.5",
"ts-node": "^8.3.0",
"tslint": "^5.18.0",
"tslint-config-prettier": "^1.15.0",
"typescript": "^3.5.3",
"webpack": "^4.35.3",
"webpack-cli": "^3.3.6"
}
}

View file

@ -1,7 +0,0 @@
declare module '@citation-js/core' {
export class Cite {
constructor(text: string);
public format(type: string, options: any): string;
}
}

View file

@ -1,13 +0,0 @@
import { Cite } from '@citation-js/core';
import '@citation-js/plugin-bibtex';
import '@citation-js/plugin-csl';
import fs from 'fs';
const code = fs.readFileSync('entry.bib').toString();
const cite = new Cite(code);
const html = cite.format('bibliography', {
format: 'html',
template: 'apa',
lang: 'en-US',
});
console.log(html);

View file

@ -1,15 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "es6",
"lib": ["es6"],
"rootDir": "src",
"outDir": "dist",
"sourceMap": true,
"strict": true,
"resolveJsonModule": true,
"esModuleInterop": true,
"skipLibCheck": true
},
"include": ["src/**/*"]
}

View file

@ -1,20 +0,0 @@
{
"extends": ["tslint:latest", "tslint-config-prettier"],
"rules": {
"no-implicit-dependencies": false,
"interface-name": false,
"max-classes-per-file": false,
"object-literal-sort-keys": false,
"variable-name": [
true,
"ban-keywords",
"check-format",
"allow-leading-underscore",
"allow-pascal-case"
],
"no-empty": false,
"no-console": false,
"no-conditional-assignment": false,
"no-bitwise": false
}
}

View file

@ -1,37 +0,0 @@
import path from 'path';
import { Configuration } from 'webpack';
const config: Configuration = {
target: 'node',
entry: './src/main.ts',
mode: 'production',
output: {
path: path.resolve(__dirname, 'dist'),
filename: 'citeproc.js',
libraryTarget: 'commonjs2',
devtoolModuleFilenameTemplate: '../[resource-path]',
},
resolve: {
extensions: ['.ts', '.js', '.json'],
},
module: {
rules: [
{
test: /\.ts$/,
exclude: /node_modules/,
use: [
{
loader: 'ts-loader',
},
],
},
{
// Map browser dependencies to an empty module
test: /node_modules[/\\](sync-request|isomorphic-fetch|ws)[/\\]/,
use: 'null-loader',
},
],
},
};
export default config;

View file

@ -1,89 +0,0 @@
#![feature(async_await)]
use futures::compat::*;
use lsp_types::*;
use std::process::{Command, Stdio};
use tempfile::tempdir;
use texlab_syntax::*;
use tokio_process::CommandExt;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum RenderCitationError {
InitializationFailed,
InvalidEntry,
NodeNotInstalled,
ScriptFaulty,
InvalidOutput,
}
pub async fn render_citation(entry_code: &str) -> Result<MarkupContent, RenderCitationError> {
let tree = BibtexSyntaxTree::from(entry_code);
if tree.entries().iter().any(|entry| entry.fields.len() == 0) {
return Err(RenderCitationError::InvalidEntry);
}
let directory = tempdir().map_err(|_| RenderCitationError::InitializationFailed)?;
let entry_path = directory.path().join("entry.bib");
tokio::fs::write(entry_path, &entry_code)
.compat()
.await
.map_err(|_| RenderCitationError::InitializationFailed)?;
let mut process = Command::new("node")
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.current_dir(directory.path())
.spawn_async()
.map_err(|_| RenderCitationError::NodeNotInstalled)?;
tokio::io::write_all(process.stdin().as_mut().unwrap(), SCRIPT)
.compat()
.await
.map_err(|_| RenderCitationError::ScriptFaulty)?;
let output = process
.wait_with_output()
.compat()
.await
.map_err(|_| RenderCitationError::ScriptFaulty)?;
if output.status.success() {
let html =
String::from_utf8(output.stdout).map_err(|_| RenderCitationError::InvalidOutput)?;
let markdown = html2md::parse_html(&html);
Ok(MarkupContent {
kind: MarkupKind::Markdown,
value: markdown.trim().to_owned().into(),
})
} else {
Err(RenderCitationError::InvalidEntry)
}
}
const SCRIPT: &str = include_str!("../script/dist/citeproc.js");
#[cfg(test)]
mod tests {
use super::*;
#[runtime::test(runtime_tokio::Tokio)]
async fn test_valid() {
let markdown =
render_citation("@article{foo, author = {Foo Bar}, title = {Baz Qux}, year = 1337}")
.await;
assert_eq!(markdown.unwrap().value, "Bar, F. (1337). Baz Qux.");
}
#[runtime::test(runtime_tokio::Tokio)]
async fn test_invalid() {
let markdown = render_citation("@article{}").await;
assert_eq!(markdown, Err(RenderCitationError::InvalidEntry));
}
#[runtime::test(runtime_tokio::Tokio)]
async fn test_empty() {
let markdown = render_citation("@article{foo,}").await;
assert_eq!(markdown, Err(RenderCitationError::InvalidEntry));
}
}

View file

@ -1,23 +0,0 @@
[package]
name = "texlab-completion"
version = "0.1.0"
authors = [
"Eric Förster <efoerster@users.noreply.github.com>",
"Patrick Förster <pfoerster@users.noreply.github.com>"]
edition = "2018"
[dependencies]
futures-boxed = { path = "../futures_boxed" }
futures-preview = { version = "0.3.0-alpha.17", features = ["compat"] }
itertools = "0.8.0"
log = "0.4.6"
lsp-types = { git = "https://github.com/latex-lsp/lsp-types", rev = "9fcc5d9b9d3013ce84e20ef566267754d594b268", features = ["proposed"] }
once_cell = "0.2.2"
regex = "1.2.0"
serde = { version = "1.0.97", features = ["derive", "rc"] }
serde_json = "1.0.40"
texlab-completion-data = { path = "../texlab_completion_data" }
texlab-formatting = { path = "../texlab_formatting" }
texlab-syntax = { path = "../texlab_syntax" }
texlab-workspace = { path = "../texlab_workspace" }
walkdir = "2"

View file

@ -1,110 +0,0 @@
use crate::factory::{self, LatexComponentId};
use futures_boxed::boxed;
use lsp_types::*;
use texlab_completion_data::DATABASE;
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct BibtexCommandCompletionProvider;
impl FeatureProvider for BibtexCommandCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let mut items = Vec::new();
if let SyntaxTree::Bibtex(tree) = &request.document().tree {
let position = request.params.position;
if let Some(BibtexNode::Command(command)) = tree.find(position).last() {
if command.token.range().contains(position)
&& command.token.start().character != position.character
{
let mut range = command.range();
range.start.character += 1;
let component = LatexComponentId::kernel();
for command in &DATABASE.kernel().commands {
let text_edit = TextEdit::new(range, (&command.name).into());
let item = factory::command(
request,
(&command.name).into(),
command.image.as_ref().map(AsRef::as_ref),
text_edit,
&component,
);
items.push(item);
}
}
}
}
items
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::{Position, Range};
#[test]
fn test_inside_command() {
let items = test_feature(
BibtexCommandCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@article{foo, bar=\n\\}")],
main_file: "foo.bib",
position: Position::new(1, 1),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(1, 1, 1, 2))
);
}
#[test]
fn test_start_of_command() {
let items = test_feature(
BibtexCommandCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@article{foo, bar=\n\\}")],
main_file: "foo.bib",
position: Position::new(1, 0),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_inside_text() {
let items = test_feature(
BibtexCommandCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@article{foo, bar=\n}")],
main_file: "foo.bib",
position: Position::new(1, 0),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_latex() {
let items = test_feature(
BibtexCommandCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\")],
main_file: "foo.tex",
position: Position::new(0, 1),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
}

View file

@ -1,154 +0,0 @@
use crate::factory;
use futures_boxed::boxed;
use lsp_types::*;
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct BibtexEntryTypeCompletionProvider;
impl FeatureProvider for BibtexEntryTypeCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
if let SyntaxTree::Bibtex(tree) = &request.document().tree {
let position = request.params.position;
for declaration in &tree.root.children {
match declaration {
BibtexDeclaration::Preamble(preamble) => {
if contains(&preamble.ty, position) {
return make_items(request, preamble.ty.range());
}
}
BibtexDeclaration::String(string) => {
if contains(&string.ty, position) {
return make_items(request, string.ty.range());
}
}
BibtexDeclaration::Entry(entry) => {
if contains(&entry.ty, position) {
return make_items(request, entry.ty.range());
}
}
BibtexDeclaration::Comment(_) => {}
}
}
}
Vec::new()
}
}
fn contains(ty: &BibtexToken, position: Position) -> bool {
ty.range().contains(position) && ty.start().character != position.character
}
fn make_items(request: &FeatureRequest<CompletionParams>, mut range: Range) -> Vec<CompletionItem> {
range.start.character += 1;
let mut items = Vec::new();
for ty in &LANGUAGE_DATA.entry_types {
let text_edit = TextEdit::new(range, (&ty.name).into());
let item = factory::entry_type(request, ty, text_edit);
items.push(item);
}
items
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::Position;
#[test]
fn test_before_at_sign() {
let items = test_feature(
BibtexEntryTypeCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@")],
main_file: "foo.bib",
position: Position::new(0, 0),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_after_at_sign() {
let items = test_feature(
BibtexEntryTypeCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@")],
main_file: "foo.bib",
position: Position::new(0, 1),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(0, 1, 0, 1))
);
}
#[test]
fn test_inside_entry_type() {
let items = test_feature(
BibtexEntryTypeCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@foo")],
main_file: "foo.bib",
position: Position::new(0, 2),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(0, 1, 0, 4))
);
}
#[test]
fn test_inside_entry_key() {
let items = test_feature(
BibtexEntryTypeCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@article{foo,}")],
main_file: "foo.bib",
position: Position::new(0, 11),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_inside_comments() {
let items = test_feature(
BibtexEntryTypeCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "foo")],
main_file: "foo.bib",
position: Position::new(0, 2),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_latex() {
let items = test_feature(
BibtexEntryTypeCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "@")],
main_file: "foo.tex",
position: Position::new(0, 1),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
}

View file

@ -1,175 +0,0 @@
use crate::factory;
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams, Range, TextEdit};
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct BibtexFieldNameCompletionProvider;
impl FeatureProvider for BibtexFieldNameCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
if let SyntaxTree::Bibtex(tree) = &request.document().tree {
let position = request.params.position;
match tree.find(position).last() {
Some(BibtexNode::Field(field)) => {
if field.name.range().contains(position) {
return make_items(request, field.name.range());
}
}
Some(BibtexNode::Entry(entry)) => {
if !entry.is_comment() && !entry.ty.range().contains(position) {
let edit_range = Range::new(position, position);
if let Some(key) = &entry.key {
if !key.range().contains(position) {
return make_items(request, edit_range);
}
} else {
return make_items(request, edit_range);
}
}
}
_ => {}
}
}
Vec::new()
}
}
fn make_items(
request: &FeatureRequest<CompletionParams>,
edit_range: Range,
) -> Vec<CompletionItem> {
let mut items = Vec::new();
for field in &LANGUAGE_DATA.fields {
let text_edit = TextEdit::new(edit_range, (&field.name).into());
let item = factory::field_name(request, field, text_edit);
items.push(item);
}
items
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::Position;
#[test]
fn test_inside_first_field() {
let items = test_feature(
BibtexFieldNameCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@article{foo,\nbar}")],
main_file: "foo.bib",
position: Position::new(1, 1),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(1, 0, 1, 3))
);
}
#[test]
fn test_inside_second_field() {
let items = test_feature(
BibtexFieldNameCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file(
"foo.bib",
"@article{foo, bar = {baz}, qux}",
)],
main_file: "foo.bib",
position: Position::new(0, 27),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(0, 27, 0, 30))
);
}
#[test]
fn test_inside_entry() {
let items = test_feature(
BibtexFieldNameCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@article{foo, \n}")],
main_file: "foo.bib",
position: Position::new(1, 0),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(1, 0, 1, 0))
);
}
#[test]
fn test_inside_content() {
let items = test_feature(
BibtexFieldNameCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@article{foo,\nbar = {baz}}")],
main_file: "foo.bib",
position: Position::new(1, 7),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_inside_entry_type() {
let items = test_feature(
BibtexFieldNameCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.bib", "@article{foo,}")],
main_file: "foo.bib",
position: Position::new(0, 3),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
// TODO: Improve behavior of this provider
//
// #[test]
// fn test_after_equals_sign() {
// let items = test_feature(
// BibtexFieldNameCompletionProvider,
// FeatureSpec {
// files: vec![FeatureSpec::file("foo.bib", "@article{foo, bar = \n}")],
// main_file: "foo.bib",
// position: Position::new(1, 0),
// ..FeatureSpec::default()
// },
// );
// assert!(items.is_empty());
// }
#[test]
fn test_inside_latex() {
let items = test_feature(
BibtexFieldNameCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "@article{foo,}")],
main_file: "foo.tex",
position: Position::new(0, 3),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
}

View file

@ -1,3 +0,0 @@
pub mod command;
pub mod entry_type;
pub mod field_name;

View file

@ -1,376 +0,0 @@
use lsp_types::*;
use once_cell::sync::Lazy;
use regex::Regex;
use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use std::path::Path;
use texlab_formatting::bibtex::{self, BibtexFormattingParams};
use texlab_syntax::*;
use texlab_workspace::*;
static WHITESPACE_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new("\\s+").unwrap());
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub enum CompletionItemData {
Command,
CommandSnippet,
Environment,
Label,
Folder,
File,
PgfLibrary,
TikzLibrary,
Color,
ColorModel,
Package,
Class,
EntryType,
FieldName,
Citation { entry_code: String },
Argument,
}
impl Into<serde_json::Value> for CompletionItemData {
fn into(self) -> serde_json::Value {
serde_json::to_value(self).unwrap()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum LatexComponentId<'a> {
User,
Component(Vec<&'a str>),
}
impl<'a> LatexComponentId<'a> {
pub fn kernel() -> Self {
LatexComponentId::Component(vec![])
}
pub fn detail(&self) -> Cow<'static, str> {
match self {
LatexComponentId::User => "user-defined".into(),
LatexComponentId::Component(files) => {
if files.is_empty() {
"built-in".into()
} else {
files.join(", ").into()
}
}
}
}
}
pub fn command(
request: &FeatureRequest<CompletionParams>,
name: Cow<'static, str>,
image: Option<&str>,
text_edit: TextEdit,
component: &LatexComponentId,
) -> CompletionItem {
CompletionItem {
kind: Some(adjust_kind(request, CompletionItemKind::Function)),
data: Some(CompletionItemData::Command.into()),
documentation: image.map(|image| image_documentation(&name, image)),
text_edit: Some(text_edit),
..CompletionItem::new_simple(name, component.detail())
}
}
pub fn command_snippet(
request: &FeatureRequest<CompletionParams>,
name: &'static str,
image: Option<&str>,
template: &'static str,
component: &LatexComponentId,
) -> CompletionItem {
CompletionItem {
kind: Some(adjust_kind(request, CompletionItemKind::Snippet)),
data: Some(CompletionItemData::CommandSnippet.into()),
documentation: image.map(|image| image_documentation(&name, image)),
insert_text: Some(template.into()),
insert_text_format: Some(InsertTextFormat::Snippet),
..CompletionItem::new_simple(name.into(), component.detail())
}
}
pub fn environment(
request: &FeatureRequest<CompletionParams>,
name: Cow<'static, str>,
text_edit: TextEdit,
component: &LatexComponentId,
) -> CompletionItem {
CompletionItem {
kind: Some(adjust_kind(request, CompletionItemKind::EnumMember)),
data: Some(CompletionItemData::Environment.into()),
text_edit: Some(text_edit),
..CompletionItem::new_simple(name, component.detail())
}
}
pub fn label(
request: &FeatureRequest<CompletionParams>,
name: Cow<'static, str>,
text_edit: TextEdit,
context: &OutlineContext,
) -> CompletionItem {
fn to_str(value: &Option<String>) -> &str {
value.as_ref().map(String::as_str).unwrap_or_default()
}
let filter_text = format!(
"{} {} {}",
&name,
to_str(&context.caption),
to_str(&context.section)
);
CompletionItem {
label: name,
kind: Some(adjust_kind(request, CompletionItemKind::Field)),
data: Some(CompletionItemData::Label.into()),
text_edit: Some(text_edit),
filter_text: Some(filter_text.into()),
documentation: context.documentation().map(Documentation::MarkupContent),
..CompletionItem::default()
}
}
pub fn folder(
request: &FeatureRequest<CompletionParams>,
path: &Path,
text_edit: TextEdit,
) -> CompletionItem {
CompletionItem {
label: path
.file_name()
.unwrap()
.to_string_lossy()
.into_owned()
.into(),
kind: Some(adjust_kind(request, CompletionItemKind::Folder)),
data: Some(CompletionItemData::Folder.into()),
text_edit: Some(text_edit),
..CompletionItem::default()
}
}
pub fn file(
request: &FeatureRequest<CompletionParams>,
path: &Path,
text_edit: TextEdit,
) -> CompletionItem {
CompletionItem {
label: path
.file_name()
.unwrap()
.to_string_lossy()
.into_owned()
.into(),
kind: Some(adjust_kind(request, CompletionItemKind::File)),
data: Some(CompletionItemData::File.into()),
text_edit: Some(text_edit),
..CompletionItem::default()
}
}
pub fn pgf_library(
request: &FeatureRequest<CompletionParams>,
name: &'static str,
text_edit: TextEdit,
) -> CompletionItem {
CompletionItem {
label: name.into(),
kind: Some(adjust_kind(request, CompletionItemKind::Module)),
data: Some(CompletionItemData::PgfLibrary.into()),
text_edit: Some(text_edit),
..CompletionItem::default()
}
}
pub fn tikz_library(
request: &FeatureRequest<CompletionParams>,
name: &'static str,
text_edit: TextEdit,
) -> CompletionItem {
CompletionItem {
label: name.into(),
kind: Some(adjust_kind(request, CompletionItemKind::Module)),
data: Some(CompletionItemData::TikzLibrary.into()),
text_edit: Some(text_edit),
..CompletionItem::default()
}
}
pub fn color(
request: &FeatureRequest<CompletionParams>,
name: &'static str,
text_edit: TextEdit,
) -> CompletionItem {
CompletionItem {
label: name.into(),
kind: Some(adjust_kind(request, CompletionItemKind::Color)),
data: Some(CompletionItemData::Color.into()),
text_edit: Some(text_edit),
..CompletionItem::default()
}
}
pub fn color_model(
request: &FeatureRequest<CompletionParams>,
name: &'static str,
text_edit: TextEdit,
) -> CompletionItem {
CompletionItem {
label: name.into(),
kind: Some(adjust_kind(request, CompletionItemKind::Color)),
data: Some(CompletionItemData::ColorModel.into()),
text_edit: Some(text_edit),
..CompletionItem::default()
}
}
pub fn package(
request: &FeatureRequest<CompletionParams>,
name: &'static str,
text_edit: TextEdit,
) -> CompletionItem {
CompletionItem {
label: name.into(),
kind: Some(adjust_kind(request, CompletionItemKind::Class)),
data: Some(CompletionItemData::Package.into()),
text_edit: Some(text_edit),
..CompletionItem::default()
}
}
pub fn class(
request: &FeatureRequest<CompletionParams>,
name: &'static str,
text_edit: TextEdit,
) -> CompletionItem {
CompletionItem {
label: name.into(),
kind: Some(adjust_kind(request, CompletionItemKind::Class)),
data: Some(CompletionItemData::Class.into()),
text_edit: Some(text_edit),
..CompletionItem::default()
}
}
pub fn citation(
request: &FeatureRequest<CompletionParams>,
entry: &BibtexEntry,
key: String,
text_edit: TextEdit,
) -> CompletionItem {
let params = BibtexFormattingParams::default();
let entry_code = bibtex::format_entry(&entry, &params);
let filter_text = format!(
"{} {}",
&key,
WHITESPACE_REGEX
.replace_all(
&entry_code
.replace('{', " ")
.replace('}', " ")
.replace(',', " ")
.replace('=', " "),
" ",
)
.trim()
);
log::info!("FilterText = {}", filter_text);
CompletionItem {
label: key.into(),
kind: Some(adjust_kind(request, CompletionItemKind::Field)),
filter_text: Some(filter_text.into()),
data: Some(CompletionItemData::Citation { entry_code }.into()),
text_edit: Some(text_edit),
..CompletionItem::default()
}
}
pub fn entry_type(
request: &FeatureRequest<CompletionParams>,
ty: &'static BibtexEntryTypeDoc,
text_edit: TextEdit,
) -> CompletionItem {
CompletionItem {
label: (&ty.name).into(),
kind: Some(adjust_kind(request, CompletionItemKind::Interface)),
data: Some(CompletionItemData::EntryType.into()),
text_edit: Some(text_edit),
documentation: ty.documentation.as_ref().map(|doc| {
Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,
value: doc.into(),
})
}),
..CompletionItem::default()
}
}
pub fn field_name(
request: &FeatureRequest<CompletionParams>,
field: &'static BibtexFieldDoc,
text_edit: TextEdit,
) -> CompletionItem {
CompletionItem {
label: (&field.name).into(),
kind: Some(adjust_kind(request, CompletionItemKind::Field)),
data: Some(CompletionItemData::FieldName.into()),
text_edit: Some(text_edit),
documentation: Some(Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,
value: (&field.documentation).into(),
})),
..CompletionItem::default()
}
}
pub fn argument(
request: &FeatureRequest<CompletionParams>,
name: &'static str,
text_edit: TextEdit,
image: Option<&str>,
) -> CompletionItem {
CompletionItem {
label: name.into(),
kind: Some(adjust_kind(request, CompletionItemKind::Field)),
data: Some(CompletionItemData::Argument.into()),
text_edit: Some(text_edit),
documentation: image.map(|image| image_documentation(&name, image)),
..CompletionItem::default()
}
}
fn image_documentation(name: &str, image: &str) -> Documentation {
Documentation::MarkupContent(MarkupContent {
kind: MarkupKind::Markdown,
value: format!(
"![{}](data:image/png;base64,{}|width=48,height=48)",
name, image
)
.into(),
})
}
fn adjust_kind(
request: &FeatureRequest<CompletionParams>,
kind: CompletionItemKind,
) -> CompletionItemKind {
if let Some(value_set) = request
.client_capabilities
.text_document
.as_ref()
.and_then(|cap| cap.completion.as_ref())
.and_then(|cap| cap.completion_item_kind.as_ref())
.and_then(|cap| cap.value_set.as_ref())
{
if value_set.contains(&kind) {
return kind;
}
}
CompletionItemKind::Text
}

View file

@ -1,114 +0,0 @@
use super::combinators::{self, Parameter};
use crate::factory;
use futures_boxed::boxed;
use lsp_types::*;
use std::iter;
use texlab_completion_data::DATABASE;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexArgumentCompletionProvider;
impl FeatureProvider for LatexArgumentCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let mut all_items = Vec::new();
for component in DATABASE.related_components(request.related_documents()) {
for command in &component.commands {
let name = format!("\\{}", command.name);
for (i, parameter) in command.parameters.iter().enumerate() {
let mut items = combinators::argument(
request,
iter::once(Parameter::new(&name, i)),
async move |context| {
let mut items = Vec::new();
for argument in &parameter.0 {
let text_edit =
TextEdit::new(context.range, (&argument.name).into());
let item = factory::argument(
request,
&argument.name,
text_edit,
argument.image.as_ref().map(AsRef::as_ref),
);
items.push(item);
}
items
},
)
.await;
all_items.append(&mut items);
}
}
}
all_items
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::{Position, Range};
#[test]
fn test_inside_mathbb_empty() {
let items = test_feature(
LatexArgumentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file(
"foo.tex",
"\\usepackage{amsfonts}\n\\mathbb{}",
)],
main_file: "foo.tex",
position: Position::new(1, 8),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(1, 8, 1, 8))
);
}
#[test]
fn test_inside_mathbb_non_empty() {
let items = test_feature(
LatexArgumentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file(
"foo.tex",
"\\usepackage{amsfonts}\n\\mathbb{foo}",
)],
main_file: "foo.tex",
position: Position::new(1, 8),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(1, 8, 1, 11))
);
}
#[test]
fn test_outside_mathbb() {
let items = test_feature(
LatexArgumentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file(
"foo.tex",
"\\usepackage{amsfonts}\n\\mathbb{}",
)],
main_file: "foo.tex",
position: Position::new(1, 9),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
}

View file

@ -1,28 +0,0 @@
use super::combinators;
use crate::factory::{self, LatexComponentId};
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams};
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexBeginCommandCompletionProvider;
impl FeatureProvider for LatexBeginCommandCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
combinators::command(request, async move |_| {
let snippet = factory::command_snippet(
request,
"begin",
None,
"begin{$1}\n\t$0\n\\end{$1}",
&LatexComponentId::kernel(),
);
vec![snippet]
})
.await
}
}

View file

@ -1,135 +0,0 @@
use super::combinators::{self, Parameter};
use crate::factory;
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams, TextEdit};
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexCitationCompletionProvider;
impl FeatureProvider for LatexCitationCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let parameters = LANGUAGE_DATA
.citation_commands
.iter()
.map(|cmd| Parameter::new(&cmd.name, cmd.index));
combinators::argument(request, parameters, async move |context| {
let mut items = Vec::new();
for document in request.related_documents() {
if let SyntaxTree::Bibtex(tree) = &document.tree {
for entry in &tree.entries() {
if !entry.is_comment() {
if let Some(key) = &entry.key {
let key = key.text().to_owned();
let text_edit = TextEdit::new(context.range, key.clone().into());
let item = factory::citation(request, entry, key, text_edit);
items.push(item);
}
}
}
}
}
items
})
.await
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::{Position, Range};
#[test]
fn test_empty() {
let items = test_feature(
LatexCitationCompletionProvider,
FeatureSpec {
files: vec![
FeatureSpec::file("foo.tex", "\\addbibresource{bar.bib}\n\\cite{}"),
FeatureSpec::file("bar.bib", "@article{foo,}"),
FeatureSpec::file("baz.bib", "@article{bar,}"),
],
main_file: "foo.tex",
position: Position::new(1, 6),
..FeatureSpec::default()
},
);
assert_eq!(items.len(), 1);
assert_eq!(items[0].label, "foo");
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(1, 6, 1, 6))
);
}
#[test]
fn test_single_key() {
let items = test_feature(
LatexCitationCompletionProvider,
FeatureSpec {
files: vec![
FeatureSpec::file("foo.tex", "\\addbibresource{bar.bib}\n\\cite{foo}"),
FeatureSpec::file("bar.bib", "@article{foo,}"),
FeatureSpec::file("baz.bib", "@article{bar,}"),
],
main_file: "foo.tex",
position: Position::new(1, 6),
..FeatureSpec::default()
},
);
assert_eq!(items.len(), 1);
assert_eq!(items[0].label, "foo");
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(1, 6, 1, 9))
);
}
#[test]
fn test_second_key() {
let items = test_feature(
LatexCitationCompletionProvider,
FeatureSpec {
files: vec![
FeatureSpec::file("foo.tex", "\\addbibresource{bar.bib}\n\\cite{foo,}"),
FeatureSpec::file("bar.bib", "@article{foo,}"),
FeatureSpec::file("baz.bib", "@article{bar,}"),
],
main_file: "foo.tex",
position: Position::new(1, 10),
..FeatureSpec::default()
},
);
assert_eq!(items.len(), 1);
assert_eq!(items[0].label, "foo");
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(1, 10, 1, 10))
);
}
#[test]
fn test_outside_cite() {
let items = test_feature(
LatexCitationCompletionProvider,
FeatureSpec {
files: vec![
FeatureSpec::file("foo.tex", "\\addbibresource{bar.bib}\n\\cite{}"),
FeatureSpec::file("bar.bib", "@article{foo,}"),
FeatureSpec::file("baz.bib", "@article{bar,}"),
],
main_file: "foo.tex",
position: Position::new(1, 7),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
}

View file

@ -1,71 +0,0 @@
use super::combinators::{self, Parameter};
use crate::factory;
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams, TextEdit};
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexColorCompletionProvider;
impl FeatureProvider for LatexColorCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let parameters = LANGUAGE_DATA
.color_commands
.iter()
.map(|cmd| Parameter::new(&cmd.name, cmd.index));
combinators::argument(request, parameters, async move |context| {
let mut items = Vec::new();
for name in &LANGUAGE_DATA.colors {
let text_edit = TextEdit::new(context.range, name.into());
let item = factory::color(request, name, text_edit);
items.push(item);
}
items
})
.await
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::{Position, Range};
#[test]
fn test_inside_color() {
let items = test_feature(
LatexColorCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\color{}")],
main_file: "foo.tex",
position: Position::new(0, 7),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(0, 7, 0, 7))
);
}
#[test]
fn test_outside_color() {
let items = test_feature(
LatexColorCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\color{}")],
main_file: "foo.tex",
position: Position::new(0, 8),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
}

View file

@ -1,91 +0,0 @@
use super::combinators::{self, Parameter};
use crate::factory;
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams, TextEdit};
use texlab_syntax::LANGUAGE_DATA;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexColorModelCompletionProvider;
impl FeatureProvider for LatexColorModelCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let parameters = LANGUAGE_DATA
.color_model_commands
.iter()
.map(|cmd| Parameter::new(&cmd.name, cmd.index));
combinators::argument(&request, parameters, async move |context| {
let mut items = Vec::new();
for name in MODEL_NAMES {
let text_edit = TextEdit::new(context.range, (*name).into());
let item = factory::color_model(request, name, text_edit);
items.push(item);
}
items
})
.await
}
}
const MODEL_NAMES: &[&str] = &["gray", "rgb", "RGB", "HTML", "cmyk"];
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::{Position, Range};
#[test]
fn test_inside_define_color() {
let items = test_feature(
LatexColorModelCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\definecolor{name}{}")],
main_file: "foo.tex",
position: Position::new(0, 19),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(0, 19, 0, 19))
);
}
#[test]
fn test_outside_define_color() {
let items = test_feature(
LatexColorModelCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\definecolor{name}{}")],
main_file: "foo.tex",
position: Position::new(0, 18),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn tet_inside_define_color_set() {
let items = test_feature(
LatexColorModelCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\definecolorset{}")],
main_file: "foo.tex",
position: Position::new(0, 16),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(0, 16, 0, 16))
);
}
}

View file

@ -1,149 +0,0 @@
use lsp_types::*;
use std::future::Future;
use std::sync::Arc;
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct Parameter<'a> {
pub name: &'a str,
pub index: usize,
}
impl<'a> Parameter<'a> {
pub fn new(name: &'a str, index: usize) -> Self {
Self { name, index }
}
}
pub async fn command<E, F>(
request: &FeatureRequest<CompletionParams>,
execute: E,
) -> Vec<CompletionItem>
where
E: FnOnce(Arc<LatexCommand>) -> F,
F: Future<Output = Vec<CompletionItem>>,
{
if let SyntaxTree::Latex(tree) = &request.document().tree {
if let Some(command) = tree.find_command_by_name(request.params.position) {
return execute(command).await;
}
}
Vec::new()
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ArgumentContext<'a> {
pub parameter: Parameter<'a>,
pub command: Arc<LatexCommand>,
pub range: Range,
}
pub async fn argument<'a, I, E, F>(
request: &'a FeatureRequest<CompletionParams>,
mut parameters: I,
execute: E,
) -> Vec<CompletionItem>
where
I: Iterator<Item = Parameter<'a>>,
E: FnOnce(ArgumentContext<'a>) -> F,
F: Future<Output = Vec<CompletionItem>>,
{
if let SyntaxTree::Latex(tree) = &request.document().tree {
let position = request.params.position;
if let Some(command) = find_command(tree, position) {
for parameter in parameters.by_ref() {
if command.name.text() != parameter.name {
continue;
}
if let Some(args) = command.args.get(parameter.index) {
if args.right.is_some() && !args.range().contains_exclusive(position) {
continue;
}
let mut range = None;
for child in &args.children {
if let LatexContent::Text(text) = &child {
for word in &text.words {
if word.range().contains(position) {
range = Some(word.range());
break;
}
}
}
}
let text_range = range.unwrap_or_else(|| Range::new(position, position));
let context = ArgumentContext {
parameter,
command: Arc::clone(&command),
range: text_range,
};
return execute(context).await;
}
}
}
}
Vec::new()
}
pub async fn argument_word<'a, I, E, F>(
request: &'a FeatureRequest<CompletionParams>,
mut parameters: I,
execute: E,
) -> Vec<CompletionItem>
where
I: Iterator<Item = Parameter<'a>>,
E: FnOnce(Arc<LatexCommand>, usize) -> F,
F: Future<Output = Vec<CompletionItem>>,
{
if let SyntaxTree::Latex(tree) = &request.document().tree {
let position = request.params.position;
if let Some(command) = find_command(tree, position) {
for parameter in parameters.by_ref() {
if command.name.text() != parameter.name {
continue;
}
if let Some(args) = command.args.get(parameter.index) {
if args.right.is_some() && !args.range().contains_exclusive(position) {
continue;
}
if args.children.len() != 0 && !command.has_word(parameter.index) {
continue;
}
return execute(Arc::clone(&command), parameter.index).await;
}
}
}
}
Vec::new()
}
pub async fn environment<'a, E, F>(
request: &'a FeatureRequest<CompletionParams>,
execute: E,
) -> Vec<CompletionItem>
where
E: FnOnce(ArgumentContext<'a>) -> F,
F: Future<Output = Vec<CompletionItem>>,
{
let parameters = LANGUAGE_DATA
.environment_commands
.iter()
.map(|cmd| Parameter::new(&cmd.name, cmd.index));
argument(request, parameters, execute).await
}
fn find_command(tree: &LatexSyntaxTree, position: Position) -> Option<Arc<LatexCommand>> {
let mut nodes = tree.find(position);
nodes.reverse();
for node in nodes {
if let LatexNode::Command(command) = node {
return Some(command);
}
}
None
}

View file

@ -1,271 +0,0 @@
use super::combinators;
use crate::factory::{self, LatexComponentId};
use futures_boxed::boxed;
use lsp_types::*;
use texlab_completion_data::DATABASE;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexComponentCommandCompletionProvider;
impl FeatureProvider for LatexComponentCommandCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
combinators::command(request, async move |command| {
let range = command.short_name_range();
let mut items = Vec::new();
for component in DATABASE.related_components(request.related_documents()) {
let file_names = component.file_names.iter().map(AsRef::as_ref).collect();
let id = LatexComponentId::Component(file_names);
for command in &component.commands {
let text_edit = TextEdit::new(range, (&command.name).into());
let item = factory::command(
request,
(&command.name).into(),
command.image.as_ref().map(AsRef::as_ref),
text_edit,
&id,
);
items.push(item);
}
}
items
})
.await
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexComponentEnvironmentCompletionProvider;
impl FeatureProvider for LatexComponentEnvironmentCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
combinators::environment(request, async move |context| {
let mut items = Vec::new();
for component in DATABASE.related_components(request.related_documents()) {
let file_names = component.file_names.iter().map(AsRef::as_ref).collect();
let id = LatexComponentId::Component(file_names);
for environment in &component.environments {
let text_edit = TextEdit::new(context.range, environment.into());
let item = factory::environment(request, environment.into(), text_edit, &id);
items.push(item);
}
}
items
})
.await
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::{Position, Range};
#[test]
fn test_command_start() {
let items = test_feature(
LatexComponentCommandCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\use")],
main_file: "foo.tex",
position: Position::new(0, 0),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_command_end() {
let items = test_feature(
LatexComponentCommandCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\use")],
main_file: "foo.tex",
position: Position::new(0, 4),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(0, 1, 0, 4))
);
}
#[test]
fn test_command_word() {
let items = test_feature(
LatexComponentCommandCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "use")],
main_file: "foo.tex",
position: Position::new(0, 2),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_command_package() {
let items = test_feature(
LatexComponentCommandCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\usepackage{lipsum}\n\\lips")],
main_file: "foo.tex",
position: Position::new(1, 2),
..FeatureSpec::default()
},
);
assert!(items.iter().any(|item| item.label == "lipsum"));
}
#[test]
fn test_command_package_comma_separated() {
let items = test_feature(
LatexComponentCommandCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file(
"foo.tex",
"\\usepackage{geometry, lipsum}\n\\lips",
)],
main_file: "foo.tex",
position: Position::new(1, 2),
..FeatureSpec::default()
},
);
assert!(items.iter().any(|item| item.label == "lipsum"));
}
#[test]
fn test_command_class() {
let items = test_feature(
LatexComponentCommandCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file(
"foo.tex",
"\\documentclass{book}\n\\chap",
)],
main_file: "foo.tex",
position: Position::new(1, 2),
..FeatureSpec::default()
},
);
assert!(items.iter().any(|item| item.label == "chapter"));
}
#[test]
fn test_environment_inside_of_empty_begin() {
let items = test_feature(
LatexComponentEnvironmentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\begin{}")],
main_file: "foo.tex",
position: Position::new(0, 7),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(0, 7, 0, 7))
);
}
#[test]
fn test_environment_inside_of_non_empty_end() {
let items = test_feature(
LatexComponentEnvironmentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\end{foo}")],
main_file: "foo.tex",
position: Position::new(0, 6),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(0, 5, 0, 8))
);
}
#[test]
fn test_environment_outside_of_empty_begin() {
let items = test_feature(
LatexComponentEnvironmentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\begin{}")],
main_file: "foo.tex",
position: Position::new(0, 6),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_environment_outside_of_empty_end() {
let items = test_feature(
LatexComponentEnvironmentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\end{}")],
main_file: "foo.tex",
position: Position::new(0, 6),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_environment_inside_of_other_command() {
let items = test_feature(
LatexComponentEnvironmentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\foo{bar}")],
main_file: "foo.tex",
position: Position::new(0, 6),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_environment_inside_second_argument() {
let items = test_feature(
LatexComponentEnvironmentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\begin{foo}{bar}")],
main_file: "foo.tex",
position: Position::new(0, 14),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_environment_unterminated() {
let items = test_feature(
LatexComponentEnvironmentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\begin{ foo")],
main_file: "foo.tex",
position: Position::new(0, 7),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
}
}

View file

@ -1,108 +0,0 @@
use super::combinators::{self, Parameter};
use crate::factory;
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams, TextEdit};
use texlab_completion_data::DATABASE;
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexClassImportProvider;
impl FeatureProvider for LatexClassImportProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
import(request, LatexIncludeKind::Class, factory::class).await
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexPackageImportProvider;
impl FeatureProvider for LatexPackageImportProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
import(request, LatexIncludeKind::Package, factory::package).await
}
}
async fn import<F>(
request: &FeatureRequest<CompletionParams>,
kind: LatexIncludeKind,
factory: F,
) -> Vec<CompletionItem>
where
F: Fn(&FeatureRequest<CompletionParams>, &'static str, TextEdit) -> CompletionItem,
{
let extension = if kind == LatexIncludeKind::Package {
"sty"
} else {
"cls"
};
let parameters = LANGUAGE_DATA
.include_commands
.iter()
.filter(|cmd| cmd.kind == kind)
.map(|cmd| Parameter::new(&cmd.name, cmd.index));
combinators::argument(request, parameters, async move |context| {
let mut items = Vec::new();
for component in &DATABASE.components {
for file_name in &component.file_names {
if file_name.ends_with(extension) {
let stem = &file_name[0..file_name.len() - 4];
let text_edit = TextEdit::new(context.range, stem.into());
let item = factory(request, stem, text_edit);
items.push(item);
}
}
}
items
})
.await
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::Position;
#[test]
fn test_class() {
let items = test_feature(
LatexClassImportProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\documentclass{}")],
main_file: "foo.tex",
position: Position::new(0, 15),
..FeatureSpec::default()
},
);
assert!(items.iter().any(|item| item.label == "beamer"));
assert!(items.iter().all(|item| item.label != "amsmath"));
}
#[test]
fn test_package() {
let items = test_feature(
LatexPackageImportProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\usepackage{}")],
main_file: "foo.tex",
position: Position::new(0, 12),
..FeatureSpec::default()
},
);
assert!(items.iter().all(|item| item.label != "beamer"));
assert!(items.iter().any(|item| item.label == "amsmath"));
}
}

View file

@ -1,131 +0,0 @@
use super::combinators::{self, Parameter};
use crate::factory;
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams, Range, TextEdit};
use std::path::{Path, PathBuf};
use texlab_syntax::*;
use texlab_workspace::*;
use walkdir::WalkDir;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexIncludeCompletionProvider;
impl FeatureProvider for LatexIncludeCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let parameters = LANGUAGE_DATA
.include_commands
.iter()
.map(|cmd| Parameter::new(&cmd.name, cmd.index));
combinators::argument_word(request, parameters, async move |command, index| {
if !request.document().is_file() {
return Vec::new();
}
let position = request.params.position;
let mut items = Vec::new();
let path_word = command.extract_word(index);
let name_range = match path_word {
Some(path_word) => Range::new_simple(
path_word.start().line,
path_word.end().character
- path_word.text().split('/').last().unwrap().chars().count() as u64,
path_word.end().line,
path_word.end().character,
),
None => Range::new(position, position),
};
let directory = current_directory(&request, &command);
for entry in WalkDir::new(directory)
.min_depth(1)
.max_depth(1)
.follow_links(false)
.into_iter()
.filter_map(std::result::Result::ok)
{
if entry.file_type().is_file() && is_included(&command, &entry.path()) {
let mut path = entry.into_path();
let include_extension = LANGUAGE_DATA
.include_commands
.iter()
.find(|cmd| command.name.text() == cmd.name)
.unwrap()
.include_extension;
if !include_extension {
remove_extension(&mut path);
}
let text_edit = make_text_edit(name_range, &path);
items.push(factory::file(request, &path, text_edit));
} else if entry.file_type().is_dir() {
let path = entry.into_path();
let text_edit = make_text_edit(name_range, &path);
items.push(factory::folder(request, &path, text_edit));
}
}
items
})
.await
}
}
fn current_directory(
request: &FeatureRequest<CompletionParams>,
command: &LatexCommand,
) -> PathBuf {
let mut path = request.document().uri.to_file_path().unwrap();
path = PathBuf::from(path.to_string_lossy().into_owned().replace('\\', "/"));
path.pop();
if let Some(include) = command.extract_word(0) {
path.push(include.text());
if !include.text().ends_with('/') {
path.pop();
}
}
path
}
fn is_included(command: &LatexCommand, file: &Path) -> bool {
if let Some(allowed_extensions) = LANGUAGE_DATA
.include_commands
.iter()
.find(|cmd| command.name.text() == cmd.name)
.unwrap()
.kind
.extensions()
{
file.extension()
.map(|extension| extension.to_string_lossy().to_lowercase())
.map(|extension| allowed_extensions.contains(&extension.as_str()))
.unwrap_or(false)
} else {
true
}
}
fn remove_extension(path: &mut PathBuf) {
let stem = path
.file_stem()
.map(|stem| stem.to_string_lossy().into_owned());
if let Some(stem) = stem {
path.pop();
path.push(PathBuf::from(stem));
}
}
fn make_text_edit(range: Range, path: &Path) -> TextEdit {
let text = path
.file_name()
.unwrap()
.to_string_lossy()
.into_owned()
.into();
TextEdit::new(range, text)
}

View file

@ -1,143 +0,0 @@
use super::combinators::{self, ArgumentContext, Parameter};
use crate::factory;
use futures_boxed::boxed;
use lsp_types::*;
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexLabelCompletionProvider;
impl FeatureProvider for LatexLabelCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let parameters = LANGUAGE_DATA
.label_commands
.iter()
.filter(|cmd| cmd.kind.is_reference())
.map(|cmd| Parameter::new(&cmd.name, cmd.index));
combinators::argument(request, parameters, async move |context| {
let outline = Outline::from(&request.view);
let source = Self::find_source(&context);
let mut items = Vec::new();
for document in request.related_documents() {
if let SyntaxTree::Latex(tree) = &document.tree {
for label in tree
.labels
.iter()
.filter(|label| label.kind == LatexLabelKind::Definition)
.filter(|label| Self::is_included(tree, label, source))
{
let outline_ctx = OutlineContext::find(&outline, &document, label.start());
for name in label.names() {
let text = name.text().to_owned();
let text_edit = TextEdit::new(context.range, text.clone().into());
let item =
factory::label(request, text.into(), text_edit, &outline_ctx);
items.push(item);
}
}
}
}
items
})
.await
}
}
impl LatexLabelCompletionProvider {
fn find_source(context: &ArgumentContext) -> LatexLabelReferenceSource {
match LANGUAGE_DATA
.label_commands
.iter()
.find(|cmd| cmd.name == context.parameter.name && cmd.index == context.parameter.index)
.map(|cmd| cmd.kind)
.unwrap()
{
LatexLabelKind::Definition => unreachable!(),
LatexLabelKind::Reference(source) => source,
}
}
fn is_included(
tree: &LatexSyntaxTree,
label: &LatexLabel,
source: LatexLabelReferenceSource,
) -> bool {
match source {
LatexLabelReferenceSource::Everything => true,
LatexLabelReferenceSource::Math => tree
.environments
.iter()
.filter(|env| env.left.is_math())
.any(|env| env.range().contains_exclusive(label.start())),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::Position;
#[test]
fn test_inside_of_ref() {
let items = test_feature(
LatexLabelCompletionProvider,
FeatureSpec {
files: vec![
FeatureSpec::file(
"foo.tex",
"\\addbibresource{bar.bib}\\include{baz}\n\\ref{}",
),
FeatureSpec::file("bar.bib", ""),
FeatureSpec::file("baz.tex", "\\label{foo}\\label{bar}\\ref{baz}"),
],
main_file: "foo.tex",
position: Position::new(1, 5),
..FeatureSpec::default()
},
);
let labels: Vec<&str> = items.iter().map(|item| item.label.as_ref()).collect();
assert_eq!(labels, vec!["foo", "bar"]);
}
#[test]
fn test_outside_of_ref() {
let items = test_feature(
LatexLabelCompletionProvider,
FeatureSpec {
files: vec![
FeatureSpec::file("foo.tex", "\\include{bar}\\ref{}"),
FeatureSpec::file("bar.tex", "\\label{foo}\\label{bar}"),
],
main_file: "foo.tex",
position: Position::new(1, 6),
..FeatureSpec::default()
},
);
assert!(items.is_empty());
}
#[test]
fn test_eqref() {
let items = test_feature(
LatexLabelCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file(
"foo.tex",
"\\begin{align}\\label{foo}\\end{align}\\label{bar}\n\\eqref{}",
)],
main_file: "foo.tex",
position: Position::new(1, 7),
..FeatureSpec::default()
},
);
let labels: Vec<&str> = items.iter().map(|item| item.label.as_ref()).collect();
assert_eq!(labels, vec!["foo"]);
}
}

View file

@ -1,13 +0,0 @@
pub mod argument;
pub mod begin_command;
pub mod citation;
pub mod color;
pub mod color_model;
pub mod combinators;
pub mod component;
pub mod import;
pub mod include;
pub mod label;
pub mod theorem;
pub mod tikz;
pub mod user;

View file

@ -1,67 +0,0 @@
use super::combinators;
use crate::factory::{self, LatexComponentId};
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams, TextEdit};
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexTheoremEnvironmentCompletionProvider;
impl FeatureProvider for LatexTheoremEnvironmentCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
combinators::environment(request, async move |context| {
let mut items = Vec::new();
for document in request.related_documents() {
if let SyntaxTree::Latex(tree) = &document.tree {
for theorem in &tree.theorem_definitions {
let name = theorem.name().text().to_owned();
let text_edit = TextEdit::new(context.range, name.clone().into());
let item = factory::environment(
request,
name.into(),
text_edit,
&LatexComponentId::User,
);
items.push(item);
}
}
}
items
})
.await
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::{Position, Range};
use std::borrow::Cow;
#[test]
fn test() {
let items = test_feature(
LatexTheoremEnvironmentCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file(
"foo.tex",
"\\newtheorem{theorem}{Theorem}\n\\begin{th}",
)],
main_file: "foo.tex",
position: Position::new(1, 8),
..FeatureSpec::default()
},
);
assert_eq!(items.len(), 1);
assert_eq!(items[0].label, Cow::from("theorem"));
assert_eq!(
items[0].text_edit.as_ref().map(|edit| edit.range),
Some(Range::new_simple(1, 7, 1, 9))
);
}
}

View file

@ -1,86 +0,0 @@
use super::combinators::{self, Parameter};
use crate::factory;
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams, TextEdit};
use texlab_syntax::LANGUAGE_DATA;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexPgfLibraryCompletionProvider;
impl FeatureProvider for LatexPgfLibraryCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let parameter = Parameter::new("\\usepgflibrary", 0);
combinators::argument(request, std::iter::once(parameter), async move |context| {
let mut items = Vec::new();
for name in &LANGUAGE_DATA.pgf_libraries {
let text_edit = TextEdit::new(context.range, name.into());
let item = factory::pgf_library(request, name, text_edit);
items.push(item);
}
items
})
.await
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexTikzLibraryCompletionProvider;
impl FeatureProvider for LatexTikzLibraryCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let parameter = Parameter::new("\\usetikzlibrary", 0);
combinators::argument(request, std::iter::once(parameter), async move |context| {
let mut items = Vec::new();
for name in &LANGUAGE_DATA.tikz_libraries {
let text_edit = TextEdit::new(context.range, name.into());
let item = factory::tikz_library(request, name, text_edit);
items.push(item);
}
items
})
.await
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::Position;
#[test]
fn test_pgf_library() {
let items = test_feature(
LatexPgfLibraryCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\usepgflibrary{}")],
main_file: "foo.tex",
position: Position::new(0, 15),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
}
#[test]
fn test_tikz_library() {
let items = test_feature(
LatexTikzLibraryCompletionProvider,
FeatureSpec {
files: vec![FeatureSpec::file("foo.tex", "\\usetikzlibrary{}")],
main_file: "foo.tex",
position: Position::new(0, 16),
..FeatureSpec::default()
},
);
assert!(!items.is_empty());
}
}

View file

@ -1,152 +0,0 @@
use super::combinators;
use crate::factory::{self, LatexComponentId};
use futures_boxed::boxed;
use itertools::Itertools;
use lsp_types::*;
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexUserCommandCompletionProvider;
impl FeatureProvider for LatexUserCommandCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
combinators::command(request, async move |current_command| {
let mut items = Vec::new();
for document in request.related_documents() {
if let SyntaxTree::Latex(tree) = &document.tree {
tree.commands
.iter()
.filter(|command| command.range() != current_command.range())
.map(|command| &command.name.text()[1..])
.unique()
.map(|command| {
let text_edit = TextEdit::new(
current_command.short_name_range(),
command.to_owned().into(),
);
factory::command(
request,
command.to_owned().into(),
None,
text_edit,
&LatexComponentId::User,
)
})
.for_each(|item| items.push(item));
}
}
items
})
.await
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub struct LatexUserEnvironmentCompletionProvider;
impl FeatureProvider for LatexUserEnvironmentCompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
combinators::environment(request, async move |context| {
let mut items = Vec::new();
for document in request.related_documents() {
if let SyntaxTree::Latex(tree) = &document.tree {
for environment in &tree.environments {
if environment.left.command == context.command
|| environment.right.command == context.command
{
continue;
}
if let Some(item) =
Self::make_item(request, &environment.left, context.range)
{
items.push(item);
}
if let Some(item) =
Self::make_item(request, &environment.right, context.range)
{
items.push(item);
}
}
}
}
items
})
.await
}
}
impl LatexUserEnvironmentCompletionProvider {
fn make_item(
request: &FeatureRequest<CompletionParams>,
delimiter: &LatexEnvironmentDelimiter,
name_range: Range,
) -> Option<CompletionItem> {
if let Some(name) = delimiter.name() {
let text = name.text().to_owned();
let text_edit = TextEdit::new(name_range, text.clone().into());
let item =
factory::environment(request, text.into(), text_edit, &LatexComponentId::User);
return Some(item);
}
None
}
}
#[cfg(test)]
mod tests {
use super::*;
use lsp_types::Position;
#[test]
fn test_command() {
let items = test_feature(
LatexUserCommandCompletionProvider,
FeatureSpec {
files: vec![
FeatureSpec::file("foo.tex", "\\include{bar.tex}\n\\foo"),
FeatureSpec::file("bar.tex", "\\bar"),
FeatureSpec::file("baz.tex", "\\baz"),
],
main_file: "foo.tex",
position: Position::new(1, 2),
..FeatureSpec::default()
},
);
let labels: Vec<&str> = items.iter().map(|item| item.label.as_ref()).collect();
assert_eq!(labels, vec!["include", "bar"]);
}
#[test]
fn test_environment() {
let items = test_feature(
LatexUserEnvironmentCompletionProvider,
FeatureSpec {
files: vec![
FeatureSpec::file("foo.tex", "\\include{bar.tex}\n\\begin{foo}"),
FeatureSpec::file("bar.tex", "\\begin{bar}\\end{bar}"),
FeatureSpec::file("baz.tex", "\\begin{baz}\\end{baz}"),
],
main_file: "foo.tex",
position: Position::new(1, 9),
..FeatureSpec::default()
},
);
let labels: Vec<&str> = items
.iter()
.map(|item| item.label.as_ref())
.unique()
.collect();
assert_eq!(labels, vec!["bar"]);
}
}

View file

@ -1,104 +0,0 @@
#![feature(async_await, async_closure)]
mod bibtex;
mod factory;
mod latex;
mod preselect;
mod quality;
use self::bibtex::command::BibtexCommandCompletionProvider;
use self::bibtex::entry_type::BibtexEntryTypeCompletionProvider;
use self::bibtex::field_name::BibtexFieldNameCompletionProvider;
pub use self::factory::CompletionItemData;
use self::latex::argument::LatexArgumentCompletionProvider;
use self::latex::begin_command::LatexBeginCommandCompletionProvider;
use self::latex::citation::LatexCitationCompletionProvider;
use self::latex::color::LatexColorCompletionProvider;
use self::latex::color_model::LatexColorModelCompletionProvider;
use self::latex::component::*;
use self::latex::import::{LatexClassImportProvider, LatexPackageImportProvider};
use self::latex::include::LatexIncludeCompletionProvider;
use self::latex::label::LatexLabelCompletionProvider;
use self::latex::theorem::LatexTheoremEnvironmentCompletionProvider;
use self::latex::tikz::*;
use self::latex::user::*;
use self::preselect::PreselectCompletionProvider;
use self::quality::OrderByQualityCompletionProvider;
use futures_boxed::boxed;
use itertools::Itertools;
use lsp_types::{CompletionItem, CompletionParams};
use std::hash::{Hash, Hasher};
use texlab_workspace::*;
pub const COMPLETION_LIMIT: usize = 50;
type MergeProvider = ConcatProvider<CompletionParams, CompletionItem>;
pub struct CompletionProvider {
provider: OrderByQualityCompletionProvider<PreselectCompletionProvider<MergeProvider>>,
}
impl CompletionProvider {
pub fn new() -> Self {
Self {
provider: OrderByQualityCompletionProvider::new(PreselectCompletionProvider::new(
ConcatProvider::new(vec![
Box::new(BibtexEntryTypeCompletionProvider),
Box::new(BibtexFieldNameCompletionProvider),
Box::new(BibtexCommandCompletionProvider),
Box::new(LatexPgfLibraryCompletionProvider),
Box::new(LatexTikzLibraryCompletionProvider),
Box::new(LatexColorCompletionProvider),
Box::new(LatexColorModelCompletionProvider),
Box::new(LatexArgumentCompletionProvider),
Box::new(LatexComponentEnvironmentCompletionProvider),
Box::new(LatexTheoremEnvironmentCompletionProvider),
Box::new(LatexLabelCompletionProvider),
Box::new(LatexCitationCompletionProvider),
Box::new(LatexIncludeCompletionProvider),
Box::new(LatexClassImportProvider),
Box::new(LatexPackageImportProvider),
Box::new(LatexBeginCommandCompletionProvider),
Box::new(LatexComponentCommandCompletionProvider),
Box::new(LatexUserCommandCompletionProvider),
Box::new(LatexUserEnvironmentCompletionProvider),
]),
)),
}
}
}
impl FeatureProvider for CompletionProvider {
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
self.provider
.execute(request)
.await
.into_iter()
.map(LabeledCompletionItem)
.unique()
.map(|item| item.0)
.take(COMPLETION_LIMIT)
.collect()
}
}
#[derive(Debug, Clone)]
struct LabeledCompletionItem(CompletionItem);
impl PartialEq for LabeledCompletionItem {
fn eq(&self, other: &Self) -> bool {
self.0.label == other.0.label
}
}
impl Eq for LabeledCompletionItem {}
impl Hash for LabeledCompletionItem {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.label.hash(state);
}
}

View file

@ -1,52 +0,0 @@
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams};
use texlab_syntax::*;
use texlab_workspace::*;
#[derive(Debug)]
pub struct PreselectCompletionProvider<F> {
provider: F,
}
impl<F> PreselectCompletionProvider<F> {
pub fn new(provider: F) -> Self {
Self { provider }
}
}
impl<F> FeatureProvider for PreselectCompletionProvider<F>
where
F: FeatureProvider<Params = CompletionParams, Output = Vec<CompletionItem>> + Send + Sync,
{
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let mut items = self.provider.execute(request).await;
if let SyntaxTree::Latex(tree) = &request.document().tree {
for environment in &tree.environments {
if let Some(name) = environment.left.name() {
let right_args = &environment.right.command.args[0];
let cond1 = right_args
.range()
.contains_exclusive(request.params.position);
let cond2 = right_args.right.is_none()
&& right_args.range().contains(request.params.position);
if cond1 || cond2 {
for item in &mut items {
item.preselect = Some(false);
if item.label == name.text() {
item.preselect = Some(true);
break;
}
}
}
}
}
}
items
}
}

View file

@ -1,121 +0,0 @@
use futures_boxed::boxed;
use lsp_types::{CompletionItem, CompletionParams, Position};
use std::borrow::Cow;
use texlab_syntax::*;
use texlab_workspace::*;
pub struct OrderByQualityCompletionProvider<F> {
pub provider: F,
}
impl<F> OrderByQualityCompletionProvider<F> {
pub fn new(provider: F) -> Self {
Self { provider }
}
}
impl<F> FeatureProvider for OrderByQualityCompletionProvider<F>
where
F: FeatureProvider<Params = CompletionParams, Output = Vec<CompletionItem>> + Send + Sync,
{
type Params = CompletionParams;
type Output = Vec<CompletionItem>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output {
let query = Self::get_query(request.document(), request.params.position);
let mut items = self.provider.execute(&request).await;
items.sort_by_key(|item| -Self::get_quality(&query, &item));
items
}
}
impl<F> OrderByQualityCompletionProvider<F> {
fn get_query(document: &Document, position: Position) -> Option<Cow<str>> {
match &document.tree {
SyntaxTree::Latex(tree) => {
let node = tree
.find_command_by_name(position)
.map(LatexNode::Command)
.or_else(|| tree.find(position).into_iter().last())?;
match node {
LatexNode::Root(_) | LatexNode::Group(_) => Some("".into()),
LatexNode::Command(command) => Some(command.name.text()[1..].to_owned().into()),
LatexNode::Text(text) => text
.words
.iter()
.find(|word| word.range().contains(position))
.map(|word| word.text().to_owned().into()),
LatexNode::Comma(_) => Some(",".into()),
LatexNode::Math(math) => Some(math.token.text().to_owned().into()),
}
}
SyntaxTree::Bibtex(tree) => {
fn get_type_query(ty: &BibtexToken, position: Position) -> Option<Cow<str>> {
if ty.range().contains(position) {
Some((&ty.text()[1..]).into())
} else {
Some("".into())
}
}
match tree.find(position).pop()? {
BibtexNode::Root(_) => Some("".into()),
BibtexNode::Preamble(preamble) => get_type_query(&preamble.ty, position),
BibtexNode::String(string) => get_type_query(&string.ty, position),
BibtexNode::Entry(entry) => get_type_query(&entry.ty, position),
BibtexNode::Comment(comment) => Some(comment.token.text().into()),
BibtexNode::Field(field) => {
if field.name.range().contains(position) {
Some(field.name.text().into())
} else {
Some("".into())
}
}
BibtexNode::Word(word) => Some(word.token.text().into()),
BibtexNode::Command(command) => Some((&command.token.text()[1..]).into()),
BibtexNode::QuotedContent(_)
| BibtexNode::BracedContent(_)
| BibtexNode::Concat(_) => Some("".into()),
}
}
}
}
fn get_quality(query: &Option<Cow<str>>, item: &CompletionItem) -> i32 {
if item.preselect == Some(true) {
return 8;
}
let label = &item.label;
if let Some(query) = query {
if label == query {
return 7;
}
if label.to_lowercase() == query.to_lowercase() {
return 6;
}
if label.starts_with(query.as_ref()) {
return 5;
}
if label.to_lowercase().starts_with(&query.to_lowercase()) {
return 4;
}
if label.contains(query.as_ref()) {
return 3;
}
if label.to_lowercase().contains(&query.to_lowercase()) {
return 2;
}
return 1;
} else {
return 0;
}
}
}

View file

@ -1,16 +0,0 @@
[package]
name = "texlab-completion-data"
version = "0.1.0"
authors = [
"Eric Förster <efoerster@users.noreply.github.com>",
"Patrick Förster <pfoerster@users.noreply.github.com>"]
edition = "2018"
[dependencies]
itertools = "0.8.0"
lsp-types = { git = "https://github.com/latex-lsp/lsp-types", rev = "9fcc5d9b9d3013ce84e20ef566267754d594b268", features = ["proposed"] }
once_cell = "0.2.2"
serde = { version = "1.0.97", features = ["derive", "rc"] }
serde_json = "1.0.40"
texlab-syntax = { path = "../texlab_syntax" }
texlab-workspace = { path = "../texlab_workspace" }

File diff suppressed because one or more lines are too long

View file

@ -1,119 +0,0 @@
use itertools::Itertools;
use lsp_types::{MarkupContent, MarkupKind};
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
use std::sync::Arc;
use texlab_syntax::*;
use texlab_workspace::Document;
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Database {
pub components: Vec<Component>,
pub metadata: Vec<Metadata>,
}
impl Database {
pub fn find(&self, name: &str) -> Option<&Component> {
self.components.iter().find(|component| {
component
.file_names
.iter()
.any(|file_name| file_name == name)
})
}
pub fn kernel(&self) -> &Component {
self.components
.iter()
.find(|component| component.file_names.is_empty())
.unwrap()
}
pub fn related_components(&self, documents: &[Arc<Document>]) -> Vec<&Component> {
let mut start_components = vec![self.kernel()];
for document in documents {
if let SyntaxTree::Latex(tree) = &document.tree {
tree.components
.iter()
.flat_map(|file| self.find(file))
.for_each(|component| start_components.push(component))
}
}
let mut all_components = Vec::new();
for component in start_components {
all_components.push(component);
component
.references
.iter()
.flat_map(|file| self.find(&file))
.for_each(|component| all_components.push(component))
}
all_components
.into_iter()
.unique_by(|component| &component.file_names)
.collect()
}
pub fn exists(&self, file_name: &str) -> bool {
return self
.components
.iter()
.any(|component| component.file_names.iter().any(|f| f == file_name));
}
pub fn documentation(&self, name: &str) -> Option<MarkupContent> {
let metadata = self
.metadata
.iter()
.find(|metadata| metadata.name == name)?;
let desc = metadata.description.to_owned()?;
Some(MarkupContent {
kind: MarkupKind::PlainText,
value: desc.into(),
})
}
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Component {
pub file_names: Vec<String>,
pub references: Vec<String>,
pub commands: Vec<Command>,
pub environments: Vec<String>,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Command {
pub name: String,
pub image: Option<String>,
pub parameters: Vec<Parameter>,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Parameter(pub Vec<Argument>);
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Argument {
pub name: String,
pub image: Option<String>,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Metadata {
pub name: String,
pub caption: Option<String>,
pub description: Option<String>,
}
const JSON: &str = include_str!("../completion.json");
pub static DATABASE: Lazy<Database> = Lazy::new(|| serde_json::from_str(JSON).unwrap());

View file

@ -1,14 +0,0 @@
[package]
name = "texlab-formatting"
version = "0.1.0"
authors = [
"Eric Förster <efoerster@users.noreply.github.com>",
"Patrick Förster <pfoerster@users.noreply.github.com>"]
edition = "2018"
[dependencies]
serde = { version = "1.0.97", features = ["derive", "rc"] }
texlab-syntax = { path = "../texlab_syntax" }
[dev-dependencies]
indoc = "0.3.1"

View file

@ -1,370 +0,0 @@
use serde::{Deserialize, Serialize};
use texlab_syntax::*;
#[derive(Debug, PartialEq, Eq, Clone, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BibtexFormattingOptions {
pub line_length: Option<i32>,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BibtexFormattingParams {
pub tab_size: usize,
pub insert_spaces: bool,
pub options: BibtexFormattingOptions,
}
impl BibtexFormattingParams {
pub fn line_length(&self) -> i32 {
let line_length = self.options.line_length.unwrap_or(120);
if line_length <= 0 {
std::i32::MAX
} else {
line_length
}
}
}
impl Default for BibtexFormattingParams {
fn default() -> Self {
BibtexFormattingParams {
tab_size: 4,
insert_spaces: true,
options: BibtexFormattingOptions::default(),
}
}
}
struct BibtexFormatter<'a> {
params: &'a BibtexFormattingParams,
indent: String,
output: String,
}
impl<'a> BibtexFormatter<'a> {
fn new(params: &'a BibtexFormattingParams) -> Self {
let indent = if params.insert_spaces {
let mut buffer = String::new();
for _ in 0..params.tab_size {
buffer.push(' ');
}
buffer
} else {
"\t".into()
};
Self {
params,
indent,
output: String::new(),
}
}
fn format_comment(&mut self, comment: &BibtexComment) {
self.output.push_str(comment.token.text());
}
fn format_preamble(&mut self, preamble: &BibtexPreamble) {
self.format_token(&preamble.ty);
self.output.push('{');
if let Some(ref content) = preamble.content {
self.format_content(content, self.output.chars().count());
self.output.push('}');
}
}
fn format_string(&mut self, string: &BibtexString) {
self.format_token(&string.ty);
self.output.push('{');
if let Some(ref name) = string.name {
self.output.push_str(name.text());
self.output.push_str(" = ");
if let Some(ref value) = string.value {
self.format_content(value, self.output.chars().count());
self.output.push('}');
}
}
}
fn format_entry(&mut self, entry: &BibtexEntry) {
self.format_token(&entry.ty);
self.output.push('{');
if let Some(ref key) = entry.key {
self.output.push_str(key.text());
self.output.push(',');
self.output.push('\n');
for field in &entry.fields {
self.format_field(field);
}
self.output.push('}');
}
}
fn format_field(&mut self, field: &BibtexField) {
self.output.push_str(self.indent.as_ref());
self.format_token(&field.name);
self.output.push_str(" = ");
let count = field.name.text().chars().count();
let align = self.params.tab_size as usize + count + 3;
if let Some(ref content) = field.content {
self.format_content(content, align);
self.output.push(',');
self.output.push('\n');
}
}
fn format_content(&mut self, content: &BibtexContent, align: usize) {
let mut analyzer = BibtexContentAnalyzer::new();
content.accept(&mut analyzer);
let tokens = analyzer.tokens;
self.output.push_str(tokens[0].text());
let mut length = align + tokens[0].text().chars().count();
for i in 1..tokens.len() {
let previous = tokens[i - 1];
let current = tokens[i];
let current_length = current.text().chars().count();
let insert_space = Self::should_insert_space(previous, current);
let space_length = if insert_space { 1 } else { 0 };
if length + current_length + space_length > self.params.line_length() as usize {
self.output.push('\n');
self.output.push_str(self.indent.as_ref());
for _ in 0..=align - self.params.tab_size {
self.output.push(' ');
}
length = align;
} else if insert_space {
self.output.push(' ');
length += 1;
}
self.output.push_str(current.text());
length += current_length;
}
}
fn format_token(&mut self, token: &BibtexToken) {
self.output.push_str(token.text().to_lowercase().as_ref());
}
fn should_insert_space(previous: &BibtexToken, current: &BibtexToken) -> bool {
previous.start().line != current.start().line
|| previous.end().character < current.start().character
}
}
struct BibtexContentAnalyzer<'a> {
pub tokens: Vec<&'a BibtexToken>,
}
impl<'a> BibtexContentAnalyzer<'a> {
pub fn new() -> Self {
BibtexContentAnalyzer { tokens: Vec::new() }
}
}
impl<'a> BibtexVisitor<'a> for BibtexContentAnalyzer<'a> {
fn visit_root(&mut self, _root: &'a BibtexRoot) {}
fn visit_comment(&mut self, _comment: &'a BibtexComment) {}
fn visit_preamble(&mut self, _preamble: &'a BibtexPreamble) {}
fn visit_string(&mut self, _string: &'a BibtexString) {}
fn visit_entry(&mut self, _entry: &'a BibtexEntry) {}
fn visit_field(&mut self, _field: &'a BibtexField) {}
fn visit_word(&mut self, word: &'a BibtexWord) {
self.tokens.push(&word.token);
}
fn visit_command(&mut self, command: &'a BibtexCommand) {
self.tokens.push(&command.token);
}
fn visit_quoted_content(&mut self, content: &'a BibtexQuotedContent) {
self.tokens.push(&content.left);
BibtexWalker::walk_quoted_content(self, content);
if let Some(ref right) = content.right {
self.tokens.push(right);
}
}
fn visit_braced_content(&mut self, content: &'a BibtexBracedContent) {
self.tokens.push(&content.left);
BibtexWalker::walk_braced_content(self, content);
if let Some(ref right) = content.right {
self.tokens.push(right);
}
}
fn visit_concat(&mut self, concat: &'a BibtexConcat) {
concat.left.accept(self);
self.tokens.push(&concat.operator);
if let Some(ref right) = concat.right {
right.accept(self);
}
}
}
pub fn format_declaration(
declaration: &BibtexDeclaration,
params: &BibtexFormattingParams,
) -> String {
match declaration {
BibtexDeclaration::Comment(comment) => format_comment(comment, params),
BibtexDeclaration::Preamble(preamble) => format_preamble(preamble, params),
BibtexDeclaration::String(string) => format_string(string, params),
BibtexDeclaration::Entry(entry) => format_entry(entry, params),
}
}
pub fn format_comment(comment: &BibtexComment, params: &BibtexFormattingParams) -> String {
let mut formatter = BibtexFormatter::new(params);
formatter.format_comment(&comment);
formatter.output
}
pub fn format_preamble(preamble: &BibtexPreamble, params: &BibtexFormattingParams) -> String {
let mut formatter = BibtexFormatter::new(params);
formatter.format_preamble(&preamble);
formatter.output
}
pub fn format_string(string: &BibtexString, params: &BibtexFormattingParams) -> String {
let mut formatter = BibtexFormatter::new(params);
formatter.format_string(&string);
formatter.output
}
pub fn format_entry(entry: &BibtexEntry, params: &BibtexFormattingParams) -> String {
let mut formatter = BibtexFormatter::new(params);
formatter.format_entry(&entry);
formatter.output
}
#[cfg(test)]
mod tests {
use super::*;
use indoc::indoc;
fn verify(source: &str, expected: &str, line_length: i32) {
let tree = BibtexSyntaxTree::from(source);
let params = BibtexFormattingParams {
tab_size: 4,
insert_spaces: true,
options: BibtexFormattingOptions {
line_length: Some(line_length),
},
};
assert_eq!(
expected,
format_declaration(&tree.root.children[0], &params)
);
}
#[test]
fn test_wrap_long_lines() {
let source =
"@article{foo, bar = {Lorem ipsum dolor sit amet, consectetur adipiscing elit.},}";
let expected = indoc!(
"
@article{foo,
bar = {Lorem ipsum dolor
sit amet,
consectetur
adipiscing elit.},
}"
);
verify(source, expected, 30);
}
#[test]
fn test_line_length_zero() {
let source =
"@article{foo, bar = {Lorem ipsum dolor sit amet, consectetur adipiscing elit.},}";
let expected = indoc!(
"
@article{foo,
bar = {Lorem ipsum dolor sit amet, consectetur adipiscing elit.},
}"
);
verify(source, expected, 0);
}
#[test]
fn test_trailing_commas() {
let source = "@article{foo, bar = baz}";
let expected = indoc!(
"
@article{foo,
bar = baz,
}"
);
verify(source, expected, 30);
}
#[test]
fn test_insert_braces() {
let source = "@article{foo, bar = baz,";
let expected = indoc!(
"
@article{foo,
bar = baz,
}"
);
verify(source, expected, 30);
}
#[test]
fn test_commands() {
let source = "@article{foo, bar = \"\\baz\",}";
let expected = indoc!(
"@article{foo,
bar = \"\\baz\",
}"
);
verify(source, expected, 30);
}
#[test]
fn test_concatenation() {
let source = "@article{foo, bar = \"baz\" # \"qux\"}";
let expected = indoc!(
"
@article{foo,
bar = \"baz\" # \"qux\",
}"
);
verify(source, expected, 30);
}
#[test]
fn test_parentheses() {
let source = "@article(foo,)";
let expected = indoc!(
"
@article{foo,
}"
);
verify(source, expected, 30);
}
#[test]
fn test_string() {
let source = "@string{foo=\"bar\"}";
let expected = "@string{foo = \"bar\"}";
verify(source, expected, 30);
}
#[test]
fn test_preamble() {
let source = "@preamble{\n\"foo bar baz\"}";
let expected = "@preamble{\"foo bar baz\"}";
verify(source, expected, 30);
}
}

View file

@ -1 +0,0 @@
pub mod bibtex;

View file

@ -1,15 +0,0 @@
[package]
name = "texlab-syntax"
version = "0.1.0"
authors = [
"Eric Förster <efoerster@users.noreply.github.com>",
"Patrick Förster <pfoerster@users.noreply.github.com>"]
edition = "2018"
[dependencies]
itertools = "0.8.0"
lsp-types = { git = "https://github.com/latex-lsp/lsp-types", rev = "9fcc5d9b9d3013ce84e20ef566267754d594b268", features = ["proposed"] }
once_cell = "0.2.2"
path-clean = "0.1.0"
serde = { version = "1.0.97", features = ["derive", "rc"] }
serde_json = "1.0.40"

View file

@ -1,570 +0,0 @@
use crate::text::{Span, SyntaxNode};
use lsp_types::Range;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum BibtexTokenKind {
PreambleKind,
StringKind,
EntryKind,
Word,
Command,
Assign,
Comma,
Concat,
Quote,
BeginBrace,
EndBrace,
BeginParen,
EndParen,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexToken {
pub span: Span,
pub kind: BibtexTokenKind,
}
impl BibtexToken {
pub fn new(span: Span, kind: BibtexTokenKind) -> Self {
BibtexToken { span, kind }
}
pub fn text(&self) -> &str {
&self.span.text
}
}
impl SyntaxNode for BibtexToken {
fn range(&self) -> Range {
self.span.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexRoot {
pub children: Vec<BibtexDeclaration>,
}
impl BibtexRoot {
pub fn new(children: Vec<BibtexDeclaration>) -> Self {
BibtexRoot { children }
}
}
impl SyntaxNode for BibtexRoot {
fn range(&self) -> Range {
if self.children.is_empty() {
Range::new_simple(0, 0, 0, 0)
} else {
Range::new(
self.children[0].start(),
self.children[self.children.len() - 1].end(),
)
}
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum BibtexDeclaration {
Comment(Box<BibtexComment>),
Preamble(Box<BibtexPreamble>),
String(Box<BibtexString>),
Entry(Box<BibtexEntry>),
}
impl BibtexDeclaration {
pub fn accept<'a>(&'a self, visitor: &mut BibtexVisitor<'a>) {
match self {
BibtexDeclaration::Comment(comment) => visitor.visit_comment(comment),
BibtexDeclaration::Preamble(preamble) => visitor.visit_preamble(preamble),
BibtexDeclaration::String(string) => visitor.visit_string(string),
BibtexDeclaration::Entry(entry) => visitor.visit_entry(entry),
}
}
}
impl SyntaxNode for BibtexDeclaration {
fn range(&self) -> Range {
match self {
BibtexDeclaration::Comment(comment) => comment.range,
BibtexDeclaration::Preamble(preamble) => preamble.range,
BibtexDeclaration::String(string) => string.range,
BibtexDeclaration::Entry(entry) => entry.range,
}
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexComment {
pub range: Range,
pub token: BibtexToken,
}
impl BibtexComment {
pub fn new(token: BibtexToken) -> Self {
BibtexComment {
range: token.range(),
token,
}
}
}
impl SyntaxNode for BibtexComment {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexPreamble {
pub range: Range,
pub ty: BibtexToken,
pub left: Option<BibtexToken>,
pub content: Option<BibtexContent>,
pub right: Option<BibtexToken>,
}
impl BibtexPreamble {
pub fn new(
ty: BibtexToken,
left: Option<BibtexToken>,
content: Option<BibtexContent>,
right: Option<BibtexToken>,
) -> Self {
let end = if let Some(ref right) = right {
right.end()
} else if let Some(ref content) = content {
content.end()
} else if let Some(ref left) = left {
left.end()
} else {
ty.end()
};
BibtexPreamble {
range: Range::new(ty.start(), end),
ty,
left,
content,
right,
}
}
}
impl SyntaxNode for BibtexPreamble {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexString {
pub range: Range,
pub ty: BibtexToken,
pub left: Option<BibtexToken>,
pub name: Option<BibtexToken>,
pub assign: Option<BibtexToken>,
pub value: Option<BibtexContent>,
pub right: Option<BibtexToken>,
}
impl BibtexString {
pub fn new(
ty: BibtexToken,
left: Option<BibtexToken>,
name: Option<BibtexToken>,
assign: Option<BibtexToken>,
value: Option<BibtexContent>,
right: Option<BibtexToken>,
) -> Self {
let end = if let Some(ref right) = right {
right.end()
} else if let Some(ref value) = value {
value.end()
} else if let Some(ref assign) = assign {
assign.end()
} else if let Some(ref name) = name {
name.end()
} else if let Some(ref left) = left {
left.end()
} else {
ty.end()
};
BibtexString {
range: Range::new(ty.start(), end),
ty,
left,
name,
assign,
value,
right,
}
}
}
impl SyntaxNode for BibtexString {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexEntry {
pub range: Range,
pub ty: BibtexToken,
pub left: Option<BibtexToken>,
pub key: Option<BibtexToken>,
pub comma: Option<BibtexToken>,
pub fields: Vec<BibtexField>,
pub right: Option<BibtexToken>,
}
impl BibtexEntry {
pub fn new(
ty: BibtexToken,
left: Option<BibtexToken>,
key: Option<BibtexToken>,
comma: Option<BibtexToken>,
fields: Vec<BibtexField>,
right: Option<BibtexToken>,
) -> Self {
let end = if let Some(ref right) = right {
right.end()
} else if !fields.is_empty() {
fields[fields.len() - 1].range.end
} else if let Some(ref comma) = comma {
comma.end()
} else if let Some(ref key) = key {
key.end()
} else if let Some(ref left) = left {
left.end()
} else {
ty.end()
};
BibtexEntry {
range: Range::new(ty.start(), end),
ty,
left,
key,
comma,
fields,
right,
}
}
pub fn is_comment(&self) -> bool {
self.ty.text().to_lowercase() == "@comment"
}
}
impl SyntaxNode for BibtexEntry {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexField {
pub range: Range,
pub name: BibtexToken,
pub assign: Option<BibtexToken>,
pub content: Option<BibtexContent>,
pub comma: Option<BibtexToken>,
}
impl BibtexField {
pub fn new(
name: BibtexToken,
assign: Option<BibtexToken>,
content: Option<BibtexContent>,
comma: Option<BibtexToken>,
) -> Self {
let end = if let Some(ref comma) = comma {
comma.end()
} else if let Some(ref content) = content {
content.end()
} else if let Some(ref assign) = assign {
assign.end()
} else {
name.end()
};
BibtexField {
range: Range::new(name.start(), end),
name,
assign,
content,
comma,
}
}
}
impl SyntaxNode for BibtexField {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum BibtexContent {
Word(BibtexWord),
Command(BibtexCommand),
QuotedContent(BibtexQuotedContent),
BracedContent(BibtexBracedContent),
Concat(Box<BibtexConcat>),
}
impl BibtexContent {
pub fn accept<'a>(&'a self, visitor: &mut BibtexVisitor<'a>) {
match self {
BibtexContent::Word(word) => visitor.visit_word(word),
BibtexContent::Command(command) => visitor.visit_command(command),
BibtexContent::QuotedContent(content) => visitor.visit_quoted_content(content),
BibtexContent::BracedContent(content) => visitor.visit_braced_content(content),
BibtexContent::Concat(concat) => visitor.visit_concat(concat),
}
}
}
impl SyntaxNode for BibtexContent {
fn range(&self) -> Range {
match self {
BibtexContent::Word(word) => word.range(),
BibtexContent::Command(command) => command.range(),
BibtexContent::QuotedContent(content) => content.range(),
BibtexContent::BracedContent(content) => content.range(),
BibtexContent::Concat(concat) => concat.range(),
}
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexWord {
pub range: Range,
pub token: BibtexToken,
}
impl BibtexWord {
pub fn new(token: BibtexToken) -> Self {
BibtexWord {
range: token.range(),
token,
}
}
}
impl SyntaxNode for BibtexWord {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexCommand {
pub range: Range,
pub token: BibtexToken,
}
impl BibtexCommand {
pub fn new(token: BibtexToken) -> Self {
BibtexCommand {
range: token.range(),
token,
}
}
}
impl SyntaxNode for BibtexCommand {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexQuotedContent {
pub range: Range,
pub left: BibtexToken,
pub children: Vec<BibtexContent>,
pub right: Option<BibtexToken>,
}
impl BibtexQuotedContent {
pub fn new(
left: BibtexToken,
children: Vec<BibtexContent>,
right: Option<BibtexToken>,
) -> Self {
let end = if let Some(ref right) = right {
right.end()
} else if !children.is_empty() {
children[children.len() - 1].end()
} else {
left.end()
};
BibtexQuotedContent {
range: Range::new(left.start(), end),
left,
children,
right,
}
}
}
impl SyntaxNode for BibtexQuotedContent {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexBracedContent {
pub range: Range,
pub left: BibtexToken,
pub children: Vec<BibtexContent>,
pub right: Option<BibtexToken>,
}
impl BibtexBracedContent {
pub fn new(
left: BibtexToken,
children: Vec<BibtexContent>,
right: Option<BibtexToken>,
) -> Self {
let end = if let Some(ref right) = right {
right.end()
} else if !children.is_empty() {
children[children.len() - 1].end()
} else {
left.end()
};
BibtexBracedContent {
range: Range::new(left.start(), end),
left,
children,
right,
}
}
}
impl SyntaxNode for BibtexBracedContent {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexConcat {
pub range: Range,
pub left: BibtexContent,
pub operator: BibtexToken,
pub right: Option<BibtexContent>,
}
impl BibtexConcat {
pub fn new(left: BibtexContent, operator: BibtexToken, right: Option<BibtexContent>) -> Self {
let end = if let Some(ref right) = right {
right.end()
} else {
operator.end()
};
BibtexConcat {
range: Range::new(left.start(), end),
left,
operator,
right,
}
}
}
impl SyntaxNode for BibtexConcat {
fn range(&self) -> Range {
self.range
}
}
pub trait BibtexVisitor<'a> {
fn visit_root(&mut self, root: &'a BibtexRoot);
fn visit_comment(&mut self, comment: &'a BibtexComment);
fn visit_preamble(&mut self, preamble: &'a BibtexPreamble);
fn visit_string(&mut self, string: &'a BibtexString);
fn visit_entry(&mut self, entry: &'a BibtexEntry);
fn visit_field(&mut self, field: &'a BibtexField);
fn visit_word(&mut self, word: &'a BibtexWord);
fn visit_command(&mut self, command: &'a BibtexCommand);
fn visit_quoted_content(&mut self, content: &'a BibtexQuotedContent);
fn visit_braced_content(&mut self, content: &'a BibtexBracedContent);
fn visit_concat(&mut self, concat: &'a BibtexConcat);
}
pub struct BibtexWalker;
impl BibtexWalker {
pub fn walk_root<'a>(visitor: &mut BibtexVisitor<'a>, root: &'a BibtexRoot) {
for declaration in &root.children {
declaration.accept(visitor);
}
}
pub fn walk_preamble<'a>(visitor: &mut BibtexVisitor<'a>, preamble: &'a BibtexPreamble) {
if let Some(ref content) = preamble.content {
content.accept(visitor);
}
}
pub fn walk_string<'a>(visitor: &mut BibtexVisitor<'a>, string: &'a BibtexString) {
if let Some(ref value) = string.value {
value.accept(visitor);
}
}
pub fn walk_entry<'a>(visitor: &mut BibtexVisitor<'a>, entry: &'a BibtexEntry) {
for field in &entry.fields {
visitor.visit_field(field);
}
}
pub fn walk_field<'a>(visitor: &mut BibtexVisitor<'a>, field: &'a BibtexField) {
if let Some(ref content) = field.content {
content.accept(visitor);
}
}
pub fn walk_quoted_content<'a>(
visitor: &mut BibtexVisitor<'a>,
content: &'a BibtexQuotedContent,
) {
for child in &content.children {
child.accept(visitor);
}
}
pub fn walk_braced_content<'a>(
visitor: &mut BibtexVisitor<'a>,
content: &'a BibtexBracedContent,
) {
for child in &content.children {
child.accept(visitor);
}
}
pub fn walk_concat<'a>(visitor: &mut BibtexVisitor<'a>, concat: &'a BibtexConcat) {
concat.left.accept(visitor);
if let Some(ref right) = concat.right {
right.accept(visitor);
}
}
}

View file

@ -1,109 +0,0 @@
use crate::bibtex::ast::*;
use crate::text::SyntaxNode;
use lsp_types::Position;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum BibtexNode<'a> {
Root(&'a BibtexRoot),
Preamble(&'a BibtexPreamble),
String(&'a BibtexString),
Entry(&'a BibtexEntry),
Comment(&'a BibtexComment),
Field(&'a BibtexField),
Word(&'a BibtexWord),
Command(&'a BibtexCommand),
QuotedContent(&'a BibtexQuotedContent),
BracedContent(&'a BibtexBracedContent),
Concat(&'a BibtexConcat),
}
#[derive(Debug)]
pub struct BibtexFinder<'a> {
pub position: Position,
pub results: Vec<BibtexNode<'a>>,
}
impl<'a> BibtexFinder<'a> {
pub fn new(position: Position) -> Self {
BibtexFinder {
position,
results: Vec::new(),
}
}
}
impl<'a> BibtexVisitor<'a> for BibtexFinder<'a> {
fn visit_root(&mut self, root: &'a BibtexRoot) {
if root.range().contains(self.position) {
self.results.push(BibtexNode::Root(root));
BibtexWalker::walk_root(self, root);
}
}
fn visit_comment(&mut self, comment: &'a BibtexComment) {
if comment.range.contains(self.position) {
self.results.push(BibtexNode::Comment(comment));
}
}
fn visit_preamble(&mut self, preamble: &'a BibtexPreamble) {
if preamble.range.contains(self.position) {
self.results.push(BibtexNode::Preamble(preamble));
BibtexWalker::walk_preamble(self, preamble);
}
}
fn visit_string(&mut self, string: &'a BibtexString) {
if string.range.contains(self.position) {
self.results.push(BibtexNode::String(string));
BibtexWalker::walk_string(self, string);
}
}
fn visit_entry(&mut self, entry: &'a BibtexEntry) {
if entry.range.contains(self.position) {
self.results.push(BibtexNode::Entry(entry));
BibtexWalker::walk_entry(self, entry);
}
}
fn visit_field(&mut self, field: &'a BibtexField) {
if field.range.contains(self.position) {
self.results.push(BibtexNode::Field(field));
BibtexWalker::walk_field(self, field);
}
}
fn visit_word(&mut self, word: &'a BibtexWord) {
if word.range.contains(self.position) {
self.results.push(BibtexNode::Word(word));
}
}
fn visit_command(&mut self, command: &'a BibtexCommand) {
if command.range.contains(self.position) {
self.results.push(BibtexNode::Command(command));
}
}
fn visit_quoted_content(&mut self, content: &'a BibtexQuotedContent) {
if content.range.contains(self.position) {
self.results.push(BibtexNode::QuotedContent(content));
BibtexWalker::walk_quoted_content(self, content);
}
}
fn visit_braced_content(&mut self, content: &'a BibtexBracedContent) {
if content.range.contains(self.position) {
self.results.push(BibtexNode::BracedContent(content));
BibtexWalker::walk_braced_content(self, content);
}
}
fn visit_concat(&mut self, concat: &'a BibtexConcat) {
if concat.range.contains(self.position) {
self.results.push(BibtexNode::Concat(concat));
BibtexWalker::walk_concat(self, concat);
}
}
}

View file

@ -1,176 +0,0 @@
use super::ast::{BibtexToken, BibtexTokenKind};
use crate::text::CharStream;
pub struct BibtexLexer<'a> {
stream: CharStream<'a>,
}
impl<'a> BibtexLexer<'a> {
pub fn new(text: &'a str) -> Self {
BibtexLexer {
stream: CharStream::new(text),
}
}
fn kind(&mut self) -> BibtexToken {
fn is_type_char(c: char) -> bool {
c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z'
}
self.stream.start_span();
self.stream.next().unwrap();
while self.stream.satifies(|c| is_type_char(*c)) {
self.stream.next();
}
let span = self.stream.end_span();
let kind = match span.text.to_lowercase().as_ref() {
"@preamble" => BibtexTokenKind::PreambleKind,
"@string" => BibtexTokenKind::StringKind,
_ => BibtexTokenKind::EntryKind,
};
BibtexToken::new(span, kind)
}
fn single_character(&mut self, kind: BibtexTokenKind) -> BibtexToken {
self.stream.start_span();
self.stream.next();
let span = self.stream.end_span();
BibtexToken::new(span, kind)
}
fn command(&mut self) -> BibtexToken {
let span = self.stream.command();
BibtexToken::new(span, BibtexTokenKind::Command)
}
fn word(&mut self) -> BibtexToken {
fn is_word_char(c: char) -> bool {
!c.is_whitespace()
&& c != '@'
&& c != '='
&& c != ','
&& c != '#'
&& c != '"'
&& c != '{'
&& c != '}'
&& c != '('
&& c != ')'
}
self.stream.start_span();
while self.stream.satifies(|c| is_word_char(*c)) {
self.stream.next();
}
let span = self.stream.end_span();
BibtexToken::new(span, BibtexTokenKind::Word)
}
}
impl<'a> Iterator for BibtexLexer<'a> {
type Item = BibtexToken;
fn next(&mut self) -> Option<BibtexToken> {
loop {
match self.stream.peek() {
Some('@') => return Some(self.kind()),
Some('=') => return Some(self.single_character(BibtexTokenKind::Assign)),
Some(',') => return Some(self.single_character(BibtexTokenKind::Comma)),
Some('#') => return Some(self.single_character(BibtexTokenKind::Concat)),
Some('"') => return Some(self.single_character(BibtexTokenKind::Quote)),
Some('{') => return Some(self.single_character(BibtexTokenKind::BeginBrace)),
Some('}') => return Some(self.single_character(BibtexTokenKind::EndBrace)),
Some('(') => return Some(self.single_character(BibtexTokenKind::BeginParen)),
Some(')') => return Some(self.single_character(BibtexTokenKind::EndParen)),
Some('\\') => return Some(self.command()),
Some(c) => {
if c.is_whitespace() {
self.stream.next();
} else {
return Some(self.word());
}
}
None => {
return None;
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::text::Span;
use lsp_types::{Position, Range};
fn verify<'a>(
lexer: &mut BibtexLexer<'a>,
line: u64,
character: u64,
text: &str,
kind: BibtexTokenKind,
) {
let start = Position::new(line, character);
let end = Position::new(line, character + text.chars().count() as u64);
let range = Range::new(start, end);
let span = Span::new(range, text.to_owned());
let token = BibtexToken::new(span, kind);
assert_eq!(Some(token), lexer.next());
}
#[test]
fn test_word() {
let mut lexer = BibtexLexer::new("foo bar baz");
verify(&mut lexer, 0, 0, "foo", BibtexTokenKind::Word);
verify(&mut lexer, 0, 4, "bar", BibtexTokenKind::Word);
verify(&mut lexer, 0, 8, "baz", BibtexTokenKind::Word);
assert_eq!(None, lexer.next());
}
#[test]
fn test_command() {
let mut lexer = BibtexLexer::new("\\foo\\bar@baz");
verify(&mut lexer, 0, 0, "\\foo", BibtexTokenKind::Command);
verify(&mut lexer, 0, 4, "\\bar@baz", BibtexTokenKind::Command);
assert_eq!(None, lexer.next());
}
#[test]
fn test_escape_sequence() {
let mut lexer = BibtexLexer::new("\\foo*\n\\%\\**");
verify(&mut lexer, 0, 0, "\\foo*", BibtexTokenKind::Command);
verify(&mut lexer, 1, 0, "\\%", BibtexTokenKind::Command);
verify(&mut lexer, 1, 2, "\\*", BibtexTokenKind::Command);
verify(&mut lexer, 1, 4, "*", BibtexTokenKind::Word);
assert_eq!(None, lexer.next());
}
#[test]
fn test_delimiter() {
let mut lexer = BibtexLexer::new("{}()\"");
verify(&mut lexer, 0, 0, "{", BibtexTokenKind::BeginBrace);
verify(&mut lexer, 0, 1, "}", BibtexTokenKind::EndBrace);
verify(&mut lexer, 0, 2, "(", BibtexTokenKind::BeginParen);
verify(&mut lexer, 0, 3, ")", BibtexTokenKind::EndParen);
verify(&mut lexer, 0, 4, "\"", BibtexTokenKind::Quote);
assert_eq!(None, lexer.next());
}
#[test]
fn test_kind() {
let mut lexer = BibtexLexer::new("@pReAmBlE\n@article\n@string");
verify(&mut lexer, 0, 0, "@pReAmBlE", BibtexTokenKind::PreambleKind);
verify(&mut lexer, 1, 0, "@article", BibtexTokenKind::EntryKind);
verify(&mut lexer, 2, 0, "@string", BibtexTokenKind::StringKind);
assert_eq!(None, lexer.next());
}
#[test]
fn test_operator() {
let mut lexer = BibtexLexer::new("=,#");
verify(&mut lexer, 0, 0, "=", BibtexTokenKind::Assign);
verify(&mut lexer, 0, 1, ",", BibtexTokenKind::Comma);
verify(&mut lexer, 0, 2, "#", BibtexTokenKind::Concat);
assert_eq!(None, lexer.next());
}
}

View file

@ -1,48 +0,0 @@
mod ast;
mod finder;
mod lexer;
mod parser;
use crate::bibtex::lexer::BibtexLexer;
use crate::bibtex::parser::BibtexParser;
pub use crate::bibtex::ast::*;
pub use crate::bibtex::finder::*;
use lsp_types::Position;
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct BibtexSyntaxTree {
pub root: BibtexRoot,
}
impl BibtexSyntaxTree {
pub fn entries(&self) -> Vec<&BibtexEntry> {
let mut entries: Vec<&BibtexEntry> = Vec::new();
for declaration in &self.root.children {
if let BibtexDeclaration::Entry(entry) = declaration {
entries.push(&entry);
}
}
entries
}
pub fn find(&self, position: Position) -> Vec<BibtexNode> {
let mut finder = BibtexFinder::new(position);
finder.visit_root(&self.root);
finder.results
}
}
impl From<BibtexRoot> for BibtexSyntaxTree {
fn from(root: BibtexRoot) -> Self {
BibtexSyntaxTree { root }
}
}
impl From<&str> for BibtexSyntaxTree {
fn from(text: &str) -> Self {
let lexer = BibtexLexer::new(text);
let mut parser = BibtexParser::new(lexer);
parser.root().into()
}
}

View file

@ -1,219 +0,0 @@
use crate::bibtex::ast::*;
use std::iter::Peekable;
pub struct BibtexParser<I: Iterator<Item = BibtexToken>> {
tokens: Peekable<I>,
}
impl<I: Iterator<Item = BibtexToken>> BibtexParser<I> {
pub fn new(tokens: I) -> Self {
BibtexParser {
tokens: tokens.peekable(),
}
}
pub fn root(&mut self) -> BibtexRoot {
let mut children = Vec::new();
while let Some(ref token) = self.tokens.peek() {
match token.kind {
BibtexTokenKind::PreambleKind => {
let preamble = Box::new(self.preamble());
children.push(BibtexDeclaration::Preamble(preamble));
}
BibtexTokenKind::StringKind => {
let string = Box::new(self.string());
children.push(BibtexDeclaration::String(string));
}
BibtexTokenKind::EntryKind => {
let entry = Box::new(self.entry());
children.push(BibtexDeclaration::Entry(entry));
}
_ => {
let comment = BibtexComment::new(self.tokens.next().unwrap());
children.push(BibtexDeclaration::Comment(Box::new(comment)));
}
}
}
BibtexRoot::new(children)
}
fn preamble(&mut self) -> BibtexPreamble {
let ty = self.tokens.next().unwrap();
let left = self.expect2(BibtexTokenKind::BeginBrace, BibtexTokenKind::BeginParen);
if left.is_none() {
return BibtexPreamble::new(ty, None, None, None);
}
if !self.can_match_content() {
return BibtexPreamble::new(ty, left, None, None);
}
let content = self.content();
let right = self.expect2(BibtexTokenKind::EndBrace, BibtexTokenKind::EndParen);
BibtexPreamble::new(ty, left, Some(content), right)
}
fn string(&mut self) -> BibtexString {
let ty = self.tokens.next().unwrap();
let left = self.expect2(BibtexTokenKind::BeginBrace, BibtexTokenKind::BeginParen);
if left.is_none() {
return BibtexString::new(ty, None, None, None, None, None);
}
let name = self.expect1(BibtexTokenKind::Word);
if name.is_none() {
return BibtexString::new(ty, left, None, None, None, None);
}
let assign = self.expect1(BibtexTokenKind::Assign);
if assign.is_none() {
return BibtexString::new(ty, left, name, None, None, None);
}
if !self.can_match_content() {
return BibtexString::new(ty, left, name, assign, None, None);
}
let value = self.content();
let right = self.expect2(BibtexTokenKind::EndBrace, BibtexTokenKind::EndParen);
BibtexString::new(ty, left, name, assign, Some(value), right)
}
fn entry(&mut self) -> BibtexEntry {
let ty = self.tokens.next().unwrap();
let left = self.expect2(BibtexTokenKind::BeginBrace, BibtexTokenKind::BeginParen);
if left.is_none() {
return BibtexEntry::new(ty, None, None, None, Vec::new(), None);
}
let name = self.expect1(BibtexTokenKind::Word);
if name.is_none() {
return BibtexEntry::new(ty, left, None, None, Vec::new(), None);
}
let comma = self.expect1(BibtexTokenKind::Comma);
if comma.is_none() {
return BibtexEntry::new(ty, left, name, None, Vec::new(), None);
}
let mut fields = Vec::new();
while self.next_of_kind(BibtexTokenKind::Word) {
fields.push(self.field());
}
let right = self.expect2(BibtexTokenKind::EndBrace, BibtexTokenKind::EndParen);
BibtexEntry::new(ty, left, name, comma, fields, right)
}
fn field(&mut self) -> BibtexField {
let name = self.tokens.next().unwrap();
let assign = self.expect1(BibtexTokenKind::Assign);
if assign.is_none() {
return BibtexField::new(name, None, None, None);
}
if !self.can_match_content() {
return BibtexField::new(name, assign, None, None);
}
let content = self.content();
let comma = self.expect1(BibtexTokenKind::Comma);
BibtexField::new(name, assign, Some(content), comma)
}
fn content(&mut self) -> BibtexContent {
let token = self.tokens.next().unwrap();
let left = match token.kind {
BibtexTokenKind::PreambleKind
| BibtexTokenKind::StringKind
| BibtexTokenKind::EntryKind
| BibtexTokenKind::Word
| BibtexTokenKind::Assign
| BibtexTokenKind::Comma
| BibtexTokenKind::BeginParen
| BibtexTokenKind::EndParen => BibtexContent::Word(BibtexWord::new(token)),
BibtexTokenKind::Command => BibtexContent::Command(BibtexCommand::new(token)),
BibtexTokenKind::Quote => {
let mut children = Vec::new();
while self.can_match_content() {
if self.next_of_kind(BibtexTokenKind::Quote) {
break;
}
children.push(self.content());
}
let right = self.expect1(BibtexTokenKind::Quote);
BibtexContent::QuotedContent(BibtexQuotedContent::new(token, children, right))
}
BibtexTokenKind::BeginBrace => {
let mut children = Vec::new();
while self.can_match_content() {
children.push(self.content());
}
let right = self.expect1(BibtexTokenKind::EndBrace);
BibtexContent::BracedContent(BibtexBracedContent::new(token, children, right))
}
_ => unreachable!(),
};
if let Some(operator) = self.expect1(BibtexTokenKind::Concat) {
let right = if self.can_match_content() {
Some(self.content())
} else {
None
};
BibtexContent::Concat(Box::new(BibtexConcat::new(left, operator, right)))
} else {
left
}
}
fn can_match_content(&mut self) -> bool {
if let Some(ref token) = self.tokens.peek() {
match token.kind {
BibtexTokenKind::PreambleKind
| BibtexTokenKind::StringKind
| BibtexTokenKind::EntryKind
| BibtexTokenKind::Word
| BibtexTokenKind::Command
| BibtexTokenKind::Assign
| BibtexTokenKind::Comma
| BibtexTokenKind::Quote
| BibtexTokenKind::BeginBrace
| BibtexTokenKind::BeginParen
| BibtexTokenKind::EndParen => true,
BibtexTokenKind::Concat | BibtexTokenKind::EndBrace => false,
}
} else {
false
}
}
fn expect1(&mut self, kind: BibtexTokenKind) -> Option<BibtexToken> {
if let Some(ref token) = self.tokens.peek() {
if token.kind == kind {
return self.tokens.next();
}
}
None
}
fn expect2(&mut self, kind1: BibtexTokenKind, kind2: BibtexTokenKind) -> Option<BibtexToken> {
if let Some(ref token) = self.tokens.peek() {
if token.kind == kind1 || token.kind == kind2 {
return self.tokens.next();
}
}
None
}
fn next_of_kind(&mut self, kind: BibtexTokenKind) -> bool {
if let Some(token) = self.tokens.peek() {
token.kind == kind
} else {
false
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,189 +0,0 @@
use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LatexEnvironmentCommand {
pub name: String,
pub index: usize,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LatexCitationCommand {
pub name: String,
pub index: usize,
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum LatexLabelReferenceSource {
Everything,
Math,
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum LatexLabelKind {
Definition,
Reference(LatexLabelReferenceSource),
}
impl LatexLabelKind {
pub fn is_reference(&self) -> bool {
match self {
LatexLabelKind::Definition => false,
LatexLabelKind::Reference(_) => true,
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LatexLabelCommand {
pub name: String,
pub index: usize,
pub kind: LatexLabelKind,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LatexSectionCommand {
pub name: String,
pub index: usize,
pub level: i32,
}
#[derive(Debug, PartialEq, Eq, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum LatexIncludeKind {
Package,
Class,
Latex,
Bibliography,
Image,
Svg,
Pdf,
Everything,
}
impl LatexIncludeKind {
pub fn extensions(&self) -> Option<&'static [&'static str]> {
match self {
LatexIncludeKind::Package => Some(&["sty"]),
LatexIncludeKind::Class => Some(&["cls"]),
LatexIncludeKind::Latex => Some(&["tex"]),
LatexIncludeKind::Bibliography => Some(&["bib"]),
LatexIncludeKind::Image => Some(&["pdf", "png", "jpg", "jpeg", "bmp"]),
LatexIncludeKind::Svg => Some(&["svg"]),
LatexIncludeKind::Pdf => Some(&["pdf"]),
LatexIncludeKind::Everything => None,
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LatexIncludeCommand {
pub name: String,
pub index: usize,
pub kind: LatexIncludeKind,
pub include_extension: bool,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LatexCommandDefinitionCommand {
pub name: String,
pub definition_index: usize,
pub argument_count_index: usize,
pub implementation_index: usize,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LatexMathOperatorCommand {
pub name: String,
pub definition_index: usize,
pub implementation_index: usize,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LatexTheoremDefinitionCommand {
pub name: String,
pub index: usize,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LatexColorCommand {
pub name: String,
pub index: usize,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LatexColorModelCommand {
pub name: String,
pub index: usize,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct BibtexEntryTypeDoc {
pub name: String,
pub documentation: Option<String>,
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
pub struct BibtexFieldDoc {
pub name: String,
pub documentation: String,
}
#[derive(Debug, PartialEq, Eq, Clone, Default, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct LanguageData {
pub environment_commands: Vec<LatexEnvironmentCommand>,
pub citation_commands: Vec<LatexCitationCommand>,
pub label_commands: Vec<LatexLabelCommand>,
pub section_commands: Vec<LatexSectionCommand>,
pub include_commands: Vec<LatexIncludeCommand>,
pub command_definition_commands: Vec<LatexCommandDefinitionCommand>,
pub math_operator_commands: Vec<LatexMathOperatorCommand>,
pub theorem_definition_commands: Vec<LatexTheoremDefinitionCommand>,
pub colors: Vec<String>,
pub color_commands: Vec<LatexColorCommand>,
pub color_model_commands: Vec<LatexColorModelCommand>,
pub entry_types: Vec<BibtexEntryTypeDoc>,
pub fields: Vec<BibtexFieldDoc>,
pub pgf_libraries: Vec<String>,
pub tikz_libraries: Vec<String>,
pub math_environments: Vec<String>,
}
impl LanguageData {
pub fn entry_type_documentation(&self, name: &str) -> Option<&str> {
for ty in self.entry_types.iter() {
if ty.name.to_lowercase() == name.to_lowercase() {
if let Some(documentation) = &ty.documentation {
return Some(&documentation);
}
}
}
None
}
pub fn field_documentation(&self, name: &str) -> Option<&str> {
self.fields
.iter()
.find(|field| field.name.to_lowercase() == name.to_lowercase())
.map(|field| field.documentation.as_ref())
}
}
pub static LANGUAGE_DATA: Lazy<LanguageData> = Lazy::new(|| {
const JSON: &str = include_str!("language.json");
serde_json::from_str(JSON).expect("Failed to deserialize language.json")
});

View file

@ -1,344 +0,0 @@
use crate::text::{Span, SyntaxNode};
use itertools::Itertools;
use lsp_types::Range;
use std::sync::Arc;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum LatexTokenKind {
Word,
Command,
Math,
Comma,
BeginGroup,
EndGroup,
BeginOptions,
EndOptions,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexToken {
pub span: Span,
pub kind: LatexTokenKind,
}
impl LatexToken {
pub fn new(span: Span, kind: LatexTokenKind) -> Self {
Self { span, kind }
}
pub fn text(&self) -> &str {
&self.span.text
}
}
impl SyntaxNode for LatexToken {
fn range(&self) -> Range {
self.span.range()
}
}
#[derive(Debug, PartialEq, Eq, Clone, Default)]
pub struct LatexRoot {
pub children: Vec<LatexContent>,
}
impl LatexRoot {
pub fn new(children: Vec<LatexContent>) -> Self {
Self { children }
}
}
impl SyntaxNode for LatexRoot {
fn range(&self) -> Range {
if self.children.is_empty() {
Range::new_simple(0, 0, 0, 0)
} else {
Range::new(
self.children[0].start(),
self.children[self.children.len() - 1].end(),
)
}
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum LatexContent {
Group(Arc<LatexGroup>),
Command(Arc<LatexCommand>),
Text(Arc<LatexText>),
Comma(Arc<LatexComma>),
Math(Arc<LatexMath>),
}
impl LatexContent {
pub fn accept(&self, visitor: &mut LatexVisitor) {
match self {
LatexContent::Group(group) => visitor.visit_group(Arc::clone(&group)),
LatexContent::Command(command) => visitor.visit_command(Arc::clone(&command)),
LatexContent::Text(text) => visitor.visit_text(Arc::clone(&text)),
LatexContent::Comma(comma) => visitor.visit_comma(Arc::clone(&comma)),
LatexContent::Math(math) => visitor.visit_math(Arc::clone(&math)),
}
}
}
impl SyntaxNode for LatexContent {
fn range(&self) -> Range {
match self {
LatexContent::Group(group) => group.range(),
LatexContent::Command(command) => command.range(),
LatexContent::Text(text) => text.range(),
LatexContent::Comma(comma) => comma.range(),
LatexContent::Math(math) => math.range(),
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum LatexGroupKind {
Group,
Options,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexGroup {
pub range: Range,
pub left: LatexToken,
pub children: Vec<LatexContent>,
pub right: Option<LatexToken>,
pub kind: LatexGroupKind,
}
impl LatexGroup {
pub fn new(
left: LatexToken,
children: Vec<LatexContent>,
right: Option<LatexToken>,
kind: LatexGroupKind,
) -> Self {
let end = if let Some(ref right) = right {
right.end()
} else if !children.is_empty() {
children[children.len() - 1].end()
} else {
left.end()
};
Self {
range: Range::new(left.start(), end),
left,
children,
right,
kind,
}
}
}
impl SyntaxNode for LatexGroup {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexCommand {
pub range: Range,
pub name: LatexToken,
pub options: Vec<Arc<LatexGroup>>,
pub args: Vec<Arc<LatexGroup>>,
pub groups: Vec<Arc<LatexGroup>>,
}
impl LatexCommand {
pub fn new(
name: LatexToken,
options: Vec<Arc<LatexGroup>>,
args: Vec<Arc<LatexGroup>>,
) -> Self {
let groups: Vec<Arc<LatexGroup>> = args
.iter()
.chain(options.iter())
.sorted_by_key(|group| group.range.start)
.map(Arc::clone)
.collect();
let end = if let Some(group) = groups.last() {
group.end()
} else {
name.end()
};
Self {
range: Range::new(name.start(), end),
name,
options,
args,
groups,
}
}
pub fn short_name_range(&self) -> Range {
Range::new_simple(
self.name.start().line,
self.name.start().character + 1,
self.name.end().line,
self.name.end().character,
)
}
pub fn extract_text(&self, index: usize) -> Option<&LatexText> {
if self.args.len() > index && self.args[index].children.len() == 1 {
if let LatexContent::Text(ref text) = self.args[index].children[0] {
Some(text)
} else {
None
}
} else {
None
}
}
pub fn extract_word(&self, index: usize) -> Option<&LatexToken> {
let text = self.extract_text(index)?;
if text.words.len() == 1 {
Some(&text.words[0])
} else {
None
}
}
pub fn has_word(&self, index: usize) -> bool {
self.extract_word(index).is_some()
}
pub fn extract_comma_separated_words(&self, index: usize) -> Vec<&LatexToken> {
let mut words = Vec::new();
for child in &self.args[index].children {
if let LatexContent::Text(text) = child {
for word in &text.words {
words.push(word);
}
}
}
words
}
pub fn has_comma_separated_words(&self, index: usize) -> bool {
if self.args.len() <= index {
return false;
}
for node in &self.args[index].children {
match node {
LatexContent::Text(_) | LatexContent::Comma(_) => (),
LatexContent::Command(_) | LatexContent::Group(_) | LatexContent::Math(_) => {
return false;
}
}
}
true
}
}
impl SyntaxNode for LatexCommand {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexText {
pub range: Range,
pub words: Vec<LatexToken>,
}
impl LatexText {
pub fn new(words: Vec<LatexToken>) -> Self {
Self {
range: Range::new(words[0].start(), words[words.len() - 1].end()),
words,
}
}
}
impl SyntaxNode for LatexText {
fn range(&self) -> Range {
self.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexComma {
pub token: LatexToken,
}
impl LatexComma {
pub fn new(token: LatexToken) -> Self {
Self { token }
}
}
impl SyntaxNode for LatexComma {
fn range(&self) -> Range {
self.token.range()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexMath {
pub token: LatexToken,
}
impl LatexMath {
pub fn new(token: LatexToken) -> Self {
Self { token }
}
}
impl SyntaxNode for LatexMath {
fn range(&self) -> Range {
self.token.range()
}
}
pub trait LatexVisitor {
fn visit_root(&mut self, root: Arc<LatexRoot>);
fn visit_group(&mut self, group: Arc<LatexGroup>);
fn visit_command(&mut self, command: Arc<LatexCommand>);
fn visit_text(&mut self, text: Arc<LatexText>);
fn visit_comma(&mut self, comma: Arc<LatexComma>);
fn visit_math(&mut self, math: Arc<LatexMath>);
}
pub struct LatexWalker;
impl LatexWalker {
pub fn walk_root(visitor: &mut LatexVisitor, root: Arc<LatexRoot>) {
for child in &root.children {
child.accept(visitor);
}
}
pub fn walk_group(visitor: &mut LatexVisitor, group: Arc<LatexGroup>) {
for child in &group.children {
child.accept(visitor);
}
}
pub fn walk_command(visitor: &mut LatexVisitor, command: Arc<LatexCommand>) {
for arg in &command.groups {
visitor.visit_group(Arc::clone(&arg));
}
}
pub fn walk_text(_visitor: &mut LatexVisitor, _text: Arc<LatexText>) {}
pub fn walk_comma(_visitor: &mut LatexVisitor, _comma: Arc<LatexComma>) {}
pub fn walk_math(_visitor: &mut LatexVisitor, _math: Arc<LatexMath>) {}
}

View file

@ -1,73 +0,0 @@
use crate::latex::ast::*;
use crate::text::SyntaxNode;
use lsp_types::Position;
use std::sync::Arc;
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum LatexNode {
Root(Arc<LatexRoot>),
Group(Arc<LatexGroup>),
Command(Arc<LatexCommand>),
Text(Arc<LatexText>),
Comma(Arc<LatexComma>),
Math(Arc<LatexMath>),
}
#[derive(Debug)]
pub struct LatexFinder {
pub position: Position,
pub results: Vec<LatexNode>,
}
impl LatexFinder {
pub fn new(position: Position) -> Self {
Self {
position,
results: Vec::new(),
}
}
}
impl LatexVisitor for LatexFinder {
fn visit_root(&mut self, root: Arc<LatexRoot>) {
if root.range().contains(self.position) {
self.results.push(LatexNode::Root(Arc::clone(&root)));
LatexWalker::walk_root(self, root);
}
}
fn visit_group(&mut self, group: Arc<LatexGroup>) {
if group.range.contains(self.position) {
self.results.push(LatexNode::Group(Arc::clone(&group)));
LatexWalker::walk_group(self, group);
}
}
fn visit_command(&mut self, command: Arc<LatexCommand>) {
if command.range.contains(self.position) {
self.results.push(LatexNode::Command(Arc::clone(&command)));
LatexWalker::walk_command(self, command);
}
}
fn visit_text(&mut self, text: Arc<LatexText>) {
if text.range.contains(self.position) {
self.results.push(LatexNode::Text(Arc::clone(&text)));
LatexWalker::walk_text(self, text);
}
}
fn visit_comma(&mut self, comma: Arc<LatexComma>) {
if comma.range().contains(self.position) {
self.results.push(LatexNode::Comma(Arc::clone(&comma)));
LatexWalker::walk_comma(self, comma);
}
}
fn visit_math(&mut self, math: Arc<LatexMath>) {
if math.range().contains(self.position) {
self.results.push(LatexNode::Math(Arc::clone(&math)));
LatexWalker::walk_math(self, math);
}
}
}

View file

@ -1,179 +0,0 @@
use super::ast::{LatexToken, LatexTokenKind};
use crate::text::CharStream;
pub struct LatexLexer<'a> {
stream: CharStream<'a>,
}
impl<'a> LatexLexer<'a> {
pub fn new(text: &'a str) -> Self {
LatexLexer {
stream: CharStream::new(text),
}
}
fn single_char(&mut self, kind: LatexTokenKind) -> LatexToken {
self.stream.start_span();
self.stream.next();
let span = self.stream.end_span();
LatexToken::new(span, kind)
}
fn math(&mut self) -> LatexToken {
self.stream.start_span();
self.stream.next();
if self.stream.satifies(|c| *c == '$') {
self.stream.next();
}
let span = self.stream.end_span();
LatexToken::new(span, LatexTokenKind::Math)
}
fn command(&mut self) -> LatexToken {
let span = self.stream.command();
LatexToken::new(span, LatexTokenKind::Command)
}
fn word(&mut self) -> LatexToken {
self.stream.start_span();
self.stream.next();
while self.stream.satifies(|c| is_word_char(*c)) {
self.stream.next();
}
let span = self.stream.end_span();
LatexToken::new(span, LatexTokenKind::Word)
}
}
impl<'a> Iterator for LatexLexer<'a> {
type Item = LatexToken;
fn next(&mut self) -> Option<LatexToken> {
loop {
match self.stream.peek() {
Some('%') => {
self.stream.skip_rest_of_line();
}
Some('{') => {
return Some(self.single_char(LatexTokenKind::BeginGroup));
}
Some('}') => {
return Some(self.single_char(LatexTokenKind::EndGroup));
}
Some('[') => {
return Some(self.single_char(LatexTokenKind::BeginOptions));
}
Some(']') => {
return Some(self.single_char(LatexTokenKind::EndOptions));
}
Some('$') => {
return Some(self.math());
}
Some(',') => {
return Some(self.single_char(LatexTokenKind::Comma));
}
Some('\\') => {
return Some(self.command());
}
Some(c) => {
if c.is_whitespace() {
self.stream.next();
} else {
return Some(self.word());
}
}
None => {
return None;
}
}
}
}
}
fn is_word_char(c: char) -> bool {
!c.is_whitespace()
&& c != '%'
&& c != '{'
&& c != '}'
&& c != '['
&& c != ']'
&& c != '\\'
&& c != '$'
&& c != ','
}
#[cfg(test)]
mod tests {
use super::*;
use crate::text::Span;
use lsp_types::{Position, Range};
fn verify<'a>(
lexer: &mut LatexLexer<'a>,
line: u64,
character: u64,
text: &str,
kind: LatexTokenKind,
) {
let start = Position::new(line, character);
let end = Position::new(line, character + text.chars().count() as u64);
let range = Range::new(start, end);
let span = Span::new(range, text.to_owned());
let token = LatexToken::new(span, kind);
assert_eq!(Some(token), lexer.next());
}
#[test]
fn test_word() {
let mut lexer = LatexLexer::new("foo bar baz");
verify(&mut lexer, 0, 0, "foo", LatexTokenKind::Word);
verify(&mut lexer, 0, 4, "bar", LatexTokenKind::Word);
verify(&mut lexer, 0, 8, "baz", LatexTokenKind::Word);
assert_eq!(None, lexer.next());
}
#[test]
fn test_command() {
let mut lexer = LatexLexer::new("\\foo\\bar@baz\n\\foo*");
verify(&mut lexer, 0, 0, "\\foo", LatexTokenKind::Command);
verify(&mut lexer, 0, 4, "\\bar@baz", LatexTokenKind::Command);
verify(&mut lexer, 1, 0, "\\foo*", LatexTokenKind::Command);
assert_eq!(None, lexer.next());
}
#[test]
fn test_escape_sequence() {
let mut lexer = LatexLexer::new("\\%\\**");
verify(&mut lexer, 0, 0, "\\%", LatexTokenKind::Command);
verify(&mut lexer, 0, 2, "\\*", LatexTokenKind::Command);
verify(&mut lexer, 0, 4, "*", LatexTokenKind::Word);
assert_eq!(None, lexer.next());
}
#[test]
fn test_group_delimiter() {
let mut lexer = LatexLexer::new("{}[]");
verify(&mut lexer, 0, 0, "{", LatexTokenKind::BeginGroup);
verify(&mut lexer, 0, 1, "}", LatexTokenKind::EndGroup);
verify(&mut lexer, 0, 2, "[", LatexTokenKind::BeginOptions);
verify(&mut lexer, 0, 3, "]", LatexTokenKind::EndOptions);
assert_eq!(None, lexer.next());
}
#[test]
fn test_math() {
let mut lexer = LatexLexer::new("$$ $ $");
verify(&mut lexer, 0, 0, "$$", LatexTokenKind::Math);
verify(&mut lexer, 0, 3, "$", LatexTokenKind::Math);
verify(&mut lexer, 0, 5, "$", LatexTokenKind::Math);
assert_eq!(None, lexer.next());
}
#[test]
fn test_line_comment() {
let mut lexer = LatexLexer::new(" %foo \nfoo");
verify(&mut lexer, 1, 0, "foo", LatexTokenKind::Word);
assert_eq!(None, lexer.next());
}
}

View file

@ -1,666 +0,0 @@
mod ast;
mod finder;
mod lexer;
mod parser;
pub use self::ast::*;
use self::finder::LatexFinder;
pub use self::finder::LatexNode;
use self::lexer::LatexLexer;
use self::parser::LatexParser;
use super::language::*;
use super::text::SyntaxNode;
use lsp_types::{Position, Range, Uri};
use path_clean::PathClean;
use std::path::PathBuf;
use std::sync::Arc;
#[derive(Debug, Default)]
struct LatexCommandAnalyzer {
commands: Vec<Arc<LatexCommand>>,
}
impl LatexCommandAnalyzer {
fn parse(root: Arc<LatexRoot>) -> Vec<Arc<LatexCommand>> {
let mut analyzer = Self::default();
analyzer.visit_root(root);
analyzer.commands
}
}
impl LatexVisitor for LatexCommandAnalyzer {
fn visit_root(&mut self, root: Arc<LatexRoot>) {
LatexWalker::walk_root(self, root);
}
fn visit_group(&mut self, group: Arc<LatexGroup>) {
LatexWalker::walk_group(self, group);
}
fn visit_command(&mut self, command: Arc<LatexCommand>) {
self.commands.push(Arc::clone(&command));
LatexWalker::walk_command(self, command);
}
fn visit_text(&mut self, text: Arc<LatexText>) {
LatexWalker::walk_text(self, text);
}
fn visit_comma(&mut self, comma: Arc<LatexComma>) {
LatexWalker::walk_comma(self, comma);
}
fn visit_math(&mut self, math: Arc<LatexMath>) {
LatexWalker::walk_math(self, math);
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexEnvironmentDelimiter {
pub command: Arc<LatexCommand>,
}
impl LatexEnvironmentDelimiter {
pub fn name(&self) -> Option<&LatexToken> {
self.command.extract_word(0)
}
pub fn is_math(&self) -> bool {
if let Some(name) = self.name() {
LANGUAGE_DATA
.math_environments
.iter()
.any(|env| env == name.text())
} else {
false
}
}
}
impl SyntaxNode for LatexEnvironmentDelimiter {
fn range(&self) -> Range {
self.command.range()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexEnvironment {
pub left: LatexEnvironmentDelimiter,
pub right: LatexEnvironmentDelimiter,
}
impl LatexEnvironment {
pub fn is_root(&self) -> bool {
self.left
.name()
.iter()
.chain(self.right.name().iter())
.any(|name| name.text() == "document")
}
fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
let mut stack = Vec::new();
let mut environments = Vec::new();
for command in commands {
if let Some(delimiter) = Self::parse_delimiter(command) {
if delimiter.command.name.text() == "\\begin" {
stack.push(delimiter);
} else if let Some(begin) = stack.pop() {
environments.push(Self {
left: begin,
right: delimiter,
});
}
}
}
environments
}
fn parse_delimiter(command: &Arc<LatexCommand>) -> Option<LatexEnvironmentDelimiter> {
if command.name.text() != "\\begin" && command.name.text() != "\\end" {
return None;
}
if command.args.len() == 0 {
return None;
}
if command.has_word(0)
|| command.args[0].children.is_empty()
|| command.args[0].right.is_none()
{
let delimiter = LatexEnvironmentDelimiter {
command: Arc::clone(&command),
};
Some(delimiter)
} else {
None
}
}
}
impl SyntaxNode for LatexEnvironment {
fn range(&self) -> Range {
Range::new(self.left.start(), self.right.end())
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexCitation {
pub command: Arc<LatexCommand>,
pub index: usize,
}
impl LatexCitation {
pub fn keys(&self) -> Vec<&LatexToken> {
self.command.extract_comma_separated_words(0)
}
fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
let mut citations = Vec::new();
for command in commands {
for LatexCitationCommand { name, index } in &LANGUAGE_DATA.citation_commands {
if command.name.text() == name && command.has_comma_separated_words(*index) {
citations.push(Self {
command: Arc::clone(command),
index: *index,
});
}
}
}
citations
}
}
impl SyntaxNode for LatexCitation {
fn range(&self) -> Range {
self.command.range()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexLabel {
pub command: Arc<LatexCommand>,
index: usize,
pub kind: LatexLabelKind,
}
impl LatexLabel {
pub fn names(&self) -> Vec<&LatexToken> {
self.command.extract_comma_separated_words(self.index)
}
fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
let mut labels = Vec::new();
for command in commands {
for LatexLabelCommand { name, index, kind } in &LANGUAGE_DATA.label_commands {
if command.name.text() == name && command.has_comma_separated_words(*index) {
labels.push(Self {
command: Arc::clone(command),
index: *index,
kind: *kind,
});
}
}
}
labels
}
}
impl SyntaxNode for LatexLabel {
fn range(&self) -> Range {
self.command.range
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexSection {
pub command: Arc<LatexCommand>,
pub index: usize,
pub level: i32,
}
impl LatexSection {
fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
let mut sections = Vec::new();
for command in commands {
for LatexSectionCommand { name, index, level } in &LANGUAGE_DATA.section_commands {
if command.name.text() == name && command.args.len() > *index {
sections.push(Self {
command: Arc::clone(command),
index: *index,
level: *level,
})
}
}
}
sections
}
}
impl SyntaxNode for LatexSection {
fn range(&self) -> Range {
self.command.range()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexInclude {
pub command: Arc<LatexCommand>,
pub index: usize,
pub kind: LatexIncludeKind,
pub all_targets: Vec<Vec<Uri>>,
pub include_extension: bool,
}
impl LatexInclude {
pub fn paths(&self) -> Vec<&LatexToken> {
self.command.extract_comma_separated_words(self.index)
}
pub fn components(&self) -> Vec<String> {
let mut components = Vec::new();
for path in self.paths() {
match self.kind {
LatexIncludeKind::Package => components.push(format!("{}.sty", path.text())),
LatexIncludeKind::Class => components.push(format!("{}.cls", path.text())),
LatexIncludeKind::Latex
| LatexIncludeKind::Bibliography
| LatexIncludeKind::Image
| LatexIncludeKind::Svg
| LatexIncludeKind::Pdf
| LatexIncludeKind::Everything => (),
}
}
components
}
fn parse(uri: &Uri, commands: &[Arc<LatexCommand>]) -> Vec<Self> {
let mut includes = Vec::new();
for command in commands {
for description in &LANGUAGE_DATA.include_commands {
if let Some(include) = Self::parse_single(uri, &command, &description) {
includes.push(include);
}
}
}
includes
}
fn parse_single(
uri: &Uri,
command: &Arc<LatexCommand>,
description: &LatexIncludeCommand,
) -> Option<Self> {
if command.name.text() != description.name {
return None;
}
if command.args.len() <= description.index {
return None;
}
let mut all_targets = Vec::new();
for relative_path in command.extract_comma_separated_words(description.index) {
let mut path = uri.to_file_path().ok()?;
path.pop();
path.push(relative_path.text());
path = PathBuf::from(path.to_string_lossy().into_owned().replace('\\', "/"));
path = path.clean();
let path = path.to_str()?.to_owned();
let mut targets = Vec::new();
targets.push(Uri::from_file_path(&path).ok()?);
if let Some(extensions) = description.kind.extensions() {
for extension in extensions {
let path = format!("{}.{}", &path, extension);
targets.push(Uri::from_file_path(&path).ok()?);
}
}
all_targets.push(targets);
}
let include = Self {
command: Arc::clone(command),
index: description.index,
kind: description.kind,
all_targets,
include_extension: description.include_extension,
};
Some(include)
}
}
impl SyntaxNode for LatexInclude {
fn range(&self) -> Range {
self.command.range()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexEquation {
pub left: Arc<LatexCommand>,
pub right: Arc<LatexCommand>,
}
impl LatexEquation {
fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
let mut equations = Vec::new();
let mut left = None;
for command in commands {
let name = command.name.text();
if name == "\\[" || name == "\\(" {
left = Some(command);
} else if name == "\\]" || name == "\\)" {
if let Some(begin) = left {
equations.push(Self {
left: Arc::clone(&begin),
right: Arc::clone(&command),
});
left = None;
}
}
}
equations
}
}
impl SyntaxNode for LatexEquation {
fn range(&self) -> Range {
Range::new(self.left.start(), self.right.end())
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexInline {
pub left: Arc<LatexMath>,
pub right: Arc<LatexMath>,
}
impl LatexInline {
fn parse(root: Arc<LatexRoot>) -> Vec<Self> {
let mut analyzer = LatexInlineAnalyzer::default();
analyzer.visit_root(root);
analyzer.inlines
}
}
impl SyntaxNode for LatexInline {
fn range(&self) -> Range {
Range::new(self.left.start(), self.right.end())
}
}
#[derive(Debug, Default)]
struct LatexInlineAnalyzer {
inlines: Vec<LatexInline>,
left: Option<Arc<LatexMath>>,
}
impl LatexVisitor for LatexInlineAnalyzer {
fn visit_root(&mut self, root: Arc<LatexRoot>) {
LatexWalker::walk_root(self, root);
}
fn visit_group(&mut self, group: Arc<LatexGroup>) {
LatexWalker::walk_group(self, group);
}
fn visit_command(&mut self, command: Arc<LatexCommand>) {
LatexWalker::walk_command(self, command);
}
fn visit_text(&mut self, text: Arc<LatexText>) {
LatexWalker::walk_text(self, text);
}
fn visit_comma(&mut self, comma: Arc<LatexComma>) {
LatexWalker::walk_comma(self, comma);
}
fn visit_math(&mut self, math: Arc<LatexMath>) {
if let Some(left) = &self.left {
let inline = LatexInline {
left: Arc::clone(&left),
right: Arc::clone(&math),
};
self.inlines.push(inline);
self.left = None;
} else {
self.left = Some(Arc::clone(&math));
}
LatexWalker::walk_math(self, math);
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexMathOperator {
pub command: Arc<LatexCommand>,
pub definition: Arc<LatexCommand>,
pub definition_index: usize,
pub implementation_index: usize,
}
impl LatexMathOperator {
fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
let mut operators = Vec::new();
for command in commands {
for LatexMathOperatorCommand {
name,
definition_index,
implementation_index,
} in &LANGUAGE_DATA.math_operator_commands
{
if command.name.text() == name
&& command.args.len() > *definition_index
&& command.args.len() > *implementation_index
{
let definition = command.args[0].children.iter().next();
if let Some(LatexContent::Command(definition)) = definition {
operators.push(Self {
command: Arc::clone(command),
definition: Arc::clone(definition),
definition_index: *definition_index,
implementation_index: *implementation_index,
})
}
}
}
}
operators
}
}
impl SyntaxNode for LatexMathOperator {
fn range(&self) -> Range {
self.command.range()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexCommandDefinition {
pub command: Arc<LatexCommand>,
pub definition: Arc<LatexCommand>,
pub definition_index: usize,
pub implementation: Arc<LatexGroup>,
pub implementation_index: usize,
pub argument_count_index: usize,
}
impl LatexCommandDefinition {
fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
let mut definitions = Vec::new();
for command in commands {
for LatexCommandDefinitionCommand {
name,
definition_index,
argument_count_index,
implementation_index,
} in &LANGUAGE_DATA.command_definition_commands
{
if command.name.text() == name
&& command.args.len() > *definition_index
&& command.args.len() > *implementation_index
{
let definition = command.args[0].children.iter().next();
if let Some(LatexContent::Command(definition)) = definition {
definitions.push(Self {
command: Arc::clone(command),
definition: Arc::clone(definition),
definition_index: *definition_index,
implementation: Arc::clone(&command.args[*implementation_index]),
implementation_index: *implementation_index,
argument_count_index: *argument_count_index,
})
}
}
}
}
definitions
}
}
impl SyntaxNode for LatexCommandDefinition {
fn range(&self) -> Range {
self.command.range()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexTheoremDefinition {
pub command: Arc<LatexCommand>,
pub index: usize,
}
impl LatexTheoremDefinition {
pub fn name(&self) -> &LatexToken {
self.command.extract_word(self.index).unwrap()
}
fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
let mut definitions = Vec::new();
for command in commands {
for LatexTheoremDefinitionCommand { name, index } in
&LANGUAGE_DATA.theorem_definition_commands
{
if command.name.text() == name && command.has_word(*index) {
definitions.push(Self {
command: Arc::clone(&command),
index: *index,
});
}
}
}
definitions
}
}
impl SyntaxNode for LatexTheoremDefinition {
fn range(&self) -> Range {
self.command.range()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexCaption {
pub command: Arc<LatexCommand>,
pub index: usize,
}
impl LatexCaption {
fn parse(commands: &[Arc<LatexCommand>]) -> Vec<Self> {
let mut captions = Vec::new();
for command in commands {
if command.name.text() == "\\caption" && command.args.len() > 0 {
captions.push(Self {
command: Arc::clone(&command),
index: 0,
});
}
}
captions
}
}
impl SyntaxNode for LatexCaption {
fn range(&self) -> Range {
self.command.range()
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct LatexSyntaxTree {
pub root: Arc<LatexRoot>,
pub commands: Vec<Arc<LatexCommand>>,
pub includes: Vec<LatexInclude>,
pub components: Vec<String>,
pub environments: Vec<LatexEnvironment>,
pub is_standalone: bool,
pub labels: Vec<LatexLabel>,
pub sections: Vec<LatexSection>,
pub citations: Vec<LatexCitation>,
pub equations: Vec<LatexEquation>,
pub inlines: Vec<LatexInline>,
pub math_operators: Vec<LatexMathOperator>,
pub command_definitions: Vec<LatexCommandDefinition>,
pub theorem_definitions: Vec<LatexTheoremDefinition>,
pub captions: Vec<LatexCaption>,
}
impl LatexSyntaxTree {
pub fn parse(uri: &Uri, text: &str) -> Self {
let lexer = LatexLexer::new(text);
let mut parser = LatexParser::new(lexer);
let root = Arc::new(parser.root());
let commands = LatexCommandAnalyzer::parse(Arc::clone(&root));
let includes = LatexInclude::parse(uri, &commands);
let components = includes.iter().flat_map(LatexInclude::components).collect();
let environments = LatexEnvironment::parse(&commands);
let is_standalone = environments.iter().any(LatexEnvironment::is_root);
let labels = LatexLabel::parse(&commands);
let sections = LatexSection::parse(&commands);
let citations = LatexCitation::parse(&commands);
let equations = LatexEquation::parse(&commands);
let inlines = LatexInline::parse(Arc::clone(&root));
let math_operators = LatexMathOperator::parse(&commands);
let command_definitions = LatexCommandDefinition::parse(&commands);
let theorem_definitions = LatexTheoremDefinition::parse(&commands);
let captions = LatexCaption::parse(&commands);
Self {
root,
commands,
includes,
components,
environments,
is_standalone,
labels,
sections,
citations,
equations,
inlines,
math_operators,
command_definitions,
theorem_definitions,
captions,
}
}
pub fn find(&self, position: Position) -> Vec<LatexNode> {
let mut finder = LatexFinder::new(position);
finder.visit_root(Arc::clone(&self.root));
finder.results
}
pub fn find_command_by_name(&self, position: Position) -> Option<Arc<LatexCommand>> {
for result in self.find(position) {
if let LatexNode::Command(command) = result {
if command.name.range().contains(position)
&& command.name.start().character != position.character
{
return Some(command);
}
}
}
None
}
}

View file

@ -1,135 +0,0 @@
use super::ast::*;
use std::iter::Peekable;
use std::sync::Arc;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum LatexScope {
Root,
Group,
Options,
}
pub struct LatexParser<I: Iterator<Item = LatexToken>> {
tokens: Peekable<I>,
}
impl<I: Iterator<Item = LatexToken>> LatexParser<I> {
pub fn new(tokens: I) -> Self {
LatexParser {
tokens: tokens.peekable(),
}
}
pub fn root(&mut self) -> LatexRoot {
let children = self.content(LatexScope::Root);
LatexRoot::new(children)
}
fn content(&mut self, scope: LatexScope) -> Vec<LatexContent> {
let mut children = Vec::new();
while let Some(ref token) = self.tokens.peek() {
match token.kind {
LatexTokenKind::Word | LatexTokenKind::BeginOptions => {
children.push(LatexContent::Text(self.text(scope)));
}
LatexTokenKind::Command => {
children.push(LatexContent::Command(self.command()));
}
LatexTokenKind::Comma => {
let node = LatexComma::new(self.tokens.next().unwrap());
children.push(LatexContent::Comma(Arc::new(node)));
}
LatexTokenKind::Math => {
children.push(LatexContent::Math(self.math()));
}
LatexTokenKind::BeginGroup => {
children.push(LatexContent::Group(self.group(LatexGroupKind::Group)));
}
LatexTokenKind::EndGroup => {
if scope == LatexScope::Root {
self.tokens.next();
} else {
return children;
}
}
LatexTokenKind::EndOptions => {
if scope == LatexScope::Options {
return children;
} else {
children.push(LatexContent::Text(self.text(scope)));
}
}
}
}
children
}
fn command(&mut self) -> Arc<LatexCommand> {
let name = self.tokens.next().unwrap();
let mut options = Vec::new();
let mut args = Vec::new();
while let Some(token) = self.tokens.peek() {
match token.kind {
LatexTokenKind::BeginGroup => {
args.push(self.group(LatexGroupKind::Group));
}
LatexTokenKind::BeginOptions => {
options.push(self.group(LatexGroupKind::Options));
}
_ => {
break;
}
}
}
Arc::new(LatexCommand::new(name, options, args))
}
fn group(&mut self, kind: LatexGroupKind) -> Arc<LatexGroup> {
let left = self.tokens.next().unwrap();
let scope = match kind {
LatexGroupKind::Group => LatexScope::Group,
LatexGroupKind::Options => LatexScope::Options,
};
let children = self.content(scope);
let right_kind = match kind {
LatexGroupKind::Group => LatexTokenKind::EndGroup,
LatexGroupKind::Options => LatexTokenKind::EndOptions,
};
let right = if self.next_of_kind(right_kind) {
self.tokens.next()
} else {
None
};
Arc::new(LatexGroup::new(left, children, right, kind))
}
fn text(&mut self, scope: LatexScope) -> Arc<LatexText> {
let mut words = Vec::new();
while let Some(ref token) = self.tokens.peek() {
let kind = token.kind;
let opts = kind == LatexTokenKind::EndOptions && scope != LatexScope::Options;
if kind == LatexTokenKind::Word || kind == LatexTokenKind::BeginOptions || opts {
words.push(self.tokens.next().unwrap());
} else {
break;
}
}
Arc::new(LatexText::new(words))
}
fn math(&mut self) -> Arc<LatexMath> {
let token = self.tokens.next().unwrap();
Arc::new(LatexMath::new(token))
}
fn next_of_kind(&mut self, kind: LatexTokenKind) -> bool {
if let Some(ref token) = self.tokens.peek() {
token.kind == kind
} else {
false
}
}
}

View file

@ -1,49 +0,0 @@
mod bibtex;
mod language;
mod latex;
mod text;
pub use self::bibtex::*;
pub use self::language::*;
pub use self::latex::*;
pub use self::text::*;
use lsp_types::Uri;
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum Language {
Latex,
Bibtex,
}
impl Language {
pub fn by_extension(extension: &str) -> Option<Self> {
match extension.to_lowercase().as_ref() {
"tex" | "sty" | "cls" | "lco" => Some(Language::Latex),
"bib" => Some(Language::Bibtex),
_ => None,
}
}
pub fn by_language_id(language_id: &str) -> Option<Self> {
match language_id {
"latex" | "tex" => Some(Language::Latex),
"bibtex" | "bib" => Some(Language::Bibtex),
_ => None,
}
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum SyntaxTree {
Latex(LatexSyntaxTree),
Bibtex(BibtexSyntaxTree),
}
impl SyntaxTree {
pub fn parse(uri: &Uri, text: &str, language: Language) -> Self {
match language {
Language::Latex => SyntaxTree::Latex(LatexSyntaxTree::parse(uri, text)),
Language::Bibtex => SyntaxTree::Bibtex(text.into()),
}
}
}

View file

@ -1,257 +0,0 @@
use lsp_types::{Position, Range};
use std::iter::Peekable;
use std::str::CharIndices;
pub trait SyntaxNode {
fn range(&self) -> Range;
fn start(&self) -> Position {
self.range().start
}
fn end(&self) -> Position {
self.range().end
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Span {
pub range: Range,
pub text: String,
}
impl Span {
pub fn new(range: Range, text: String) -> Self {
Span { range, text }
}
}
impl SyntaxNode for Span {
fn range(&self) -> Range {
self.range
}
}
pub struct CharStream<'a> {
text: &'a str,
chars: Peekable<CharIndices<'a>>,
current_position: Position,
current_index: usize,
start_position: Position,
start_index: usize,
}
impl<'a> CharStream<'a> {
pub fn new(text: &'a str) -> Self {
CharStream {
text,
chars: text.char_indices().peekable(),
current_position: Position::new(0, 0),
current_index: 0,
start_position: Position::new(0, 0),
start_index: 0,
}
}
pub fn peek(&mut self) -> Option<char> {
self.chars.peek().map(|(_, c)| *c)
}
pub fn satifies<P: FnOnce(&char) -> bool>(&mut self, predicate: P) -> bool {
self.peek().filter(predicate).is_some()
}
pub fn skip_rest_of_line(&mut self) {
loop {
match self.peek() {
Some('\n') => {
self.next();
break;
}
Some(_) => {
self.next();
}
None => {
break;
}
}
}
}
pub fn start_span(&mut self) {
self.start_index = self.current_index;
self.start_position = self.current_position;
}
pub fn end_span(&mut self) -> Span {
let range = Range::new(self.start_position, self.current_position);
let text = &self.text[self.start_index..self.current_index];
Span::new(range, text.to_owned())
}
pub fn seek(&mut self, position: Position) {
while self.current_position < position {
self.next();
}
}
pub fn command(&mut self) -> Span {
self.start_span();
self.next();
let mut escape = true;
while self.satifies(|c| is_command_char(*c)) {
self.next();
escape = false;
}
if let Some(c) = self.peek() {
if c != '\r' && c != '\n' && (escape || c == '*') {
self.next();
}
}
self.end_span()
}
fn update_position(&mut self, c: char) {
if c == '\n' {
self.current_position.line += 1;
self.current_position.character = 0;
} else {
self.current_position.character += 1;
}
}
pub fn extract(text: &'a str, range: Range) -> String {
let mut stream = Self::new(text);
stream.seek(range.start);
stream.start_span();
stream.seek(range.end);
stream.end_span().text
}
}
impl<'a> Iterator for CharStream<'a> {
type Item = char;
fn next(&mut self) -> Option<char> {
if let Some((i, c)) = self.chars.next() {
self.current_index = i + c.len_utf8();
self.update_position(c);
Some(c)
} else {
None
}
}
}
fn is_command_char(c: char) -> bool {
c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' || c == '@'
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_peek() {
let mut stream = CharStream::new("ab\nc");
assert_eq!(Some('a'), stream.peek());
assert_eq!(Some('a'), stream.next());
assert_eq!(Some('b'), stream.peek());
assert_eq!(Some('b'), stream.next());
assert_eq!(Some('\n'), stream.peek());
assert_eq!(Some('\n'), stream.next());
assert_eq!(Some('c'), stream.peek());
assert_eq!(Some('c'), stream.next());
assert_eq!(None, stream.peek());
assert_eq!(None, stream.next());
}
#[test]
fn test_span() {
let mut stream = CharStream::new("abc\ndef");
stream.next();
stream.start_span();
stream.next();
stream.next();
let span = stream.end_span();
assert_eq!(
Span::new(Range::new_simple(0, 1, 0, 3), "bc".to_owned()),
span
);
assert_eq!(Position::new(0, 1), span.start());
assert_eq!(Position::new(0, 3), span.end());
}
#[test]
fn test_span_unicode() {
let mut stream = CharStream::new("😀😃😄😁");
stream.next();
stream.start_span();
stream.next();
stream.next();
let span = stream.end_span();
assert_eq!(
Span::new(Range::new_simple(0, 1, 0, 3), "😃😄".to_owned()),
span
);
}
#[test]
fn test_satifies() {
let mut stream = CharStream::new("aBc");
assert_eq!(true, stream.satifies(|c| c.is_lowercase()));
stream.next();
assert_eq!(false, stream.satifies(|c| c.is_lowercase()));
}
#[test]
fn test_skip_rest_of_line() {
let mut stream = CharStream::new("abc\ndef");
stream.skip_rest_of_line();
assert_eq!(Some('d'), stream.next());
stream.skip_rest_of_line();
assert_eq!(None, stream.next());
stream.skip_rest_of_line();
assert_eq!(None, stream.next());
}
#[test]
fn test_seek() {
let mut stream = CharStream::new("abc\ndefghi");
let pos = Position::new(1, 2);
stream.seek(pos);
assert_eq!(Some('f'), stream.peek());
}
#[test]
fn test_command_basic() {
let mut stream = CharStream::new("\\foo@bar");
let span = stream.command();
assert_eq!(
Span::new(Range::new_simple(0, 0, 0, 8), "\\foo@bar".to_owned()),
span
);
}
#[test]
fn test_command_star() {
let mut stream = CharStream::new("\\foo*");
let span = stream.command();
assert_eq!(
Span::new(Range::new_simple(0, 0, 0, 5), "\\foo*".to_owned()),
span
);
}
#[test]
fn test_command_escape() {
let mut stream = CharStream::new("\\**");
let span = stream.command();
assert_eq!(
Span::new(Range::new_simple(0, 0, 0, 2), "\\*".to_owned()),
span
);
}
}

View file

@ -1,14 +0,0 @@
[package]
name = "texlab-workspace"
version = "0.1.0"
authors = [
"Eric Förster <efoerster@users.noreply.github.com>",
"Patrick Förster <pfoerster@users.noreply.github.com>"]
edition = "2018"
[dependencies]
futures-boxed = { path = "../futures_boxed" }
futures-preview = { version = "0.3.0-alpha.17", features = ["compat"] }
log = "0.4.6"
lsp-types = { git = "https://github.com/latex-lsp/lsp-types", rev = "9fcc5d9b9d3013ce84e20ef566267754d594b268", features = ["proposed"] }
texlab-syntax = { path = "../texlab_syntax" }

View file

@ -1,216 +0,0 @@
use super::{Document, DocumentView, Workspace, WorkspaceBuilder};
use futures_boxed::boxed;
use lsp_types::*;
use std::sync::Arc;
#[derive(Debug, PartialEq, Clone)]
pub struct FeatureRequest<P> {
pub params: P,
pub view: DocumentView,
pub client_capabilities: Arc<ClientCapabilities>,
}
impl<P> FeatureRequest<P> {
pub fn workspace(&self) -> &Workspace {
&self.view.workspace
}
pub fn document(&self) -> &Document {
&self.view.document
}
pub fn related_documents(&self) -> &[Arc<Document>] {
&self.view.related_documents
}
}
pub trait FeatureProvider {
type Params;
type Output;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<Self::Params>) -> Self::Output;
}
type ListProvider<P, O> = Box<FeatureProvider<Params = P, Output = Vec<O>> + Send + Sync>;
#[derive(Default)]
pub struct ConcatProvider<P, O> {
providers: Vec<ListProvider<P, O>>,
}
impl<P, O> ConcatProvider<P, O> {
pub fn new(providers: Vec<ListProvider<P, O>>) -> Self {
Self { providers }
}
}
impl<P, O> FeatureProvider for ConcatProvider<P, O>
where
P: Send + Sync,
O: Send + Sync,
{
type Params = P;
type Output = Vec<O>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<P>) -> Vec<O> {
let mut items = Vec::new();
for provider in &self.providers {
items.append(&mut provider.execute(request).await);
}
items
}
}
type OptionProvider<P, O> = Box<FeatureProvider<Params = P, Output = Option<O>> + Send + Sync>;
#[derive(Default)]
pub struct ChoiceProvider<P, O> {
providers: Vec<OptionProvider<P, O>>,
}
impl<P, O> ChoiceProvider<P, O> {
pub fn new(providers: Vec<OptionProvider<P, O>>) -> Self {
Self { providers }
}
}
impl<P, O> FeatureProvider for ChoiceProvider<P, O>
where
P: Send + Sync,
O: Send + Sync,
{
type Params = P;
type Output = Option<O>;
#[boxed]
async fn execute<'a>(&'a self, request: &'a FeatureRequest<P>) -> Option<O> {
for provider in &self.providers {
let item = provider.execute(request).await;
if item.is_some() {
return item;
}
}
None
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct FeatureSpecFile {
name: &'static str,
text: &'static str,
}
#[derive(Debug, PartialEq, Clone, Default)]
pub struct FeatureSpec {
pub files: Vec<FeatureSpecFile>,
pub main_file: &'static str,
pub position: Position,
pub new_name: &'static str,
pub include_declaration: bool,
pub client_capabilities: ClientCapabilities,
}
impl FeatureSpec {
pub fn file(name: &'static str, text: &'static str) -> FeatureSpecFile {
FeatureSpecFile { name, text }
}
pub fn uri(name: &str) -> Uri {
let path = std::env::temp_dir().join(name);
Uri::from_file_path(path).unwrap()
}
fn identifier(&self) -> TextDocumentIdentifier {
let uri = Self::uri(self.main_file);
TextDocumentIdentifier::new(uri)
}
fn view(&self) -> DocumentView {
let mut builder = WorkspaceBuilder::new();
for file in &self.files {
builder.document(file.name, file.text);
}
let workspace = builder.workspace;
let main_uri = Self::uri(self.main_file);
let main_document = workspace.find(&main_uri).unwrap();
DocumentView::new(Arc::new(workspace), main_document)
}
fn request<T>(self, params: T) -> FeatureRequest<T> {
FeatureRequest {
params,
view: self.view(),
client_capabilities: Arc::new(self.client_capabilities),
}
}
}
impl Into<FeatureRequest<TextDocumentPositionParams>> for FeatureSpec {
fn into(self) -> FeatureRequest<TextDocumentPositionParams> {
let params = TextDocumentPositionParams::new(self.identifier(), self.position);
self.request(params)
}
}
impl Into<FeatureRequest<CompletionParams>> for FeatureSpec {
fn into(self) -> FeatureRequest<CompletionParams> {
let params = CompletionParams {
text_document: self.identifier(),
position: self.position,
context: None,
};
self.request(params)
}
}
impl Into<FeatureRequest<FoldingRangeParams>> for FeatureSpec {
fn into(self) -> FeatureRequest<FoldingRangeParams> {
let params = FoldingRangeParams {
text_document: self.identifier(),
};
self.request(params)
}
}
impl Into<FeatureRequest<DocumentLinkParams>> for FeatureSpec {
fn into(self) -> FeatureRequest<DocumentLinkParams> {
let params = DocumentLinkParams {
text_document: self.identifier(),
};
self.request(params)
}
}
impl Into<FeatureRequest<ReferenceParams>> for FeatureSpec {
fn into(self) -> FeatureRequest<ReferenceParams> {
let params = ReferenceParams {
text_document: self.identifier(),
position: self.position,
context: ReferenceContext {
include_declaration: self.include_declaration,
},
};
self.request(params)
}
}
impl Into<FeatureRequest<RenameParams>> for FeatureSpec {
fn into(self) -> FeatureRequest<RenameParams> {
let params = RenameParams {
text_document: self.identifier(),
position: self.position,
new_name: self.new_name.to_owned(),
};
self.request(params)
}
}
pub fn test_feature<F, P, O, S>(provider: F, spec: S) -> O
where
F: FeatureProvider<Params = P, Output = O>,
S: Into<FeatureRequest<P>>,
{
futures::executor::block_on(provider.execute(&spec.into()))
}

View file

@ -1,352 +0,0 @@
#![feature(async_await)]
mod feature;
mod outline;
pub use self::feature::*;
pub use self::outline::*;
use log::*;
use lsp_types::{TextDocumentItem, Uri};
use std::ffi::OsStr;
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::{Arc, Mutex};
use texlab_syntax::*;
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Document {
pub uri: Uri,
pub text: String,
pub tree: SyntaxTree,
}
impl Document {
pub fn new(uri: Uri, text: String, tree: SyntaxTree) -> Self {
Document { uri, text, tree }
}
pub fn parse(uri: Uri, text: String, language: Language) -> Self {
let tree = SyntaxTree::parse(&uri, &text, language);
Document::new(uri, text, tree)
}
pub fn is_file(&self) -> bool {
self.uri.scheme() == "file"
}
}
#[derive(Debug, PartialEq, Eq, Clone, Default)]
pub struct Workspace {
pub documents: Vec<Arc<Document>>,
}
impl Workspace {
pub fn new() -> Self {
Workspace {
documents: Vec::new(),
}
}
pub fn find(&self, uri: &Uri) -> Option<Arc<Document>> {
self.documents
.iter()
.find(|document| document.uri == *uri)
.map(|document| Arc::clone(&document))
}
pub fn related_documents(&self, uri: &Uri) -> Vec<Arc<Document>> {
let mut edges: Vec<(Arc<Document>, Arc<Document>)> = Vec::new();
for parent in self.documents.iter().filter(|document| document.is_file()) {
if let SyntaxTree::Latex(tree) = &parent.tree {
for include in &tree.includes {
for targets in &include.all_targets {
for target in targets {
if let Some(ref child) = self.find(target) {
edges.push((Arc::clone(&parent), Arc::clone(&child)));
edges.push((Arc::clone(&child), Arc::clone(&parent)));
}
}
}
}
}
}
let mut results = Vec::new();
if let Some(start) = self.find(uri) {
let mut visited: Vec<Arc<Document>> = Vec::new();
let mut stack = Vec::new();
stack.push(start);
while let Some(current) = stack.pop() {
if visited.contains(&current) {
continue;
}
visited.push(Arc::clone(&current));
results.push(Arc::clone(&current));
for edge in &edges {
if edge.0 == current {
stack.push(Arc::clone(&edge.1));
}
}
}
}
results
}
pub fn find_parent(&self, uri: &Uri) -> Option<Arc<Document>> {
for document in self.related_documents(uri) {
if let SyntaxTree::Latex(tree) = &document.tree {
if tree.is_standalone {
return Some(document);
}
}
}
None
}
pub fn unresolved_includes(&self) -> Vec<PathBuf> {
let mut includes = Vec::new();
for document in &self.documents {
if let SyntaxTree::Latex(tree) = &document.tree {
for include in &tree.includes {
if include.kind != LatexIncludeKind::Latex
&& include.kind != LatexIncludeKind::Bibliography
{
continue;
}
for targets in &include.all_targets {
if targets.iter().any(|target| self.find(target).is_some()) {
continue;
}
for target in targets {
let path = target.to_file_path().unwrap();
if path.exists() {
includes.push(path);
}
}
}
}
}
}
includes
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct DocumentView {
pub workspace: Arc<Workspace>,
pub document: Arc<Document>,
pub related_documents: Vec<Arc<Document>>,
}
impl DocumentView {
pub fn new(workspace: Arc<Workspace>, document: Arc<Document>) -> Self {
let related_documents = workspace.related_documents(&document.uri);
Self {
workspace,
document,
related_documents,
}
}
}
#[derive(Debug, Default)]
pub struct WorkspaceManager {
workspace: Mutex<Arc<Workspace>>,
}
impl WorkspaceManager {
pub fn get(&self) -> Arc<Workspace> {
let workspace = self.workspace.lock().unwrap();
Arc::clone(&workspace)
}
pub fn add(&self, document: TextDocumentItem) {
let language = match Language::by_language_id(&document.language_id) {
Some(language) => language,
None => {
error!("Invalid language id: {}", &document.language_id);
return;
}
};
let mut workspace = self.workspace.lock().unwrap();
*workspace = Self::add_or_update(&workspace, document.uri, document.text, language);
}
pub fn load(&self, path: &Path) {
let language = match path
.extension()
.and_then(OsStr::to_str)
.and_then(Language::by_extension)
{
Some(language) => language,
None => {
warn!("Could not determine language: {}", path.to_string_lossy());
return;
}
};
let uri = match Uri::from_file_path(path) {
Ok(uri) => uri,
Err(_) => {
error!("Invalid path: {}", path.to_string_lossy());
return;
}
};
let text = match fs::read_to_string(path) {
Ok(text) => text,
Err(_) => {
warn!("Could not open file: {}", path.to_string_lossy());
return;
}
};
let mut workspace = self.workspace.lock().unwrap();
*workspace = Self::add_or_update(&workspace, uri, text, language);
}
pub fn update(&self, uri: Uri, text: String) {
let mut workspace = self.workspace.lock().unwrap();
let old_document = match workspace.documents.iter().find(|x| x.uri == uri) {
Some(document) => document,
None => {
warn!("Document not found: {}", uri);
return;
}
};
let language = match old_document.tree {
SyntaxTree::Latex(_) => Language::Latex,
SyntaxTree::Bibtex(_) => Language::Bibtex,
};
*workspace = Self::add_or_update(&workspace, uri, text, language);
}
fn add_or_update(
workspace: &Workspace,
uri: Uri,
text: String,
language: Language,
) -> Arc<Workspace> {
let document = Document::parse(uri, text, language);
let mut documents: Vec<Arc<Document>> = workspace
.documents
.iter()
.filter(|x| x.uri != document.uri)
.cloned()
.collect();
documents.push(Arc::new(document));
Arc::new(Workspace { documents })
}
}
pub struct WorkspaceBuilder {
pub workspace: Workspace,
}
impl WorkspaceBuilder {
pub fn new() -> Self {
WorkspaceBuilder {
workspace: Workspace::default(),
}
}
pub fn document(&mut self, name: &str, text: &str) -> Uri {
let path = std::env::temp_dir().join(name);
let language = Language::by_extension(path.extension().unwrap().to_str().unwrap()).unwrap();
let uri = Uri::from_file_path(path).unwrap();
let document = Document::parse(uri.clone(), text.to_owned(), language);
self.workspace.documents.push(Arc::new(document));
uri
}
}
#[cfg(test)]
mod tests {
use super::*;
fn verify_documents(expected: Vec<Uri>, actual: Vec<Arc<Document>>) {
assert_eq!(expected.len(), actual.len());
for i in 0..expected.len() {
assert_eq!(expected[i], actual[i].uri);
}
}
#[test]
fn test_related_documents_append_extensions() {
let mut builder = WorkspaceBuilder::new();
let uri1 = builder.document("foo.tex", "\\include{bar/baz}");
let uri2 = builder.document("bar/baz.tex", "");
let documents = builder.workspace.related_documents(&uri1);
verify_documents(vec![uri1, uri2], documents);
}
#[test]
fn test_related_documents_relative_path() {
let mut builder = WorkspaceBuilder::new();
let uri1 = builder.document("foo.tex", "");
let uri2 = builder.document("bar/baz.tex", "\\input{../foo.tex}");
let documents = builder.workspace.related_documents(&uri1);
verify_documents(vec![uri1, uri2], documents);
}
#[test]
fn test_related_documents_invalid_includes() {
let mut builder = WorkspaceBuilder::new();
let uri = builder.document("foo.tex", "\\include{<foo>?|bar|:}\n\\include{}");
let documents = builder.workspace.related_documents(&uri);
verify_documents(vec![uri], documents);
}
#[test]
fn test_related_documents_bibliographies() {
let mut builder = WorkspaceBuilder::new();
let uri1 = builder.document("foo.tex", "\\addbibresource{bar.bib}");
let uri2 = builder.document("bar.bib", "");
let documents = builder.workspace.related_documents(&uri2);
verify_documents(vec![uri2, uri1], documents);
}
#[test]
fn test_related_documents_unresolvable_include() {
let mut builder = WorkspaceBuilder::new();
let uri = builder.document("foo.tex", "\\include{bar.tex}");
builder.document("baz.tex", "");
let documents = builder.workspace.related_documents(&uri);
verify_documents(vec![uri], documents);
}
#[test]
fn test_related_documents_include_cycles() {
let mut builder = WorkspaceBuilder::new();
let uri1 = builder.document("foo.tex", "\\input{bar.tex}");
let uri2 = builder.document("bar.tex", "\\input{foo.tex}");
let documents = builder.workspace.related_documents(&uri1);
verify_documents(vec![uri1, uri2], documents);
}
#[test]
fn test_find_parent() {
let mut builder = WorkspaceBuilder::new();
let uri1 = builder.document("foo.tex", "");
let uri2 = builder.document("bar.tex", "\\begin{document}\\include{foo}\\end{document}");
let document = builder.workspace.find_parent(&uri1).unwrap();
assert_eq!(uri2, document.uri);
}
#[test]
fn test_find_parent_no_parent() {
let mut builder = WorkspaceBuilder::new();
let uri = builder.document("foo.tex", "");
builder.document("bar.tex", "\\begin{document}\\end{document}");
let document = builder.workspace.find_parent(&uri);
assert_eq!(None, document);
}
}

View file

@ -1,175 +0,0 @@
use super::{Document, DocumentView};
use lsp_types::*;
use std::collections::HashSet;
use texlab_syntax::*;
#[derive(Debug, PartialEq, Eq, Clone, Default)]
pub struct Outline<'a> {
sections: Vec<OutlineSection<'a>>,
}
impl<'a> Outline<'a> {
fn new(sections: Vec<OutlineSection<'a>>) -> Self {
Self { sections }
}
pub fn find(&self, uri: &Uri, position: Position) -> Option<&'a LatexSection> {
self.sections
.iter()
.filter(|sec| sec.document.uri == *uri)
.rev()
.find(|sec| sec.item.end() <= position)
.map(|sec| sec.item)
}
}
impl<'a> From<&'a DocumentView> for Outline<'a> {
fn from(view: &'a DocumentView) -> Self {
let mut finder = OutlineSectionFinder::default();
let document = if let Some(parent) = view.workspace.find_parent(&view.document.uri) {
view.related_documents
.iter()
.find(|doc| doc.uri == parent.uri)
.unwrap()
} else {
&view.document
};
finder.analyze(view, &document);
Outline::new(finder.sections)
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
struct OutlineSection<'a> {
pub document: &'a Document,
pub item: &'a LatexSection,
}
impl<'a> OutlineSection<'a> {
fn new(document: &'a Document, item: &'a LatexSection) -> Self {
Self { document, item }
}
}
#[derive(Debug, Default)]
struct OutlineSectionFinder<'a> {
visited: HashSet<&'a Uri>,
sections: Vec<OutlineSection<'a>>,
}
impl<'a> OutlineSectionFinder<'a> {
fn analyze(&mut self, view: &'a DocumentView, document: &'a Document) {
if !self.visited.insert(&document.uri) {
return;
}
if let SyntaxTree::Latex(tree) = &document.tree {
let mut items = Vec::new();
for section in &tree.sections {
items.push(OutlineItem::Section(section));
}
for include in &tree.includes {
items.push(OutlineItem::Include(include));
}
items.sort_by_key(SyntaxNode::start);
for item in items {
match item {
OutlineItem::Section(item) => {
let section = OutlineSection::new(document, item);
self.sections.push(section);
}
OutlineItem::Include(item) => {
for document in &view.related_documents {
for targets in &item.all_targets {
if targets.contains(&document.uri) {
self.analyze(view, document);
break;
}
}
}
}
}
}
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
enum OutlineItem<'a> {
Section(&'a LatexSection),
Include(&'a LatexInclude),
}
impl<'a> SyntaxNode for OutlineItem<'a> {
fn range(&self) -> Range {
match self {
OutlineItem::Section(section) => section.range(),
OutlineItem::Include(include) => include.range(),
}
}
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct OutlineContext {
pub section: Option<String>,
pub caption: Option<String>,
}
impl OutlineContext {
pub fn find(outline: &Outline, document: &Document, position: Position) -> Self {
let section = Self::find_section(outline, document, position);
let caption = Self::find_caption(document, position);
Self { section, caption }
}
fn find_section(outline: &Outline, document: &Document, position: Position) -> Option<String> {
let section = outline.find(&document.uri, position)?;
let content = &section.command.args[section.index];
Some(Self::extract(document, content)?)
}
fn find_caption(document: &Document, position: Position) -> Option<String> {
if let SyntaxTree::Latex(tree) = &document.tree {
let environment = tree
.environments
.iter()
.filter(|env| env.left.name().map(LatexToken::text) != Some("document"))
.find(|env| env.range().contains(position))?;
let caption = tree
.captions
.iter()
.find(|cap| environment.range().contains(cap.start()))?;
let content = &caption.command.args[caption.index];
Some(Self::extract(document, content)?)
} else {
None
}
}
fn extract(document: &Document, content: &LatexGroup) -> Option<String> {
let right = content.right.as_ref()?;
let range = Range::new_simple(
content.left.start().line,
content.left.start().character + 1,
right.end().line,
right.end().character - 1,
);
Some(CharStream::extract(&document.text, range))
}
pub fn documentation(&self) -> Option<MarkupContent> {
let text = match (&self.section, &self.caption) {
(Some(section), Some(caption)) => format!("*{}* \n{}", section, caption),
(Some(section), None) => format!("*{}*", section),
(None, Some(caption)) => caption.to_owned(),
(None, None) => return None,
};
Some(MarkupContent {
kind: MarkupKind::Markdown,
value: text.into(),
})
}
}