Allow multi-word names in parser

This commit is contained in:
Patrick Förster 2021-05-29 11:18:44 +02:00
parent b3b0ad0e38
commit 1ef60b9cec
116 changed files with 819 additions and 558 deletions

View file

@ -67,7 +67,7 @@ fn convert_to_ris(root: &bibtex::SyntaxNode, key: &str) -> Option<RisReference>
let entry = root
.children()
.filter_map(bibtex::Entry::cast)
.find(|entry| entry.key().map(|key| key.text()) == Some(key))?;
.find(|entry| entry.key().map(|key| key.to_string()).as_deref() == Some(key))?;
bib_code.push_str(&entry.syntax().to_string());
bib_code = bib_code.replace("\\hypen", "-");

View file

@ -56,13 +56,13 @@ fn analyze_environment(
node: &latex::SyntaxNode,
) -> Option<()> {
let environment = latex::Environment::cast(node)?;
let name1 = environment.begin()?.name()?.word()?;
let name2 = environment.end()?.name()?.word()?;
if name1.text() != name2.text() {
let name1 = environment.begin()?.name()?.key()?;
let name2 = environment.end()?.name()?.key()?;
if name1 != name2 {
diagnostics_by_uri.insert(
Arc::clone(&document.uri),
Diagnostic {
range: document.line_index.line_col_lsp_range(name1.text_range()),
range: document.line_index.line_col_lsp_range(name1.small_range()),
severity: Some(DiagnosticSeverity::Error),
code: None,
code_description: None,
@ -98,8 +98,10 @@ fn analyze_curly_group(
.filter_map(latex::Environment::cast)
.filter_map(|env| env.begin())
.filter_map(|begin| begin.name())
.filter_map(|name| name.word())
.any(|name| ["asy", "lstlisting", "minted", "verbatim"].contains(&name.text()));
.filter_map(|name| name.key())
.any(|name| {
["asy", "lstlisting", "minted", "verbatim"].contains(&name.to_string().as_str())
});
if !is_inside_verbatim_environment
&& !node

View file

@ -19,6 +19,8 @@ mod user_command;
mod user_environment;
mod util;
use std::borrow::Cow;
use cancellation::CancellationToken;
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
use lsp_types::{
@ -144,15 +146,29 @@ fn dedup(items: Vec<InternalCompletionItem>) -> Vec<InternalCompletionItem> {
}
fn score(context: &CursorContext<CompletionParams>, items: &mut Vec<InternalCompletionItem>) {
let pattern = match &context.cursor {
Cursor::Latex(token) if token.kind().is_command_name() => token.text().trim_end(),
Cursor::Latex(token) if token.kind() == latex::WORD => token.text(),
Cursor::Latex(_) => "",
Cursor::Bibtex(token) if token.kind().is_type() => token.text(),
Cursor::Bibtex(token) if token.kind() == bibtex::WORD => token.text(),
Cursor::Bibtex(token) if token.kind() == bibtex::COMMAND_NAME => token.text().trim_end(),
Cursor::Bibtex(_) => "",
Cursor::Nothing => "",
let pattern: Cow<str> = match &context.cursor {
Cursor::Latex(token) if token.kind().is_command_name() => token.text().trim_end().into(),
Cursor::Latex(token) if token.kind() == latex::WORD => {
if let Some(key) = latex::Key::cast(token.parent()) {
key.to_string().into()
} else {
token.text().into()
}
}
Cursor::Latex(_) => "".into(),
Cursor::Bibtex(token) if token.kind().is_type() => token.text().into(),
Cursor::Bibtex(token) if token.kind() == bibtex::WORD => {
if let Some(key) = bibtex::Key::cast(token.parent()) {
key.to_string().into()
} else {
token.text().into()
}
}
Cursor::Bibtex(token) if token.kind() == bibtex::COMMAND_NAME => {
token.text().trim_end().into()
}
Cursor::Bibtex(_) => "".into(),
Cursor::Nothing => "".into(),
};
let file_pattern = pattern.split('/').last().unwrap();
@ -211,7 +227,7 @@ fn preselect(
let group = latex::CurlyGroupWord::cast(name.parent())?;
let end = latex::End::cast(group.syntax().parent()?)?;
let environment = latex::Environment::cast(end.syntax().parent()?)?;
let name = environment.begin()?.name()?.word()?.text();
let name = environment.begin()?.name()?.key()?.to_string();
for item in items {
if item.data.label() == name {

View file

@ -1,5 +1,4 @@
use cancellation::CancellationToken;
use cstree::TextRange;
use lsp_types::CompletionParams;
use crate::{
@ -16,18 +15,9 @@ pub fn complete_acronyms<'a>(
) -> Option<()> {
cancellation_token.result().ok()?;
let token = context.cursor.as_latex()?;
let group = latex::CurlyGroupWord::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
let (_, range, group) = context.find_curly_group_word()?;
latex::AcronymReference::cast(group.syntax().parent()?)?;
let range = if token.kind() == latex::WORD {
token.text_range()
} else {
TextRange::empty(context.offset)
};
for document in &context.request.subset.documents {
if let Some(data) = document.data.as_latex() {
for name in data
@ -35,8 +25,8 @@ pub fn complete_acronyms<'a>(
.descendants()
.filter_map(latex::AcronymDefinition::cast)
.filter_map(|node| node.name())
.filter_map(|name| name.word())
.map(|name| name.text())
.filter_map(|name| name.key())
.map(|name| name.to_string())
{
items.push(InternalCompletionItem::new(
range,

View file

@ -26,12 +26,14 @@ pub fn complete_citations<'a>(
let token = context.cursor.as_latex()?;
let range = if token.kind() == latex::WORD {
token.text_range()
latex::Key::cast(token.parent())
.map(|key| key.small_range())
.or_else(|| latex::Text::cast(token.parent()).map(|text| text.small_range()))?
} else {
TextRange::empty(context.offset)
};
check_citation(context, token).or_else(|| check_acronym(token))?;
check_citation(context).or_else(|| check_acronym(context))?;
for document in &context.request.subset.documents {
if let Some(data) = document.data.as_bibtex() {
for entry in data.root.children().filter_map(bibtex::Entry::cast) {
@ -46,20 +48,17 @@ pub fn complete_citations<'a>(
Some(())
}
fn check_citation(
context: &CursorContext<CompletionParams>,
token: &latex::SyntaxToken,
) -> Option<()> {
let group = latex::CurlyGroupWordList::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
fn check_citation(context: &CursorContext<CompletionParams>) -> Option<()> {
let (_, _, group) = context.find_curly_group_word_list()?;
latex::Citation::cast(group.syntax().parent()?)?;
Some(())
}
fn check_acronym(token: &latex::SyntaxToken) -> Option<()> {
fn check_acronym(context: &CursorContext<CompletionParams>) -> Option<()> {
let token = context.cursor.as_latex()?;
let pair = token.ancestors().find_map(latex::KeyValuePair::cast)?;
let mut key_words = pair.key()?.words();
if key_words.next()?.text() != "cite" || key_words.next().is_some() {
if pair.key()?.to_string() != "cite" {
return None;
}
@ -72,7 +71,7 @@ fn make_item<'a>(
entry: bibtex::Entry<'a>,
range: TextRange,
) -> Option<InternalCompletionItem<'a>> {
let key = entry.key()?.text();
let key = entry.key()?.to_string();
let ty = LANGUAGE_DATA
.find_entry_type(&entry.ty()?.text()[1..])
.map(|ty| Structure::Entry(ty.category))
@ -98,7 +97,7 @@ fn make_item<'a>(
range,
InternalCompletionItemData::Citation {
uri: Arc::clone(&document.uri),
key: key.into(),
key,
text,
ty,
},
@ -170,6 +169,29 @@ mod tests {
}
}
#[test]
fn test_latex_two_words() {
let request = FeatureTester::builder()
.files(vec![
("main.tex", "\\addbibresource{main.bib}\n\\cite{foo}"),
("main.bib", "@article{foo bar,}"),
])
.main("main.tex")
.line(1)
.character(6)
.build()
.completion();
let context = CursorContext::new(request);
let mut actual_items = Vec::new();
complete_citations(&context, &mut actual_items, CancellationToken::none());
assert!(!actual_items.is_empty());
for item in actual_items {
assert_eq!(item.range, TextRange::new(32.into(), 35.into()));
}
}
#[test]
fn test_latex_open_brace() {
let request = FeatureTester::builder()
@ -241,4 +263,30 @@ mod tests {
assert_eq!(item.range, TextRange::new(54.into(), 54.into()));
}
}
#[test]
fn test_latex_acronym_two_words() {
let request = FeatureTester::builder()
.files(vec![
(
"main.tex",
"\\addbibresource{main.bib}\n\\DeclareAcronym{foo}{cite={\na b}}",
),
("main.bib", "@article{foo,}"),
])
.main("main.tex")
.line(2)
.character(0)
.build()
.completion();
let context = CursorContext::new(request);
let mut actual_items = Vec::new();
complete_citations(&context, &mut actual_items, CancellationToken::none());
assert!(!actual_items.is_empty());
for item in actual_items {
assert_eq!(item.range, TextRange::new(54.into(), 57.into()));
}
}
}

View file

@ -1,5 +1,4 @@
use cancellation::CancellationToken;
use cstree::TextRange;
use lsp_types::CompletionParams;
use crate::{
@ -17,15 +16,7 @@ pub fn complete_colors<'a>(
) -> Option<()> {
cancellation_token.result().ok()?;
let token = context.cursor.as_latex()?;
let range = if token.kind() == latex::WORD {
token.text_range()
} else {
TextRange::empty(context.offset)
};
let group = latex::CurlyGroupWord::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
let (_, range, group) = context.find_curly_group_word()?;
latex::ColorReference::cast(group.syntax().parent()?)?;
for name in &LANGUAGE_DATA.colors {

View file

@ -18,15 +18,7 @@ pub fn complete_color_models<'a>(
) -> Option<()> {
cancellation_token.result().ok()?;
let token = context.cursor.as_latex()?;
let range = if token.kind() == latex::WORD {
token.text_range()
} else {
TextRange::empty(context.offset)
};
check_color_definition(context, token)
.or_else(|| check_color_definition_set(context, token))?;
let range = check_color_definition(context).or_else(|| check_color_definition_set(context))?;
for name in MODEL_NAMES {
items.push(InternalCompletionItem::new(
@ -38,30 +30,23 @@ pub fn complete_color_models<'a>(
Some(())
}
fn check_color_definition(
context: &CursorContext<CompletionParams>,
token: &latex::SyntaxToken,
) -> Option<()> {
let group = latex::CurlyGroupWord::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
fn check_color_definition(context: &CursorContext<CompletionParams>) -> Option<TextRange> {
let (_, range, group) = context.find_curly_group_word()?;
let definition = latex::ColorDefinition::cast(group.syntax().parent()?)?;
definition
.model()
.filter(|model| model.syntax().text_range() == group.syntax().text_range())?;
Some(())
Some(range)
}
fn check_color_definition_set(
context: &CursorContext<CompletionParams>,
token: &latex::SyntaxToken,
) -> Option<()> {
let group = latex::CurlyGroupWordList::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
fn check_color_definition_set(context: &CursorContext<CompletionParams>) -> Option<TextRange> {
let (_, range, group) = context.find_curly_group_word_list()?;
let definition = latex::ColorSetDefinition::cast(group.syntax().parent()?)?;
definition
.model_list()
.filter(|model| model.syntax().text_range() == group.syntax().text_range())?;
Some(())
Some(range)
}
#[cfg(test)]

View file

@ -1,12 +1,7 @@
use cancellation::CancellationToken;
use cstree::TextRange;
use lsp_types::CompletionParams;
use crate::{
component_db::COMPONENT_DATABASE,
features::cursor::CursorContext,
syntax::{latex, CstNode},
};
use crate::{component_db::COMPONENT_DATABASE, features::cursor::CursorContext};
use super::types::{InternalCompletionItem, InternalCompletionItemData};
@ -17,18 +12,7 @@ pub fn complete_component_environments<'a>(
) -> Option<()> {
cancellation_token.result().ok()?;
let token = context.cursor.as_latex()?;
let group = latex::CurlyGroupWord::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
if !matches!(group.syntax().parent()?.kind(), latex::BEGIN | latex::END) {
return None;
}
let range = if token.kind() == latex::WORD {
token.text_range()
} else {
TextRange::empty(context.offset)
};
let (_, range) = context.find_environment_name()?;
for component in COMPONENT_DATABASE.linked_components(&context.request.subset) {
cancellation_token.result().ok()?;

View file

@ -26,7 +26,7 @@ pub fn complete_fields<'a>(
let parent = token.parent();
if let Some(entry) = bibtex::Entry::cast(parent) {
if entry.key()?.text_range() == token.text_range() {
if entry.key()?.small_range() == token.text_range() {
return None;
}
} else {

View file

@ -1,5 +1,4 @@
use cancellation::CancellationToken;
use cstree::TextRange;
use lsp_types::CompletionParams;
use crate::{
@ -16,15 +15,8 @@ pub fn complete_glossary_entries<'a>(
) -> Option<()> {
cancellation_token.result().ok()?;
let token = context.cursor.as_latex()?;
let group = latex::CurlyGroupWord::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
let (_, range, group) = context.find_curly_group_word()?;
latex::GlossaryEntryReference::cast(group.syntax().parent()?)?;
let range = if token.kind() == latex::WORD {
token.text_range()
} else {
TextRange::empty(context.offset)
};
for document in &context.request.subset.documents {
if let Some(data) = document.data.as_latex() {
@ -33,8 +25,8 @@ pub fn complete_glossary_entries<'a>(
if let Some(name) = latex::GlossaryEntryDefinition::cast(node)
.and_then(|entry| entry.name())
.and_then(|name| name.word())
.map(|name| name.text())
.and_then(|name| name.key())
.map(|name| name.to_string())
{
items.push(InternalCompletionItem::new(
range,
@ -42,8 +34,8 @@ pub fn complete_glossary_entries<'a>(
));
} else if let Some(name) = latex::AcronymDefinition::cast(node)
.and_then(|entry| entry.name())
.and_then(|name| name.word())
.map(|name| name.text())
.and_then(|name| name.key())
.map(|name| name.to_string())
{
items.push(InternalCompletionItem::new(
range,

View file

@ -1,5 +1,4 @@
use cancellation::CancellationToken;
use cstree::TextRange;
use lsp_types::CompletionParams;
use rustc_hash::FxHashSet;
use smol_str::SmolStr;
@ -17,15 +16,7 @@ pub fn complete_imports<'a>(
items: &mut Vec<InternalCompletionItem<'a>>,
cancellation_token: &CancellationToken,
) -> Option<()> {
let token = context.cursor.as_latex()?;
let range = if token.kind() == latex::WORD {
token.text_range()
} else {
TextRange::empty(context.offset)
};
let group = latex::CurlyGroupWordList::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
let (_, range, group) = context.find_curly_group_word_list()?;
let (extension, mut factory): (
&str,
@ -76,6 +67,8 @@ pub fn complete_imports<'a>(
#[cfg(test)]
mod tests {
use cstree::TextRange;
use crate::features::testing::FeatureTester;
use super::*;

View file

@ -25,15 +25,8 @@ pub fn complete_includes<'a>(
return None;
}
let token = context.cursor.as_latex()?;
let (path_text, path_range) = if token.kind() == latex::WORD {
(token.text(), token.text_range())
} else {
("", TextRange::empty(context.offset))
};
let (path_text, path_range, group) = context.find_curly_group_word_list()?;
let group = latex::CurlyGroupWordList::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
let include = group.syntax().parent()?;
let (include_extension, extensions): (bool, &[&str]) = match include.kind() {
latex::PACKAGE_INCLUDE => (false, &["sty"]),
@ -62,7 +55,7 @@ pub fn complete_includes<'a>(
TextRange::new(start, path_range.end())
};
let current_dir = current_dir(context, path_text)?;
let current_dir = current_dir(context, &path_text)?;
for entry in fs::read_dir(current_dir).ok()?.filter_map(Result::ok) {
let mut path = entry.path();

View file

@ -18,26 +18,7 @@ pub fn complete_labels<'a>(
) -> Option<()> {
cancellation_token.result().ok()?;
let token = context.cursor.as_latex()?;
let is_math = latex::CurlyGroupWordList::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))
.and_then(|group| group.syntax().parent())
.and_then(|reference| latex::LabelReference::cast(reference))
.and_then(|refernce| refernce.command())
.map(|reference| reference.text() == "\\eqref")
.or_else(|| {
latex::CurlyGroupWord::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))
.and_then(|group| group.syntax().parent())
.and_then(|reference| latex::LabelReferenceRange::cast(reference))
.map(|_| false)
})?;
let range = if token.kind() == latex::WORD {
token.text_range()
} else {
TextRange::empty(context.offset)
};
let (range, is_math) = find_reference(context).or_else(|| find_reference_range(context))?;
for document in &context.request.subset.documents {
if let Some(data) = document.data.as_latex() {
@ -49,10 +30,10 @@ pub fn complete_labels<'a>(
{
if let Some(name) = label
.name()
.and_then(|name| name.word())
.map(|name| name.text())
.and_then(|name| name.key())
.map(|name| name.to_string())
{
match render_label(&context.request.subset, name, Some(label)) {
match render_label(&context.request.subset, &name, Some(label)) {
Some(rendered_label) => {
let kind = match &rendered_label.object {
LabelledObject::Section { .. } => Structure::Section,
@ -112,6 +93,19 @@ pub fn complete_labels<'a>(
Some(())
}
fn find_reference(context: &CursorContext<CompletionParams>) -> Option<(TextRange, bool)> {
let (_, range, group) = context.find_curly_group_word_list()?;
let reference = latex::LabelReference::cast(group.syntax().parent()?)?;
let is_math = reference.command()?.text() == "\\eqref";
Some((range, is_math))
}
fn find_reference_range(context: &CursorContext<CompletionParams>) -> Option<(TextRange, bool)> {
let (_, range, group) = context.find_curly_group_word()?;
latex::LabelReferenceRange::cast(group.syntax().parent()?)?;
Some((range, false))
}
#[cfg(test)]
mod tests {
use cstree::TextRange;
@ -193,4 +187,24 @@ mod tests {
assert_eq!(item.range, TextRange::new(11.into(), 11.into()));
}
}
#[test]
fn test_multi_word() {
let request = FeatureTester::builder()
.files(vec![("main.tex", "\\ref{foo}\\label{foo bar}")])
.main("main.tex")
.line(0)
.character(8)
.build()
.completion();
let context = CursorContext::new(request);
let mut actual_items = Vec::new();
complete_labels(&context, &mut actual_items, CancellationToken::none());
assert!(!actual_items.is_empty());
for item in actual_items {
assert_eq!(item.range, TextRange::new(5.into(), 8.into()));
}
}
}

View file

@ -1,11 +1,7 @@
use cancellation::CancellationToken;
use cstree::TextRange;
use lsp_types::CompletionParams;
use crate::{
features::cursor::CursorContext,
syntax::{latex, CstNode},
};
use crate::features::cursor::CursorContext;
use super::types::{InternalCompletionItem, InternalCompletionItemData};
@ -18,18 +14,7 @@ pub fn complete_theorem_environments<'a>(
return None;
}
let token = context.cursor.as_latex()?;
let group = latex::CurlyGroupWord::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
if !matches!(group.syntax().parent()?.kind(), latex::BEGIN | latex::END) {
return None;
}
let range = if token.kind() == latex::WORD {
token.text_range()
} else {
TextRange::empty(context.offset)
};
let (_, range) = context.find_environment_name()?;
for document in &context.request.subset.documents {
if let Some(data) = document.data.as_latex() {
@ -49,6 +34,8 @@ pub fn complete_theorem_environments<'a>(
#[cfg(test)]
mod tests {
use cstree::TextRange;
use crate::features::testing::FeatureTester;
use super::*;

View file

@ -1,5 +1,4 @@
use cancellation::CancellationToken;
use cstree::TextRange;
use lsp_types::CompletionParams;
use crate::{
@ -17,15 +16,8 @@ pub fn complete_tikz_libraries<'a>(
) -> Option<()> {
cancellation_token.result().ok()?;
let token = context.cursor.as_latex()?;
let range = if token.kind() == latex::WORD {
token.text_range()
} else {
TextRange::empty(context.offset)
};
let (_, range, group) = context.find_curly_group_word_list()?;
let group = latex::CurlyGroupWordList::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
let import = latex::TikzLibraryImport::cast(group.syntax().parent()?)?;
if import.command()?.text() == "\\usepgflibrary" {
@ -49,6 +41,8 @@ pub fn complete_tikz_libraries<'a>(
#[cfg(test)]
mod tests {
use cstree::TextRange;
use crate::features::testing::FeatureTester;
use super::*;

View file

@ -40,7 +40,7 @@ pub enum InternalCompletionItemData<'a> {
BeginCommand,
Citation {
uri: Arc<Uri>,
key: &'a str,
key: String,
text: String,
ty: Structure,
},
@ -67,10 +67,10 @@ pub enum InternalCompletionItemData<'a> {
name: &'a str,
},
Acronym {
name: &'a str,
name: String,
},
GlossaryEntry {
name: &'a str,
name: String,
},
File {
name: SmolStr,
@ -79,7 +79,7 @@ pub enum InternalCompletionItemData<'a> {
name: SmolStr,
},
Label {
name: &'a str,
name: String,
kind: Structure,
header: Option<String>,
footer: Option<String>,

View file

@ -1,11 +1,7 @@
use cancellation::CancellationToken;
use cstree::TextRange;
use lsp_types::CompletionParams;
use crate::{
features::cursor::CursorContext,
syntax::{latex, CstNode},
};
use crate::features::cursor::CursorContext;
use super::types::{InternalCompletionItem, InternalCompletionItemData};
@ -16,18 +12,7 @@ pub fn complete_user_environments<'a>(
) -> Option<()> {
cancellation_token.result().ok()?;
let token = context.cursor.as_latex()?;
let group = latex::CurlyGroupWord::cast(token.parent())
.filter(|group| context.is_inside_latex_curly(group))?;
if !matches!(group.syntax().parent()?.kind(), latex::BEGIN | latex::END) {
return None;
}
let range = if token.kind() == latex::WORD {
token.text_range()
} else {
TextRange::empty(context.offset)
};
let (name, range) = context.find_environment_name()?;
for document in &context.request.subset.documents {
if let Some(data) = document.data.as_latex() {
@ -35,7 +20,7 @@ pub fn complete_user_environments<'a>(
.extras
.environment_names
.iter()
.filter(|name| name.as_str() != token.text())
.filter(|n| n.as_str() != name)
{
cancellation_token.result().ok()?;
items.push(InternalCompletionItem::new(

View file

@ -1,7 +1,7 @@
use cstree::{TextRange, TextSize};
use lsp_types::{
CompletionParams, GotoDefinitionParams, HoverParams, Position, ReferenceParams, RenameParams,
TextDocumentPositionParams,
CompletionParams, DocumentHighlightParams, GotoDefinitionParams, HoverParams, Position,
ReferenceParams, RenameParams, TextDocumentPositionParams,
};
use crate::{
@ -42,6 +42,14 @@ impl Cursor {
return Some(Self::Latex(right));
}
if left.kind() == latex::WHITESPACE && left.parent().kind() == latex::KEY {
return Some(Self::Latex(left));
}
if right.kind() == latex::WHITESPACE && right.parent().kind() == latex::KEY {
return Some(Self::Latex(right));
}
Some(Self::Latex(right))
}
@ -149,66 +157,116 @@ impl<P: HasPosition> CursorContext<P> {
group.small_range().contains(self.offset) || group.right_curly().is_none()
}
pub fn find_citation_key_word(&self) -> Option<(&str, TextRange)> {
let key = self
pub fn find_citation_key_word(&self) -> Option<(String, TextRange)> {
let word = self
.cursor
.as_latex()
.filter(|token| token.kind() == latex::WORD)?;
let group = latex::CurlyGroupWordList::cast(key.parent())?;
let key = latex::Key::cast(word.parent())?;
let group = latex::CurlyGroupWordList::cast(key.syntax().parent()?)?;
latex::Citation::cast(group.syntax().parent()?)?;
Some((key.text(), key.text_range()))
Some((key.to_string(), key.small_range()))
}
pub fn find_citation_key_command(&self) -> Option<(&str, TextRange)> {
pub fn find_citation_key_command(&self) -> Option<(String, TextRange)> {
let command = self.cursor.as_latex()?;
let citation = latex::Citation::cast(command.parent())?;
let key = citation.key_list()?.words().next()?;
Some((key.text(), key.text_range()))
let key = citation.key_list()?.keys().next()?;
Some((key.to_string(), key.small_range()))
}
pub fn find_entry_key(&self) -> Option<(&str, TextRange)> {
let key = self
pub fn find_entry_key(&self) -> Option<(String, TextRange)> {
let word = self
.cursor
.as_bibtex()
.filter(|token| token.kind() == bibtex::WORD)?;
bibtex::Entry::cast(key.parent())?;
Some((key.text(), key.text_range()))
let key = bibtex::Key::cast(word.parent())?;
bibtex::Entry::cast(key.syntax().parent()?)?;
Some((key.to_string(), key.small_range()))
}
pub fn find_label_name_word(&self) -> Option<(&str, TextRange)> {
pub fn find_label_name_key(&self) -> Option<(String, TextRange)> {
let name = self
.cursor
.as_latex()
.filter(|token| token.kind() == latex::WORD)?;
let key = latex::Key::cast(name.parent())?;
if matches!(
name.parent().parent()?.kind(),
key.syntax().parent()?.parent()?.kind(),
latex::LABEL_DEFINITION | latex::LABEL_REFERENCE | latex::LABEL_REFERENCE_RANGE
) {
Some((name.text(), name.text_range()))
Some((key.to_string(), key.small_range()))
} else {
None
}
}
pub fn find_label_name_command(&self) -> Option<(&str, TextRange)> {
pub fn find_label_name_command(&self) -> Option<(String, TextRange)> {
let node = self.cursor.as_latex()?.parent();
if let Some(label) = latex::LabelDefinition::cast(node) {
let name = label.name()?.word()?;
Some((name.text(), name.text_range()))
let name = label.name()?.key()?;
Some((name.to_string(), name.small_range()))
} else if let Some(label) = latex::LabelReference::cast(node) {
let name = label.name_list()?.words().next()?;
Some((name.text(), name.text_range()))
let name = label.name_list()?.keys().next()?;
Some((name.to_string(), name.small_range()))
} else if let Some(label) = latex::LabelReferenceRange::cast(node) {
let name = label.from()?.word()?;
Some((name.text(), name.text_range()))
let name = label.from()?.key()?;
Some((name.to_string(), name.small_range()))
} else {
None
}
}
pub fn find_environment_name(&self) -> Option<(String, TextRange)> {
let (name, range, group) = self.find_curly_group_word()?;
if !matches!(group.syntax().parent()?.kind(), latex::BEGIN | latex::END) {
return None;
}
Some((name, range))
}
pub fn find_curly_group_word(&self) -> Option<(String, TextRange, latex::CurlyGroupWord)> {
let token = self.cursor.as_latex()?;
let key = latex::Key::cast(token.parent());
let group = key
.as_ref()
.and_then(|key| key.syntax().parent())
.unwrap_or(token.parent());
let group =
latex::CurlyGroupWord::cast(group).filter(|group| self.is_inside_latex_curly(group))?;
key.map(|key| (key.to_string(), key.small_range(), group))
.or_else(|| Some((String::new(), TextRange::empty(self.offset), group)))
}
pub fn find_curly_group_word_list(
&self,
) -> Option<(String, TextRange, latex::CurlyGroupWordList)> {
let token = self.cursor.as_latex()?;
let key = latex::Key::cast(token.parent());
let group = key
.as_ref()
.and_then(|key| key.syntax().parent())
.unwrap_or(token.parent());
let group = latex::CurlyGroupWordList::cast(group)
.filter(|group| self.is_inside_latex_curly(group))?;
key.map(|key| (key.to_string(), key.small_range(), group))
.or_else(|| Some((String::new(), TextRange::empty(self.offset), group)))
}
}
pub trait HasPosition {
@ -250,3 +308,9 @@ impl HasPosition for GotoDefinitionParams {
self.text_document_position_params.position
}
}
impl HasPosition for DocumentHighlightParams {
fn position(&self) -> Position {
self.text_document_position_params.position
}
}

View file

@ -13,29 +13,31 @@ pub fn goto_entry_definition(
) -> Option<Vec<LocationLink>> {
let main_document = context.request.main_document();
let key = context
let word = context
.cursor
.as_latex()
.filter(|token| token.kind() == latex::WORD)?;
latex::Citation::cast(key.parent().parent()?)?;
let key = latex::Key::cast(word.parent())?;
latex::Citation::cast(key.syntax().parent()?.parent()?)?;
let origin_selection_range = main_document
.line_index
.line_col_lsp_range(key.text_range());
.line_col_lsp_range(key.small_range());
for document in &context.request.subset.documents {
if let Some(data) = document.data.as_bibtex() {
for entry in data.root.children().filter_map(bibtex::Entry::cast) {
cancellation_token.result().ok()?;
if let Some(key) = entry.key().filter(|k| k.text() == key.text()) {
if let Some(key) = entry.key().filter(|k| k.to_string() == key.to_string()) {
return Some(vec![LocationLink {
origin_selection_range: Some(origin_selection_range),
target_uri: document.uri.as_ref().clone().into(),
target_selection_range: document
.line_index
.line_col_lsp_range(key.text_range()),
.line_col_lsp_range(key.small_range()),
target_range: document.line_index.line_col_lsp_range(entry.small_range()),
}]);
}

View file

@ -1,7 +1,10 @@
use cancellation::CancellationToken;
use lsp_types::{GotoDefinitionParams, LocationLink};
use crate::{features::cursor::CursorContext, find_label_definition, render_label, LineIndexExt};
use crate::{
features::cursor::CursorContext, find_label_definition, render_label, syntax::CstNode,
LineIndexExt,
};
pub fn goto_label_definition(
context: &CursorContext<GotoDefinitionParams>,
@ -10,7 +13,7 @@ pub fn goto_label_definition(
let main_document = context.request.main_document();
let (name_text, name_range) = context
.find_label_name_word()
.find_label_name_key()
.or_else(|| context.find_label_name_command())?;
let origin_selection_range = main_document.line_index.line_col_lsp_range(name_range);
@ -18,10 +21,10 @@ pub fn goto_label_definition(
for document in &context.request.subset.documents {
cancellation_token.result().ok()?;
if let Some(data) = document.data.as_latex() {
if let Some(definition) = find_label_definition(&data.root, name_text) {
let target_selection_range = definition.name()?.word()?.text_range();
if let Some(definition) = find_label_definition(&data.root, &name_text) {
let target_selection_range = definition.name()?.key()?.small_range();
let target_range =
render_label(&context.request.subset, name_text, Some(definition))
render_label(&context.request.subset, &name_text, Some(definition))
.map(|label| label.range)
.unwrap_or(target_selection_range);

View file

@ -148,7 +148,7 @@ impl<'a> Formatter<'a> {
self.visit_token_lowercase(entry.ty().unwrap());
self.output.push('{');
if let Some(key) = entry.key() {
self.output.push_str(key.text());
self.output.push_str(&key.to_string());
self.output.push(',');
self.output.push('\n');
for field in entry.fields() {

View file

@ -5,11 +5,12 @@ use lsp_types::{DocumentHighlight, DocumentHighlightParams};
use self::label::find_label_highlights;
use super::FeatureRequest;
use super::{cursor::CursorContext, FeatureRequest};
pub fn find_document_highlights(
request: FeatureRequest<DocumentHighlightParams>,
token: &CancellationToken,
cancellation_token: &CancellationToken,
) -> Option<Vec<DocumentHighlight>> {
find_label_highlights(&request, token)
let context = CursorContext::new(request);
find_label_highlights(&context, cancellation_token)
}

View file

@ -2,48 +2,32 @@ use cancellation::CancellationToken;
use lsp_types::{DocumentHighlight, DocumentHighlightKind, DocumentHighlightParams};
use crate::{
features::FeatureRequest,
features::cursor::CursorContext,
syntax::{latex, CstNode},
LineIndexExt,
};
pub fn find_label_highlights(
request: &FeatureRequest<DocumentHighlightParams>,
token: &CancellationToken,
context: &CursorContext<DocumentHighlightParams>,
cancellation_token: &CancellationToken,
) -> Option<Vec<DocumentHighlight>> {
let main_document = request.main_document();
let offset = main_document
.line_index
.offset_lsp(request.params.text_document_position_params.position);
let (name_text, _) = context.find_label_name_key()?;
let main_document = context.request.main_document();
let data = main_document.data.as_latex()?;
let name = data.root.token_at_offset(offset).right_biased()?;
if name.kind() != latex::WORD {
return None;
}
if !matches!(
name.parent().parent()?.kind(),
latex::LABEL_DEFINITION | latex::LABEL_REFERENCE | latex::LABEL_REFERENCE_RANGE
) {
return None;
}
let mut highlights = Vec::new();
for node in data.root.descendants() {
if token.is_canceled() {
return None;
}
cancellation_token.result().ok()?;
if let Some(label_name) = latex::LabelDefinition::cast(node)
.and_then(|label| label.name())
.and_then(|label_name| label_name.word())
.filter(|label_name| label_name.text() == name.text())
.and_then(|label_name| label_name.key())
.filter(|label_name| label_name.to_string() == name_text)
{
let range = main_document
.line_index
.line_col_lsp_range(label_name.text_range());
.line_col_lsp_range(label_name.small_range());
highlights.push(DocumentHighlight {
range,
@ -53,12 +37,12 @@ pub fn find_label_highlights(
for label_name in label
.name_list()
.into_iter()
.flat_map(|name| name.words())
.filter(|label_name| label_name.text() == name.text())
.flat_map(|name| name.keys())
.filter(|label_name| label_name.to_string() == name_text)
{
let range = main_document
.line_index
.line_col_lsp_range(label_name.text_range());
.line_col_lsp_range(label_name.small_range());
highlights.push(DocumentHighlight {
range,
@ -68,12 +52,12 @@ pub fn find_label_highlights(
} else if let Some(label) = latex::LabelReferenceRange::cast(node) {
if let Some(label_name) = label
.from()
.and_then(|label_name| label_name.word())
.filter(|label_name| label_name.text() == name.text())
.and_then(|label_name| label_name.key())
.filter(|label_name| label_name.to_string() == name_text)
{
let range = main_document
.line_index
.line_col_lsp_range(label_name.text_range());
.line_col_lsp_range(label_name.small_range());
highlights.push(DocumentHighlight {
range,
@ -83,12 +67,12 @@ pub fn find_label_highlights(
if let Some(label_name) = label
.to()
.and_then(|label_name| label_name.word())
.filter(|label_name| label_name.text() == name.text())
.and_then(|label_name| label_name.key())
.filter(|label_name| label_name.to_string() == name_text)
{
let range = main_document
.line_index
.line_col_lsp_range(label_name.text_range());
.line_col_lsp_range(label_name.small_range());
highlights.push(DocumentHighlight {
range,
@ -118,8 +102,9 @@ mod tests {
.character(0)
.build()
.highlight();
let context = CursorContext::new(request);
let actual_links = find_label_highlights(&request, CancellationToken::none());
let actual_links = find_label_highlights(&context, CancellationToken::none());
assert!(actual_links.is_none());
}
@ -133,8 +118,9 @@ mod tests {
.character(0)
.build()
.highlight();
let context = CursorContext::new(request);
let actual_links = find_label_highlights(&request, CancellationToken::none());
let actual_links = find_label_highlights(&context, CancellationToken::none());
assert!(actual_links.is_none());
}
@ -148,8 +134,9 @@ mod tests {
.character(7)
.build();
let request = tester.highlight();
let context = CursorContext::new(request);
let actual_highlights = find_label_highlights(&request, CancellationToken::none()).unwrap();
let actual_highlights = find_label_highlights(&context, CancellationToken::none()).unwrap();
let expected_highlights = vec![
DocumentHighlight {

View file

@ -10,10 +10,10 @@ pub fn find_label_hover(
let main_document = context.request.main_document();
let (name_text, name_range) = context
.find_label_name_word()
.find_label_name_key()
.or_else(|| context.find_label_name_command())?;
let label = render_label(&context.request.subset, name_text, None)?;
let label = render_label(&context.request.subset, &name_text, None)?;
Some(Hover {
range: Some(main_document.line_index.line_col_lsp_range(name_range)),

View file

@ -26,9 +26,9 @@ pub fn find_entry_references(
.descendants()
.filter_map(latex::Citation::cast)
.filter_map(|citation| citation.key_list())
.flat_map(|keys| keys.words())
.filter(|key| key.text() == key_text)
.map(|key| document.line_index.line_col_lsp_range(key.text_range()))
.flat_map(|keys| keys.keys())
.filter(|key| key.to_string() == key_text)
.map(|key| document.line_index.line_col_lsp_range(key.small_range()))
.for_each(|range| {
references.push(Location::new(document.uri.as_ref().clone().into(), range));
});
@ -38,8 +38,8 @@ pub fn find_entry_references(
.children()
.filter_map(bibtex::Entry::cast)
.filter_map(|entry| entry.key())
.filter(|key| key.text() == key_text)
.map(|key| document.line_index.line_col_lsp_range(key.text_range()))
.filter(|key| key.to_string() == key_text)
.map(|key| document.line_index.line_col_lsp_range(key.small_range()))
.for_each(|range| {
references.push(Location::new(document.uri.as_ref().clone().into(), range));
});

View file

@ -11,7 +11,7 @@ pub fn find_label_references(
cancellation_token.result().ok()?;
let (name_text, _) = context
.find_label_name_word()
.find_label_name_key()
.or_else(|| context.find_label_name_command())?;
for document in &context.request.subset.documents {

View file

@ -4,7 +4,7 @@ use cancellation::CancellationToken;
use lsp_types::{Range, RenameParams, TextEdit, WorkspaceEdit};
use crate::{
features::cursor::{Cursor, CursorContext, HasPosition},
features::cursor::{CursorContext, HasPosition},
syntax::{bibtex, latex, CstNode},
DocumentData, LineIndexExt,
};
@ -13,18 +13,9 @@ pub fn prepare_entry_rename<P: HasPosition>(
context: &CursorContext<P>,
_cancellation_token: &CancellationToken,
) -> Option<Range> {
let range = match &context.cursor {
Cursor::Latex(token) if token.kind() == latex::WORD => {
let group = latex::CurlyGroupWordList::cast(token.parent())?;
latex::Citation::cast(group.syntax().parent()?)?;
token.text_range()
}
Cursor::Bibtex(token) if token.kind() == bibtex::WORD => {
bibtex::Entry::cast(token.parent())?;
token.text_range()
}
_ => return None,
};
let (_, range) = context
.find_citation_key_word()
.or_else(|| context.find_entry_key())?;
Some(
context
@ -41,11 +32,9 @@ pub fn rename_entry(
) -> Option<WorkspaceEdit> {
cancellation_token.result().ok()?;
prepare_entry_rename(context, cancellation_token)?;
let key_text = context
.cursor
.as_latex()
.map(|token| token.text())
.or_else(|| context.cursor.as_bibtex().map(|token| token.text()))?;
let (key_text, _) = context
.find_citation_key_word()
.or_else(|| context.find_entry_key())?;
let mut changes = HashMap::new();
for document in &context.request.subset.documents {
@ -57,9 +46,9 @@ pub fn rename_entry(
.descendants()
.filter_map(latex::Citation::cast)
.filter_map(|citation| citation.key_list())
.flat_map(|keys| keys.words())
.filter(|key| key.text() == key_text)
.map(|key| document.line_index.line_col_lsp_range(key.text_range()))
.flat_map(|keys| keys.keys())
.filter(|key| key.to_string() == key_text)
.map(|key| document.line_index.line_col_lsp_range(key.small_range()))
.map(|range| TextEdit::new(range, context.request.params.new_name.clone()))
.collect();
changes.insert(document.uri.as_ref().clone().into(), edits);
@ -70,8 +59,8 @@ pub fn rename_entry(
.descendants()
.filter_map(bibtex::Entry::cast)
.filter_map(|entry| entry.key())
.filter(|key| key.text() == key_text)
.map(|key| document.line_index.line_col_lsp_range(key.text_range()))
.filter(|key| key.to_string() == key_text)
.map(|key| document.line_index.line_col_lsp_range(key.small_range()))
.map(|range| TextEdit::new(range, context.request.params.new_name.clone()))
.collect();
changes.insert(document.uri.as_ref().clone().into(), edits);

View file

@ -13,20 +13,14 @@ pub fn prepare_label_rename<P: HasPosition>(
context: &CursorContext<P>,
_cancellation_token: &CancellationToken,
) -> Option<Range> {
let name = context.cursor.as_latex()?;
name.parent().parent().filter(|node| {
matches!(
node.kind(),
latex::LABEL_DEFINITION | latex::LABEL_REFERENCE | latex::LABEL_REFERENCE_RANGE
)
})?;
let (_, range) = context.find_label_name_key()?;
Some(
context
.request
.main_document()
.line_index
.line_col_lsp_range(name.text_range()),
.line_col_lsp_range(range),
)
}
@ -35,7 +29,8 @@ pub fn rename_label(
cancellation_token: &CancellationToken,
) -> Option<WorkspaceEdit> {
prepare_label_rename(context, cancellation_token)?;
let name_text = context.cursor.as_latex()?.text();
let (name_text, _) = context.find_label_name_key()?;
let mut changes = HashMap::new();
for document in &context.request.subset.documents {
cancellation_token.result().ok()?;
@ -44,9 +39,9 @@ pub fn rename_label(
for node in data.root.descendants() {
if let Some(range) = latex::LabelDefinition::cast(node)
.and_then(|label| label.name())
.and_then(|name| name.word())
.filter(|name| name.text() == name_text)
.map(|name| document.line_index.line_col_lsp_range(name.text_range()))
.and_then(|name| name.key())
.filter(|name| name.to_string() == name_text)
.map(|name| document.line_index.line_col_lsp_range(name.small_range()))
{
edits.push(TextEdit::new(
range,
@ -57,9 +52,9 @@ pub fn rename_label(
latex::LabelReference::cast(node)
.and_then(|label| label.name_list())
.into_iter()
.flat_map(|label| label.words())
.filter(|name| name.text() == name_text)
.map(|name| document.line_index.line_col_lsp_range(name.text_range()))
.flat_map(|label| label.keys())
.filter(|name| name.to_string() == name_text)
.map(|name| document.line_index.line_col_lsp_range(name.small_range()))
.for_each(|range| {
edits.push(TextEdit::new(
range,
@ -70,22 +65,22 @@ pub fn rename_label(
if let Some(label) = latex::LabelReferenceRange::cast(node) {
if let Some(name1) = label
.from()
.and_then(|name| name.word())
.filter(|name| name.text() == name_text)
.and_then(|name| name.key())
.filter(|name| name.to_string() == name_text)
{
edits.push(TextEdit::new(
document.line_index.line_col_lsp_range(name1.text_range()),
document.line_index.line_col_lsp_range(name1.small_range()),
context.request.params.new_name.clone(),
));
}
if let Some(name2) = label
.from()
.and_then(|name| name.word())
.filter(|name| name.text() == name_text)
.and_then(|name| name.key())
.filter(|name| name.to_string() == name_text)
{
edits.push(TextEdit::new(
document.line_index.line_col_lsp_range(name2.text_range()),
document.line_index.line_col_lsp_range(name2.small_range()),
context.request.params.new_name.clone(),
));
}

View file

@ -68,7 +68,7 @@ pub fn find_bibtex_symbols(
.unwrap_or(BibtexEntryTypeCategory::Misc);
buf.push(InternalSymbol {
name: key.text().to_string(),
name: key.to_string(),
label: None,
kind: InternalSymbolKind::Entry(category),
deprecated: false,
@ -77,7 +77,7 @@ pub fn find_bibtex_symbols(
.line_col_lsp_range(entry.small_range()),
selection_range: main_document
.line_index
.line_col_lsp_range(key.text_range()),
.line_col_lsp_range(key.small_range()),
children,
});
}

View file

@ -59,25 +59,25 @@ fn visit(context: &mut Context, node: &latex::SyntaxNode) -> Vec<InternalSymbol>
latex::ENVIRONMENT => latex::Environment::cast(node)
.and_then(|env| env.begin())
.and_then(|begin| begin.name())
.and_then(|name| name.word())
.map(|name| name.text())
.and_then(|name| name.key())
.map(|name| name.to_string())
.and_then(|name| {
if LANGUAGE_DATA
.math_environments
.iter()
.any(|env| env == name)
.any(|env| env == &name)
{
visit_equation_environment(context, node)
} else if LANGUAGE_DATA
.enum_environments
.iter()
.any(|env| env == name)
.any(|env| env == &name)
{
visit_enumeration(context, node, name)
} else if let Ok(float_kind) = LabelledFloatKind::from_str(name) {
visit_enumeration(context, node, &name)
} else if let Ok(float_kind) = LabelledFloatKind::from_str(&name) {
visit_float(context, node, float_kind)
} else {
visit_theorem(context, node, name)
visit_theorem(context, node, &name)
}
}),
_ => None,
@ -161,12 +161,12 @@ fn visit_enum_item(context: &mut Context, node: &latex::SyntaxNode) -> Option<In
.filter_map(latex::Environment::cast)
.filter_map(|environment| environment.begin())
.filter_map(|begin| begin.name())
.filter_map(|name| name.word())
.filter_map(|name| name.key())
.any(|name| {
LANGUAGE_DATA
.enum_environments
.iter()
.any(|e| e == name.text())
.any(|e| e == &name.to_string())
})
{
return None;
@ -471,7 +471,7 @@ fn find_label_by_parent(
) -> Option<NumberedLabel> {
let node = parent.children().find_map(latex::LabelDefinition::cast)?;
let name = node.name()?.word()?.text();
let name = node.name()?.key()?.to_string();
let range = context
.subset
.documents
@ -479,7 +479,7 @@ fn find_label_by_parent(
.line_index
.line_col_lsp_range(node.small_range());
let number = find_label_number(&context.subset, name);
let number = find_label_number(&context.subset, &name);
Some(NumberedLabel {
name: name.to_string(),
range,

View file

@ -157,8 +157,9 @@ pub fn find_label_definition<'a>(
.find(|label| {
label
.name()
.and_then(|name| name.word())
.map(|name| name.text())
.and_then(|name| name.key())
.map(|name| name.to_string())
.as_deref()
== Some(label_name)
})
}
@ -178,7 +179,7 @@ fn render_label_float(
number: &mut Option<String>,
) -> Option<RenderedLabel> {
let environment = latex::Environment::cast(parent)?;
let environment_name = environment.begin()?.name()?.word()?.text();
let environment_name = environment.begin()?.name()?.key()?.to_string();
let kind = LabelledFloatKind::from_str(&environment_name).ok()?;
let caption = find_caption_by_parent(&parent)?;
Some(RenderedLabel {
@ -235,12 +236,12 @@ fn render_label_equation(
number: &mut Option<String>,
) -> Option<RenderedLabel> {
let environment = latex::Environment::cast(parent)?;
let environment_name = environment.begin()?.name()?.word()?.text();
let environment_name = environment.begin()?.name()?.key()?.to_string();
if !LANGUAGE_DATA
.math_environments
.iter()
.any(|name| name == environment_name)
.any(|name| name == &environment_name)
{
return None;
}
@ -261,7 +262,7 @@ fn render_label_theorem(
let begin = environment.begin()?;
let description = begin.options().and_then(|options| options.content_text());
let environment_name = begin.name()?.word()?.text();
let environment_name = begin.name()?.key()?.to_string();
let theorem = subset.documents.iter().find_map(|document| {
document.data.as_latex().and_then(|data| {

View file

@ -1,4 +1,5 @@
use cstree::TextRange;
use itertools::{EitherOrBoth, Itertools};
use crate::syntax::CstNode;
@ -159,11 +160,8 @@ impl<'a> HasType<'a> for Entry<'a> {}
impl<'a> HasDelimiters<'a> for Entry<'a> {}
impl<'a> Entry<'a> {
pub fn key(&self) -> Option<&'a SyntaxToken> {
self.syntax()
.children_with_tokens()
.filter_map(|node| node.into_token())
.find(|node| node.kind() == WORD)
pub fn key(&self) -> Option<Key<'a>> {
self.syntax().children().find_map(Key::cast)
}
pub fn fields(&self) -> impl Iterator<Item = Field<'a>> {
@ -171,6 +169,36 @@ impl<'a> Entry<'a> {
}
}
cst_node!(Key, KEY);
impl<'a> Key<'a> {
pub fn words(&self) -> impl Iterator<Item = &'a SyntaxToken> {
self.syntax()
.children_with_tokens()
.filter_map(|node| node.into_token())
.filter(|node| node.kind() == WORD)
}
}
impl<'a> PartialEq for Key<'a> {
fn eq(&self, other: &Self) -> bool {
self.words()
.zip_longest(other.words())
.all(|result| match result {
EitherOrBoth::Both(left, right) => left.text() == right.text(),
EitherOrBoth::Left(_) | EitherOrBoth::Right(_) => false,
})
}
}
impl<'a> Eq for Key<'a> {}
impl<'a> ToString for Key<'a> {
fn to_string(&self) -> std::string::String {
self.words().map(|word| word.text()).join(" ")
}
}
cst_node!(Field, FIELD);
impl<'a> Field<'a> {

View file

@ -26,6 +26,7 @@ pub enum SyntaxKind {
STRING,
COMMENT,
ENTRY,
KEY,
FIELD,
VALUE,
TOKEN,

View file

@ -174,7 +174,7 @@ impl<'a> Parser<'a> {
self.builder.finish_node();
return;
}
self.eat();
self.key();
while let Some(kind) = self.peek() {
match kind {
@ -190,6 +190,18 @@ impl<'a> Parser<'a> {
self.builder.finish_node();
}
fn key(&mut self) {
self.builder.start_node(KEY.into());
while self
.peek()
.filter(|&kind| matches!(kind, WORD | WHITESPACE))
.is_some()
{
self.eat();
}
self.builder.finish_node();
}
fn field(&mut self) {
self.builder.start_node(FIELD.into());
self.eat();

View file

@ -7,7 +7,8 @@ ROOT@0..50
ENTRY@0..50
ENTRY_TYPE@0..8 "@article"
L_PAREN@8..9 "("
WORD@9..12 "foo"
KEY@9..12
WORD@9..12 "foo"
COMMA@12..13 ","
WHITESPACE@13..14 " "
FIELD@14..32

View file

@ -7,7 +7,8 @@ ROOT@0..14
ENTRY@0..14
ENTRY_TYPE@0..8 "@article"
L_CURLY@8..9 "{"
WORD@9..12 "foo"
KEY@9..12
WORD@9..12 "foo"
COMMA@12..13 ","
R_CURLY@13..14 "}"

View file

@ -7,6 +7,7 @@ ROOT@0..13
ENTRY@0..13
ENTRY_TYPE@0..8 "@article"
L_CURLY@8..9 "{"
WORD@9..12 "foo"
KEY@9..12
WORD@9..12 "foo"
R_CURLY@12..13 "}"

View file

@ -7,7 +7,8 @@ ROOT@0..33
ENTRY@0..33
ENTRY_TYPE@0..8 "@article"
L_CURLY@8..9 "{"
WORD@9..12 "foo"
KEY@9..12
WORD@9..12 "foo"
COMMA@12..13 ","
WHITESPACE@13..14 " "
FIELD@14..32

View file

@ -7,7 +7,8 @@ ROOT@0..37
ENTRY@0..37
ENTRY_TYPE@0..8 "@article"
L_CURLY@8..9 "{"
WORD@9..16 "foo2021"
KEY@9..16
WORD@9..16 "foo2021"
COMMA@16..17 ","
WHITESPACE@17..18 " "
FIELD@18..36

View file

@ -7,7 +7,8 @@ ROOT@0..34
ENTRY@0..34
ENTRY_TYPE@0..8 "@article"
L_CURLY@8..9 "{"
WORD@9..12 "foo"
KEY@9..12
WORD@9..12 "foo"
COMMA@12..13 ","
WHITESPACE@13..14 " "
FIELD@14..32

View file

@ -7,7 +7,8 @@ ROOT@0..56
ENTRY@0..56
ENTRY_TYPE@0..8 "@article"
L_CURLY@8..9 "{"
WORD@9..12 "foo"
KEY@9..12
WORD@9..12 "foo"
COMMA@12..13 ","
WHITESPACE@13..14 " "
FIELD@14..32

View file

@ -7,7 +7,8 @@ ROOT@0..36
ENTRY@0..36
ENTRY_TYPE@0..8 "@article"
L_CURLY@8..9 "{"
WORD@9..12 "foo"
KEY@9..12
WORD@9..12 "foo"
COMMA@12..13 ","
WHITESPACE@13..14 " "
FIELD@14..32

View file

@ -4,7 +4,7 @@ use super::LatexAnalyzerContext;
pub fn analyze_begin(context: &mut LatexAnalyzerContext, node: &latex::SyntaxNode) -> Option<()> {
let begin = latex::Begin::cast(node)?;
let name = begin.name()?.word()?.text();
let name = begin.name()?.key()?.to_string();
let extras = &mut context.extras;
extras.environment_names.insert(name.into());
Some(())

View file

@ -23,22 +23,22 @@ pub fn analyze_include(context: &mut LatexAnalyzerContext, node: &latex::SyntaxN
ExplicitLinkKind::Class => &["cls"],
};
for path in include.path_list()?.words() {
let stem = path.text();
let mut targets = vec![Arc::new(context.base_uri.join(stem).ok()?.into())];
for path in include.path_list()?.keys() {
let stem = path.to_string();
let mut targets = vec![Arc::new(context.base_uri.join(&stem).ok()?.into())];
for extension in extensions {
let path = format!("{}.{}", stem, extension);
targets.push(Arc::new(context.base_uri.join(&path).ok()?.into()));
}
resolve_distro_file(&context.inner.resolver.lock().unwrap(), stem, extensions)
resolve_distro_file(&context.inner.resolver.lock().unwrap(), &stem, extensions)
.into_iter()
.for_each(|target| targets.push(Arc::new(target)));
context.extras.explicit_links.push(ExplicitLink {
kind,
stem: stem.into(),
stem_range: path.text_range(),
stem_range: path.small_range(),
targets,
});
}
@ -52,19 +52,19 @@ pub fn analyze_import(context: &mut LatexAnalyzerContext, node: &latex::SyntaxNo
let mut targets = Vec::new();
let directory = context
.base_uri
.join(import.directory()?.word()?.text())
.join(&import.directory()?.key()?.to_string())
.ok()?;
let file = import.file()?.word()?;
let stem = file.text();
targets.push(Arc::new(directory.join(stem).ok()?.into()));
let file = import.file()?.key()?;
let stem = file.to_string();
targets.push(Arc::new(directory.join(&stem).ok()?.into()));
targets.push(Arc::new(
directory.join(&format!("{}.tex", stem)).ok()?.into(),
));
context.extras.explicit_links.push(ExplicitLink {
stem: stem.into(),
stem_range: file.text_range(),
stem_range: file.small_range(),
targets,
kind: ExplicitLinkKind::Latex,
});

View file

@ -18,10 +18,10 @@ fn analyze_label_definition_name(
node: &latex::SyntaxNode,
) -> Option<()> {
let label = latex::LabelDefinition::cast(node)?;
let name = label.name()?.word()?;
let name = label.name()?.key()?;
context.extras.label_names.push(LabelName {
text: name.text().into(),
range: name.text_range(),
text: name.to_string().into(),
range: name.small_range(),
is_definition: true,
});
Some(())
@ -32,10 +32,10 @@ fn analyze_label_reference_name(
node: &latex::SyntaxNode,
) -> Option<()> {
let label = latex::LabelReference::cast(node)?;
for name in label.name_list()?.words() {
for name in label.name_list()?.keys() {
context.extras.label_names.push(LabelName {
text: name.text().into(),
range: name.text_range(),
text: name.to_string().into(),
range: name.small_range(),
is_definition: false,
});
}
@ -47,18 +47,18 @@ fn analyze_label_reference_range_name(
node: &latex::SyntaxNode,
) -> Option<()> {
let label = LabelReferenceRange::cast(node)?;
if let Some(name1) = label.from().and_then(|name| name.word()) {
if let Some(name1) = label.from().and_then(|name| name.key()) {
context.extras.label_names.push(LabelName {
text: name1.text().into(),
range: name1.text_range(),
text: name1.to_string().into(),
range: name1.small_range(),
is_definition: false,
});
}
if let Some(name2) = label.to().and_then(|name| name.word()) {
if let Some(name2) = label.to().and_then(|name| name.key()) {
context.extras.label_names.push(LabelName {
text: name2.text().into(),
range: name2.text_range(),
text: name2.to_string().into(),
range: name2.small_range(),
is_definition: false,
});
}

View file

@ -7,7 +7,7 @@ pub fn analyze_label_number(
node: &latex::SyntaxNode,
) -> Option<()> {
let number = latex::LabelNumber::cast(node)?;
let name = number.name()?.word()?.text().into();
let name = number.name()?.key()?.to_string();
let text = number
.text()?
.syntax()

View file

@ -10,7 +10,7 @@ pub fn analyze_theorem_definition(
node: &latex::SyntaxNode,
) -> Option<()> {
let theorem = latex::TheoremDefinition::cast(node)?;
let name = theorem.name()?.word()?.text().into();
let name = theorem.name()?.key()?.to_string();
let description = theorem.description()?;
let description = description.content_text()?;

View file

@ -20,9 +20,9 @@ pub struct Extras {
pub explicit_links: Vec<ExplicitLink>,
pub has_document_environment: bool,
pub command_names: FxHashSet<SmolStr>,
pub environment_names: FxHashSet<SmolStr>,
pub environment_names: FxHashSet<String>,
pub label_names: Vec<LabelName>,
pub label_numbers_by_name: FxHashMap<SmolStr, String>,
pub label_numbers_by_name: FxHashMap<String, String>,
pub theorem_environments: Vec<TheoremEnvironment>,
}
@ -61,7 +61,7 @@ impl ExplicitLink {
#[derive(Debug, PartialEq, Eq, Clone, Default, Hash)]
pub struct TheoremEnvironment {
pub name: SmolStr,
pub name: String,
pub description: String,
}

View file

@ -1,4 +1,5 @@
use cstree::TextRange;
use itertools::{EitherOrBoth, Itertools};
use crate::syntax::CstNode;
@ -6,7 +7,7 @@ use super::{Language, SyntaxKind::*, SyntaxNode, SyntaxToken};
macro_rules! cst_node {
($name:ident, $($kind:pat),+) => {
#[derive(Clone)]
#[derive(Clone, Copy)]
#[repr(transparent)]
pub struct $name<'a>(&'a SyntaxNode);
@ -45,6 +46,15 @@ macro_rules! cst_node {
cst_node!(Text, TEXT);
impl<'a> Text<'a> {
pub fn words(&self) -> impl Iterator<Item = &'a SyntaxToken> {
self.syntax()
.children_with_tokens()
.filter_map(|node| node.into_token())
.filter(|node| node.kind() == WORD)
}
}
pub trait HasCurly<'a>: CstNode<'a, Lang = Language> {
fn left_curly(&self) -> Option<&'a SyntaxToken> {
self.syntax()
@ -168,11 +178,8 @@ cst_node!(CurlyGroupWord, CURLY_GROUP_WORD);
impl<'a> HasCurly<'a> for CurlyGroupWord<'a> {}
impl<'a> CurlyGroupWord<'a> {
pub fn word(&self) -> Option<&'a SyntaxToken> {
self.syntax()
.children_with_tokens()
.filter_map(|node| node.into_token())
.find(|node| node.kind() == WORD)
pub fn key(&self) -> Option<Key<'a>> {
self.syntax().children().find_map(Key::cast)
}
}
@ -181,11 +188,8 @@ cst_node!(BrackGroupWord, BRACK_GROUP_WORD);
impl<'a> HasBrack<'a> for BrackGroupWord<'a> {}
impl<'a> BrackGroupWord<'a> {
pub fn word(&self) -> Option<&'a SyntaxToken> {
self.syntax()
.children_with_tokens()
.filter_map(|node| node.into_token())
.find(|node| node.kind() == WORD)
pub fn key(&self) -> Option<Key<'a>> {
self.syntax().children().find_map(Key::cast)
}
}
@ -194,11 +198,8 @@ cst_node!(CurlyGroupWordList, CURLY_GROUP_WORD_LIST);
impl<'a> HasCurly<'a> for CurlyGroupWordList<'a> {}
impl<'a> CurlyGroupWordList<'a> {
pub fn words(&self) -> impl Iterator<Item = &'a SyntaxToken> {
self.syntax()
.children_with_tokens()
.filter_map(|node| node.into_token())
.filter(|node| node.kind() == WORD)
pub fn keys(&self) -> impl Iterator<Item = Key<'a>> {
self.syntax().children().filter_map(Key::cast)
}
}
@ -226,6 +227,25 @@ impl<'a> Key<'a> {
}
}
impl<'a> PartialEq for Key<'a> {
fn eq(&self, other: &Self) -> bool {
self.words()
.zip_longest(other.words())
.all(|result| match result {
EitherOrBoth::Both(left, right) => left.text() == right.text(),
EitherOrBoth::Left(_) | EitherOrBoth::Right(_) => false,
})
}
}
impl<'a> Eq for Key<'a> {}
impl<'a> ToString for Key<'a> {
fn to_string(&self) -> String {
self.words().map(|word| word.text()).join(" ")
}
}
cst_node!(Value, VALUE);
cst_node!(KeyValuePair, KEY_VALUE_PAIR);

View file

@ -173,20 +173,6 @@ impl<'a> Parser<'a> {
self.builder.finish_node();
}
fn group_with_token(
&mut self,
node_kind: SyntaxKind,
content_kind: SyntaxKind,
right_kind: SyntaxKind,
) {
self.builder.start_node(node_kind.into());
self.eat();
self.trivia();
self.expect(content_kind);
self.expect(right_kind);
self.builder.finish_node();
}
fn curly_group(&mut self) {
self.builder.start_node(CURLY_GROUP.into());
self.eat();
@ -219,7 +205,23 @@ impl<'a> Parser<'a> {
}
fn curly_group_word(&mut self) {
self.group_with_token(CURLY_GROUP_WORD, WORD, R_CURLY);
self.builder.start_node(CURLY_GROUP_WORD.into());
self.eat();
self.trivia();
match self.peek() {
Some(WORD) => {
self.key();
}
Some(PARAMETER) => {
self.eat();
self.trivia();
}
Some(_) | None => {
self.builder.token(MISSING.into(), "");
}
}
self.expect(R_CURLY);
self.builder.finish_node();
}
fn curly_group_word_list(&mut self) {
@ -228,10 +230,14 @@ impl<'a> Parser<'a> {
while self
.peek()
.filter(|&kind| matches!(kind, WHITESPACE | COMMENT | WORD | COMMA))
.filter(|&kind| matches!(kind, WHITESPACE | COMMENT | WORD | COMMA | PARAMETER))
.is_some()
{
self.eat();
if self.peek() == Some(WORD) {
self.key();
} else {
self.eat();
}
}
self.expect(R_CURLY);
@ -239,7 +245,24 @@ impl<'a> Parser<'a> {
}
fn curly_group_command(&mut self) {
self.group_with_token(CURLY_GROUP_COMMAND, GENERIC_COMMAND_NAME, R_CURLY);
self.builder.start_node(CURLY_GROUP_COMMAND.into());
self.eat();
self.trivia();
match self.peek() {
Some(kind) if kind.is_command_name() => {
self.eat();
self.trivia();
}
Some(PARAMETER) => {
self.eat();
self.trivia();
}
Some(_) | None => {
self.builder.token(MISSING.into(), "");
}
}
self.expect(R_CURLY);
self.builder.finish_node();
}
fn brack_group(&mut self) {
@ -271,38 +294,25 @@ impl<'a> Parser<'a> {
}
fn brack_group_word(&mut self) {
self.group_with_token(BRACK_GROUP_WORD, WORD, R_BRACK);
self.builder.start_node(BRACK_GROUP_WORD.into());
self.eat();
self.trivia();
match self.peek() {
Some(WORD) => {
self.key();
}
Some(PARAMETER) => {
self.eat();
self.trivia();
}
Some(_) | None => {
self.builder.token(MISSING.into(), "");
}
}
self.expect(R_BRACK);
self.builder.finish_node();
}
// fn paren_group(&mut self) {
// self.builder.start_node(PAREN_GROUP.into());
// self.eat();
// while self
// .peek()
// .filter(|&kind| {
// !matches!(
// kind,
// R_CURLY
// | R_BRACK
// | R_PAREN
// | PART_NAME
// | CHAPTER_NAME
// | SECTION_NAME
// | SUBSECTION_NAME
// | PARAGRAPH_NAME
// | SUBPARAGRAPH_NAME
// | ENUM_ITEM_NAME
// | END_ENVIRONMENT_NAME
// )
// })
// .is_some()
// {
// self.content();
// }
// self.expect(R_PAREN);
// self.builder.finish_node();
// }
fn mixed_group(&mut self) {
self.builder.start_node(MIXED_GROUP.into());
self.eat();

View file

@ -9,7 +9,8 @@ ROOT@0..64
ACRONYM_DECLARATION_NAME@0..15 "\\DeclareAcronym"
CURLY_GROUP_WORD@15..19
L_CURLY@15..16 "{"
WORD@16..18 "eg"
KEY@16..18
WORD@16..18 "eg"
R_CURLY@18..19 "}"
CURLY_GROUP_KEY_VALUE@19..64
L_CURLY@19..20 "{"

View file

@ -27,7 +27,8 @@ ROOT@0..76
R_BRACK@42..43 "]"
CURLY_GROUP_WORD@43..53
L_CURLY@43..44 "{"
WORD@44..52 "fpsLabel"
KEY@44..52
WORD@44..52 "fpsLabel"
R_CURLY@52..53 "}"
CURLY_GROUP@53..58
L_CURLY@53..54 "{"

View file

@ -9,7 +9,8 @@ ROOT@0..44
ACRONYM_DEFINITION_NAME@0..11 "\\newacronym"
CURLY_GROUP_WORD@11..21
L_CURLY@11..12 "{"
WORD@12..20 "fpsLabel"
KEY@12..20
WORD@12..20 "fpsLabel"
R_CURLY@20..21 "}"
CURLY_GROUP@21..26
L_CURLY@21..22 "{"

View file

@ -24,6 +24,7 @@ ROOT@0..32
R_BRACK@21..22 "]"
CURLY_GROUP_WORD@22..32
L_CURLY@22..23 "{"
WORD@23..31 "fpsLabel"
KEY@23..31
WORD@23..31 "fpsLabel"
R_CURLY@31..32 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..19
ACRONYM_REFERENCE_NAME@0..9 "\\acrshort"
CURLY_GROUP_WORD@9..19
L_CURLY@9..10 "{"
WORD@10..18 "fpsLabel"
KEY@10..18
WORD@10..18 "fpsLabel"
R_CURLY@18..19 "}"

View file

@ -25,6 +25,7 @@ ROOT@0..42
R_BRACK@28..29 "]"
CURLY_GROUP_WORD_LIST@29..42
L_CURLY@29..30 "{"
WORD@30..41 "foo/bar.bib"
KEY@30..41
WORD@30..41 "foo/bar.bib"
R_CURLY@41..42 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..28
BIBLATEX_INCLUDE_NAME@0..15 "\\addbibresource"
CURLY_GROUP_WORD_LIST@15..28
L_CURLY@15..16 "{"
WORD@16..27 "foo/bar.bib"
KEY@16..27
WORD@16..27 "foo/bar.bib"
R_CURLY@27..28 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..22
BIBTEX_INCLUDE_NAME@0..13 "\\bibliography"
CURLY_GROUP_WORD_LIST@13..22
L_CURLY@13..14 "{"
WORD@14..21 "foo/bar"
KEY@14..21
WORD@14..21 "foo/bar"
R_CURLY@21..22 "}"

View file

@ -10,7 +10,8 @@ ROOT@0..39
BEGIN_ENVIRONMENT_NAME@0..6 "\\begin"
CURLY_GROUP_WORD@6..14
L_CURLY@6..7 "{"
WORD@7..13 "figure"
KEY@7..13
WORD@7..13 "figure"
R_CURLY@13..14 "}"
CAPTION@14..27
CAPTION_NAME@14..22 "\\caption"
@ -23,6 +24,7 @@ ROOT@0..39
END_ENVIRONMENT_NAME@27..31 "\\end"
CURLY_GROUP_WORD@31..39
L_CURLY@31..32 "{"
WORD@32..38 "figure"
KEY@32..38
WORD@32..38 "figure"
R_CURLY@38..39 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..9
CITATION_NAME@0..5 "\\cite"
CURLY_GROUP_WORD_LIST@5..9
L_CURLY@5..6 "{"
WORD@6..9 "foo"
KEY@6..9
WORD@6..9 "foo"
MISSING@9..9 ""

View file

@ -9,9 +9,11 @@ ROOT@0..15
CITATION_NAME@0..5 "\\cite"
CURLY_GROUP_WORD_LIST@5..15
L_CURLY@5..6 "{"
WORD@6..9 "foo"
KEY@6..9
WORD@6..9 "foo"
COMMA@9..10 ","
WHITESPACE@10..11 " "
WORD@11..14 "bar"
KEY@11..14
WORD@11..14 "bar"
R_CURLY@14..15 "}"

View file

@ -14,6 +14,7 @@ ROOT@0..15
R_BRACK@9..10 "]"
CURLY_GROUP_WORD_LIST@10..15
L_CURLY@10..11 "{"
WORD@11..14 "bar"
KEY@11..14
WORD@11..14 "bar"
R_CURLY@14..15 "}"

View file

@ -19,6 +19,7 @@ ROOT@0..20
R_BRACK@14..15 "]"
CURLY_GROUP_WORD_LIST@15..20
L_CURLY@15..16 "{"
WORD@16..19 "baz"
KEY@16..19
WORD@16..19 "baz"
R_CURLY@19..20 "}"

View file

@ -10,7 +10,8 @@ ROOT@0..12
CURLY_GROUP_WORD_LIST@5..12
L_CURLY@5..6 "{"
COMMA@6..7 ","
WORD@7..10 "foo"
KEY@7..10
WORD@7..10 "foo"
COMMA@10..11 ","
R_CURLY@11..12 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..10
CITATION_NAME@0..5 "\\cite"
CURLY_GROUP_WORD_LIST@5..10
L_CURLY@5..6 "{"
WORD@6..9 "foo"
KEY@6..9
WORD@6..9 "foo"
R_CURLY@9..10 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..10
CITATION_NAME@0..7 "\\nocite"
CURLY_GROUP_WORD_LIST@7..10
L_CURLY@7..8 "{"
WORD@8..9 "*"
KEY@8..9
WORD@8..9 "*"
R_CURLY@9..10 "}"

View file

@ -32,6 +32,7 @@ ROOT@0..44
R_BRACK@34..35 "]"
CURLY_GROUP_WORD_LIST@35..44
L_CURLY@35..36 "{"
WORD@36..43 "article"
KEY@36..43
WORD@36..43 "article"
R_CURLY@43..44 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..23
CLASS_INCLUDE_NAME@0..14 "\\documentclass"
CURLY_GROUP_WORD_LIST@14..23
L_CURLY@14..15 "{"
WORD@15..22 "article"
KEY@15..22
WORD@15..22 "article"
R_CURLY@22..23 "}"

View file

@ -9,11 +9,13 @@ ROOT@0..33
COLOR_DEFINITION_NAME@0..12 "\\definecolor"
CURLY_GROUP_WORD@12..17
L_CURLY@12..13 "{"
WORD@13..16 "foo"
KEY@13..16
WORD@13..16 "foo"
R_CURLY@16..17 "}"
CURLY_GROUP_WORD@17..22
L_CURLY@17..18 "{"
WORD@18..21 "rgb"
KEY@18..21
WORD@18..21 "rgb"
R_CURLY@21..22 "}"
CURLY_GROUP@22..33
L_CURLY@22..23 "{"

View file

@ -9,6 +9,7 @@ ROOT@0..13
COLOR_REFERENCE_NAME@0..6 "\\color"
CURLY_GROUP_WORD@6..13
L_CURLY@6..7 "{"
WORD@7..12 "black"
KEY@7..12
WORD@7..12 "black"
R_CURLY@12..13 "}"

View file

@ -9,21 +9,26 @@ ROOT@0..39
COLOR_SET_DEFINITION_NAME@0..15 "\\definecolorset"
BRACK_GROUP_WORD@15..19
L_BRACK@15..16 "["
WORD@16..18 "ty"
KEY@16..18
WORD@16..18 "ty"
R_BRACK@18..19 "]"
CURLY_GROUP_WORD_LIST@19..29
L_CURLY@19..20 "{"
WORD@20..23 "rgb"
KEY@20..23
WORD@20..23 "rgb"
COMMA@23..24 ","
WORD@24..28 "HTML"
KEY@24..28
WORD@24..28 "HTML"
R_CURLY@28..29 "}"
CURLY_GROUP_WORD@29..34
L_CURLY@29..30 "{"
WORD@30..33 "foo"
KEY@30..33
WORD@30..33 "foo"
R_CURLY@33..34 "}"
CURLY_GROUP_WORD@34..39
L_CURLY@34..35 "{"
WORD@35..38 "bar"
KEY@35..38
WORD@35..38 "bar"
R_CURLY@38..39 "}"
MISSING@39..39 ""

View file

@ -9,13 +9,16 @@ ROOT@0..30
COLOR_SET_DEFINITION_NAME@0..15 "\\definecolorset"
CURLY_GROUP_WORD_LIST@15..25
L_CURLY@15..16 "{"
WORD@16..19 "rgb"
KEY@16..19
WORD@16..19 "rgb"
COMMA@19..20 ","
WORD@20..24 "HTML"
KEY@20..24
WORD@20..24 "HTML"
R_CURLY@24..25 "}"
CURLY_GROUP_WORD@25..30
L_CURLY@25..26 "{"
WORD@26..29 "foo"
KEY@26..29
WORD@26..29 "foo"
R_CURLY@29..30 "}"
MISSING@30..30 ""
MISSING@30..30 ""

View file

@ -9,9 +9,11 @@ ROOT@0..25
COLOR_SET_DEFINITION_NAME@0..15 "\\definecolorset"
CURLY_GROUP_WORD_LIST@15..25
L_CURLY@15..16 "{"
WORD@16..19 "rgb"
KEY@16..19
WORD@16..19 "rgb"
COMMA@19..20 ","
WORD@20..24 "HTML"
KEY@20..24
WORD@20..24 "HTML"
R_CURLY@24..25 "}"
MISSING@25..25 ""
MISSING@25..25 ""

View file

@ -9,24 +9,30 @@ ROOT@0..44
COLOR_SET_DEFINITION_NAME@0..15 "\\definecolorset"
BRACK_GROUP_WORD@15..19
L_BRACK@15..16 "["
WORD@16..18 "ty"
KEY@16..18
WORD@16..18 "ty"
R_BRACK@18..19 "]"
CURLY_GROUP_WORD_LIST@19..29
L_CURLY@19..20 "{"
WORD@20..23 "rgb"
KEY@20..23
WORD@20..23 "rgb"
COMMA@23..24 ","
WORD@24..28 "HTML"
KEY@24..28
WORD@24..28 "HTML"
R_CURLY@28..29 "}"
CURLY_GROUP_WORD@29..34
L_CURLY@29..30 "{"
WORD@30..33 "foo"
KEY@30..33
WORD@30..33 "foo"
R_CURLY@33..34 "}"
CURLY_GROUP_WORD@34..39
L_CURLY@34..35 "{"
WORD@35..38 "bar"
KEY@35..38
WORD@35..38 "bar"
R_CURLY@38..39 "}"
CURLY_GROUP_WORD@39..44
L_CURLY@39..40 "{"
WORD@40..43 "baz"
KEY@40..43
WORD@40..43 "baz"
R_CURLY@43..44 "}"

View file

@ -10,7 +10,8 @@ ROOT@0..23
MISSING@11..11 ""
BRACK_GROUP_WORD@11..14
L_BRACK@11..12 "["
WORD@12..13 "1"
KEY@12..13
WORD@12..13 "1"
R_BRACK@13..14 "]"
CURLY_GROUP@14..19
L_CURLY@14..15 "{"

View file

@ -9,11 +9,13 @@ ROOT@0..47
ENVIRONMENT_DEFINIITION_NAME@0..15 "\\newenvironment"
CURLY_GROUP_WORD@15..20
L_CURLY@15..16 "{"
WORD@16..19 "bar"
KEY@16..19
WORD@16..19 "bar"
R_CURLY@19..20 "}"
BRACK_GROUP_WORD@20..23
L_BRACK@20..21 "["
WORD@21..22 "1"
KEY@21..22
WORD@21..22 "1"
R_BRACK@22..23 "]"
CURLY_GROUP@23..36
L_CURLY@23..24 "{"

View file

@ -10,7 +10,8 @@ ROOT@0..43
BEGIN_ENVIRONMENT_NAME@0..6 "\\begin"
CURLY_GROUP_WORD@6..12
L_CURLY@6..7 "{"
WORD@7..10 "foo"
KEY@7..10
WORD@7..10 "foo"
R_CURLY@10..11 "}"
WHITESPACE@11..12 " "
ENVIRONMENT@12..34
@ -18,20 +19,23 @@ ROOT@0..43
BEGIN_ENVIRONMENT_NAME@12..18 "\\begin"
CURLY_GROUP_WORD@18..24
L_CURLY@18..19 "{"
WORD@19..22 "qux"
KEY@19..22
WORD@19..22 "qux"
R_CURLY@22..23 "}"
WHITESPACE@23..24 " "
END@24..34
END_ENVIRONMENT_NAME@24..28 "\\end"
CURLY_GROUP_WORD@28..34
L_CURLY@28..29 "{"
WORD@29..32 "baz"
KEY@29..32
WORD@29..32 "baz"
R_CURLY@32..33 "}"
WHITESPACE@33..34 " "
END@34..43
END_ENVIRONMENT_NAME@34..38 "\\end"
CURLY_GROUP_WORD@38..43
L_CURLY@38..39 "{"
WORD@39..42 "bar"
KEY@39..42
WORD@39..42 "bar"
R_CURLY@42..43 "}"

View file

@ -10,33 +10,36 @@ ROOT@0..52
BEGIN_ENVIRONMENT_NAME@0..6 "\\begin"
CURLY_GROUP_WORD@6..11
L_CURLY@6..7 "{"
WORD@7..10 "foo"
WHITESPACE@10..11 " "
KEY@7..11
WORD@7..10 "foo"
WHITESPACE@10..11 " "
MISSING@11..11 ""
ENVIRONMENT@11..44
BEGIN@11..22
BEGIN@11..34
BEGIN_ENVIRONMENT_NAME@11..17 "\\begin"
CURLY_GROUP_WORD@17..22
CURLY_GROUP_WORD@17..34
L_CURLY@17..18 "{"
WORD@18..21 "qux"
WHITESPACE@21..22 " "
MISSING@22..22 ""
TEXT@22..34
WORD@22..27 "Hello"
WHITESPACE@27..28 " "
WORD@28..33 "World"
WHITESPACE@33..34 " "
KEY@18..34
WORD@18..21 "qux"
WHITESPACE@21..22 " "
WORD@22..27 "Hello"
WHITESPACE@27..28 " "
WORD@28..33 "World"
WHITESPACE@33..34 " "
MISSING@34..34 ""
END@34..44
END_ENVIRONMENT_NAME@34..38 "\\end"
CURLY_GROUP_WORD@38..44
L_CURLY@38..39 "{"
WORD@39..42 "baz"
KEY@39..42
WORD@39..42 "baz"
R_CURLY@42..43 "}"
WHITESPACE@43..44 " "
END@44..52
END_ENVIRONMENT_NAME@44..48 "\\end"
CURLY_GROUP_WORD@48..52
L_CURLY@48..49 "{"
WORD@49..52 "bar"
KEY@49..52
WORD@49..52 "bar"
MISSING@52..52 ""

View file

@ -10,7 +10,8 @@ ROOT@0..33
BEGIN_ENVIRONMENT_NAME@0..6 "\\begin"
CURLY_GROUP_WORD@6..12
L_CURLY@6..7 "{"
WORD@7..10 "foo"
KEY@7..10
WORD@7..10 "foo"
R_CURLY@10..11 "}"
WHITESPACE@11..12 " "
TEXT@12..24
@ -22,6 +23,7 @@ ROOT@0..33
END_ENVIRONMENT_NAME@24..28 "\\end"
CURLY_GROUP_WORD@28..33
L_CURLY@28..29 "{"
WORD@29..32 "bar"
KEY@29..32
WORD@29..32 "bar"
R_CURLY@32..33 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..11
LABEL_REFERENCE_NAME@0..6 "\\eqref"
CURLY_GROUP_WORD_LIST@6..11
L_CURLY@6..7 "{"
WORD@7..10 "foo"
KEY@7..10
WORD@7..10 "foo"
R_CURLY@10..11 "}"

View file

@ -10,7 +10,8 @@ ROOT@0..28
BEGIN_ENVIRONMENT_NAME@0..6 "\\begin"
CURLY_GROUP_WORD@6..10
L_CURLY@6..7 "{"
WORD@7..8 "a"
KEY@7..8
WORD@7..8 "a"
R_CURLY@8..9 "}"
WHITESPACE@9..10 " "
TEXT@10..18
@ -25,6 +26,7 @@ ROOT@0..28
END_ENVIRONMENT_NAME@21..25 "\\end"
CURLY_GROUP_WORD@25..28
L_CURLY@25..26 "{"
WORD@26..27 "b"
KEY@26..27
WORD@26..27 "b"
R_CURLY@27..28 "}"

View file

@ -10,7 +10,8 @@ ROOT@0..28
BEGIN_ENVIRONMENT_NAME@0..6 "\\begin"
CURLY_GROUP_WORD@6..10
L_CURLY@6..7 "{"
WORD@7..8 "a"
KEY@7..8
WORD@7..8 "a"
R_CURLY@8..9 "}"
WHITESPACE@9..10 " "
EQUATION@10..21
@ -26,6 +27,7 @@ ROOT@0..28
END_ENVIRONMENT_NAME@21..25 "\\end"
CURLY_GROUP_WORD@25..28
L_CURLY@25..26 "{"
WORD@26..27 "b"
KEY@26..27
WORD@26..27 "b"
R_CURLY@27..28 "}"

View file

@ -9,7 +9,8 @@ ROOT@0..39
GLOSSARY_ENTRY_DEFINITION_NAME@0..17 "\\newglossaryentry"
CURLY_GROUP_WORD@17..22
L_CURLY@17..18 "{"
WORD@18..21 "foo"
KEY@18..21
WORD@18..21 "foo"
R_CURLY@21..22 "}"
CURLY_GROUP_KEY_VALUE@22..39
L_CURLY@22..23 "{"

View file

@ -27,6 +27,7 @@ ROOT@0..25
R_BRACK@19..20 "]"
CURLY_GROUP_WORD@20..25
L_CURLY@20..21 "{"
WORD@21..24 "baz"
KEY@21..24
WORD@21..24 "baz"
R_CURLY@24..25 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..9
GLOSSARY_ENTRY_REFERENCE_NAME@0..4 "\\gls"
CURLY_GROUP_WORD@4..9
L_CURLY@4..5 "{"
WORD@5..8 "foo"
KEY@5..8
WORD@5..8 "foo"
R_CURLY@8..9 "}"

View file

@ -20,6 +20,7 @@ ROOT@0..39
R_BRACK@25..26 "]"
CURLY_GROUP_WORD_LIST@26..39
L_CURLY@26..27 "{"
WORD@27..38 "foo/bar.pdf"
KEY@27..38
WORD@27..38 "foo/bar.pdf"
R_CURLY@38..39 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..29
GRAPHICS_INCLUDE_NAME@0..16 "\\includegraphics"
CURLY_GROUP_WORD_LIST@16..29
L_CURLY@16..17 "{"
WORD@17..28 "foo/bar.pdf"
KEY@17..28
WORD@17..28 "foo/bar.pdf"
R_CURLY@28..29 "}"

View file

@ -9,7 +9,8 @@ ROOT@0..11
IMPORT_NAME@0..7 "\\import"
CURLY_GROUP_WORD@7..11
L_CURLY@7..8 "{"
WORD@8..11 "foo"
KEY@8..11
WORD@8..11 "foo"
MISSING@11..11 ""
MISSING@11..11 ""

View file

@ -9,10 +9,12 @@ ROOT@0..17
IMPORT_NAME@0..7 "\\import"
CURLY_GROUP_WORD@7..12
L_CURLY@7..8 "{"
WORD@8..11 "foo"
KEY@8..11
WORD@8..11 "foo"
R_CURLY@11..12 "}"
CURLY_GROUP_WORD@12..17
L_CURLY@12..13 "{"
WORD@13..16 "bar"
KEY@13..16
WORD@13..16 "bar"
R_CURLY@16..17 "}"

View file

@ -20,6 +20,7 @@ ROOT@0..30
R_BRACK@20..21 "]"
CURLY_GROUP_WORD_LIST@21..30
L_CURLY@21..22 "{"
WORD@22..29 "foo/bar"
KEY@22..29
WORD@22..29 "foo/bar"
R_CURLY@29..30 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..20
SVG_INCLUDE_NAME@0..11 "\\includesvg"
CURLY_GROUP_WORD_LIST@11..20
L_CURLY@11..12 "{"
WORD@12..19 "foo/bar"
KEY@12..19
WORD@12..19 "foo/bar"
R_CURLY@19..20 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..11
LABEL_DEFINITION_NAME@0..6 "\\label"
CURLY_GROUP_WORD@6..11
L_CURLY@6..7 "{"
WORD@7..10 "foo"
KEY@7..10
WORD@7..10 "foo"
R_CURLY@10..11 "}"

View file

@ -9,7 +9,8 @@ ROOT@0..21
LABEL_NUMBER_NAME@0..9 "\\newlabel"
CURLY_GROUP_WORD@9..14
L_CURLY@9..10 "{"
WORD@10..13 "foo"
KEY@10..13
WORD@10..13 "foo"
R_CURLY@13..14 "}"
CURLY_GROUP@14..21
L_CURLY@14..15 "{"

View file

@ -9,9 +9,11 @@ ROOT@0..14
LABEL_REFERENCE_NAME@0..4 "\\ref"
CURLY_GROUP_WORD_LIST@4..14
L_CURLY@4..5 "{"
WORD@5..8 "foo"
KEY@5..8
WORD@5..8 "foo"
COMMA@8..9 ","
WHITESPACE@9..10 " "
WORD@10..13 "bar"
KEY@10..13
WORD@10..13 "bar"
R_CURLY@13..14 "}"

View file

@ -9,10 +9,12 @@ ROOT@0..19
LABEL_REFERENCE_RANGE_NAME@0..10 "\\crefrange"
CURLY_GROUP_WORD@10..14
L_CURLY@10..11 "{"
WORD@11..14 "foo"
KEY@11..14
WORD@11..14 "foo"
MISSING@14..14 ""
CURLY_GROUP_WORD@14..19
L_CURLY@14..15 "{"
WORD@15..18 "bar"
KEY@15..18
WORD@15..18 "bar"
R_CURLY@18..19 "}"

View file

@ -9,7 +9,8 @@ ROOT@0..15
LABEL_REFERENCE_RANGE_NAME@0..10 "\\crefrange"
CURLY_GROUP_WORD@10..15
L_CURLY@10..11 "{"
WORD@11..14 "foo"
KEY@11..14
WORD@11..14 "foo"
R_CURLY@14..15 "}"
MISSING@15..15 ""

View file

@ -9,10 +9,12 @@ ROOT@0..20
LABEL_REFERENCE_RANGE_NAME@0..10 "\\crefrange"
CURLY_GROUP_WORD@10..15
L_CURLY@10..11 "{"
WORD@11..14 "foo"
KEY@11..14
WORD@11..14 "foo"
R_CURLY@14..15 "}"
CURLY_GROUP_WORD@15..20
L_CURLY@15..16 "{"
WORD@16..19 "bar"
KEY@16..19
WORD@16..19 "bar"
R_CURLY@19..20 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..9
LABEL_REFERENCE_NAME@0..4 "\\ref"
CURLY_GROUP_WORD_LIST@4..9
L_CURLY@4..5 "{"
WORD@5..8 "foo"
KEY@5..8
WORD@5..8 "foo"
R_CURLY@8..9 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..17
LATEX_INCLUDE_NAME@0..8 "\\include"
CURLY_GROUP_WORD_LIST@8..17
L_CURLY@8..9 "{"
WORD@9..16 "foo/bar"
KEY@9..16
WORD@9..16 "foo/bar"
R_CURLY@16..17 "}"

View file

@ -9,6 +9,7 @@ ROOT@0..19
LATEX_INCLUDE_NAME@0..6 "\\input"
CURLY_GROUP_WORD_LIST@6..19
L_CURLY@6..7 "{"
WORD@7..18 "foo/bar.tex"
KEY@7..18
WORD@7..18 "foo/bar.tex"
R_CURLY@18..19 "}"

Some files were not shown because too many files have changed in this diff Show more