Add a lexing-based 'highlight' mode to the parser

basic syntax highlighting

added more syntax highlighting coverage

add example of a markdown table with styling

move FIXED_TOKEN logic into highlight

refactor highlight, add support for backpassing

escape html from source code

fix bug with <pre> tag ordering

refactor out html from roc_parse

remove test, put highlight functionality into separate file

fix typo
This commit is contained in:
Luke Boswell 2023-02-28 17:03:49 +11:00
parent 7ccc23ca06
commit 1590b30b19
No known key found for this signature in database
GPG key ID: F6DB3C9DB47377B0
12 changed files with 1413 additions and 114 deletions

View file

@ -1,5 +1,6 @@
use crate::ast::CommentOrNewline;
use crate::ast::Spaceable;
use crate::parser::Progress;
use crate::parser::SpaceProblem;
use crate::parser::{self, and, backtrackable, BadInputError, Parser, Progress::*};
use crate::state::State;
@ -7,6 +8,7 @@ use bumpalo::collections::vec::Vec;
use bumpalo::Bump;
use roc_region::all::Loc;
use roc_region::all::Position;
use roc_region::all::Region;
pub fn space0_around_ee<'a, P, S, E>(
parser: P,
@ -386,98 +388,132 @@ pub fn spaces<'a, E>() -> impl Parser<'a, &'a [CommentOrNewline<'a>], E>
where
E: 'a + SpaceProblem,
{
move |arena, mut state: State<'a>, _min_indent: u32| {
move |arena, state: State<'a>, _min_indent: u32| {
let mut newlines = Vec::new_in(arena);
let mut progress = NoProgress;
loop {
let whitespace = fast_eat_whitespace(state.bytes());
if whitespace > 0 {
state.advance_mut(whitespace);
progress = MadeProgress;
}
match state.bytes().first() {
Some(b'#') => {
state.advance_mut(1);
let is_doc_comment = state.bytes().first() == Some(&b'#')
&& (state.bytes().get(1) == Some(&b' ')
|| state.bytes().get(1) == Some(&b'\n')
|| begins_with_crlf(&state.bytes()[1..])
|| Option::is_none(&state.bytes().get(1)));
if is_doc_comment {
state.advance_mut(1);
if state.bytes().first() == Some(&b' ') {
state.advance_mut(1);
}
}
let len = fast_eat_until_control_character(state.bytes());
// We already checked that the string is valid UTF-8
debug_assert!(std::str::from_utf8(&state.bytes()[..len]).is_ok());
let text = unsafe { std::str::from_utf8_unchecked(&state.bytes()[..len]) };
let comment = if is_doc_comment {
CommentOrNewline::DocComment(text)
} else {
CommentOrNewline::LineComment(text)
};
newlines.push(comment);
state.advance_mut(len);
if begins_with_crlf(state.bytes()) {
state.advance_mut(1);
state = state.advance_newline();
} else if state.bytes().first() == Some(&b'\n') {
state = state.advance_newline();
}
progress = MadeProgress;
}
Some(b'\r') => {
if state.bytes().get(1) == Some(&b'\n') {
newlines.push(CommentOrNewline::Newline);
state.advance_mut(1);
state = state.advance_newline();
progress = MadeProgress;
} else {
return Err((
progress,
E::space_problem(
BadInputError::HasMisplacedCarriageReturn,
state.pos(),
),
));
}
}
Some(b'\n') => {
newlines.push(CommentOrNewline::Newline);
state = state.advance_newline();
progress = MadeProgress;
}
Some(b'\t') => {
return Err((
progress,
E::space_problem(BadInputError::HasTab, state.pos()),
));
}
Some(x) if *x < b' ' => {
return Err((
progress,
E::space_problem(BadInputError::HasAsciiControl, state.pos()),
));
}
_ => {
if !newlines.is_empty() {
state = state.mark_current_indent();
}
break;
}
}
match consume_spaces(state, |_, space, _| newlines.push(space)) {
Ok((progress, state)) => Ok((progress, newlines.into_bump_slice(), state)),
Err((progress, err)) => Err((progress, err)),
}
Ok((progress, newlines.into_bump_slice(), state))
}
}
pub fn loc_spaces<'a, E>() -> impl Parser<'a, &'a [Loc<CommentOrNewline<'a>>], E>
where
E: 'a + SpaceProblem,
{
move |arena, state: State<'a>, _min_indent: u32| {
let mut newlines = Vec::new_in(arena);
match consume_spaces(state, |start, space, end| {
newlines.push(Loc::at(Region::between(start, end), space))
}) {
Ok((progress, state)) => Ok((progress, newlines.into_bump_slice(), state)),
Err((progress, err)) => Err((progress, err)),
}
}
}
fn consume_spaces<'a, E, F>(
mut state: State<'a>,
mut on_space: F,
) -> Result<(Progress, State<'a>), (Progress, E)>
where
E: 'a + SpaceProblem,
F: FnMut(Position, CommentOrNewline<'a>, Position),
{
let mut progress = NoProgress;
let mut found_newline = false;
loop {
let whitespace = fast_eat_whitespace(state.bytes());
if whitespace > 0 {
state.advance_mut(whitespace);
progress = MadeProgress;
}
let start = state.pos();
match state.bytes().first() {
Some(b'#') => {
state.advance_mut(1);
let is_doc_comment = state.bytes().first() == Some(&b'#')
&& (state.bytes().get(1) == Some(&b' ')
|| state.bytes().get(1) == Some(&b'\n')
|| begins_with_crlf(&state.bytes()[1..])
|| Option::is_none(&state.bytes().get(1)));
if is_doc_comment {
state.advance_mut(1);
if state.bytes().first() == Some(&b' ') {
state.advance_mut(1);
}
}
let len = fast_eat_until_control_character(state.bytes());
// We already checked that the string is valid UTF-8
debug_assert!(std::str::from_utf8(&state.bytes()[..len]).is_ok());
let text = unsafe { std::str::from_utf8_unchecked(&state.bytes()[..len]) };
let comment = if is_doc_comment {
CommentOrNewline::DocComment(text)
} else {
CommentOrNewline::LineComment(text)
};
state.advance_mut(len);
on_space(start, comment, state.pos());
found_newline = true;
if begins_with_crlf(state.bytes()) {
state.advance_mut(1);
state = state.advance_newline();
} else if state.bytes().first() == Some(&b'\n') {
state = state.advance_newline();
}
progress = MadeProgress;
}
Some(b'\r') => {
if state.bytes().get(1) == Some(&b'\n') {
state.advance_mut(1);
state = state.advance_newline();
on_space(start, CommentOrNewline::Newline, state.pos());
found_newline = true;
progress = MadeProgress;
} else {
return Err((
progress,
E::space_problem(BadInputError::HasMisplacedCarriageReturn, state.pos()),
));
}
}
Some(b'\n') => {
state = state.advance_newline();
on_space(start, CommentOrNewline::Newline, state.pos());
found_newline = true;
progress = MadeProgress;
}
Some(b'\t') => {
return Err((
progress,
E::space_problem(BadInputError::HasTab, state.pos()),
));
}
Some(x) if *x < b' ' => {
return Err((
progress,
E::space_problem(BadInputError::HasAsciiControl, state.pos()),
));
}
_ => {
if found_newline {
state = state.mark_current_indent();
}
break;
}
}
}
Ok((progress, state))
}

View file

@ -0,0 +1,565 @@
use encode_unicode::CharExt;
use std::collections::HashSet;
use bumpalo::Bump;
use roc_region::all::{Loc, Region};
use crate::{
ast::CommentOrNewline,
blankspace::loc_spaces,
keyword::KEYWORDS,
number_literal::positive_number_literal,
parser::{EExpr, ParseResult, Parser},
state::State,
string_literal::{parse_str_like_literal, StrLikeLiteral},
};
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Token {
LineComment,
DocComment,
Error,
SingleQuote,
String,
UnicodeEscape,
EscapedChar,
Interpolated,
Keyword,
UpperIdent,
LowerIdent,
Number,
Other,
Minus,
Plus,
Colon,
Bar,
Equals,
GreaterThan,
LessThan,
Comma,
Backslash,
Brace,
Bracket,
Paren,
Arrow,
Pipe,
Backpass,
}
pub fn highlight(text: &str) -> Vec<Loc<Token>> {
let mut tokens = Vec::new();
let state = State::new(text.as_bytes());
let arena = Bump::new();
let header_keywords = HEADER_KEYWORDS.iter().copied().collect::<HashSet<_>>();
let body_keywords = KEYWORDS.iter().copied().collect::<HashSet<_>>();
if let Ok((_prog, _, new_state)) = crate::module::header().parse(&arena, state.clone(), 0) {
let inner_state =
State::new(text[..state.bytes().len() - new_state.bytes().len()].as_bytes());
highlight_inner(&arena, inner_state, &mut tokens, &header_keywords);
highlight_inner(&arena, new_state, &mut tokens, &body_keywords);
} else {
highlight_inner(&arena, state, &mut tokens, &body_keywords);
}
tokens = combine_tokens(tokens);
tokens
}
fn combine_tokens(locations: Vec<Loc<Token>>) -> Vec<Loc<Token>> {
let mut tokens: Vec<Loc<Token>> = Vec::new();
let mut previous_location: Option<Loc<Token>> = None;
for location in locations {
match location.value {
// Catch tokens which may combine for a different meaning
Token::LessThan => match previous_location {
Some(prev) => {
tokens.push(prev);
tokens.push(location);
previous_location = None;
}
None => {
previous_location = Some(location);
}
},
Token::Bar => match previous_location {
Some(prev) => {
tokens.push(prev);
tokens.push(location);
previous_location = None;
}
None => {
previous_location = Some(location);
}
},
// Combination tokens
Token::GreaterThan => {
match previous_location {
Some(prev) => {
match prev.value {
Token::Minus => {
// arrow operator "->"
tokens.push(Loc::at(
Region::between(prev.region.start(), location.region.end()),
Token::Arrow,
));
previous_location = None;
}
Token::Bar => {
// pipe operator "|>"
tokens.push(Loc::at(
Region::between(prev.region.start(), location.region.end()),
Token::Pipe,
));
previous_location = None;
}
_ => {
tokens.push(prev);
tokens.push(location);
previous_location = None;
}
}
}
_ => {
tokens.push(location);
}
}
}
Token::Minus => {
match previous_location {
Some(prev) => {
match prev.value {
Token::LessThan => {
// backpass operator "<-"
tokens.push(Loc::at(
Region::between(prev.region.start(), location.region.end()),
Token::Backpass,
));
previous_location = None;
}
_ => {
tokens.push(prev);
tokens.push(location);
previous_location = None;
}
}
}
None => {
previous_location = Some(location);
}
}
}
_ => {
tokens.push(location);
}
}
}
tokens
}
fn highlight_inner<'a>(
arena: &'a Bump,
mut state: State<'a>,
tokens: &mut Vec<Loc<Token>>,
keywords: &HashSet<&str>,
) {
loop {
let start = state.pos();
if let Ok((b, _width)) = char::from_utf8_slice_start(state.bytes()) {
match b {
' ' | '\n' | '\t' | '\r' | '#' => {
let res: ParseResult<'a, _, EExpr<'a>> =
loc_spaces().parse(arena, state.clone(), 0);
if let Ok((_, spaces, new_state)) = res {
state = new_state;
for space in spaces {
let token = match space.value {
CommentOrNewline::Newline => {
continue;
}
CommentOrNewline::LineComment(_) => Token::LineComment,
CommentOrNewline::DocComment(_) => Token::DocComment,
};
tokens.push(Loc::at(space.region, token));
}
} else {
fast_forward_to(&mut state, tokens, start, |c| c == b'\n');
}
}
'"' | '\'' => {
if let Ok((_, item, new_state)) =
parse_str_like_literal().parse(arena, state.clone(), 0)
{
state = new_state;
match item {
StrLikeLiteral::SingleQuote(_) => {
tokens.push(Loc::at(
Region::between(start, state.pos()),
Token::SingleQuote,
));
}
StrLikeLiteral::Str(_) => {
tokens.push(Loc::at(
Region::between(start, state.pos()),
Token::String,
));
}
}
} else {
fast_forward_to(&mut state, tokens, start, |c| c == b'\n');
}
}
c if c.is_alphabetic() => {
let buffer = state.bytes();
let mut chomped = 0;
let is_upper = c.is_uppercase();
while let Ok((ch, width)) = char::from_utf8_slice_start(&buffer[chomped..]) {
if ch.is_alphabetic() || ch.is_ascii_digit() {
chomped += width;
} else {
// we're done
break;
}
}
let ident = std::str::from_utf8(&buffer[..chomped]).unwrap();
state.advance_mut(chomped);
if keywords.contains(ident) {
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Keyword));
} else {
tokens.push(Loc::at(
Region::between(start, state.pos()),
if is_upper {
Token::UpperIdent
} else {
Token::LowerIdent
},
));
}
}
'0'..='9' => {
if let Ok((_, _item, new_state)) =
positive_number_literal().parse(arena, state.clone(), 0)
{
state = new_state;
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Number));
} else {
fast_forward_to(&mut state, tokens, start, |b| !b.is_ascii_digit());
}
}
':' => {
state.advance_mut(1);
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Colon));
}
'|' => {
state.advance_mut(1);
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Bar));
}
'-' => {
state.advance_mut(1);
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Minus));
}
'+' => {
state.advance_mut(1);
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Plus));
}
'=' => {
state.advance_mut(1);
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Equals));
}
'>' => {
state.advance_mut(1);
tokens.push(Loc::at(
Region::between(start, state.pos()),
Token::GreaterThan,
));
}
'<' => {
state.advance_mut(1);
tokens.push(Loc::at(
Region::between(start, state.pos()),
Token::LessThan,
));
}
',' => {
state.advance_mut(1);
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Comma));
}
'\\' => {
state.advance_mut(1);
tokens.push(Loc::at(
Region::between(start, state.pos()),
Token::Backslash,
));
}
'{' | '}' => {
state.advance_mut(1);
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Brace));
}
'[' | ']' => {
state.advance_mut(1);
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Bracket));
}
'(' | ')' => {
state.advance_mut(1);
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Paren));
}
_ => {
state.advance_mut(1);
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Other));
}
}
} else {
break;
}
}
}
fn fast_forward_to(
state: &mut State,
tokens: &mut Vec<Loc<Token>>,
start: roc_region::all::Position,
end: impl Fn(u8) -> bool,
) {
while let Some(b) = state.bytes().first() {
if end(*b) {
break;
}
state.advance_mut(1);
}
tokens.push(Loc::at(Region::between(start, state.pos()), Token::Error));
}
pub const HEADER_KEYWORDS: [&str; 14] = [
"interface",
"app",
"package",
"platform",
"hosted",
"exposes",
"imports",
"with",
"generates",
"package",
"packages",
"requires",
"provides",
"to",
];
#[cfg(test)]
mod tests {
use roc_region::all::Position;
use super::*;
#[test]
fn test_highlight_comments() {
let text = "# a\n#b\n#c";
let tokens = highlight(text);
assert_eq!(
tokens,
vec![
Loc::at(
Region::between(Position::new(0), Position::new(3)),
Token::LineComment
),
Loc::at(
Region::between(Position::new(4), Position::new(6)),
Token::LineComment
),
Loc::at(
Region::between(Position::new(7), Position::new(9)),
Token::LineComment
),
]
);
}
#[test]
fn test_highlight_doc_comments() {
let text = "## a\n##b\n##c";
let tokens = highlight(text);
assert_eq!(
tokens,
vec![
Loc::at(
Region::between(Position::new(0), Position::new(4)),
Token::DocComment
),
// the next two are line comments because there's not a space at the beginning
Loc::at(
Region::between(Position::new(5), Position::new(8)),
Token::LineComment
),
Loc::at(
Region::between(Position::new(9), Position::new(12)),
Token::LineComment
),
]
);
}
#[test]
fn test_highlight_strings() {
let text = r#""a""#;
let tokens = highlight(text);
assert_eq!(
tokens,
vec![Loc::at(
Region::between(Position::new(0), Position::new(3)),
Token::String
)]
);
}
#[test]
fn test_highlight_single_quotes() {
let text = r#"'a'"#;
let tokens = highlight(text);
assert_eq!(
tokens,
vec![Loc::at(
Region::between(Position::new(0), Position::new(3)),
Token::SingleQuote
)]
);
}
#[test]
fn test_highlight_header() {
let text = r#"app "test-app" provides [] to "./blah""#;
let tokens = highlight(text);
assert_eq!(
tokens,
vec![
Loc::at(
Region::between(Position::new(0), Position::new(3)),
Token::Keyword
),
Loc::at(
Region::between(Position::new(4), Position::new(14)),
Token::String
),
Loc::at(
Region::between(Position::new(15), Position::new(23)),
Token::Keyword
),
Loc::at(
Region::between(Position::new(24), Position::new(25)),
Token::Bracket
),
Loc::at(
Region::between(Position::new(25), Position::new(26)),
Token::Bracket
),
Loc::at(
Region::between(Position::new(27), Position::new(29)),
Token::Keyword
),
Loc::at(
Region::between(Position::new(30), Position::new(38)),
Token::String
),
]
);
}
#[test]
fn test_highlight_numbers() {
let text = "123.0 123 123. 123.0e10 123e10 123e-10 0x123";
let tokens = highlight(text);
assert_eq!(
tokens,
vec![
Loc::at(
Region::between(Position::new(0), Position::new(5)),
Token::Number
),
Loc::at(
Region::between(Position::new(6), Position::new(9)),
Token::Number
),
Loc::at(
Region::between(Position::new(10), Position::new(14)),
Token::Number
),
Loc::at(
Region::between(Position::new(15), Position::new(23)),
Token::Number
),
Loc::at(
Region::between(Position::new(24), Position::new(30)),
Token::Number
),
Loc::at(
Region::between(Position::new(31), Position::new(38)),
Token::Number
),
Loc::at(
Region::between(Position::new(39), Position::new(44)),
Token::Number
),
]
);
}
#[test]
fn test_combine_tokens() {
let input: Vec<Loc<Token>> = vec![
// arrow operator "->"
Loc::at(
Region::between(Position::new(0), Position::new(5)),
Token::Minus,
),
Loc::at(
Region::between(Position::new(6), Position::new(7)),
Token::GreaterThan,
),
// pipe operator "|>"
Loc::at(
Region::between(Position::new(8), Position::new(9)),
Token::Bar,
),
Loc::at(
Region::between(Position::new(10), Position::new(11)),
Token::GreaterThan,
),
// backpass operator "<-"
Loc::at(
Region::between(Position::new(12), Position::new(13)),
Token::LessThan,
),
Loc::at(
Region::between(Position::new(14), Position::new(15)),
Token::Minus,
),
];
let actual = combine_tokens(input);
let expected = vec![
Loc::at(
Region::between(Position::new(0), Position::new(7)),
Token::Arrow,
),
Loc::at(
Region::between(Position::new(8), Position::new(11)),
Token::Pipe,
),
Loc::at(
Region::between(Position::new(12), Position::new(15)),
Token::Backpass,
),
];
assert_eq!(actual, expected);
}
}

View file

@ -10,6 +10,7 @@ pub mod ast;
pub mod blankspace;
pub mod expr;
pub mod header;
pub mod highlight;
pub mod ident;
pub mod keyword;
pub mod module;

View file

@ -129,6 +129,10 @@ impl Position {
offset: self.offset - count as u32,
}
}
pub fn byte_offset(&self) -> usize {
self.offset as usize
}
}
impl Debug for Position {
@ -322,6 +326,10 @@ impl<T> Loc<T> {
value: transform(self.value),
}
}
pub fn byte_range(&self) -> std::ops::Range<usize> {
self.region.start.byte_offset()..self.region.end.byte_offset()
}
}
impl<T> fmt::Debug for Loc<T>

View file

@ -568,6 +568,15 @@ mod test_snapshots {
Err(err) => Err(format!("{:?}", err)),
};
if expect == TestExpectation::Pass {
let tokens = roc_parse::highlight::highlight(&source);
for token in tokens {
if token.value == roc_parse::highlight::Token::Error {
panic!("Found an error highlight token in the input: {:?}", token);
}
}
}
let actual_result =
if expect == TestExpectation::Pass || expect == TestExpectation::Malformed {
result.expect("The source code for this test did not successfully parse!")