parser tests work

This commit is contained in:
Aleksey Kladov 2021-12-12 17:58:45 +03:00
parent 26bfd6023f
commit 6ce587ba5a
8 changed files with 92 additions and 140 deletions

View file

@ -2,12 +2,10 @@
//! incremental reparsing.
pub(crate) mod lexer;
mod text_token_source;
mod text_tree_sink;
mod reparsing;
use parser::SyntaxKind;
use text_token_source::TextTokenSource;
use text_tree_sink::TextTreeSink;
use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
@ -15,12 +13,12 @@ use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode};
pub(crate) use crate::parsing::{lexer::*, reparsing::incremental_reparse};
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let (tokens, lexer_errors) = tokenize(text);
let (lexer_tokens, lexer_errors) = tokenize(text);
let parser_tokens = to_parser_tokens(text, &lexer_tokens);
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &lexer_tokens);
parser::parse_source_file(&mut token_source, &mut tree_sink);
parser::parse_source_file(&parser_tokens, &mut tree_sink);
let (tree, mut parser_errors) = tree_sink.finish();
parser_errors.extend(lexer_errors);
@ -33,26 +31,47 @@ pub(crate) fn parse_text_as<T: AstNode>(
text: &str,
entry_point: parser::ParserEntryPoint,
) -> Result<T, ()> {
let (tokens, lexer_errors) = tokenize(text);
let (lexer_tokens, lexer_errors) = tokenize(text);
if !lexer_errors.is_empty() {
return Err(());
}
let mut token_source = TextTokenSource::new(text, &tokens);
let mut tree_sink = TextTreeSink::new(text, &tokens);
let parser_tokens = to_parser_tokens(text, &lexer_tokens);
let mut tree_sink = TextTreeSink::new(text, &lexer_tokens);
// TextTreeSink assumes that there's at least some root node to which it can attach errors and
// tokens. We arbitrarily give it a SourceFile.
use parser::TreeSink;
tree_sink.start_node(SyntaxKind::SOURCE_FILE);
parser::parse(&mut token_source, &mut tree_sink, entry_point);
parser::parse(&parser_tokens, &mut tree_sink, entry_point);
tree_sink.finish_node();
let (tree, parser_errors) = tree_sink.finish();
use parser::TokenSource;
if !parser_errors.is_empty() || token_source.current().kind != SyntaxKind::EOF {
let (tree, parser_errors, eof) = tree_sink.finish_eof();
if !parser_errors.is_empty() || !eof {
return Err(());
}
SyntaxNode::new_root(tree).first_child().and_then(T::cast).ok_or(())
}
pub(crate) fn to_parser_tokens(text: &str, lexer_tokens: &[lexer::Token]) -> ::parser::Tokens {
let mut off = 0;
let mut res = parser::Tokens::default();
let mut was_joint = true;
for t in lexer_tokens {
if t.kind.is_trivia() {
was_joint = false;
} else if t.kind == SyntaxKind::IDENT {
let token_text = &text[off..][..usize::from(t.len)];
let contextual_kw =
SyntaxKind::from_contextual_keyword(token_text).unwrap_or(SyntaxKind::IDENT);
res.push_ident(contextual_kw);
} else {
res.push(was_joint, t.kind);
was_joint = true;
}
off += usize::from(t.len);
}
res
}