remove TokenPos

This commit is contained in:
Aleksey Kladov 2019-02-20 22:58:56 +03:00
parent cce23fddba
commit 4c1f9b8d4e
3 changed files with 28 additions and 49 deletions

View file

@ -69,24 +69,7 @@ trait TreeSink {
/// ///
/// Hopefully this will allow us to treat text and token trees in the same way! /// Hopefully this will allow us to treat text and token trees in the same way!
trait TokenSource { trait TokenSource {
fn token_kind(&self, pos: TokenPos) -> SyntaxKind; fn token_kind(&self, pos: usize) -> SyntaxKind;
fn is_token_joint_to_next(&self, pos: TokenPos) -> bool; fn is_token_joint_to_next(&self, pos: usize) -> bool;
fn is_keyword(&self, pos: TokenPos, kw: &str) -> bool; fn is_keyword(&self, pos: usize, kw: &str) -> bool;
}
#[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Default)]
pub(crate) struct TokenPos(pub u32);
impl std::ops::Add<u32> for TokenPos {
type Output = TokenPos;
fn add(self, rhs: u32) -> TokenPos {
TokenPos(self.0 + rhs)
}
}
impl std::ops::AddAssign<u32> for TokenPos {
fn add_assign(&mut self, rhs: u32) {
self.0 += rhs
}
} }

View file

@ -1,33 +1,29 @@
use crate::{ use crate::{
SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit, SyntaxKind, SyntaxKind::EOF, TextRange, TextUnit,
parsing::{ parsing::{
TokenPos, TokenSource, TokenSource,
lexer::Token, lexer::Token,
}, },
}; };
impl<'t> TokenSource for ParserInput<'t> { impl<'t> TokenSource for ParserInput<'t> {
fn token_kind(&self, pos: TokenPos) -> SyntaxKind { fn token_kind(&self, pos: usize) -> SyntaxKind {
let idx = pos.0 as usize; if !(pos < self.tokens.len()) {
if !(idx < self.tokens.len()) {
return EOF; return EOF;
} }
self.tokens[idx].kind self.tokens[pos].kind
} }
fn is_token_joint_to_next(&self, pos: TokenPos) -> bool { fn is_token_joint_to_next(&self, pos: usize) -> bool {
let idx_curr = pos.0 as usize; if !(pos + 1 < self.tokens.len()) {
let idx_next = pos.0 as usize + 1;
if !(idx_next < self.tokens.len()) {
return true; return true;
} }
self.start_offsets[idx_curr] + self.tokens[idx_curr].len == self.start_offsets[idx_next] self.start_offsets[pos] + self.tokens[pos].len == self.start_offsets[pos + 1]
} }
fn is_keyword(&self, pos: TokenPos, kw: &str) -> bool { fn is_keyword(&self, pos: usize, kw: &str) -> bool {
let idx = pos.0 as usize; if !(pos < self.tokens.len()) {
if !(idx < self.tokens.len()) {
return false; return false;
} }
let range = TextRange::offset_len(self.start_offsets[idx], self.tokens[idx].len); let range = TextRange::offset_len(self.start_offsets[pos], self.tokens[pos].len);
self.text[range] == *kw self.text[range] == *kw
} }

View file

@ -6,7 +6,7 @@ use crate::{
syntax_error::ParseError, syntax_error::ParseError,
SyntaxKind::{self, ERROR, EOF, TOMBSTONE}, SyntaxKind::{self, ERROR, EOF, TOMBSTONE},
parsing::{ parsing::{
TokenSource, TokenPos, TokenSource,
token_set::TokenSet, token_set::TokenSet,
event::Event, event::Event,
}, },
@ -23,14 +23,14 @@ use crate::{
/// finish expression". See `Event` docs for more. /// finish expression". See `Event` docs for more.
pub(crate) struct Parser<'t> { pub(crate) struct Parser<'t> {
token_source: &'t dyn TokenSource, token_source: &'t dyn TokenSource,
pos: TokenPos, token_pos: usize,
events: Vec<Event>, events: Vec<Event>,
steps: Cell<u32>, steps: Cell<u32>,
} }
impl<'t> Parser<'t> { impl<'t> Parser<'t> {
pub(super) fn new(token_source: &'t dyn TokenSource) -> Parser<'t> { pub(super) fn new(token_source: &'t dyn TokenSource) -> Parser<'t> {
Parser { token_source, pos: TokenPos::default(), events: Vec::new(), steps: Cell::new(0) } Parser { token_source, token_pos: 0, events: Vec::new(), steps: Cell::new(0) }
} }
pub(crate) fn finish(self) -> Vec<Event> { pub(crate) fn finish(self) -> Vec<Event> {
@ -49,9 +49,9 @@ impl<'t> Parser<'t> {
/// ///
/// Useful for parsing things like `>>`. /// Useful for parsing things like `>>`.
pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> { pub(crate) fn current2(&self) -> Option<(SyntaxKind, SyntaxKind)> {
let c1 = self.token_source.token_kind(self.pos); let c1 = self.token_source.token_kind(self.token_pos);
let c2 = self.token_source.token_kind(self.pos + 1); let c2 = self.token_source.token_kind(self.token_pos + 1);
if self.token_source.is_token_joint_to_next(self.pos) { if self.token_source.is_token_joint_to_next(self.token_pos) {
Some((c1, c2)) Some((c1, c2))
} else { } else {
None None
@ -63,11 +63,11 @@ impl<'t> Parser<'t> {
/// ///
/// Useful for parsing things like `=>>`. /// Useful for parsing things like `=>>`.
pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> { pub(crate) fn current3(&self) -> Option<(SyntaxKind, SyntaxKind, SyntaxKind)> {
let c1 = self.token_source.token_kind(self.pos); let c1 = self.token_source.token_kind(self.token_pos);
let c2 = self.token_source.token_kind(self.pos + 1); let c2 = self.token_source.token_kind(self.token_pos + 1);
let c3 = self.token_source.token_kind(self.pos + 2); let c3 = self.token_source.token_kind(self.token_pos + 2);
if self.token_source.is_token_joint_to_next(self.pos) if self.token_source.is_token_joint_to_next(self.token_pos)
&& self.token_source.is_token_joint_to_next(self.pos + 1) && self.token_source.is_token_joint_to_next(self.token_pos + 1)
{ {
Some((c1, c2, c3)) Some((c1, c2, c3))
} else { } else {
@ -77,11 +77,11 @@ impl<'t> Parser<'t> {
/// Lookahead operation: returns the kind of the next nth /// Lookahead operation: returns the kind of the next nth
/// token. /// token.
pub(crate) fn nth(&self, n: u32) -> SyntaxKind { pub(crate) fn nth(&self, n: usize) -> SyntaxKind {
let steps = self.steps.get(); let steps = self.steps.get();
assert!(steps <= 10_000_000, "the parser seems stuck"); assert!(steps <= 10_000_000, "the parser seems stuck");
self.steps.set(steps + 1); self.steps.set(steps + 1);
self.token_source.token_kind(self.pos + n) self.token_source.token_kind(self.token_pos + n)
} }
/// Checks if the current token is `kind`. /// Checks if the current token is `kind`.
@ -96,7 +96,7 @@ impl<'t> Parser<'t> {
/// Checks if the current token is contextual keyword with text `t`. /// Checks if the current token is contextual keyword with text `t`.
pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool { pub(crate) fn at_contextual_kw(&self, kw: &str) -> bool {
self.token_source.is_keyword(self.pos, kw) self.token_source.is_keyword(self.token_pos, kw)
} }
/// Starts a new node in the syntax tree. All nodes and tokens /// Starts a new node in the syntax tree. All nodes and tokens
@ -184,7 +184,7 @@ impl<'t> Parser<'t> {
} }
fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) { fn do_bump(&mut self, kind: SyntaxKind, n_raw_tokens: u8) {
self.pos += u32::from(n_raw_tokens); self.token_pos += usize::from(n_raw_tokens);
self.push_event(Event::Token { kind, n_raw_tokens }); self.push_event(Event::Token { kind, n_raw_tokens });
} }