create Chars struct instead of using Peekable

This commit is contained in:
Andy Grove 2018-09-02 18:59:30 -06:00
parent 7ff4133327
commit 69a140a820
5 changed files with 64 additions and 59 deletions

View file

@ -35,40 +35,30 @@ impl SQLTokenizer<AcmeToken> for AcmeTokenizer {
unimplemented!()
}
fn peek_token(&mut self) -> Result<Option<SQLToken<AcmeToken>>, TokenizerError<AcmeToken>> {
fn next_token(&mut self) -> Result<Option<SQLToken<AcmeToken>>, TokenizerError> {
// let mut arc = self.ansi_tokenizer.lock().unwrap();
// match arc.peek_char() {
// Some(&ch) => match ch {
// '!' => {
// arc.next_char(); // consume the first `!`
// match arc.peek_char() {
// Some(&ch) => match ch {
// '!' => {
// arc.next_char(); // consume the second `!`
// Ok(Some(SQLToken::Custom(AcmeToken::Factorial)))
// },
// _ => Err(TokenizerError::UnexpectedChar(ch,Position::new(0,0)))
// },
// None => Ok(Some(SQLToken::Not))
// }
// }
// _ => arc.next_token()
// }
// _ => arc.next_token()
// }
unimplemented!()
}
fn next_token(&mut self) -> Result<Option<SQLToken<AcmeToken>>, TokenizerError<AcmeToken>> {
let mut arc = self.ansi_tokenizer.lock().unwrap();
match arc.peek_char() {
Some(&ch) => match ch {
'!' => {
arc.next_char(); // consume the first `!`
match arc.peek_char() {
Some(&ch) => match ch {
'!' => {
arc.next_char(); // consume the second `!`
Ok(Some(SQLToken::Custom(AcmeToken::Factorial)))
},
_ => Err(TokenizerError::UnexpectedChar(ch,Position::new(0,0)))
},
None => Ok(Some(SQLToken::Not))
}
}
_ => arc.next_token()
}
_ => arc.next_token()
}
}
fn peek_char(&mut self) -> Option<&char> {
unimplemented!()
}
fn next_char(&mut self) -> Option<&char> {
unimplemented!()
}
}
struct AcmeParser {

View file

@ -24,7 +24,7 @@ impl<TokenType, ExprType> SQLParser<TokenType, ExprType> for ANSISQLParser<Token
fn parse_prefix(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> {
match self.tokenizer.lock().unwrap().peek_token()? {
match self.tokenizer.lock().unwrap().next_token()? {
Some(SQLToken::Keyword(ref k)) => match k.to_uppercase().as_ref() {
"INSERT" => unimplemented!(),
"UPDATE" => unimplemented!(),

View file

@ -16,11 +16,7 @@ impl<'a, TokenType> SQLTokenizer<TokenType> for ANSISQLTokenizer<'a>
unimplemented!()
}
fn peek_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> {
unimplemented!()
}
fn next_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> {
fn next_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError> {
match self.chars.next() {
Some(ch) => match ch {
' ' | '\t' | '\n' => Ok(Some(SQLToken::Whitespace(ch))),
@ -48,12 +44,5 @@ impl<'a, TokenType> SQLTokenizer<TokenType> for ANSISQLTokenizer<'a>
}
}
fn peek_char(&mut self) -> Option<&char> {
unimplemented!()
}
fn next_char(&mut self) -> Option<&char> {
unimplemented!()
}
}

View file

@ -1,8 +1,6 @@
use std::cmp::PartialEq;
use std::fmt::Debug;
use std::rc::Rc;
use std::str::Chars;
use std::iter::Peekable;
use std::sync::{Arc, Mutex};
use super::tokenizer::*;
@ -99,10 +97,10 @@ pub enum ParserError<TokenType>
Custom(String)
}
impl<TokenType> From<TokenizerError<TokenType>> for ParserError<TokenType>
impl<TokenType> From<TokenizerError> for ParserError<TokenType>
where TokenType: Debug + PartialEq {
fn from(_: TokenizerError<TokenType>) -> Self {
fn from(_: TokenizerError) -> Self {
unimplemented!()
}
}

View file

@ -3,6 +3,41 @@ use std::fmt::Debug;
//use std::iter::Peekable;
//use std::str::Chars;
pub struct CharSeq {
chars: Vec<char>,
i: usize,
m: usize
}
impl CharSeq {
pub fn new(sql: &str) -> Self {
CharSeq {
chars: sql.chars().collect(),
i: 0,
m: 0
}
}
pub fn mark(&mut self) {
self.m = self.i;
}
pub fn reset(&mut self) {
self.i = self.m;
}
pub fn next(&mut self) -> Option<char> {
if self.i < self.chars.len() {
self.i += 1;
Some(self.chars[self.i-1])
} else {
None
}
}
}
#[derive(Debug)]
pub struct Position {
line: usize,
@ -15,11 +50,11 @@ impl Position {
}
#[derive(Debug)]
pub enum TokenizerError<T> {
pub enum TokenizerError {
UnexpectedChar(char,Position),
UnexpectedEof(Position),
UnterminatedStringLiteral(Position),
Custom(T)
Custom(String)
}
/// SQL Tokens
@ -43,7 +78,7 @@ pub enum SQLToken<T: Debug + PartialEq> {
LParen,
RParen,
Comma,
/// Custom token
/// Custom token (dialect-specific)
Custom(T)
}
@ -53,15 +88,8 @@ pub trait SQLTokenizer<TokenType>
/// get the precendence of a token
fn precedence(&self, token: &SQLToken<TokenType>) -> usize;
/// return a reference to the next token but do not advance the index
fn peek_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>>;
/// return a reference to the next token and advance the index
fn next_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>>;
fn peek_char(&mut self) -> Option<&char>;
fn next_char(&mut self) -> Option<&char>;
fn next_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError>;
}
//