create Chars struct instead of using Peekable

This commit is contained in:
Andy Grove 2018-09-02 18:59:30 -06:00
parent 7ff4133327
commit 69a140a820
5 changed files with 64 additions and 59 deletions

View file

@ -35,40 +35,30 @@ impl SQLTokenizer<AcmeToken> for AcmeTokenizer {
unimplemented!() unimplemented!()
} }
fn peek_token(&mut self) -> Result<Option<SQLToken<AcmeToken>>, TokenizerError<AcmeToken>> { fn next_token(&mut self) -> Result<Option<SQLToken<AcmeToken>>, TokenizerError> {
// let mut arc = self.ansi_tokenizer.lock().unwrap();
// match arc.peek_char() {
// Some(&ch) => match ch {
// '!' => {
// arc.next_char(); // consume the first `!`
// match arc.peek_char() {
// Some(&ch) => match ch {
// '!' => {
// arc.next_char(); // consume the second `!`
// Ok(Some(SQLToken::Custom(AcmeToken::Factorial)))
// },
// _ => Err(TokenizerError::UnexpectedChar(ch,Position::new(0,0)))
// },
// None => Ok(Some(SQLToken::Not))
// }
// }
// _ => arc.next_token()
// }
// _ => arc.next_token()
// }
unimplemented!() unimplemented!()
} }
fn next_token(&mut self) -> Result<Option<SQLToken<AcmeToken>>, TokenizerError<AcmeToken>> {
let mut arc = self.ansi_tokenizer.lock().unwrap();
match arc.peek_char() {
Some(&ch) => match ch {
'!' => {
arc.next_char(); // consume the first `!`
match arc.peek_char() {
Some(&ch) => match ch {
'!' => {
arc.next_char(); // consume the second `!`
Ok(Some(SQLToken::Custom(AcmeToken::Factorial)))
},
_ => Err(TokenizerError::UnexpectedChar(ch,Position::new(0,0)))
},
None => Ok(Some(SQLToken::Not))
}
}
_ => arc.next_token()
}
_ => arc.next_token()
}
}
fn peek_char(&mut self) -> Option<&char> {
unimplemented!()
}
fn next_char(&mut self) -> Option<&char> {
unimplemented!()
}
} }
struct AcmeParser { struct AcmeParser {

View file

@ -24,7 +24,7 @@ impl<TokenType, ExprType> SQLParser<TokenType, ExprType> for ANSISQLParser<Token
fn parse_prefix(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> { fn parse_prefix(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> {
match self.tokenizer.lock().unwrap().peek_token()? { match self.tokenizer.lock().unwrap().next_token()? {
Some(SQLToken::Keyword(ref k)) => match k.to_uppercase().as_ref() { Some(SQLToken::Keyword(ref k)) => match k.to_uppercase().as_ref() {
"INSERT" => unimplemented!(), "INSERT" => unimplemented!(),
"UPDATE" => unimplemented!(), "UPDATE" => unimplemented!(),

View file

@ -16,11 +16,7 @@ impl<'a, TokenType> SQLTokenizer<TokenType> for ANSISQLTokenizer<'a>
unimplemented!() unimplemented!()
} }
fn peek_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> { fn next_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError> {
unimplemented!()
}
fn next_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> {
match self.chars.next() { match self.chars.next() {
Some(ch) => match ch { Some(ch) => match ch {
' ' | '\t' | '\n' => Ok(Some(SQLToken::Whitespace(ch))), ' ' | '\t' | '\n' => Ok(Some(SQLToken::Whitespace(ch))),
@ -48,12 +44,5 @@ impl<'a, TokenType> SQLTokenizer<TokenType> for ANSISQLTokenizer<'a>
} }
} }
fn peek_char(&mut self) -> Option<&char> {
unimplemented!()
}
fn next_char(&mut self) -> Option<&char> {
unimplemented!()
}
} }

View file

@ -1,8 +1,6 @@
use std::cmp::PartialEq; use std::cmp::PartialEq;
use std::fmt::Debug; use std::fmt::Debug;
use std::rc::Rc; use std::rc::Rc;
use std::str::Chars;
use std::iter::Peekable;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use super::tokenizer::*; use super::tokenizer::*;
@ -99,10 +97,10 @@ pub enum ParserError<TokenType>
Custom(String) Custom(String)
} }
impl<TokenType> From<TokenizerError<TokenType>> for ParserError<TokenType> impl<TokenType> From<TokenizerError> for ParserError<TokenType>
where TokenType: Debug + PartialEq { where TokenType: Debug + PartialEq {
fn from(_: TokenizerError<TokenType>) -> Self { fn from(_: TokenizerError) -> Self {
unimplemented!() unimplemented!()
} }
} }

View file

@ -3,6 +3,41 @@ use std::fmt::Debug;
//use std::iter::Peekable; //use std::iter::Peekable;
//use std::str::Chars; //use std::str::Chars;
pub struct CharSeq {
chars: Vec<char>,
i: usize,
m: usize
}
impl CharSeq {
pub fn new(sql: &str) -> Self {
CharSeq {
chars: sql.chars().collect(),
i: 0,
m: 0
}
}
pub fn mark(&mut self) {
self.m = self.i;
}
pub fn reset(&mut self) {
self.i = self.m;
}
pub fn next(&mut self) -> Option<char> {
if self.i < self.chars.len() {
self.i += 1;
Some(self.chars[self.i-1])
} else {
None
}
}
}
#[derive(Debug)] #[derive(Debug)]
pub struct Position { pub struct Position {
line: usize, line: usize,
@ -15,11 +50,11 @@ impl Position {
} }
#[derive(Debug)] #[derive(Debug)]
pub enum TokenizerError<T> { pub enum TokenizerError {
UnexpectedChar(char,Position), UnexpectedChar(char,Position),
UnexpectedEof(Position), UnexpectedEof(Position),
UnterminatedStringLiteral(Position), UnterminatedStringLiteral(Position),
Custom(T) Custom(String)
} }
/// SQL Tokens /// SQL Tokens
@ -43,7 +78,7 @@ pub enum SQLToken<T: Debug + PartialEq> {
LParen, LParen,
RParen, RParen,
Comma, Comma,
/// Custom token /// Custom token (dialect-specific)
Custom(T) Custom(T)
} }
@ -53,15 +88,8 @@ pub trait SQLTokenizer<TokenType>
/// get the precendence of a token /// get the precendence of a token
fn precedence(&self, token: &SQLToken<TokenType>) -> usize; fn precedence(&self, token: &SQLToken<TokenType>) -> usize;
/// return a reference to the next token but do not advance the index
fn peek_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>>;
/// return a reference to the next token and advance the index /// return a reference to the next token and advance the index
fn next_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>>; fn next_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError>;
fn peek_char(&mut self) -> Option<&char>;
fn next_char(&mut self) -> Option<&char>;
} }
// //