Refactoring use of generics, added peek_token

This commit is contained in:
Andy Grove 2018-09-01 08:11:26 -06:00
parent eed0261297
commit e6e9c8d2cc
5 changed files with 56 additions and 34 deletions

View file

@ -32,9 +32,13 @@ struct AcmeTokenizer {
}
/// The ACME tokenizer looks for the factorial operator `!!` but delegates everything else
impl SQLTokenizer<AcmeToken, AcmeTokenizerError> for AcmeTokenizer {
impl SQLTokenizer<AcmeToken> for AcmeTokenizer {
fn next_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<AcmeToken>>, TokenizerError<AcmeTokenizerError>> {
fn peek_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<AcmeToken>>, TokenizerError<AcmeToken>> {
unimplemented!()
}
fn next_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<AcmeToken>>, TokenizerError<AcmeToken>> {
match chars.peek() {
Some(&ch) => match ch {
'!' => {

View file

@ -6,18 +6,30 @@ use std::str::Chars;
use super::super::tokenizer::*;
use super::super::parser::*;
pub struct ANSISQLParser {
pub struct ANSISQLParser<'a, TokenType> {
chars: Peekable<Chars<'a>>,
tokenizer: SQLTokenizer<TokenType>
}
impl<S,TE> SQLParser<S,TE> for ANSISQLParser
where S: Debug + PartialEq {
impl<'a, TokenType, ExprType> SQLParser<TokenType, ExprType> for ANSISQLParser<'a, TokenType>
where TokenType: Debug + PartialEq, ExprType: Debug + PartialEq {
fn parse_prefix(&mut self) -> Result<Box<SQLExpr<S>>, ParserError<S, TE>> {
unimplemented!()
fn parse_prefix(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> {
match self.tokenizer.peek_token(&mut self.chars)? {
Some(SQLToken::Keyword(ref k)) => match k.to_uppercase().as_ref() {
"INSERT" => unimplemented!(),
"UPDATE" => unimplemented!(),
"DELETE" => unimplemented!(),
"SELECT" => unimplemented!(),
"CREATE" => unimplemented!(),
_ => unimplemented!()
},
_ => unimplemented!()
}
}
fn parse_infix(&mut self, left: SQLExpr<S>) -> Result<Option<Box<SQLExpr<S>>>, ParserError<S, TE>> {
fn parse_infix(&mut self, left: SQLExpr<ExprType>) -> Result<Option<Box<SQLExpr<ExprType>>>, ParserError<TokenType>> {
unimplemented!()
}
}

View file

@ -7,10 +7,14 @@ use super::super::tokenizer::*;
pub struct ANSISQLTokenizer {}
impl<S,TE> SQLTokenizer<S,TE> for ANSISQLTokenizer
where S: Debug + PartialEq {
impl<TokenType> SQLTokenizer<TokenType> for ANSISQLTokenizer
where TokenType: Debug + PartialEq {
fn next_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<S>>, TokenizerError<TE>> {
fn peek_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> {
unimplemented!()
}
fn next_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> {
match chars.next() {
Some(ch) => match ch {
' ' | '\t' | '\n' => Ok(Some(SQLToken::Whitespace(ch))),

View file

@ -70,48 +70,47 @@ pub enum SQLOperator<T> {
/// SQL Expressions
#[derive(Debug)]
pub enum SQLExpr<T> {
pub enum SQLExpr<ExprType> {
/// Identifier e.g. table name or column name
Identifier(String),
/// Literal value
Literal(String),
/// Binary expression e.g. `1 + 2` or `fname LIKE "A%"`
Binary(Box<SQLExpr<T>>, SQLOperator<T>, Box<SQLExpr<T>>),
Binary(Box<SQLExpr<ExprType>>, SQLOperator<ExprType>, Box<SQLExpr<ExprType>>),
/// Function invocation with function name and list of argument expressions
FunctionCall(String, Vec<SQLExpr<T>>),
FunctionCall(String, Vec<SQLExpr<ExprType>>),
Insert,
Update,
Delete,
Select,
CreateTable,
/// Custom expression (vendor-specific)
Custom(T)
Custom(ExprType)
}
#[derive(Debug)]
pub enum ParserError<S, PE>
where S: Debug + PartialEq {
WrongToken { expected: Vec<SQLToken<S>>, actual: SQLToken<S>, line: usize, col: usize },
Custom(PE)
pub enum ParserError<TokenType>
where TokenType: Debug + PartialEq {
WrongToken { expected: Vec<SQLToken<TokenType>>, actual: SQLToken<TokenType>, line: usize, col: usize },
Custom(String)
}
impl<S, TE> From<TokenizerError<TE>> for ParserError<S, TE>
where S: Debug + PartialEq {
impl<TokenType> From<TokenizerError<TokenType>> for ParserError<TokenType>
where TokenType: Debug + PartialEq {
fn from(_: TokenizerError<TE>) -> Self {
fn from(_: TokenizerError<TokenType>) -> Self {
unimplemented!()
}
}
pub trait SQLParser<S, PE>
where S: Debug + PartialEq {
pub trait SQLParser<TokenType, ExprType>
where TokenType: Debug + PartialEq, ExprType: Debug + PartialEq {
/// parse the prefix and stop once an infix operator is reached
fn parse_prefix(&mut self) -> Result<Box<SQLExpr<S>>, ParserError<S, PE>> ;
fn parse_prefix(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> ;
/// parse the next infix expression, returning None if the precedence has changed
fn parse_infix(&mut self, left: SQLExpr<S>) -> Result<Option<Box<SQLExpr<S>>>, ParserError<S, PE>>;
fn parse_infix(&mut self, left: SQLExpr<ExprType>) -> Result<Option<Box<SQLExpr<ExprType>>>, ParserError<TokenType>>;
}
//

View file

@ -47,21 +47,24 @@ pub enum SQLToken<T: Debug + PartialEq> {
Custom(T)
}
pub trait SQLTokenizer<S, TE>
where S: Debug + PartialEq {
pub trait SQLTokenizer<TokenType>
where TokenType: Debug + PartialEq {
/// return a reference to the next token but do not advance the index
fn peek_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>>;
/// return a reference to the next token and advance the index
fn next_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<S>>, TokenizerError<TE>>;
fn next_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>>;
}
pub fn tokenize<S,TE>(sql: &str, tokenizer: &mut SQLTokenizer<S,TE>) -> Result<Vec<SQLToken<S>>, TokenizerError<TE>>
where S: Debug + PartialEq
pub fn tokenize<TokenType>(sql: &str, tokenizer: &mut SQLTokenizer<TokenType>) -> Result<Vec<SQLToken<TokenType>>, TokenizerError<TokenType>>
where TokenType: Debug + PartialEq
{
let mut peekable = sql.chars().peekable();
let mut tokens : Vec<SQLToken<S>> = vec![];
let mut tokens : Vec<SQLToken<TokenType>> = vec![];
loop {
match tokenizer.next_token(&mut peekable)? {