Refactoring

This commit is contained in:
Andy Grove 2018-09-01 08:58:00 -06:00
parent 72cff632c1
commit 2ddc9f7a49
4 changed files with 71 additions and 49 deletions

View file

@ -29,7 +29,7 @@ impl<'a, TokenType, ExprType> SQLParser<TokenType, ExprType> for ANSISQLParser<'
}
}
fn parse_infix(&mut self, left: &SQLExpr<ExprType>, precedence: usize) -> Result<Option<Box<SQLExpr<ExprType>>>, ParserError<TokenType>> {
fn parse_infix(&mut self, _left: &SQLExpr<ExprType>, _precedence: usize) -> Result<Option<Box<SQLExpr<ExprType>>>, ParserError<TokenType>> {
unimplemented!()
}
}

View file

@ -10,11 +10,11 @@ pub struct ANSISQLTokenizer {}
impl<TokenType> SQLTokenizer<TokenType> for ANSISQLTokenizer
where TokenType: Debug + PartialEq {
fn precedence(&self, token: &SQLToken<TokenType>) -> usize {
fn precedence(&self, _token: &SQLToken<TokenType>) -> usize {
unimplemented!()
}
fn peek_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> {
fn peek_token(&self, _chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> {
unimplemented!()
}

View file

@ -117,37 +117,43 @@ pub trait SQLParser<TokenType, ExprType>
}
struct PrattParser<'a, TokenType, ExprType> {
chars: Peekable<Chars<'a>>,
tokenizer: Rc<SQLTokenizer<TokenType>>,
parser: SQLParser<TokenType, ExprType>
pub fn parse_expr<'a, TokenType, ExprType>(_tokenizer: Rc<SQLTokenizer<TokenType>>, _parser: Rc<SQLParser<TokenType, ExprType>>, _chars: &mut Peekable<Chars<'a>>)
-> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> where TokenType: Debug + PartialEq, ExprType: Debug {
unimplemented!()
}
impl<'a, TokenType, ExprType> PrattParser<'a, TokenType, ExprType>
where TokenType: Debug + PartialEq, ExprType: Debug {
fn parse_expr(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> {
let precedence: usize = 0;
let mut expr = self.parser.parse_prefix()?;
while let Some(token) = self.tokenizer.peek_token(&mut self.chars)? {
let next_precedence = self.tokenizer.precedence(&token);
if precedence >= next_precedence {
break;
}
expr = self.parser.parse_infix(&expr, next_precedence)?.unwrap(); //TODO: fix me
}
Ok(expr)
}
}
//pub struct PrattParser<'a, TokenType, ExprType> {
// chars: Peekable<Chars<'a>>,
// tokenizer: Rc<SQLTokenizer<TokenType>>,
// parser: Rc<SQLParser<TokenType, ExprType>>
//}
//
//impl<'a, TokenType, ExprType> PrattParser<'a, TokenType, ExprType>
// where TokenType: Debug + PartialEq, ExprType: Debug {
//
// pub fn parse_expr(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> {
//
// let precedence: usize = 0;
// let parser_ref = self.parser.as_ref();
//
// let mut expr = parser_ref.parse_prefix()?;
//
// while let Some(token) = self.tokenizer.peek_token(&mut self.chars)? {
//
// let next_precedence = self.tokenizer.precedence(&token);
//
// if precedence >= next_precedence {
// break;
// }
//
// expr = parser_ref.parse_infix(&expr, next_precedence)?.unwrap(); //TODO: fix me
// }
//
// Ok(expr)
// }
//
//}
// fn parse_prefix(&mut self) -> Result<Box<SQLExpr>, ParserError> {
//