Refactoring

This commit is contained in:
Andy Grove 2018-09-01 08:58:00 -06:00
parent 72cff632c1
commit 2ddc9f7a49
4 changed files with 71 additions and 49 deletions

View file

@ -1,5 +1,6 @@
use std::str::Chars;
use std::iter::Peekable;
use std::rc::Rc;
extern crate datafusion_sql;
@ -62,43 +63,58 @@ impl SQLTokenizer<AcmeToken> for AcmeTokenizer {
}
}
struct AcmeParser<'a> {
chars: Peekable<Chars<'a>>
struct AcmeParser {
tokenizer: Rc<SQLTokenizer<AcmeToken>>
}
//
//impl<'a> AcmeParser<'a> {
//
// pub fn new(sql: &'a str) -> Self {
// AcmeParser {
// chars: sql.chars().peekable()
// }
// }
//}
impl<'a> AcmeParser<'a> {
pub fn new(sql: &'a str) -> Self {
AcmeParser {
chars: sql.chars().peekable()
}
}
}
impl<'a> SQLParser<AcmeToken, AcmeExpr> for AcmeParser<'a> {
impl SQLParser<AcmeToken, AcmeExpr> for AcmeParser {
fn parse_prefix(&mut self) -> Result<Box<SQLExpr<AcmeExpr>>, ParserError<AcmeToken>> {
unimplemented!()
}
fn parse_infix(&mut self, left: &SQLExpr<AcmeExpr>, precedence: usize) -> Result<Option<Box<SQLExpr<AcmeExpr>>>, ParserError<AcmeToken>> {
fn parse_infix(&mut self, left: &SQLExpr<AcmeExpr>, _precedence: usize) -> Result<Option<Box<SQLExpr<AcmeExpr>>>, ParserError<AcmeToken>> {
unimplemented!()
}
}
fn main() {
let sql = "1 + !! 5 * 2";
let acme_parser = AcmeParser::new(sql);
// let acme_parser = AcmeParser::new(sql);
//acme_parser
// let mut acme_tokenizer = AcmeTokenizer {
// generic: ANSISQLTokenizer { }
// };
let mut acme_tokenizer: Rc<SQLTokenizer<AcmeToken>> = Rc::new(AcmeTokenizer {
generic: ANSISQLTokenizer { }
});
let mut acme_parser: Rc<SQLParser<AcmeToken, AcmeExpr>> = Rc::new(AcmeParser {
tokenizer: acme_tokenizer.clone()
});
// let mut pratt_parser = Rc::new(PrattParser {
// chars: sql.chars().peekable(),
// tokenizer: acme_tokenizer.clone(),
// parser: acme_parser.clone()
// });
let mut chars = sql.chars().peekable();
let expr = parse_expr(acme_tokenizer, acme_parser, &mut chars);
println!("Parsed: {:?}", expr);
//
// let tokens = tokenize(&sql, &mut acme_tokenizer).unwrap();
//

View file

@ -29,7 +29,7 @@ impl<'a, TokenType, ExprType> SQLParser<TokenType, ExprType> for ANSISQLParser<'
}
}
fn parse_infix(&mut self, left: &SQLExpr<ExprType>, precedence: usize) -> Result<Option<Box<SQLExpr<ExprType>>>, ParserError<TokenType>> {
fn parse_infix(&mut self, _left: &SQLExpr<ExprType>, _precedence: usize) -> Result<Option<Box<SQLExpr<ExprType>>>, ParserError<TokenType>> {
unimplemented!()
}
}

View file

@ -10,11 +10,11 @@ pub struct ANSISQLTokenizer {}
impl<TokenType> SQLTokenizer<TokenType> for ANSISQLTokenizer
where TokenType: Debug + PartialEq {
fn precedence(&self, token: &SQLToken<TokenType>) -> usize {
fn precedence(&self, _token: &SQLToken<TokenType>) -> usize {
unimplemented!()
}
fn peek_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> {
fn peek_token(&self, _chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> {
unimplemented!()
}

View file

@ -117,37 +117,43 @@ pub trait SQLParser<TokenType, ExprType>
}
struct PrattParser<'a, TokenType, ExprType> {
chars: Peekable<Chars<'a>>,
tokenizer: Rc<SQLTokenizer<TokenType>>,
parser: SQLParser<TokenType, ExprType>
pub fn parse_expr<'a, TokenType, ExprType>(_tokenizer: Rc<SQLTokenizer<TokenType>>, _parser: Rc<SQLParser<TokenType, ExprType>>, _chars: &mut Peekable<Chars<'a>>)
-> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> where TokenType: Debug + PartialEq, ExprType: Debug {
unimplemented!()
}
impl<'a, TokenType, ExprType> PrattParser<'a, TokenType, ExprType>
where TokenType: Debug + PartialEq, ExprType: Debug {
fn parse_expr(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> {
let precedence: usize = 0;
let mut expr = self.parser.parse_prefix()?;
while let Some(token) = self.tokenizer.peek_token(&mut self.chars)? {
let next_precedence = self.tokenizer.precedence(&token);
if precedence >= next_precedence {
break;
}
expr = self.parser.parse_infix(&expr, next_precedence)?.unwrap(); //TODO: fix me
}
Ok(expr)
}
}
//pub struct PrattParser<'a, TokenType, ExprType> {
// chars: Peekable<Chars<'a>>,
// tokenizer: Rc<SQLTokenizer<TokenType>>,
// parser: Rc<SQLParser<TokenType, ExprType>>
//}
//
//impl<'a, TokenType, ExprType> PrattParser<'a, TokenType, ExprType>
// where TokenType: Debug + PartialEq, ExprType: Debug {
//
// pub fn parse_expr(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> {
//
// let precedence: usize = 0;
// let parser_ref = self.parser.as_ref();
//
// let mut expr = parser_ref.parse_prefix()?;
//
// while let Some(token) = self.tokenizer.peek_token(&mut self.chars)? {
//
// let next_precedence = self.tokenizer.precedence(&token);
//
// if precedence >= next_precedence {
// break;
// }
//
// expr = parser_ref.parse_infix(&expr, next_precedence)?.unwrap(); //TODO: fix me
// }
//
// Ok(expr)
// }
//
//}
// fn parse_prefix(&mut self) -> Result<Box<SQLExpr>, ParserError> {
//