Roughing out pratt parser logic

This commit is contained in:
Andy Grove 2018-09-01 08:39:27 -06:00
parent e6e9c8d2cc
commit 72cff632c1
5 changed files with 91 additions and 46 deletions

View file

@ -7,24 +7,21 @@ use datafusion_sql::ansi::tokenizer::ANSISQLTokenizer;
use datafusion_sql::tokenizer::*;
use datafusion_sql::parser::*;
///
/// This example demonstrates building a custom ACME parser that extends the generic parser
/// by adding support for a factorial operator !!
///
/// by adding support for a factorial expression `!! expr`.
/// Custom SQLToken
#[derive(Debug,PartialEq)]
enum AcmeToken {
/// Factorial operator `!!`
/// Factorial token `!!`
Factorial
}
/// Custom SQLExpr
#[derive(Debug)]
enum AcmeOperator {
Factorial
}
#[derive(Debug)]
enum AcmeTokenizerError {
enum AcmeExpr {
/// Factorial expression
Factorial(Box<SQLExpr<AcmeExpr>>)
}
struct AcmeTokenizer {
@ -34,6 +31,10 @@ struct AcmeTokenizer {
/// The ACME tokenizer looks for the factorial operator `!!` but delegates everything else
impl SQLTokenizer<AcmeToken> for AcmeTokenizer {
fn precedence(&self, token: &SQLToken<AcmeToken>) -> usize {
unimplemented!()
}
fn peek_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<AcmeToken>>, TokenizerError<AcmeToken>> {
unimplemented!()
}
@ -53,7 +54,7 @@ impl SQLTokenizer<AcmeToken> for AcmeTokenizer {
},
None => Ok(Some(SQLToken::Not))
}
},
}
_ => self.generic.next_token(chars)
}
_ => self.generic.next_token(chars)
@ -61,19 +62,47 @@ impl SQLTokenizer<AcmeToken> for AcmeTokenizer {
}
}
struct AcmeParser<'a> {
chars: Peekable<Chars<'a>>
}
impl<'a> AcmeParser<'a> {
pub fn new(sql: &'a str) -> Self {
AcmeParser {
chars: sql.chars().peekable()
}
}
}
impl<'a> SQLParser<AcmeToken, AcmeExpr> for AcmeParser<'a> {
fn parse_prefix(&mut self) -> Result<Box<SQLExpr<AcmeExpr>>, ParserError<AcmeToken>> {
unimplemented!()
}
fn parse_infix(&mut self, left: &SQLExpr<AcmeExpr>, precedence: usize) -> Result<Option<Box<SQLExpr<AcmeExpr>>>, ParserError<AcmeToken>> {
unimplemented!()
}
}
fn main() {
let sql = "1 + !! 5 * 2";
let mut acme_tokenizer = AcmeTokenizer {
generic: ANSISQLTokenizer { }
};
let acme_parser = AcmeParser::new(sql);
let tokens = tokenize(&sql, &mut acme_tokenizer).unwrap();
println!("tokens = {:?}", tokens);
//acme_parser
// let mut acme_tokenizer = AcmeTokenizer {
// generic: ANSISQLTokenizer { }
// };
//
// let tokens = tokenize(&sql, &mut acme_tokenizer).unwrap();
//
// println!("tokens = {:?}", tokens);

View file

@ -29,7 +29,7 @@ impl<'a, TokenType, ExprType> SQLParser<TokenType, ExprType> for ANSISQLParser<'
}
}
fn parse_infix(&mut self, left: SQLExpr<ExprType>) -> Result<Option<Box<SQLExpr<ExprType>>>, ParserError<TokenType>> {
fn parse_infix(&mut self, left: &SQLExpr<ExprType>, precedence: usize) -> Result<Option<Box<SQLExpr<ExprType>>>, ParserError<TokenType>> {
unimplemented!()
}
}

View file

@ -10,6 +10,10 @@ pub struct ANSISQLTokenizer {}
impl<TokenType> SQLTokenizer<TokenType> for ANSISQLTokenizer
where TokenType: Debug + PartialEq {
fn precedence(&self, token: &SQLToken<TokenType>) -> usize {
unimplemented!()
}
fn peek_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>> {
unimplemented!()
}

View file

@ -1,5 +1,8 @@
use std::cmp::PartialEq;
use std::fmt::Debug;
use std::rc::Rc;
use std::str::Chars;
use std::iter::Peekable;
use super::tokenizer::*;
@ -105,41 +108,47 @@ impl<TokenType> From<TokenizerError<TokenType>> for ParserError<TokenType>
pub trait SQLParser<TokenType, ExprType>
where TokenType: Debug + PartialEq, ExprType: Debug + PartialEq {
where TokenType: Debug + PartialEq, ExprType: Debug {
/// parse the prefix and stop once an infix operator is reached
fn parse_prefix(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> ;
/// parse the next infix expression, returning None if the precedence has changed
fn parse_infix(&mut self, left: SQLExpr<ExprType>) -> Result<Option<Box<SQLExpr<ExprType>>>, ParserError<TokenType>>;
fn parse_infix(&mut self, left: &SQLExpr<ExprType>, precedence: usize) -> Result<Option<Box<SQLExpr<ExprType>>>, ParserError<TokenType>>;
}
struct PrattParser<'a, TokenType, ExprType> {
chars: Peekable<Chars<'a>>,
tokenizer: Rc<SQLTokenizer<TokenType>>,
parser: SQLParser<TokenType, ExprType>
}
impl<'a, TokenType, ExprType> PrattParser<'a, TokenType, ExprType>
where TokenType: Debug + PartialEq, ExprType: Debug {
fn parse_expr(&mut self) -> Result<Box<SQLExpr<ExprType>>, ParserError<TokenType>> {
let precedence: usize = 0;
let mut expr = self.parser.parse_prefix()?;
while let Some(token) = self.tokenizer.peek_token(&mut self.chars)? {
let next_precedence = self.tokenizer.precedence(&token);
if precedence >= next_precedence {
break;
}
expr = self.parser.parse_infix(&expr, next_precedence)?.unwrap(); //TODO: fix me
}
Ok(expr)
}
}
//
//
//struct GenericParser {
// tokenizer: SQLTokenizer
//}
//
//impl GenericParser {
//
// fn parse_expr(&mut self, precedence: u8) -> Result<Box<SQLExpr>, ParserError> {
//
// let mut expr = self.parse_prefix()?;
//
// // loop while there are more tokens and until the precedence changes
// while let Some(token) = self.tokenizer.peek_token()? {
//
// let next_precedence = self.get_precedence(&token);
//
// if precedence >= next_precedence {
// break;
// }
//
// expr = self.parse_infix(expr, next_precedence)?;
// }
//
// Ok(expr)
// }
//
// fn parse_prefix(&mut self) -> Result<Box<SQLExpr>, ParserError> {
//
// match self.tokenizer.peek_token()? {

View file

@ -50,6 +50,9 @@ pub enum SQLToken<T: Debug + PartialEq> {
pub trait SQLTokenizer<TokenType>
where TokenType: Debug + PartialEq {
/// get the precendence of a token
fn precedence(&self, token: &SQLToken<TokenType>) -> usize;
/// return a reference to the next token but do not advance the index
fn peek_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<SQLToken<TokenType>>, TokenizerError<TokenType>>;