mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-08-18 13:10:15 +00:00
Refactoring
This commit is contained in:
parent
69a140a820
commit
2e20b15c2e
5 changed files with 68 additions and 58 deletions
|
@ -1,9 +1,7 @@
|
|||
use std::cmp::PartialEq;
|
||||
use std::fmt::Debug;
|
||||
//use std::iter::Peekable;
|
||||
//use std::str::Chars;
|
||||
|
||||
|
||||
/// Simple holder for a sequence of characters that supports iteration and mark/reset methods
|
||||
pub struct CharSeq {
|
||||
chars: Vec<char>,
|
||||
i: usize,
|
||||
|
@ -12,6 +10,7 @@ pub struct CharSeq {
|
|||
|
||||
impl CharSeq {
|
||||
|
||||
/// Create a CharSeq from a string
|
||||
pub fn new(sql: &str) -> Self {
|
||||
CharSeq {
|
||||
chars: sql.chars().collect(),
|
||||
|
@ -20,14 +19,26 @@ impl CharSeq {
|
|||
}
|
||||
}
|
||||
|
||||
/// Mark the current index
|
||||
pub fn mark(&mut self) {
|
||||
self.m = self.i;
|
||||
}
|
||||
|
||||
/// Reset the index
|
||||
pub fn reset(&mut self) {
|
||||
self.i = self.m;
|
||||
}
|
||||
|
||||
/// Peek the next char
|
||||
pub fn peek(&mut self) -> Option<&char> {
|
||||
if self.i < self.chars.len() {
|
||||
Some(&self.chars[self.i])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the next char
|
||||
pub fn next(&mut self) -> Option<char> {
|
||||
if self.i < self.chars.len() {
|
||||
self.i += 1;
|
||||
|
@ -61,8 +72,8 @@ pub enum TokenizerError {
|
|||
#[derive(Debug,PartialEq)]
|
||||
pub enum SQLToken<T: Debug + PartialEq> {
|
||||
Whitespace(char),
|
||||
Keyword(String), //TODO: &str ?
|
||||
Identifier(String), //TODO: &str ?
|
||||
Keyword(String),
|
||||
Identifier(String),
|
||||
Literal(String), //TODO: need to model different types of literal
|
||||
Plus,
|
||||
Minus,
|
||||
|
@ -89,28 +100,28 @@ pub trait SQLTokenizer<TokenType>
|
|||
fn precedence(&self, token: &SQLToken<TokenType>) -> usize;
|
||||
|
||||
/// return a reference to the next token and advance the index
|
||||
fn next_token(&mut self) -> Result<Option<SQLToken<TokenType>>, TokenizerError>;
|
||||
fn next_token(&mut self, chars: &mut CharSeq) -> Result<Option<SQLToken<TokenType>>, TokenizerError>;
|
||||
}
|
||||
|
||||
//
|
||||
//pub fn tokenize<TokenType>(sql: &str, tokenizer: &mut SQLTokenizer<TokenType>) -> Result<Vec<SQLToken<TokenType>>, TokenizerError<TokenType>>
|
||||
// where TokenType: Debug + PartialEq
|
||||
// {
|
||||
//
|
||||
// let mut peekable = sql.chars().peekable();
|
||||
//
|
||||
// let mut tokens : Vec<SQLToken<TokenType>> = vec![];
|
||||
//
|
||||
// loop {
|
||||
// match tokenizer.next_token(&mut peekable)? {
|
||||
// Some(SQLToken::Whitespace(_)) => { /* ignore */ },
|
||||
// Some(token) => {
|
||||
// println!("Token: {:?}", token);
|
||||
// tokens.push(token)
|
||||
// },
|
||||
// None => break
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// Ok(tokens)
|
||||
//}
|
||||
|
||||
pub fn tokenize<TokenType>(sql: &str, tokenizer: &mut SQLTokenizer<TokenType>) -> Result<Vec<SQLToken<TokenType>>, TokenizerError>
|
||||
where TokenType: Debug + PartialEq
|
||||
{
|
||||
|
||||
let mut chars = CharSeq::new(sql);
|
||||
|
||||
let mut tokens : Vec<SQLToken<TokenType>> = vec![];
|
||||
|
||||
loop {
|
||||
match tokenizer.next_token(&mut chars)? {
|
||||
Some(SQLToken::Whitespace(_)) => { /* ignore */ },
|
||||
Some(token) => {
|
||||
println!("Token: {:?}", token);
|
||||
tokens.push(token)
|
||||
},
|
||||
None => break
|
||||
}
|
||||
}
|
||||
|
||||
Ok(tokens)
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue