mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-08-08 08:18:02 +00:00
cargo fmt
This commit is contained in:
parent
d58e59324b
commit
cc725791de
4 changed files with 43 additions and 54 deletions
|
@ -32,7 +32,6 @@
|
||||||
//! println!("AST: {:?}", ast);
|
//! println!("AST: {:?}", ast);
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
|
|
||||||
extern crate fnv;
|
extern crate fnv;
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
|
|
|
@ -54,15 +54,7 @@ pub enum ASTNode {
|
||||||
/// Literal string
|
/// Literal string
|
||||||
SQLLiteralString(String),
|
SQLLiteralString(String),
|
||||||
/// Scalar function call e.g. `LEFT(foo, 5)`
|
/// Scalar function call e.g. `LEFT(foo, 5)`
|
||||||
SQLFunction {
|
SQLFunction { id: String, args: Vec<ASTNode> },
|
||||||
id: String,
|
|
||||||
args: Vec<ASTNode>,
|
|
||||||
},
|
|
||||||
/// Expression with ASC/DESC attribute e.g. `foo ASC` or `foo DESC`.=
|
|
||||||
SQLOrderBy {
|
|
||||||
expr: Box<ASTNode>,
|
|
||||||
asc: bool,
|
|
||||||
},
|
|
||||||
/// SELECT
|
/// SELECT
|
||||||
SQLSelect {
|
SQLSelect {
|
||||||
/// projection expressions
|
/// projection expressions
|
||||||
|
@ -121,19 +113,18 @@ pub enum ASTNode {
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub struct SQLAssigment {
|
pub struct SQLAssigment {
|
||||||
id: String,
|
id: String,
|
||||||
value: Box<ASTNode>
|
value: Box<ASTNode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// SQL ORDER BY expression
|
/// SQL ORDER BY expression
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub struct SQLOrderByExpr {
|
pub struct SQLOrderByExpr {
|
||||||
pub expr: Box<ASTNode>,
|
pub expr: Box<ASTNode>,
|
||||||
pub asc: bool
|
pub asc: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SQLOrderByExpr {
|
impl SQLOrderByExpr {
|
||||||
pub fn new(expr: Box<ASTNode>,
|
pub fn new(expr: Box<ASTNode>, asc: bool) -> Self {
|
||||||
asc: bool) -> Self {
|
|
||||||
SQLOrderByExpr { expr, asc }
|
SQLOrderByExpr { expr, asc }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -442,8 +442,7 @@ impl Parser {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_delete(&mut self) -> Result<ASTNode, ParserError> {
|
pub fn parse_delete(&mut self) -> Result<ASTNode, ParserError> {
|
||||||
|
let relation: Option<Box<ASTNode>> = if self.parse_keyword("FROM") {
|
||||||
let relation: Option<Box<ASTNode>> = if self.parse_keyword("FROM") {
|
|
||||||
Some(Box::new(self.parse_expr(0)?))
|
Some(Box::new(self.parse_expr(0)?))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -481,7 +480,6 @@ impl Parser {
|
||||||
limit,
|
limit,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a SELECT statement
|
/// Parse a SELECT statement
|
||||||
|
@ -590,7 +588,7 @@ impl Parser {
|
||||||
None => true,
|
None => true,
|
||||||
};
|
};
|
||||||
|
|
||||||
expr_list.push(SQLOrderByExpr::new(Box::new(expr),asc));
|
expr_list.push(SQLOrderByExpr::new(Box::new(expr), asc));
|
||||||
|
|
||||||
if let Some(t) = self.peek_token() {
|
if let Some(t) = self.peek_token() {
|
||||||
if t == Token::Comma {
|
if t == Token::Comma {
|
||||||
|
@ -624,17 +622,15 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_delete_statement() {
|
fn parse_delete_statement() {
|
||||||
|
|
||||||
let sql: &str = "DELETE FROM 'table'";
|
let sql: &str = "DELETE FROM 'table'";
|
||||||
|
|
||||||
match parse_sql(&sql) {
|
match parse_sql(&sql) {
|
||||||
|
ASTNode::SQLDelete { relation, .. } => {
|
||||||
ASTNode::SQLDelete {
|
assert_eq!(
|
||||||
relation,
|
Some(Box::new(ASTNode::SQLLiteralString("table".to_string()))),
|
||||||
..
|
relation
|
||||||
} => {
|
);
|
||||||
assert_eq!(Some(Box::new(ASTNode::SQLLiteralString("table".to_string()))), relation);
|
}
|
||||||
},
|
|
||||||
|
|
||||||
_ => assert!(false),
|
_ => assert!(false),
|
||||||
}
|
}
|
||||||
|
@ -642,20 +638,21 @@ mod tests {
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_where_delete_statement() {
|
fn parse_where_delete_statement() {
|
||||||
|
|
||||||
let sql: &str = "DELETE FROM 'table' WHERE name = 5";
|
let sql: &str = "DELETE FROM 'table' WHERE name = 5";
|
||||||
|
|
||||||
use self::ASTNode::*;
|
use self::ASTNode::*;
|
||||||
use self::SQLOperator::*;
|
use self::SQLOperator::*;
|
||||||
|
|
||||||
match parse_sql(&sql) {
|
match parse_sql(&sql) {
|
||||||
|
|
||||||
ASTNode::SQLDelete {
|
ASTNode::SQLDelete {
|
||||||
relation,
|
relation,
|
||||||
selection,
|
selection,
|
||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
assert_eq!(Some(Box::new(ASTNode::SQLLiteralString("table".to_string()))), relation);
|
assert_eq!(
|
||||||
|
Some(Box::new(ASTNode::SQLLiteralString("table".to_string()))),
|
||||||
|
relation
|
||||||
|
);
|
||||||
|
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
SQLBinaryExpr {
|
SQLBinaryExpr {
|
||||||
|
@ -665,8 +662,7 @@ mod tests {
|
||||||
},
|
},
|
||||||
*selection.unwrap(),
|
*selection.unwrap(),
|
||||||
);
|
);
|
||||||
|
}
|
||||||
},
|
|
||||||
|
|
||||||
_ => assert!(false),
|
_ => assert!(false),
|
||||||
}
|
}
|
||||||
|
@ -932,17 +928,12 @@ mod tests {
|
||||||
let sql = "SELECT 'one'";
|
let sql = "SELECT 'one'";
|
||||||
match parse_sql(&sql) {
|
match parse_sql(&sql) {
|
||||||
ASTNode::SQLSelect { ref projection, .. } => {
|
ASTNode::SQLSelect { ref projection, .. } => {
|
||||||
assert_eq!(
|
assert_eq!(projection[0], ASTNode::SQLLiteralString("one".to_string()));
|
||||||
projection[0],
|
|
||||||
ASTNode::SQLLiteralString("one".to_string())
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
_ => panic!(),
|
_ => panic!(),
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn parse_select_version() {
|
fn parse_select_version() {
|
||||||
let sql = "SELECT @@version";
|
let sql = "SELECT @@version";
|
||||||
|
|
|
@ -162,13 +162,11 @@ impl Tokenizer {
|
||||||
let mut tokens: Vec<Token> = vec![];
|
let mut tokens: Vec<Token> = vec![];
|
||||||
|
|
||||||
while let Some(token) = self.next_token(&mut peekable)? {
|
while let Some(token) = self.next_token(&mut peekable)? {
|
||||||
|
|
||||||
match &token {
|
match &token {
|
||||||
|
|
||||||
Token::Whitespace('\n') => {
|
Token::Whitespace('\n') => {
|
||||||
self.line += 1;
|
self.line += 1;
|
||||||
self.col = 1;
|
self.col = 1;
|
||||||
},
|
}
|
||||||
|
|
||||||
Token::Whitespace('\t') => self.col += 4,
|
Token::Whitespace('\t') => self.col += 4,
|
||||||
Token::Identifier(s) => self.col += s.len() as u64,
|
Token::Identifier(s) => self.col += s.len() as u64,
|
||||||
|
@ -179,7 +177,6 @@ impl Tokenizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
tokens.push(token);
|
tokens.push(token);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(tokens
|
Ok(tokens
|
||||||
|
@ -303,9 +300,15 @@ impl Tokenizer {
|
||||||
chars.next();
|
chars.next();
|
||||||
Ok(Some(Token::Neq))
|
Ok(Some(Token::Neq))
|
||||||
}
|
}
|
||||||
_ => Err(TokenizerError(format!("Tokenizer Error at Line: {}, Col: {}", self.line, self.col))),
|
_ => Err(TokenizerError(format!(
|
||||||
|
"Tokenizer Error at Line: {}, Col: {}",
|
||||||
|
self.line, self.col
|
||||||
|
))),
|
||||||
},
|
},
|
||||||
None => Err(TokenizerError(format!("Tokenizer Error at Line: {}, Col: {}", self.line, self.col))),
|
None => Err(TokenizerError(format!(
|
||||||
|
"Tokenizer Error at Line: {}, Col: {}",
|
||||||
|
self.line, self.col
|
||||||
|
))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
'<' => {
|
'<' => {
|
||||||
|
@ -340,9 +343,7 @@ impl Tokenizer {
|
||||||
}
|
}
|
||||||
_ => Err(TokenizerError(format!(
|
_ => Err(TokenizerError(format!(
|
||||||
"Tokenizer Error at Line: {}, Column: {}, unhandled char '{}'",
|
"Tokenizer Error at Line: {}, Column: {}, unhandled char '{}'",
|
||||||
self.line,
|
self.line, self.col, ch
|
||||||
self.col,
|
|
||||||
ch
|
|
||||||
))),
|
))),
|
||||||
},
|
},
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
|
@ -435,24 +436,31 @@ mod tests {
|
||||||
let tokens = tokenizer.tokenize();
|
let tokens = tokenizer.tokenize();
|
||||||
|
|
||||||
match tokens {
|
match tokens {
|
||||||
Err(e) => assert_eq!(TokenizerError("Tokenizer Error at Line: 2, Column: 1, unhandled char \'م\'".to_string()), e),
|
Err(e) => assert_eq!(
|
||||||
|
TokenizerError(
|
||||||
|
"Tokenizer Error at Line: 2, Column: 1, unhandled char \'م\'".to_string()
|
||||||
|
),
|
||||||
|
e
|
||||||
|
),
|
||||||
_ => panic!("Test Failure in tokenize_invalid_string"),
|
_ => panic!("Test Failure in tokenize_invalid_string"),
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn tokenize_invalid_string_cols() {
|
fn tokenize_invalid_string_cols() {
|
||||||
|
|
||||||
let sql = String::from("\n\nSELECT * FROM table\tمصطفىh");
|
let sql = String::from("\n\nSELECT * FROM table\tمصطفىh");
|
||||||
|
|
||||||
let mut tokenizer = Tokenizer::new(&sql);
|
let mut tokenizer = Tokenizer::new(&sql);
|
||||||
let tokens = tokenizer.tokenize();
|
let tokens = tokenizer.tokenize();
|
||||||
match tokens {
|
match tokens {
|
||||||
Err(e) => assert_eq!(TokenizerError("Tokenizer Error at Line: 3, Column: 24, unhandled char \'م\'".to_string()), e),
|
Err(e) => assert_eq!(
|
||||||
|
TokenizerError(
|
||||||
|
"Tokenizer Error at Line: 3, Column: 24, unhandled char \'م\'".to_string()
|
||||||
|
),
|
||||||
|
e
|
||||||
|
),
|
||||||
_ => panic!("Test Failure in tokenize_invalid_string_cols"),
|
_ => panic!("Test Failure in tokenize_invalid_string_cols"),
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue