cargo fmt

This commit is contained in:
Andy Grove 2018-09-08 08:10:05 -06:00
parent d58e59324b
commit cc725791de
4 changed files with 43 additions and 54 deletions

View file

@ -32,7 +32,6 @@
//! println!("AST: {:?}", ast);
//! ```
extern crate fnv;
#[macro_use]

View file

@ -54,15 +54,7 @@ pub enum ASTNode {
/// Literal string
SQLLiteralString(String),
/// Scalar function call e.g. `LEFT(foo, 5)`
SQLFunction {
id: String,
args: Vec<ASTNode>,
},
/// Expression with ASC/DESC attribute e.g. `foo ASC` or `foo DESC`.=
SQLOrderBy {
expr: Box<ASTNode>,
asc: bool,
},
SQLFunction { id: String, args: Vec<ASTNode> },
/// SELECT
SQLSelect {
/// projection expressions
@ -121,19 +113,18 @@ pub enum ASTNode {
#[derive(Debug, Clone, PartialEq)]
pub struct SQLAssigment {
id: String,
value: Box<ASTNode>
value: Box<ASTNode>,
}
/// SQL ORDER BY expression
#[derive(Debug, Clone, PartialEq)]
pub struct SQLOrderByExpr {
pub expr: Box<ASTNode>,
pub asc: bool
pub asc: bool,
}
impl SQLOrderByExpr {
pub fn new(expr: Box<ASTNode>,
asc: bool) -> Self {
pub fn new(expr: Box<ASTNode>, asc: bool) -> Self {
SQLOrderByExpr { expr, asc }
}
}

View file

@ -442,7 +442,6 @@ impl Parser {
}
pub fn parse_delete(&mut self) -> Result<ASTNode, ParserError> {
let relation: Option<Box<ASTNode>> = if self.parse_keyword("FROM") {
Some(Box::new(self.parse_expr(0)?))
} else {
@ -481,7 +480,6 @@ impl Parser {
limit,
})
}
}
/// Parse a SELECT statement
@ -624,17 +622,15 @@ mod tests {
#[test]
fn parse_delete_statement() {
let sql: &str = "DELETE FROM 'table'";
match parse_sql(&sql) {
ASTNode::SQLDelete {
relation,
..
} => {
assert_eq!(Some(Box::new(ASTNode::SQLLiteralString("table".to_string()))), relation);
},
ASTNode::SQLDelete { relation, .. } => {
assert_eq!(
Some(Box::new(ASTNode::SQLLiteralString("table".to_string()))),
relation
);
}
_ => assert!(false),
}
@ -642,20 +638,21 @@ mod tests {
#[test]
fn parse_where_delete_statement() {
let sql: &str = "DELETE FROM 'table' WHERE name = 5";
use self::ASTNode::*;
use self::SQLOperator::*;
match parse_sql(&sql) {
ASTNode::SQLDelete {
relation,
selection,
..
} => {
assert_eq!(Some(Box::new(ASTNode::SQLLiteralString("table".to_string()))), relation);
assert_eq!(
Some(Box::new(ASTNode::SQLLiteralString("table".to_string()))),
relation
);
assert_eq!(
SQLBinaryExpr {
@ -665,8 +662,7 @@ mod tests {
},
*selection.unwrap(),
);
},
}
_ => assert!(false),
}
@ -932,17 +928,12 @@ mod tests {
let sql = "SELECT 'one'";
match parse_sql(&sql) {
ASTNode::SQLSelect { ref projection, .. } => {
assert_eq!(
projection[0],
ASTNode::SQLLiteralString("one".to_string())
);
assert_eq!(projection[0], ASTNode::SQLLiteralString("one".to_string()));
}
_ => panic!(),
}
}
#[test]
fn parse_select_version() {
let sql = "SELECT @@version";

View file

@ -162,13 +162,11 @@ impl Tokenizer {
let mut tokens: Vec<Token> = vec![];
while let Some(token) = self.next_token(&mut peekable)? {
match &token {
Token::Whitespace('\n') => {
self.line += 1;
self.col = 1;
},
}
Token::Whitespace('\t') => self.col += 4,
Token::Identifier(s) => self.col += s.len() as u64,
@ -179,7 +177,6 @@ impl Tokenizer {
}
tokens.push(token);
}
Ok(tokens
@ -303,9 +300,15 @@ impl Tokenizer {
chars.next();
Ok(Some(Token::Neq))
}
_ => Err(TokenizerError(format!("Tokenizer Error at Line: {}, Col: {}", self.line, self.col))),
_ => Err(TokenizerError(format!(
"Tokenizer Error at Line: {}, Col: {}",
self.line, self.col
))),
},
None => Err(TokenizerError(format!("Tokenizer Error at Line: {}, Col: {}", self.line, self.col))),
None => Err(TokenizerError(format!(
"Tokenizer Error at Line: {}, Col: {}",
self.line, self.col
))),
}
}
'<' => {
@ -340,9 +343,7 @@ impl Tokenizer {
}
_ => Err(TokenizerError(format!(
"Tokenizer Error at Line: {}, Column: {}, unhandled char '{}'",
self.line,
self.col,
ch
self.line, self.col, ch
))),
},
None => Ok(None),
@ -435,24 +436,31 @@ mod tests {
let tokens = tokenizer.tokenize();
match tokens {
Err(e) => assert_eq!(TokenizerError("Tokenizer Error at Line: 2, Column: 1, unhandled char \'م\'".to_string()), e),
Err(e) => assert_eq!(
TokenizerError(
"Tokenizer Error at Line: 2, Column: 1, unhandled char \'م\'".to_string()
),
e
),
_ => panic!("Test Failure in tokenize_invalid_string"),
}
}
#[test]
fn tokenize_invalid_string_cols() {
let sql = String::from("\n\nSELECT * FROM table\tمصطفىh");
let mut tokenizer = Tokenizer::new(&sql);
let tokens = tokenizer.tokenize();
match tokens {
Err(e) => assert_eq!(TokenizerError("Tokenizer Error at Line: 3, Column: 24, unhandled char \'م\'".to_string()), e),
Err(e) => assert_eq!(
TokenizerError(
"Tokenizer Error at Line: 3, Column: 24, unhandled char \'م\'".to_string()
),
e
),
_ => panic!("Test Failure in tokenize_invalid_string_cols"),
}
}
#[test]