Merge pull request #51 from nickolay/rust-2018

Rust 2018 and clippy fixes
This commit is contained in:
Andy Grove 2019-04-27 08:51:43 -06:00 committed by GitHub
commit 4cdd003cd4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 180 additions and 247 deletions

View file

@ -12,6 +12,7 @@ include = [
"src/**/*.rs", "src/**/*.rs",
"Cargo.toml", "Cargo.toml",
] ]
edition = "2018"
[lib] [lib]
name = "sqlparser" name = "sqlparser"
@ -24,3 +25,4 @@ uuid = "0.7.1"
[dev-dependencies] [dev-dependencies]
simple_logger = "1.0.1" simple_logger = "1.0.1"
matches = "0.1"

View file

@ -1,5 +1,7 @@
extern crate simple_logger; #![warn(clippy::all)]
extern crate sqlparser;
use simple_logger;
///! A small command-line app to run the parser. ///! A small command-line app to run the parser.
/// Run with `cargo run --example cli` /// Run with `cargo run --example cli`
use std::fs; use std::fs;
@ -14,8 +16,8 @@ fn main() {
.nth(1) .nth(1)
.expect("No arguments provided!\n\nUsage: cargo run --example cli FILENAME.sql"); .expect("No arguments provided!\n\nUsage: cargo run --example cli FILENAME.sql");
let contents = let contents = fs::read_to_string(&filename)
fs::read_to_string(&filename).expect(&format!("Unable to read the file {}", &filename)); .unwrap_or_else(|_| panic!("Unable to read the file {}", &filename));
let without_bom = if contents.chars().nth(0).unwrap() as u64 != 0xfeff { let without_bom = if contents.chars().nth(0).unwrap() as u64 != 0xfeff {
contents.as_str() contents.as_str()
} else { } else {
@ -31,7 +33,7 @@ fn main() {
"Round-trip:\n'{}'", "Round-trip:\n'{}'",
statements statements
.iter() .iter()
.map(|s| s.to_string()) .map(std::string::ToString::to_string)
.collect::<Vec<_>>() .collect::<Vec<_>>()
.join("\n") .join("\n")
); );

View file

@ -1,4 +1,4 @@
extern crate sqlparser; #![warn(clippy::all)]
use sqlparser::dialect::GenericSqlDialect; use sqlparser::dialect::GenericSqlDialect;
use sqlparser::sqlparser::*; use sqlparser::sqlparser::*;

View file

@ -1,4 +1,4 @@
use dialect::Dialect; use crate::dialect::Dialect;
pub struct AnsiSqlDialect {} pub struct AnsiSqlDialect {}

View file

@ -1,4 +1,4 @@
use dialect::Dialect; use crate::dialect::Dialect;
pub struct GenericSqlDialect {} pub struct GenericSqlDialect {}
impl Dialect for GenericSqlDialect { impl Dialect for GenericSqlDialect {

View file

@ -365,9 +365,9 @@ keyword!(
); );
/// special case of keyword where the it is an invalid identifier /// special case of keyword where the it is an invalid identifier
pub const END_EXEC: &'static str = "END-EXEC"; pub const END_EXEC: &str = "END-EXEC";
pub const ALL_KEYWORDS: &'static [&'static str] = &[ pub const ALL_KEYWORDS: &[&str] = &[
ABS, ABS,
ADD, ADD,
ASC, ASC,
@ -716,7 +716,7 @@ pub const ALL_KEYWORDS: &'static [&'static str] = &[
/// These keywords can't be used as a table alias, so that `FROM table_name alias` /// These keywords can't be used as a table alias, so that `FROM table_name alias`
/// can be parsed unambiguously without looking ahead. /// can be parsed unambiguously without looking ahead.
pub const RESERVED_FOR_TABLE_ALIAS: &'static [&'static str] = &[ pub const RESERVED_FOR_TABLE_ALIAS: &[&str] = &[
// Reserved as both a table and a column alias: // Reserved as both a table and a column alias:
WITH, SELECT, WHERE, GROUP, ORDER, UNION, EXCEPT, INTERSECT, WITH, SELECT, WHERE, GROUP, ORDER, UNION, EXCEPT, INTERSECT,
// Reserved only as a table alias in the `FROM`/`JOIN` clauses: // Reserved only as a table alias in the `FROM`/`JOIN` clauses:
@ -725,7 +725,7 @@ pub const RESERVED_FOR_TABLE_ALIAS: &'static [&'static str] = &[
/// Can't be used as a column alias, so that `SELECT <expr> alias` /// Can't be used as a column alias, so that `SELECT <expr> alias`
/// can be parsed unambiguously without looking ahead. /// can be parsed unambiguously without looking ahead.
pub const RESERVED_FOR_COLUMN_ALIAS: &'static [&'static str] = &[ pub const RESERVED_FOR_COLUMN_ALIAS: &[&str] = &[
// Reserved as both a table and a column alias: // Reserved as both a table and a column alias:
WITH, SELECT, WHERE, GROUP, ORDER, UNION, EXCEPT, INTERSECT, WITH, SELECT, WHERE, GROUP, ORDER, UNION, EXCEPT, INTERSECT,
// Reserved only as a column alias in the `SELECT` clause: // Reserved only as a column alias in the `SELECT` clause:

View file

@ -1,4 +1,4 @@
use dialect::Dialect; use crate::dialect::Dialect;
pub struct PostgreSqlDialect {} pub struct PostgreSqlDialect {}

View file

@ -34,11 +34,7 @@
//! //!
//! println!("AST: {:?}", ast); //! println!("AST: {:?}", ast);
//! ``` //! ```
#![warn(clippy::all)]
#[macro_use]
extern crate log;
extern crate chrono;
extern crate uuid;
pub mod dialect; pub mod dialect;
pub mod sqlast; pub mod sqlast;

View file

@ -202,7 +202,7 @@ impl ToString for ASTNode {
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub enum SQLStatement { pub enum SQLStatement {
/// SELECT /// SELECT
SQLSelect(SQLQuery), SQLQuery(Box<SQLQuery>),
/// INSERT /// INSERT
SQLInsert { SQLInsert {
/// TABLE /// TABLE
@ -240,7 +240,7 @@ pub enum SQLStatement {
SQLCreateView { SQLCreateView {
/// View name /// View name
name: SQLObjectName, name: SQLObjectName,
query: SQLQuery, query: Box<SQLQuery>,
materialized: bool, materialized: bool,
}, },
/// CREATE TABLE /// CREATE TABLE
@ -264,17 +264,17 @@ pub enum SQLStatement {
impl ToString for SQLStatement { impl ToString for SQLStatement {
fn to_string(&self) -> String { fn to_string(&self) -> String {
match self { match self {
SQLStatement::SQLSelect(s) => s.to_string(), SQLStatement::SQLQuery(s) => s.to_string(),
SQLStatement::SQLInsert { SQLStatement::SQLInsert {
table_name, table_name,
columns, columns,
values, values,
} => { } => {
let mut s = format!("INSERT INTO {}", table_name.to_string()); let mut s = format!("INSERT INTO {}", table_name.to_string());
if columns.len() > 0 { if !columns.is_empty() {
s += &format!(" ({})", columns.join(", ")); s += &format!(" ({})", columns.join(", "));
} }
if values.len() > 0 { if !values.is_empty() {
s += &format!( s += &format!(
" VALUES({})", " VALUES({})",
values values
@ -307,12 +307,12 @@ impl ToString for SQLStatement {
); );
} }
s += " FROM stdin; "; s += " FROM stdin; ";
if values.len() > 0 { if !values.is_empty() {
s += &format!( s += &format!(
"\n{}", "\n{}",
values values
.iter() .iter()
.map(|v| v.clone().unwrap_or("\\N".to_string())) .map(|v| v.clone().unwrap_or_else(|| "\\N".to_string()))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join("\t") .join("\t")
); );
@ -381,13 +381,7 @@ impl ToString for SQLStatement {
file_format.as_ref().map(|f| f.to_string()).unwrap(), file_format.as_ref().map(|f| f.to_string()).unwrap(),
location.as_ref().unwrap() location.as_ref().unwrap()
), ),
SQLStatement::SQLCreateTable { SQLStatement::SQLCreateTable { name, columns, .. } => format!(
name,
columns,
external: _,
file_format: _,
location: _,
} => format!(
"CREATE TABLE {} ({})", "CREATE TABLE {} ({})",
name.to_string(), name.to_string(),
columns columns
@ -483,7 +477,7 @@ impl ToString for FileFormat {
} }
} }
use sqlparser::ParserError; use crate::sqlparser::ParserError;
use std::str::FromStr; use std::str::FromStr;
impl FromStr for FileFormat { impl FromStr for FileFormat {
type Err = ParserError; type Err = ParserError;

View file

@ -50,7 +50,7 @@ impl ToString for SQLQuery {
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq)]
pub enum SQLSetExpr { pub enum SQLSetExpr {
/// Restricted SELECT .. FROM .. HAVING (no ORDER BY or set operations) /// Restricted SELECT .. FROM .. HAVING (no ORDER BY or set operations)
Select(SQLSelect), Select(Box<SQLSelect>),
/// Parenthesized SELECT subquery, which may include more set operations /// Parenthesized SELECT subquery, which may include more set operations
/// in its body and an optional ORDER BY / LIMIT. /// in its body and an optional ORDER BY / LIMIT.
Query(Box<SQLQuery>), Query(Box<SQLQuery>),

View file

@ -19,14 +19,14 @@ pub enum SQLType {
Blob(usize), Blob(usize),
/// Decimal type with optional precision and scale e.g. DECIMAL(10,2) /// Decimal type with optional precision and scale e.g. DECIMAL(10,2)
Decimal(Option<usize>, Option<usize>), Decimal(Option<usize>, Option<usize>),
/// Floating point with optional precision e.g. FLOAT(8)
Float(Option<usize>),
/// Small integer /// Small integer
SmallInt, SmallInt,
/// Integer /// Integer
Int, Int,
/// Big integer /// Big integer
BigInt, BigInt,
/// Floating point with optional precision e.g. FLOAT(8)
Float(Option<usize>),
/// Floating point e.g. REAL /// Floating point e.g. REAL
Real, Real,
/// Double e.g. DOUBLE PRECISION /// Double e.g. DOUBLE PRECISION
@ -54,20 +54,8 @@ pub enum SQLType {
impl ToString for SQLType { impl ToString for SQLType {
fn to_string(&self) -> String { fn to_string(&self) -> String {
match self { match self {
SQLType::Char(size) => { SQLType::Char(size) => format_type_with_optional_length("char", size),
if let Some(size) = size { SQLType::Varchar(size) => format_type_with_optional_length("character varying", size),
format!("char({})", size)
} else {
"char".to_string()
}
}
SQLType::Varchar(size) => {
if let Some(size) = size {
format!("character varying({})", size)
} else {
"character varying".to_string()
}
}
SQLType::Uuid => "uuid".to_string(), SQLType::Uuid => "uuid".to_string(),
SQLType::Clob(size) => format!("clob({})", size), SQLType::Clob(size) => format!("clob({})", size),
SQLType::Binary(size) => format!("binary({})", size), SQLType::Binary(size) => format!("binary({})", size),
@ -76,22 +64,14 @@ impl ToString for SQLType {
SQLType::Decimal(precision, scale) => { SQLType::Decimal(precision, scale) => {
if let Some(scale) = scale { if let Some(scale) = scale {
format!("numeric({},{})", precision.unwrap(), scale) format!("numeric({},{})", precision.unwrap(), scale)
} else if let Some(precision) = precision {
format!("numeric({})", precision)
} else { } else {
format!("numeric") format_type_with_optional_length("numeric", precision)
} }
} }
SQLType::Float(size) => format_type_with_optional_length("float", size),
SQLType::SmallInt => "smallint".to_string(), SQLType::SmallInt => "smallint".to_string(),
SQLType::Int => "int".to_string(), SQLType::Int => "int".to_string(),
SQLType::BigInt => "bigint".to_string(), SQLType::BigInt => "bigint".to_string(),
SQLType::Float(size) => {
if let Some(size) = size {
format!("float({})", size)
} else {
"float".to_string()
}
}
SQLType::Real => "real".to_string(), SQLType::Real => "real".to_string(),
SQLType::Double => "double".to_string(), SQLType::Double => "double".to_string(),
SQLType::Boolean => "boolean".to_string(), SQLType::Boolean => "boolean".to_string(),
@ -106,3 +86,11 @@ impl ToString for SQLType {
} }
} }
} }
fn format_type_with_optional_length(sql_type: &str, len: &Option<usize>) -> String {
let mut s = sql_type.to_string();
if let Some(len) = len {
s += &format!("({})", len);
}
s
}

View file

@ -14,6 +14,8 @@
//! SQL Parser //! SQL Parser
use log::debug;
use super::dialect::keywords; use super::dialect::keywords;
use super::dialect::Dialect; use super::dialect::Dialect;
use super::sqlast::*; use super::sqlast::*;
@ -47,14 +49,11 @@ pub struct Parser {
impl Parser { impl Parser {
/// Parse the specified tokens /// Parse the specified tokens
pub fn new(tokens: Vec<Token>) -> Self { pub fn new(tokens: Vec<Token>) -> Self {
Parser { Parser { tokens, index: 0 }
tokens: tokens,
index: 0,
}
} }
/// Parse a SQL statement and produce an Abstract Syntax Tree (AST) /// Parse a SQL statement and produce an Abstract Syntax Tree (AST)
pub fn parse_sql(dialect: &Dialect, sql: String) -> Result<Vec<SQLStatement>, ParserError> { pub fn parse_sql(dialect: &dyn Dialect, sql: String) -> Result<Vec<SQLStatement>, ParserError> {
let mut tokenizer = Tokenizer::new(dialect, &sql); let mut tokenizer = Tokenizer::new(dialect, &sql);
let tokens = tokenizer.tokenize()?; let tokens = tokenizer.tokenize()?;
let mut parser = Parser::new(tokens); let mut parser = Parser::new(tokens);
@ -91,7 +90,7 @@ impl Parser {
Token::SQLWord(ref w) if w.keyword != "" => match w.keyword.as_ref() { Token::SQLWord(ref w) if w.keyword != "" => match w.keyword.as_ref() {
"SELECT" | "WITH" => { "SELECT" | "WITH" => {
self.prev_token(); self.prev_token();
Ok(SQLStatement::SQLSelect(self.parse_query()?)) Ok(SQLStatement::SQLQuery(Box::new(self.parse_query()?)))
} }
"CREATE" => Ok(self.parse_create()?), "CREATE" => Ok(self.parse_create()?),
"DELETE" => Ok(self.parse_delete()?), "DELETE" => Ok(self.parse_delete()?),
@ -233,7 +232,7 @@ impl Parser {
t t
)), )),
}, },
None => parser_err!(format!("Prefix parser expected a keyword but hit EOF")), None => parser_err!("Prefix parser expected a keyword but hit EOF"),
} }
} }
@ -403,21 +402,21 @@ impl Parser {
/// Convert a token operator to an AST operator /// Convert a token operator to an AST operator
pub fn to_sql_operator(&self, tok: &Token) -> Result<SQLOperator, ParserError> { pub fn to_sql_operator(&self, tok: &Token) -> Result<SQLOperator, ParserError> {
match tok { match tok {
&Token::Eq => Ok(SQLOperator::Eq), Token::Eq => Ok(SQLOperator::Eq),
&Token::Neq => Ok(SQLOperator::NotEq), Token::Neq => Ok(SQLOperator::NotEq),
&Token::Lt => Ok(SQLOperator::Lt), Token::Lt => Ok(SQLOperator::Lt),
&Token::LtEq => Ok(SQLOperator::LtEq), Token::LtEq => Ok(SQLOperator::LtEq),
&Token::Gt => Ok(SQLOperator::Gt), Token::Gt => Ok(SQLOperator::Gt),
&Token::GtEq => Ok(SQLOperator::GtEq), Token::GtEq => Ok(SQLOperator::GtEq),
&Token::Plus => Ok(SQLOperator::Plus), Token::Plus => Ok(SQLOperator::Plus),
&Token::Minus => Ok(SQLOperator::Minus), Token::Minus => Ok(SQLOperator::Minus),
&Token::Mult => Ok(SQLOperator::Multiply), Token::Mult => Ok(SQLOperator::Multiply),
&Token::Div => Ok(SQLOperator::Divide), Token::Div => Ok(SQLOperator::Divide),
&Token::Mod => Ok(SQLOperator::Modulus), Token::Mod => Ok(SQLOperator::Modulus),
&Token::SQLWord(ref k) if k.keyword == "AND" => Ok(SQLOperator::And), Token::SQLWord(ref k) if k.keyword == "AND" => Ok(SQLOperator::And),
&Token::SQLWord(ref k) if k.keyword == "OR" => Ok(SQLOperator::Or), Token::SQLWord(ref k) if k.keyword == "OR" => Ok(SQLOperator::Or),
//&Token::SQLWord(ref k) if k.keyword == "NOT" => Ok(SQLOperator::Not), //Token::SQLWord(ref k) if k.keyword == "NOT" => Ok(SQLOperator::Not),
&Token::SQLWord(ref k) if k.keyword == "LIKE" => Ok(SQLOperator::Like), Token::SQLWord(ref k) if k.keyword == "LIKE" => Ok(SQLOperator::Like),
_ => parser_err!(format!("Unsupported SQL operator {:?}", tok)), _ => parser_err!(format!("Unsupported SQL operator {:?}", tok)),
} }
} }
@ -436,19 +435,17 @@ impl Parser {
debug!("get_precedence() {:?}", tok); debug!("get_precedence() {:?}", tok);
match tok { match tok {
&Token::SQLWord(ref k) if k.keyword == "OR" => Ok(5), Token::SQLWord(k) if k.keyword == "OR" => Ok(5),
&Token::SQLWord(ref k) if k.keyword == "AND" => Ok(10), Token::SQLWord(k) if k.keyword == "AND" => Ok(10),
&Token::SQLWord(ref k) if k.keyword == "NOT" => Ok(15), Token::SQLWord(k) if k.keyword == "NOT" => Ok(15),
&Token::SQLWord(ref k) if k.keyword == "IS" => Ok(17), Token::SQLWord(k) if k.keyword == "IS" => Ok(17),
&Token::SQLWord(ref k) if k.keyword == "IN" => Ok(20), Token::SQLWord(k) if k.keyword == "IN" => Ok(20),
&Token::SQLWord(ref k) if k.keyword == "BETWEEN" => Ok(20), Token::SQLWord(k) if k.keyword == "BETWEEN" => Ok(20),
&Token::SQLWord(ref k) if k.keyword == "LIKE" => Ok(20), Token::SQLWord(k) if k.keyword == "LIKE" => Ok(20),
&Token::Eq | &Token::Lt | &Token::LtEq | &Token::Neq | &Token::Gt | &Token::GtEq => { Token::Eq | Token::Lt | Token::LtEq | Token::Neq | Token::Gt | Token::GtEq => Ok(20),
Ok(20) Token::Plus | Token::Minus => Ok(30),
} Token::Mult | Token::Div | Token::Mod => Ok(40),
&Token::Plus | &Token::Minus => Ok(30), Token::DoubleColon => Ok(50),
&Token::Mult | &Token::Div | &Token::Mod => Ok(40),
&Token::DoubleColon => Ok(50),
_ => Ok(0), _ => Ok(0),
} }
} }
@ -482,7 +479,7 @@ impl Parser {
loop { loop {
match self.token_at(index) { match self.token_at(index) {
Some(Token::Whitespace(_)) => { Some(Token::Whitespace(_)) => {
index = index + 1; index += 1;
} }
Some(_) => { Some(_) => {
return Some(index); return Some(index);
@ -505,7 +502,7 @@ impl Parser {
pub fn next_token_no_skip(&mut self) -> Option<Token> { pub fn next_token_no_skip(&mut self) -> Option<Token> {
if self.index < self.tokens.len() { if self.index < self.tokens.len() {
self.index = self.index + 1; self.index += 1;
Some(self.tokens[self.index - 1].clone()) Some(self.tokens[self.index - 1].clone())
} else { } else {
None None
@ -530,7 +527,7 @@ impl Parser {
/// Get the previous token and decrement the token index /// Get the previous token and decrement the token index
fn prev_token_no_skip(&mut self) -> Option<Token> { fn prev_token_no_skip(&mut self) -> Option<Token> {
if self.index > 0 { if self.index > 0 {
self.index = self.index - 1; self.index -= 1;
Some(self.tokens[self.index].clone()) Some(self.tokens[self.index].clone())
} else { } else {
None None
@ -658,7 +655,7 @@ impl Parser {
// Some dialects allow WITH here, followed by some keywords (e.g. MS SQL) // Some dialects allow WITH here, followed by some keywords (e.g. MS SQL)
// or `(k1=v1, k2=v2, ...)` (Postgres) // or `(k1=v1, k2=v2, ...)` (Postgres)
self.expect_keyword("AS")?; self.expect_keyword("AS")?;
let query = self.parse_query()?; let query = Box::new(self.parse_query()?);
// Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here. // Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
Ok(SQLStatement::SQLCreateView { Ok(SQLStatement::SQLCreateView {
name, name,
@ -701,9 +698,8 @@ impl Parser {
}; };
let allow_null = if self.parse_keywords(vec!["NOT", "NULL"]) { let allow_null = if self.parse_keywords(vec!["NOT", "NULL"]) {
false false
} else if self.parse_keyword("NULL") {
true
} else { } else {
let _ = self.parse_keyword("NULL");
true true
}; };
debug!("default: {:?}", default); debug!("default: {:?}", default);
@ -877,7 +873,7 @@ impl Parser {
} }
}, },
//TODO: parse the timestamp here (see parse_timestamp_value()) //TODO: parse the timestamp here (see parse_timestamp_value())
Token::Number(ref n) if n.contains(".") => match n.parse::<f64>() { Token::Number(ref n) if n.contains('.') => match n.parse::<f64>() {
Ok(n) => Ok(Value::Double(n)), Ok(n) => Ok(Value::Double(n)),
Err(e) => parser_err!(format!("Could not parse '{}' as f64: {}", n, e)), Err(e) => parser_err!(format!("Could not parse '{}' as f64: {}", n, e)),
}, },
@ -952,7 +948,7 @@ impl Parser {
Some(token) => match token { Some(token) => match token {
Token::Plus | Token::Minus => { Token::Plus | Token::Minus => {
let tz = self.parse_timezone_offset()?; let tz = self.parse_timezone_offset()?;
let offset = FixedOffset::east(tz as i32 * 3600); let offset = FixedOffset::east(i32::from(tz) * 3600);
Ok(Value::Timestamp(DateTime::from_utc(date_time, offset))) Ok(Value::Timestamp(DateTime::from_utc(date_time, offset)))
} }
_ => Ok(Value::DateTime(date_time)), _ => Ok(Value::DateTime(date_time)),
@ -1014,11 +1010,8 @@ impl Parser {
"FLOAT" => Ok(SQLType::Float(self.parse_optional_precision()?)), "FLOAT" => Ok(SQLType::Float(self.parse_optional_precision()?)),
"REAL" => Ok(SQLType::Real), "REAL" => Ok(SQLType::Real),
"DOUBLE" => { "DOUBLE" => {
if self.parse_keyword("PRECISION") { let _ = self.parse_keyword("PRECISION");
Ok(SQLType::Double) Ok(SQLType::Double)
} else {
Ok(SQLType::Double)
}
} }
"SMALLINT" => Ok(SQLType::SmallInt), "SMALLINT" => Ok(SQLType::SmallInt),
"INT" | "INTEGER" => Ok(SQLType::Int), "INT" | "INTEGER" => Ok(SQLType::Int),
@ -1034,50 +1027,20 @@ impl Parser {
"UUID" => Ok(SQLType::Uuid), "UUID" => Ok(SQLType::Uuid),
"DATE" => Ok(SQLType::Date), "DATE" => Ok(SQLType::Date),
"TIMESTAMP" => { "TIMESTAMP" => {
if self.parse_keyword("WITH") { // TBD: we throw away "with/without timezone" information
if self.parse_keywords(vec!["TIME", "ZONE"]) { if self.parse_keyword("WITH") || self.parse_keyword("WITHOUT") {
Ok(SQLType::Timestamp) self.expect_keyword("TIME")?;
} else { self.expect_keyword("ZONE")?;
parser_err!(format!(
"Expecting 'time zone', found: {:?}",
self.peek_token()
))
}
} else if self.parse_keyword("WITHOUT") {
if self.parse_keywords(vec!["TIME", "ZONE"]) {
Ok(SQLType::Timestamp)
} else {
parser_err!(format!(
"Expecting 'time zone', found: {:?}",
self.peek_token()
))
}
} else {
Ok(SQLType::Timestamp)
} }
Ok(SQLType::Timestamp)
} }
"TIME" => { "TIME" => {
if self.parse_keyword("WITH") { // TBD: we throw away "with/without timezone" information
if self.parse_keywords(vec!["TIME", "ZONE"]) { if self.parse_keyword("WITH") || self.parse_keyword("WITHOUT") {
Ok(SQLType::Time) self.expect_keyword("TIME")?;
} else { self.expect_keyword("ZONE")?;
parser_err!(format!(
"Expecting 'time zone', found: {:?}",
self.peek_token()
))
}
} else if self.parse_keyword("WITHOUT") {
if self.parse_keywords(vec!["TIME", "ZONE"]) {
Ok(SQLType::Time)
} else {
parser_err!(format!(
"Expecting 'time zone', found: {:?}",
self.peek_token()
))
}
} else {
Ok(SQLType::Timestamp)
} }
Ok(SQLType::Time)
} }
"REGCLASS" => Ok(SQLType::Regclass), "REGCLASS" => Ok(SQLType::Regclass),
"TEXT" => { "TEXT" => {
@ -1288,7 +1251,7 @@ impl Parser {
break; break;
} }
} }
return Ok(cte); Ok(cte)
} }
/// Parse a "query body", which is an expression with roughly the /// Parse a "query body", which is an expression with roughly the
@ -1303,7 +1266,7 @@ impl Parser {
// We parse the expression using a Pratt parser, as in `parse_expr()`. // We parse the expression using a Pratt parser, as in `parse_expr()`.
// Start by parsing a restricted SELECT or a `(subquery)`: // Start by parsing a restricted SELECT or a `(subquery)`:
let mut expr = if self.parse_keyword("SELECT") { let mut expr = if self.parse_keyword("SELECT") {
SQLSetExpr::Select(self.parse_select()?) SQLSetExpr::Select(Box::new(self.parse_select()?))
} else if self.consume_token(&Token::LParen) { } else if self.consume_token(&Token::LParen) {
// CTEs are not allowed here, but the parser currently accepts them // CTEs are not allowed here, but the parser currently accepts them
let subquery = self.parse_query()?; let subquery = self.parse_query()?;

View file

@ -138,9 +138,9 @@ impl Token {
let is_keyword = quote_style == None && ALL_KEYWORDS.contains(&word_uppercase.as_str()); let is_keyword = quote_style == None && ALL_KEYWORDS.contains(&word_uppercase.as_str());
Token::SQLWord(SQLWord { Token::SQLWord(SQLWord {
value: word.to_string(), value: word.to_string(),
quote_style: quote_style, quote_style,
keyword: if is_keyword { keyword: if is_keyword {
word_uppercase.to_string() word_uppercase
} else { } else {
"".to_string() "".to_string()
}, },
@ -212,7 +212,7 @@ pub struct TokenizerError(String);
/// SQL Tokenizer /// SQL Tokenizer
pub struct Tokenizer<'a> { pub struct Tokenizer<'a> {
dialect: &'a Dialect, dialect: &'a dyn Dialect,
pub query: String, pub query: String,
pub line: u64, pub line: u64,
pub col: u64, pub col: u64,
@ -220,7 +220,7 @@ pub struct Tokenizer<'a> {
impl<'a> Tokenizer<'a> { impl<'a> Tokenizer<'a> {
/// Create a new SQL tokenizer for the specified SQL statement /// Create a new SQL tokenizer for the specified SQL statement
pub fn new(dialect: &'a Dialect, query: &str) -> Self { pub fn new(dialect: &'a dyn Dialect, query: &str) -> Self {
Self { Self {
dialect, dialect,
query: query.to_string(), query: query.to_string(),
@ -256,7 +256,7 @@ impl<'a> Tokenizer<'a> {
} }
/// Get the next token or return None /// Get the next token or return None
fn next_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<Token>, TokenizerError> { fn next_token(&self, chars: &mut Peekable<Chars<'_>>) -> Result<Option<Token>, TokenizerError> {
//println!("next_token: {:?}", chars.peek()); //println!("next_token: {:?}", chars.peek());
match chars.peek() { match chars.peek() {
Some(&ch) => match ch { Some(&ch) => match ch {
@ -312,11 +312,11 @@ impl<'a> Tokenizer<'a> {
Ok(Some(Token::make_word(&s, Some(quote_start)))) Ok(Some(Token::make_word(&s, Some(quote_start))))
} }
// numbers // numbers
'0'...'9' => { '0'..='9' => {
let mut s = String::new(); let mut s = String::new();
while let Some(&ch) = chars.peek() { while let Some(&ch) = chars.peek() {
match ch { match ch {
'0'...'9' | '.' => { '0'..='9' | '.' => {
chars.next(); // consume chars.next(); // consume
s.push(ch); s.push(ch);
} }
@ -436,7 +436,7 @@ impl<'a> Tokenizer<'a> {
} }
/// Tokenize an identifier or keyword, after the first char is already consumed. /// Tokenize an identifier or keyword, after the first char is already consumed.
fn tokenize_word(&self, first_char: char, chars: &mut Peekable<Chars>) -> String { fn tokenize_word(&self, first_char: char, chars: &mut Peekable<Chars<'_>>) -> String {
let mut s = String::new(); let mut s = String::new();
s.push(first_char); s.push(first_char);
while let Some(&ch) = chars.peek() { while let Some(&ch) = chars.peek() {
@ -451,7 +451,7 @@ impl<'a> Tokenizer<'a> {
} }
/// Read a single quoted string, starting with the opening quote. /// Read a single quoted string, starting with the opening quote.
fn tokenize_single_quoted_string(&self, chars: &mut Peekable<Chars>) -> String { fn tokenize_single_quoted_string(&self, chars: &mut Peekable<Chars<'_>>) -> String {
//TODO: handle escaped quotes in string //TODO: handle escaped quotes in string
//TODO: handle newlines in string //TODO: handle newlines in string
//TODO: handle EOF before terminating quote //TODO: handle EOF before terminating quote
@ -475,7 +475,7 @@ impl<'a> Tokenizer<'a> {
fn tokenize_multiline_comment( fn tokenize_multiline_comment(
&self, &self,
chars: &mut Peekable<Chars>, chars: &mut Peekable<Chars<'_>>,
) -> Result<Option<Token>, TokenizerError> { ) -> Result<Option<Token>, TokenizerError> {
let mut s = String::new(); let mut s = String::new();
let mut maybe_closing_comment = false; let mut maybe_closing_comment = false;
@ -506,7 +506,7 @@ impl<'a> Tokenizer<'a> {
fn consume_and_return( fn consume_and_return(
&self, &self,
chars: &mut Peekable<Chars>, chars: &mut Peekable<Chars<'_>>,
t: Token, t: Token,
) -> Result<Option<Token>, TokenizerError> { ) -> Result<Option<Token>, TokenizerError> {
chars.next(); chars.next();

View file

@ -1,5 +1,4 @@
extern crate log; #![warn(clippy::all)]
extern crate sqlparser;
use sqlparser::dialect::AnsiSqlDialect; use sqlparser::dialect::AnsiSqlDialect;
use sqlparser::sqlast::*; use sqlparser::sqlast::*;
@ -8,15 +7,18 @@ use sqlparser::sqlparser::*;
#[test] #[test]
fn parse_simple_select() { fn parse_simple_select() {
let sql = String::from("SELECT id, fname, lname FROM customer WHERE id = 1"); let sql = String::from("SELECT id, fname, lname FROM customer WHERE id = 1");
let ast = Parser::parse_sql(&AnsiSqlDialect {}, sql).unwrap(); let mut ast = Parser::parse_sql(&AnsiSqlDialect {}, sql).unwrap();
assert_eq!(1, ast.len()); assert_eq!(1, ast.len());
match ast.first().unwrap() { match ast.pop().unwrap() {
SQLStatement::SQLSelect(SQLQuery { SQLStatement::SQLQuery(q) => match *q {
body: SQLSetExpr::Select(SQLSelect { projection, .. }), SQLQuery {
.. body: SQLSetExpr::Select(select),
}) => { ..
assert_eq!(3, projection.len()); } => {
} assert_eq!(3, select.projection.len());
_ => assert!(false), }
_ => unreachable!(),
},
_ => unreachable!(),
} }
} }

View file

@ -1,5 +1,6 @@
extern crate log; #![warn(clippy::all)]
extern crate sqlparser;
use matches::assert_matches;
use sqlparser::dialect::*; use sqlparser::dialect::*;
use sqlparser::sqlast::*; use sqlparser::sqlast::*;
@ -13,8 +14,7 @@ fn parse_delete_statement() {
SQLStatement::SQLDelete { table_name, .. } => { SQLStatement::SQLDelete { table_name, .. } => {
assert_eq!(SQLObjectName(vec!["\"table\"".to_string()]), table_name); assert_eq!(SQLObjectName(vec!["\"table\"".to_string()]), table_name);
} }
_ => unreachable!(),
_ => assert!(false),
} }
} }
@ -41,8 +41,7 @@ fn parse_where_delete_statement() {
selection.unwrap(), selection.unwrap(),
); );
} }
_ => unreachable!(),
_ => assert!(false),
} }
} }
@ -232,23 +231,17 @@ fn parse_not_precedence() {
use self::ASTNode::*; use self::ASTNode::*;
// NOT has higher precedence than OR/AND, so the following must parse as (NOT true) OR true // NOT has higher precedence than OR/AND, so the following must parse as (NOT true) OR true
let sql = "NOT true OR true"; let sql = "NOT true OR true";
match verified_expr(sql) { assert_matches!(verified_expr(sql), SQLBinaryExpr {
SQLBinaryExpr { op: SQLOperator::Or,
op: SQLOperator::Or, ..
.. });
} => assert!(true),
_ => assert!(false),
};
// But NOT has lower precedence than comparison operators, so the following parses as NOT (a IS NULL) // But NOT has lower precedence than comparison operators, so the following parses as NOT (a IS NULL)
let sql = "NOT a IS NULL"; let sql = "NOT a IS NULL";
match verified_expr(sql) { assert_matches!(verified_expr(sql), SQLUnary {
SQLUnary { operator: SQLOperator::Not,
operator: SQLOperator::Not, ..
.. });
} => assert!(true),
_ => assert!(false),
};
} }
#[test] #[test]
@ -449,9 +442,9 @@ fn parse_create_table() {
SQLStatement::SQLCreateTable { SQLStatement::SQLCreateTable {
name, name,
columns, columns,
external: _, external: false,
file_format: _, file_format: None,
location: _, location: None,
} => { } => {
assert_eq!("uk_cities", name.to_string()); assert_eq!("uk_cities", name.to_string());
assert_eq!(3, columns.len()); assert_eq!(3, columns.len());
@ -471,7 +464,7 @@ fn parse_create_table() {
assert_eq!(SQLType::Double, c_lng.data_type); assert_eq!(SQLType::Double, c_lng.data_type);
assert_eq!(true, c_lng.allow_null); assert_eq!(true, c_lng.allow_null);
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -522,7 +515,7 @@ fn parse_create_external_table() {
assert_eq!(FileFormat::TEXTFILE, file_format.unwrap()); assert_eq!(FileFormat::TEXTFILE, file_format.unwrap());
assert_eq!("/tmp/example.csv", location.unwrap()); assert_eq!("/tmp/example.csv", location.unwrap());
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -844,14 +837,13 @@ fn parse_ctes() {
cte_sqls[0], cte_sqls[1] cte_sqls[0], cte_sqls[1]
); );
fn assert_ctes_in_select(expected: &Vec<&str>, sel: &SQLQuery) { fn assert_ctes_in_select(expected: &[&str], sel: &SQLQuery) {
for i in 0..1 { let mut i = 0;
let Cte { for exp in expected {
ref query, let Cte { query, alias } = &sel.ctes[i];
ref alias, assert_eq!(*exp, query.to_string());
} = sel.ctes[i];
assert_eq!(expected[i], query.to_string());
assert_eq!(if i == 0 { "a" } else { "b" }, alias); assert_eq!(if i == 0 { "a" } else { "b" }, alias);
i += 1;
} }
} }
@ -861,7 +853,7 @@ fn parse_ctes() {
let sql = &format!("SELECT ({})", with); let sql = &format!("SELECT ({})", with);
let select = verified_only_select(sql); let select = verified_only_select(sql);
match expr_from_projection(only(&select.projection)) { match expr_from_projection(only(&select.projection)) {
&ASTNode::SQLSubquery(ref subquery) => { ASTNode::SQLSubquery(ref subquery) => {
assert_ctes_in_select(&cte_sqls, subquery.as_ref()); assert_ctes_in_select(&cte_sqls, subquery.as_ref());
} }
_ => panic!("Expected subquery"), _ => panic!("Expected subquery"),
@ -956,14 +948,11 @@ fn parse_multiple_statements() {
fn parse_scalar_subqueries() { fn parse_scalar_subqueries() {
use self::ASTNode::*; use self::ASTNode::*;
let sql = "(SELECT 1) + (SELECT 2)"; let sql = "(SELECT 1) + (SELECT 2)";
match verified_expr(sql) { assert_matches!(verified_expr(sql), SQLBinaryExpr {
SQLBinaryExpr { op: SQLOperator::Plus, ..
op: SQLOperator::Plus, .. //left: box SQLSubquery { .. },
//left: box SQLSubquery { .. }, //right: box SQLSubquery { .. },
//right: box SQLSubquery { .. }, });
} => assert!(true),
_ => assert!(false),
};
} }
#[test] #[test]
@ -979,7 +968,7 @@ fn parse_create_view() {
assert_eq!("SELECT foo FROM bar", query.to_string()); assert_eq!("SELECT foo FROM bar", query.to_string());
assert!(!materialized); assert!(!materialized);
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -996,7 +985,7 @@ fn parse_create_materialized_view() {
assert_eq!("SELECT foo FROM bar", query.to_string()); assert_eq!("SELECT foo FROM bar", query.to_string());
assert!(materialized); assert!(materialized);
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -1009,15 +998,15 @@ fn parse_invalid_subquery_without_parens() {
); );
} }
fn only<'a, T>(v: &'a Vec<T>) -> &'a T { fn only<T>(v: &[T]) -> &T {
assert_eq!(1, v.len()); assert_eq!(1, v.len());
v.first().unwrap() v.first().unwrap()
} }
fn verified_query(query: &str) -> SQLQuery { fn verified_query(query: &str) -> SQLQuery {
match verified_stmt(query) { match verified_stmt(query) {
SQLStatement::SQLSelect(select) => select, SQLStatement::SQLQuery(query) => *query,
_ => panic!("Expected SELECT"), _ => panic!("Expected SQLQuery"),
} }
} }
@ -1030,7 +1019,7 @@ fn expr_from_projection(item: &SQLSelectItem) -> &ASTNode {
fn verified_only_select(query: &str) -> SQLSelect { fn verified_only_select(query: &str) -> SQLSelect {
match verified_query(query).body { match verified_query(query).body {
SQLSetExpr::Select(s) => s, SQLSetExpr::Select(s) => *s,
_ => panic!("Expected SQLSetExpr::Select"), _ => panic!("Expected SQLSetExpr::Select"),
} }
} }
@ -1073,10 +1062,9 @@ fn parse_sql_expr(sql: &str) -> ASTNode {
generic_ast generic_ast
} }
fn parse_sql_expr_with(dialect: &Dialect, sql: &str) -> ASTNode { fn parse_sql_expr_with(dialect: &dyn Dialect, sql: &str) -> ASTNode {
let mut tokenizer = Tokenizer::new(dialect, &sql); let mut tokenizer = Tokenizer::new(dialect, &sql);
let tokens = tokenizer.tokenize().unwrap(); let tokens = tokenizer.tokenize().unwrap();
let mut parser = Parser::new(tokens); let mut parser = Parser::new(tokens);
let ast = parser.parse_expr().unwrap(); parser.parse_expr().unwrap()
ast
} }

View file

@ -1,13 +1,12 @@
extern crate log; #![warn(clippy::all)]
extern crate sqlparser;
use log::debug;
use sqlparser::dialect::PostgreSqlDialect; use sqlparser::dialect::PostgreSqlDialect;
use sqlparser::sqlast::*; use sqlparser::sqlast::*;
use sqlparser::sqlparser::*; use sqlparser::sqlparser::*;
use sqlparser::sqltokenizer::*; use sqlparser::sqltokenizer::*;
use log::*;
#[test] #[test]
fn test_prev_index() { fn test_prev_index() {
let sql: &str = "SELECT version()"; let sql: &str = "SELECT version()";
@ -42,7 +41,7 @@ fn parse_simple_insert() {
values values
); );
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -67,7 +66,7 @@ fn parse_common_insert() {
values values
); );
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -92,7 +91,7 @@ fn parse_complex_insert() {
values values
); );
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -134,7 +133,7 @@ fn parse_insert_with_columns() {
values values
); );
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -143,7 +142,7 @@ fn parse_insert_invalid() {
let sql = String::from("INSERT public.customer (id, name, active) VALUES (1, 2, 3)"); let sql = String::from("INSERT public.customer (id, name, active) VALUES (1, 2, 3)");
match Parser::parse_sql(&PostgreSqlDialect {}, sql) { match Parser::parse_sql(&PostgreSqlDialect {}, sql) {
Err(_) => {} Err(_) => {}
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -166,9 +165,9 @@ fn parse_create_table_with_defaults() {
SQLStatement::SQLCreateTable { SQLStatement::SQLCreateTable {
name, name,
columns, columns,
external: _, external: false,
file_format: _, file_format: None,
location: _, location: None,
} => { } => {
assert_eq!("public.customer", name.to_string()); assert_eq!("public.customer", name.to_string());
assert_eq!(10, columns.len()); assert_eq!(10, columns.len());
@ -188,7 +187,7 @@ fn parse_create_table_with_defaults() {
assert_eq!(SQLType::Varchar(Some(45)), c_lng.data_type); assert_eq!(SQLType::Varchar(Some(45)), c_lng.data_type);
assert_eq!(false, c_lng.allow_null); assert_eq!(false, c_lng.allow_null);
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -213,9 +212,9 @@ fn parse_create_table_from_pg_dump() {
SQLStatement::SQLCreateTable { SQLStatement::SQLCreateTable {
name, name,
columns, columns,
external: _, external: false,
file_format: _, file_format: None,
location: _, location: None,
} => { } => {
assert_eq!("public.customer", name.to_string()); assert_eq!("public.customer", name.to_string());
@ -257,7 +256,7 @@ fn parse_create_table_from_pg_dump() {
c_release_year.data_type c_release_year.data_type
); );
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -276,9 +275,9 @@ fn parse_create_table_with_inherit() {
SQLStatement::SQLCreateTable { SQLStatement::SQLCreateTable {
name, name,
columns, columns,
external: _, external: false,
file_format: _, file_format: None,
location: _, location: None,
} => { } => {
assert_eq!("bazaar.settings", name.to_string()); assert_eq!("bazaar.settings", name.to_string());
@ -296,7 +295,7 @@ fn parse_create_table_with_inherit() {
assert_eq!(false, c_name.is_primary); assert_eq!(false, c_name.is_primary);
assert_eq!(true, c_name.is_unique); assert_eq!(true, c_name.is_unique);
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -311,7 +310,7 @@ fn parse_alter_table_constraint_primary_key() {
SQLStatement::SQLAlterTable { name, .. } => { SQLStatement::SQLAlterTable { name, .. } => {
assert_eq!(name.to_string(), "bazaar.address"); assert_eq!(name.to_string(), "bazaar.address");
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -324,7 +323,7 @@ fn parse_alter_table_constraint_foreign_key() {
SQLStatement::SQLAlterTable { name, .. } => { SQLStatement::SQLAlterTable { name, .. } => {
assert_eq!(name.to_string(), "public.customer"); assert_eq!(name.to_string(), "public.customer");
} }
_ => assert!(false), _ => unreachable!(),
} }
} }
@ -410,8 +409,7 @@ fn parse_sql_statements(sql: &str) -> Result<Vec<SQLStatement>, ParserError> {
fn parse_sql_expr(sql: &str) -> ASTNode { fn parse_sql_expr(sql: &str) -> ASTNode {
debug!("sql: {}", sql); debug!("sql: {}", sql);
let mut parser = parser(sql); let mut parser = parser(sql);
let ast = parser.parse_expr().unwrap(); parser.parse_expr().unwrap()
ast
} }
fn parser(sql: &str) -> Parser { fn parser(sql: &str) -> Parser {