Merge pull request #51 from nickolay/rust-2018

Rust 2018 and clippy fixes
This commit is contained in:
Andy Grove 2019-04-27 08:51:43 -06:00 committed by GitHub
commit 4cdd003cd4
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 180 additions and 247 deletions

View file

@ -12,6 +12,7 @@ include = [
"src/**/*.rs",
"Cargo.toml",
]
edition = "2018"
[lib]
name = "sqlparser"
@ -24,3 +25,4 @@ uuid = "0.7.1"
[dev-dependencies]
simple_logger = "1.0.1"
matches = "0.1"

View file

@ -1,5 +1,7 @@
extern crate simple_logger;
extern crate sqlparser;
#![warn(clippy::all)]
use simple_logger;
///! A small command-line app to run the parser.
/// Run with `cargo run --example cli`
use std::fs;
@ -14,8 +16,8 @@ fn main() {
.nth(1)
.expect("No arguments provided!\n\nUsage: cargo run --example cli FILENAME.sql");
let contents =
fs::read_to_string(&filename).expect(&format!("Unable to read the file {}", &filename));
let contents = fs::read_to_string(&filename)
.unwrap_or_else(|_| panic!("Unable to read the file {}", &filename));
let without_bom = if contents.chars().nth(0).unwrap() as u64 != 0xfeff {
contents.as_str()
} else {
@ -31,7 +33,7 @@ fn main() {
"Round-trip:\n'{}'",
statements
.iter()
.map(|s| s.to_string())
.map(std::string::ToString::to_string)
.collect::<Vec<_>>()
.join("\n")
);

View file

@ -1,4 +1,4 @@
extern crate sqlparser;
#![warn(clippy::all)]
use sqlparser::dialect::GenericSqlDialect;
use sqlparser::sqlparser::*;

View file

@ -1,4 +1,4 @@
use dialect::Dialect;
use crate::dialect::Dialect;
pub struct AnsiSqlDialect {}

View file

@ -1,4 +1,4 @@
use dialect::Dialect;
use crate::dialect::Dialect;
pub struct GenericSqlDialect {}
impl Dialect for GenericSqlDialect {

View file

@ -365,9 +365,9 @@ keyword!(
);
/// special case of keyword where the it is an invalid identifier
pub const END_EXEC: &'static str = "END-EXEC";
pub const END_EXEC: &str = "END-EXEC";
pub const ALL_KEYWORDS: &'static [&'static str] = &[
pub const ALL_KEYWORDS: &[&str] = &[
ABS,
ADD,
ASC,
@ -716,7 +716,7 @@ pub const ALL_KEYWORDS: &'static [&'static str] = &[
/// These keywords can't be used as a table alias, so that `FROM table_name alias`
/// can be parsed unambiguously without looking ahead.
pub const RESERVED_FOR_TABLE_ALIAS: &'static [&'static str] = &[
pub const RESERVED_FOR_TABLE_ALIAS: &[&str] = &[
// Reserved as both a table and a column alias:
WITH, SELECT, WHERE, GROUP, ORDER, UNION, EXCEPT, INTERSECT,
// Reserved only as a table alias in the `FROM`/`JOIN` clauses:
@ -725,7 +725,7 @@ pub const RESERVED_FOR_TABLE_ALIAS: &'static [&'static str] = &[
/// Can't be used as a column alias, so that `SELECT <expr> alias`
/// can be parsed unambiguously without looking ahead.
pub const RESERVED_FOR_COLUMN_ALIAS: &'static [&'static str] = &[
pub const RESERVED_FOR_COLUMN_ALIAS: &[&str] = &[
// Reserved as both a table and a column alias:
WITH, SELECT, WHERE, GROUP, ORDER, UNION, EXCEPT, INTERSECT,
// Reserved only as a column alias in the `SELECT` clause:

View file

@ -1,4 +1,4 @@
use dialect::Dialect;
use crate::dialect::Dialect;
pub struct PostgreSqlDialect {}

View file

@ -34,11 +34,7 @@
//!
//! println!("AST: {:?}", ast);
//! ```
#[macro_use]
extern crate log;
extern crate chrono;
extern crate uuid;
#![warn(clippy::all)]
pub mod dialect;
pub mod sqlast;

View file

@ -202,7 +202,7 @@ impl ToString for ASTNode {
#[derive(Debug, Clone, PartialEq)]
pub enum SQLStatement {
/// SELECT
SQLSelect(SQLQuery),
SQLQuery(Box<SQLQuery>),
/// INSERT
SQLInsert {
/// TABLE
@ -240,7 +240,7 @@ pub enum SQLStatement {
SQLCreateView {
/// View name
name: SQLObjectName,
query: SQLQuery,
query: Box<SQLQuery>,
materialized: bool,
},
/// CREATE TABLE
@ -264,17 +264,17 @@ pub enum SQLStatement {
impl ToString for SQLStatement {
fn to_string(&self) -> String {
match self {
SQLStatement::SQLSelect(s) => s.to_string(),
SQLStatement::SQLQuery(s) => s.to_string(),
SQLStatement::SQLInsert {
table_name,
columns,
values,
} => {
let mut s = format!("INSERT INTO {}", table_name.to_string());
if columns.len() > 0 {
if !columns.is_empty() {
s += &format!(" ({})", columns.join(", "));
}
if values.len() > 0 {
if !values.is_empty() {
s += &format!(
" VALUES({})",
values
@ -307,12 +307,12 @@ impl ToString for SQLStatement {
);
}
s += " FROM stdin; ";
if values.len() > 0 {
if !values.is_empty() {
s += &format!(
"\n{}",
values
.iter()
.map(|v| v.clone().unwrap_or("\\N".to_string()))
.map(|v| v.clone().unwrap_or_else(|| "\\N".to_string()))
.collect::<Vec<String>>()
.join("\t")
);
@ -381,13 +381,7 @@ impl ToString for SQLStatement {
file_format.as_ref().map(|f| f.to_string()).unwrap(),
location.as_ref().unwrap()
),
SQLStatement::SQLCreateTable {
name,
columns,
external: _,
file_format: _,
location: _,
} => format!(
SQLStatement::SQLCreateTable { name, columns, .. } => format!(
"CREATE TABLE {} ({})",
name.to_string(),
columns
@ -483,7 +477,7 @@ impl ToString for FileFormat {
}
}
use sqlparser::ParserError;
use crate::sqlparser::ParserError;
use std::str::FromStr;
impl FromStr for FileFormat {
type Err = ParserError;

View file

@ -50,7 +50,7 @@ impl ToString for SQLQuery {
#[derive(Debug, Clone, PartialEq)]
pub enum SQLSetExpr {
/// Restricted SELECT .. FROM .. HAVING (no ORDER BY or set operations)
Select(SQLSelect),
Select(Box<SQLSelect>),
/// Parenthesized SELECT subquery, which may include more set operations
/// in its body and an optional ORDER BY / LIMIT.
Query(Box<SQLQuery>),

View file

@ -19,14 +19,14 @@ pub enum SQLType {
Blob(usize),
/// Decimal type with optional precision and scale e.g. DECIMAL(10,2)
Decimal(Option<usize>, Option<usize>),
/// Floating point with optional precision e.g. FLOAT(8)
Float(Option<usize>),
/// Small integer
SmallInt,
/// Integer
Int,
/// Big integer
BigInt,
/// Floating point with optional precision e.g. FLOAT(8)
Float(Option<usize>),
/// Floating point e.g. REAL
Real,
/// Double e.g. DOUBLE PRECISION
@ -54,20 +54,8 @@ pub enum SQLType {
impl ToString for SQLType {
fn to_string(&self) -> String {
match self {
SQLType::Char(size) => {
if let Some(size) = size {
format!("char({})", size)
} else {
"char".to_string()
}
}
SQLType::Varchar(size) => {
if let Some(size) = size {
format!("character varying({})", size)
} else {
"character varying".to_string()
}
}
SQLType::Char(size) => format_type_with_optional_length("char", size),
SQLType::Varchar(size) => format_type_with_optional_length("character varying", size),
SQLType::Uuid => "uuid".to_string(),
SQLType::Clob(size) => format!("clob({})", size),
SQLType::Binary(size) => format!("binary({})", size),
@ -76,22 +64,14 @@ impl ToString for SQLType {
SQLType::Decimal(precision, scale) => {
if let Some(scale) = scale {
format!("numeric({},{})", precision.unwrap(), scale)
} else if let Some(precision) = precision {
format!("numeric({})", precision)
} else {
format!("numeric")
format_type_with_optional_length("numeric", precision)
}
}
SQLType::Float(size) => format_type_with_optional_length("float", size),
SQLType::SmallInt => "smallint".to_string(),
SQLType::Int => "int".to_string(),
SQLType::BigInt => "bigint".to_string(),
SQLType::Float(size) => {
if let Some(size) = size {
format!("float({})", size)
} else {
"float".to_string()
}
}
SQLType::Real => "real".to_string(),
SQLType::Double => "double".to_string(),
SQLType::Boolean => "boolean".to_string(),
@ -106,3 +86,11 @@ impl ToString for SQLType {
}
}
}
fn format_type_with_optional_length(sql_type: &str, len: &Option<usize>) -> String {
let mut s = sql_type.to_string();
if let Some(len) = len {
s += &format!("({})", len);
}
s
}

View file

@ -14,6 +14,8 @@
//! SQL Parser
use log::debug;
use super::dialect::keywords;
use super::dialect::Dialect;
use super::sqlast::*;
@ -47,14 +49,11 @@ pub struct Parser {
impl Parser {
/// Parse the specified tokens
pub fn new(tokens: Vec<Token>) -> Self {
Parser {
tokens: tokens,
index: 0,
}
Parser { tokens, index: 0 }
}
/// Parse a SQL statement and produce an Abstract Syntax Tree (AST)
pub fn parse_sql(dialect: &Dialect, sql: String) -> Result<Vec<SQLStatement>, ParserError> {
pub fn parse_sql(dialect: &dyn Dialect, sql: String) -> Result<Vec<SQLStatement>, ParserError> {
let mut tokenizer = Tokenizer::new(dialect, &sql);
let tokens = tokenizer.tokenize()?;
let mut parser = Parser::new(tokens);
@ -91,7 +90,7 @@ impl Parser {
Token::SQLWord(ref w) if w.keyword != "" => match w.keyword.as_ref() {
"SELECT" | "WITH" => {
self.prev_token();
Ok(SQLStatement::SQLSelect(self.parse_query()?))
Ok(SQLStatement::SQLQuery(Box::new(self.parse_query()?)))
}
"CREATE" => Ok(self.parse_create()?),
"DELETE" => Ok(self.parse_delete()?),
@ -233,7 +232,7 @@ impl Parser {
t
)),
},
None => parser_err!(format!("Prefix parser expected a keyword but hit EOF")),
None => parser_err!("Prefix parser expected a keyword but hit EOF"),
}
}
@ -403,21 +402,21 @@ impl Parser {
/// Convert a token operator to an AST operator
pub fn to_sql_operator(&self, tok: &Token) -> Result<SQLOperator, ParserError> {
match tok {
&Token::Eq => Ok(SQLOperator::Eq),
&Token::Neq => Ok(SQLOperator::NotEq),
&Token::Lt => Ok(SQLOperator::Lt),
&Token::LtEq => Ok(SQLOperator::LtEq),
&Token::Gt => Ok(SQLOperator::Gt),
&Token::GtEq => Ok(SQLOperator::GtEq),
&Token::Plus => Ok(SQLOperator::Plus),
&Token::Minus => Ok(SQLOperator::Minus),
&Token::Mult => Ok(SQLOperator::Multiply),
&Token::Div => Ok(SQLOperator::Divide),
&Token::Mod => Ok(SQLOperator::Modulus),
&Token::SQLWord(ref k) if k.keyword == "AND" => Ok(SQLOperator::And),
&Token::SQLWord(ref k) if k.keyword == "OR" => Ok(SQLOperator::Or),
//&Token::SQLWord(ref k) if k.keyword == "NOT" => Ok(SQLOperator::Not),
&Token::SQLWord(ref k) if k.keyword == "LIKE" => Ok(SQLOperator::Like),
Token::Eq => Ok(SQLOperator::Eq),
Token::Neq => Ok(SQLOperator::NotEq),
Token::Lt => Ok(SQLOperator::Lt),
Token::LtEq => Ok(SQLOperator::LtEq),
Token::Gt => Ok(SQLOperator::Gt),
Token::GtEq => Ok(SQLOperator::GtEq),
Token::Plus => Ok(SQLOperator::Plus),
Token::Minus => Ok(SQLOperator::Minus),
Token::Mult => Ok(SQLOperator::Multiply),
Token::Div => Ok(SQLOperator::Divide),
Token::Mod => Ok(SQLOperator::Modulus),
Token::SQLWord(ref k) if k.keyword == "AND" => Ok(SQLOperator::And),
Token::SQLWord(ref k) if k.keyword == "OR" => Ok(SQLOperator::Or),
//Token::SQLWord(ref k) if k.keyword == "NOT" => Ok(SQLOperator::Not),
Token::SQLWord(ref k) if k.keyword == "LIKE" => Ok(SQLOperator::Like),
_ => parser_err!(format!("Unsupported SQL operator {:?}", tok)),
}
}
@ -436,19 +435,17 @@ impl Parser {
debug!("get_precedence() {:?}", tok);
match tok {
&Token::SQLWord(ref k) if k.keyword == "OR" => Ok(5),
&Token::SQLWord(ref k) if k.keyword == "AND" => Ok(10),
&Token::SQLWord(ref k) if k.keyword == "NOT" => Ok(15),
&Token::SQLWord(ref k) if k.keyword == "IS" => Ok(17),
&Token::SQLWord(ref k) if k.keyword == "IN" => Ok(20),
&Token::SQLWord(ref k) if k.keyword == "BETWEEN" => Ok(20),
&Token::SQLWord(ref k) if k.keyword == "LIKE" => Ok(20),
&Token::Eq | &Token::Lt | &Token::LtEq | &Token::Neq | &Token::Gt | &Token::GtEq => {
Ok(20)
}
&Token::Plus | &Token::Minus => Ok(30),
&Token::Mult | &Token::Div | &Token::Mod => Ok(40),
&Token::DoubleColon => Ok(50),
Token::SQLWord(k) if k.keyword == "OR" => Ok(5),
Token::SQLWord(k) if k.keyword == "AND" => Ok(10),
Token::SQLWord(k) if k.keyword == "NOT" => Ok(15),
Token::SQLWord(k) if k.keyword == "IS" => Ok(17),
Token::SQLWord(k) if k.keyword == "IN" => Ok(20),
Token::SQLWord(k) if k.keyword == "BETWEEN" => Ok(20),
Token::SQLWord(k) if k.keyword == "LIKE" => Ok(20),
Token::Eq | Token::Lt | Token::LtEq | Token::Neq | Token::Gt | Token::GtEq => Ok(20),
Token::Plus | Token::Minus => Ok(30),
Token::Mult | Token::Div | Token::Mod => Ok(40),
Token::DoubleColon => Ok(50),
_ => Ok(0),
}
}
@ -482,7 +479,7 @@ impl Parser {
loop {
match self.token_at(index) {
Some(Token::Whitespace(_)) => {
index = index + 1;
index += 1;
}
Some(_) => {
return Some(index);
@ -505,7 +502,7 @@ impl Parser {
pub fn next_token_no_skip(&mut self) -> Option<Token> {
if self.index < self.tokens.len() {
self.index = self.index + 1;
self.index += 1;
Some(self.tokens[self.index - 1].clone())
} else {
None
@ -530,7 +527,7 @@ impl Parser {
/// Get the previous token and decrement the token index
fn prev_token_no_skip(&mut self) -> Option<Token> {
if self.index > 0 {
self.index = self.index - 1;
self.index -= 1;
Some(self.tokens[self.index].clone())
} else {
None
@ -658,7 +655,7 @@ impl Parser {
// Some dialects allow WITH here, followed by some keywords (e.g. MS SQL)
// or `(k1=v1, k2=v2, ...)` (Postgres)
self.expect_keyword("AS")?;
let query = self.parse_query()?;
let query = Box::new(self.parse_query()?);
// Optional `WITH [ CASCADED | LOCAL ] CHECK OPTION` is widely supported here.
Ok(SQLStatement::SQLCreateView {
name,
@ -701,9 +698,8 @@ impl Parser {
};
let allow_null = if self.parse_keywords(vec!["NOT", "NULL"]) {
false
} else if self.parse_keyword("NULL") {
true
} else {
let _ = self.parse_keyword("NULL");
true
};
debug!("default: {:?}", default);
@ -877,7 +873,7 @@ impl Parser {
}
},
//TODO: parse the timestamp here (see parse_timestamp_value())
Token::Number(ref n) if n.contains(".") => match n.parse::<f64>() {
Token::Number(ref n) if n.contains('.') => match n.parse::<f64>() {
Ok(n) => Ok(Value::Double(n)),
Err(e) => parser_err!(format!("Could not parse '{}' as f64: {}", n, e)),
},
@ -952,7 +948,7 @@ impl Parser {
Some(token) => match token {
Token::Plus | Token::Minus => {
let tz = self.parse_timezone_offset()?;
let offset = FixedOffset::east(tz as i32 * 3600);
let offset = FixedOffset::east(i32::from(tz) * 3600);
Ok(Value::Timestamp(DateTime::from_utc(date_time, offset)))
}
_ => Ok(Value::DateTime(date_time)),
@ -1014,11 +1010,8 @@ impl Parser {
"FLOAT" => Ok(SQLType::Float(self.parse_optional_precision()?)),
"REAL" => Ok(SQLType::Real),
"DOUBLE" => {
if self.parse_keyword("PRECISION") {
Ok(SQLType::Double)
} else {
Ok(SQLType::Double)
}
let _ = self.parse_keyword("PRECISION");
Ok(SQLType::Double)
}
"SMALLINT" => Ok(SQLType::SmallInt),
"INT" | "INTEGER" => Ok(SQLType::Int),
@ -1034,50 +1027,20 @@ impl Parser {
"UUID" => Ok(SQLType::Uuid),
"DATE" => Ok(SQLType::Date),
"TIMESTAMP" => {
if self.parse_keyword("WITH") {
if self.parse_keywords(vec!["TIME", "ZONE"]) {
Ok(SQLType::Timestamp)
} else {
parser_err!(format!(
"Expecting 'time zone', found: {:?}",
self.peek_token()
))
}
} else if self.parse_keyword("WITHOUT") {
if self.parse_keywords(vec!["TIME", "ZONE"]) {
Ok(SQLType::Timestamp)
} else {
parser_err!(format!(
"Expecting 'time zone', found: {:?}",
self.peek_token()
))
}
} else {
Ok(SQLType::Timestamp)
// TBD: we throw away "with/without timezone" information
if self.parse_keyword("WITH") || self.parse_keyword("WITHOUT") {
self.expect_keyword("TIME")?;
self.expect_keyword("ZONE")?;
}
Ok(SQLType::Timestamp)
}
"TIME" => {
if self.parse_keyword("WITH") {
if self.parse_keywords(vec!["TIME", "ZONE"]) {
Ok(SQLType::Time)
} else {
parser_err!(format!(
"Expecting 'time zone', found: {:?}",
self.peek_token()
))
}
} else if self.parse_keyword("WITHOUT") {
if self.parse_keywords(vec!["TIME", "ZONE"]) {
Ok(SQLType::Time)
} else {
parser_err!(format!(
"Expecting 'time zone', found: {:?}",
self.peek_token()
))
}
} else {
Ok(SQLType::Timestamp)
// TBD: we throw away "with/without timezone" information
if self.parse_keyword("WITH") || self.parse_keyword("WITHOUT") {
self.expect_keyword("TIME")?;
self.expect_keyword("ZONE")?;
}
Ok(SQLType::Time)
}
"REGCLASS" => Ok(SQLType::Regclass),
"TEXT" => {
@ -1288,7 +1251,7 @@ impl Parser {
break;
}
}
return Ok(cte);
Ok(cte)
}
/// Parse a "query body", which is an expression with roughly the
@ -1303,7 +1266,7 @@ impl Parser {
// We parse the expression using a Pratt parser, as in `parse_expr()`.
// Start by parsing a restricted SELECT or a `(subquery)`:
let mut expr = if self.parse_keyword("SELECT") {
SQLSetExpr::Select(self.parse_select()?)
SQLSetExpr::Select(Box::new(self.parse_select()?))
} else if self.consume_token(&Token::LParen) {
// CTEs are not allowed here, but the parser currently accepts them
let subquery = self.parse_query()?;

View file

@ -138,9 +138,9 @@ impl Token {
let is_keyword = quote_style == None && ALL_KEYWORDS.contains(&word_uppercase.as_str());
Token::SQLWord(SQLWord {
value: word.to_string(),
quote_style: quote_style,
quote_style,
keyword: if is_keyword {
word_uppercase.to_string()
word_uppercase
} else {
"".to_string()
},
@ -212,7 +212,7 @@ pub struct TokenizerError(String);
/// SQL Tokenizer
pub struct Tokenizer<'a> {
dialect: &'a Dialect,
dialect: &'a dyn Dialect,
pub query: String,
pub line: u64,
pub col: u64,
@ -220,7 +220,7 @@ pub struct Tokenizer<'a> {
impl<'a> Tokenizer<'a> {
/// Create a new SQL tokenizer for the specified SQL statement
pub fn new(dialect: &'a Dialect, query: &str) -> Self {
pub fn new(dialect: &'a dyn Dialect, query: &str) -> Self {
Self {
dialect,
query: query.to_string(),
@ -256,7 +256,7 @@ impl<'a> Tokenizer<'a> {
}
/// Get the next token or return None
fn next_token(&self, chars: &mut Peekable<Chars>) -> Result<Option<Token>, TokenizerError> {
fn next_token(&self, chars: &mut Peekable<Chars<'_>>) -> Result<Option<Token>, TokenizerError> {
//println!("next_token: {:?}", chars.peek());
match chars.peek() {
Some(&ch) => match ch {
@ -312,11 +312,11 @@ impl<'a> Tokenizer<'a> {
Ok(Some(Token::make_word(&s, Some(quote_start))))
}
// numbers
'0'...'9' => {
'0'..='9' => {
let mut s = String::new();
while let Some(&ch) = chars.peek() {
match ch {
'0'...'9' | '.' => {
'0'..='9' | '.' => {
chars.next(); // consume
s.push(ch);
}
@ -436,7 +436,7 @@ impl<'a> Tokenizer<'a> {
}
/// Tokenize an identifier or keyword, after the first char is already consumed.
fn tokenize_word(&self, first_char: char, chars: &mut Peekable<Chars>) -> String {
fn tokenize_word(&self, first_char: char, chars: &mut Peekable<Chars<'_>>) -> String {
let mut s = String::new();
s.push(first_char);
while let Some(&ch) = chars.peek() {
@ -451,7 +451,7 @@ impl<'a> Tokenizer<'a> {
}
/// Read a single quoted string, starting with the opening quote.
fn tokenize_single_quoted_string(&self, chars: &mut Peekable<Chars>) -> String {
fn tokenize_single_quoted_string(&self, chars: &mut Peekable<Chars<'_>>) -> String {
//TODO: handle escaped quotes in string
//TODO: handle newlines in string
//TODO: handle EOF before terminating quote
@ -475,7 +475,7 @@ impl<'a> Tokenizer<'a> {
fn tokenize_multiline_comment(
&self,
chars: &mut Peekable<Chars>,
chars: &mut Peekable<Chars<'_>>,
) -> Result<Option<Token>, TokenizerError> {
let mut s = String::new();
let mut maybe_closing_comment = false;
@ -506,7 +506,7 @@ impl<'a> Tokenizer<'a> {
fn consume_and_return(
&self,
chars: &mut Peekable<Chars>,
chars: &mut Peekable<Chars<'_>>,
t: Token,
) -> Result<Option<Token>, TokenizerError> {
chars.next();

View file

@ -1,5 +1,4 @@
extern crate log;
extern crate sqlparser;
#![warn(clippy::all)]
use sqlparser::dialect::AnsiSqlDialect;
use sqlparser::sqlast::*;
@ -8,15 +7,18 @@ use sqlparser::sqlparser::*;
#[test]
fn parse_simple_select() {
let sql = String::from("SELECT id, fname, lname FROM customer WHERE id = 1");
let ast = Parser::parse_sql(&AnsiSqlDialect {}, sql).unwrap();
let mut ast = Parser::parse_sql(&AnsiSqlDialect {}, sql).unwrap();
assert_eq!(1, ast.len());
match ast.first().unwrap() {
SQLStatement::SQLSelect(SQLQuery {
body: SQLSetExpr::Select(SQLSelect { projection, .. }),
..
}) => {
assert_eq!(3, projection.len());
}
_ => assert!(false),
match ast.pop().unwrap() {
SQLStatement::SQLQuery(q) => match *q {
SQLQuery {
body: SQLSetExpr::Select(select),
..
} => {
assert_eq!(3, select.projection.len());
}
_ => unreachable!(),
},
_ => unreachable!(),
}
}

View file

@ -1,5 +1,6 @@
extern crate log;
extern crate sqlparser;
#![warn(clippy::all)]
use matches::assert_matches;
use sqlparser::dialect::*;
use sqlparser::sqlast::*;
@ -13,8 +14,7 @@ fn parse_delete_statement() {
SQLStatement::SQLDelete { table_name, .. } => {
assert_eq!(SQLObjectName(vec!["\"table\"".to_string()]), table_name);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -41,8 +41,7 @@ fn parse_where_delete_statement() {
selection.unwrap(),
);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -232,23 +231,17 @@ fn parse_not_precedence() {
use self::ASTNode::*;
// NOT has higher precedence than OR/AND, so the following must parse as (NOT true) OR true
let sql = "NOT true OR true";
match verified_expr(sql) {
SQLBinaryExpr {
op: SQLOperator::Or,
..
} => assert!(true),
_ => assert!(false),
};
assert_matches!(verified_expr(sql), SQLBinaryExpr {
op: SQLOperator::Or,
..
});
// But NOT has lower precedence than comparison operators, so the following parses as NOT (a IS NULL)
let sql = "NOT a IS NULL";
match verified_expr(sql) {
SQLUnary {
operator: SQLOperator::Not,
..
} => assert!(true),
_ => assert!(false),
};
assert_matches!(verified_expr(sql), SQLUnary {
operator: SQLOperator::Not,
..
});
}
#[test]
@ -449,9 +442,9 @@ fn parse_create_table() {
SQLStatement::SQLCreateTable {
name,
columns,
external: _,
file_format: _,
location: _,
external: false,
file_format: None,
location: None,
} => {
assert_eq!("uk_cities", name.to_string());
assert_eq!(3, columns.len());
@ -471,7 +464,7 @@ fn parse_create_table() {
assert_eq!(SQLType::Double, c_lng.data_type);
assert_eq!(true, c_lng.allow_null);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -522,7 +515,7 @@ fn parse_create_external_table() {
assert_eq!(FileFormat::TEXTFILE, file_format.unwrap());
assert_eq!("/tmp/example.csv", location.unwrap());
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -844,14 +837,13 @@ fn parse_ctes() {
cte_sqls[0], cte_sqls[1]
);
fn assert_ctes_in_select(expected: &Vec<&str>, sel: &SQLQuery) {
for i in 0..1 {
let Cte {
ref query,
ref alias,
} = sel.ctes[i];
assert_eq!(expected[i], query.to_string());
fn assert_ctes_in_select(expected: &[&str], sel: &SQLQuery) {
let mut i = 0;
for exp in expected {
let Cte { query, alias } = &sel.ctes[i];
assert_eq!(*exp, query.to_string());
assert_eq!(if i == 0 { "a" } else { "b" }, alias);
i += 1;
}
}
@ -861,7 +853,7 @@ fn parse_ctes() {
let sql = &format!("SELECT ({})", with);
let select = verified_only_select(sql);
match expr_from_projection(only(&select.projection)) {
&ASTNode::SQLSubquery(ref subquery) => {
ASTNode::SQLSubquery(ref subquery) => {
assert_ctes_in_select(&cte_sqls, subquery.as_ref());
}
_ => panic!("Expected subquery"),
@ -956,14 +948,11 @@ fn parse_multiple_statements() {
fn parse_scalar_subqueries() {
use self::ASTNode::*;
let sql = "(SELECT 1) + (SELECT 2)";
match verified_expr(sql) {
SQLBinaryExpr {
op: SQLOperator::Plus, ..
//left: box SQLSubquery { .. },
//right: box SQLSubquery { .. },
} => assert!(true),
_ => assert!(false),
};
assert_matches!(verified_expr(sql), SQLBinaryExpr {
op: SQLOperator::Plus, ..
//left: box SQLSubquery { .. },
//right: box SQLSubquery { .. },
});
}
#[test]
@ -979,7 +968,7 @@ fn parse_create_view() {
assert_eq!("SELECT foo FROM bar", query.to_string());
assert!(!materialized);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -996,7 +985,7 @@ fn parse_create_materialized_view() {
assert_eq!("SELECT foo FROM bar", query.to_string());
assert!(materialized);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -1009,15 +998,15 @@ fn parse_invalid_subquery_without_parens() {
);
}
fn only<'a, T>(v: &'a Vec<T>) -> &'a T {
fn only<T>(v: &[T]) -> &T {
assert_eq!(1, v.len());
v.first().unwrap()
}
fn verified_query(query: &str) -> SQLQuery {
match verified_stmt(query) {
SQLStatement::SQLSelect(select) => select,
_ => panic!("Expected SELECT"),
SQLStatement::SQLQuery(query) => *query,
_ => panic!("Expected SQLQuery"),
}
}
@ -1030,7 +1019,7 @@ fn expr_from_projection(item: &SQLSelectItem) -> &ASTNode {
fn verified_only_select(query: &str) -> SQLSelect {
match verified_query(query).body {
SQLSetExpr::Select(s) => s,
SQLSetExpr::Select(s) => *s,
_ => panic!("Expected SQLSetExpr::Select"),
}
}
@ -1073,10 +1062,9 @@ fn parse_sql_expr(sql: &str) -> ASTNode {
generic_ast
}
fn parse_sql_expr_with(dialect: &Dialect, sql: &str) -> ASTNode {
fn parse_sql_expr_with(dialect: &dyn Dialect, sql: &str) -> ASTNode {
let mut tokenizer = Tokenizer::new(dialect, &sql);
let tokens = tokenizer.tokenize().unwrap();
let mut parser = Parser::new(tokens);
let ast = parser.parse_expr().unwrap();
ast
parser.parse_expr().unwrap()
}

View file

@ -1,13 +1,12 @@
extern crate log;
extern crate sqlparser;
#![warn(clippy::all)]
use log::debug;
use sqlparser::dialect::PostgreSqlDialect;
use sqlparser::sqlast::*;
use sqlparser::sqlparser::*;
use sqlparser::sqltokenizer::*;
use log::*;
#[test]
fn test_prev_index() {
let sql: &str = "SELECT version()";
@ -42,7 +41,7 @@ fn parse_simple_insert() {
values
);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -67,7 +66,7 @@ fn parse_common_insert() {
values
);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -92,7 +91,7 @@ fn parse_complex_insert() {
values
);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -134,7 +133,7 @@ fn parse_insert_with_columns() {
values
);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -143,7 +142,7 @@ fn parse_insert_invalid() {
let sql = String::from("INSERT public.customer (id, name, active) VALUES (1, 2, 3)");
match Parser::parse_sql(&PostgreSqlDialect {}, sql) {
Err(_) => {}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -166,9 +165,9 @@ fn parse_create_table_with_defaults() {
SQLStatement::SQLCreateTable {
name,
columns,
external: _,
file_format: _,
location: _,
external: false,
file_format: None,
location: None,
} => {
assert_eq!("public.customer", name.to_string());
assert_eq!(10, columns.len());
@ -188,7 +187,7 @@ fn parse_create_table_with_defaults() {
assert_eq!(SQLType::Varchar(Some(45)), c_lng.data_type);
assert_eq!(false, c_lng.allow_null);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -213,9 +212,9 @@ fn parse_create_table_from_pg_dump() {
SQLStatement::SQLCreateTable {
name,
columns,
external: _,
file_format: _,
location: _,
external: false,
file_format: None,
location: None,
} => {
assert_eq!("public.customer", name.to_string());
@ -257,7 +256,7 @@ fn parse_create_table_from_pg_dump() {
c_release_year.data_type
);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -276,9 +275,9 @@ fn parse_create_table_with_inherit() {
SQLStatement::SQLCreateTable {
name,
columns,
external: _,
file_format: _,
location: _,
external: false,
file_format: None,
location: None,
} => {
assert_eq!("bazaar.settings", name.to_string());
@ -296,7 +295,7 @@ fn parse_create_table_with_inherit() {
assert_eq!(false, c_name.is_primary);
assert_eq!(true, c_name.is_unique);
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -311,7 +310,7 @@ fn parse_alter_table_constraint_primary_key() {
SQLStatement::SQLAlterTable { name, .. } => {
assert_eq!(name.to_string(), "bazaar.address");
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -324,7 +323,7 @@ fn parse_alter_table_constraint_foreign_key() {
SQLStatement::SQLAlterTable { name, .. } => {
assert_eq!(name.to_string(), "public.customer");
}
_ => assert!(false),
_ => unreachable!(),
}
}
@ -410,8 +409,7 @@ fn parse_sql_statements(sql: &str) -> Result<Vec<SQLStatement>, ParserError> {
fn parse_sql_expr(sql: &str) -> ASTNode {
debug!("sql: {}", sql);
let mut parser = parser(sql);
let ast = parser.parse_expr().unwrap();
ast
parser.parse_expr().unwrap()
}
fn parser(sql: &str) -> Parser {