Make clippy happy (#330)

Signed-off-by: koushiro <koushiro.cqx@gmail.com>
This commit is contained in:
Qinxuan Chen 2021-08-20 05:02:15 +08:00 committed by GitHub
parent e5991f3ae5
commit 67e17b27f5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 56 additions and 43 deletions

View file

@ -9,7 +9,7 @@ Given that the parser produces a typed AST, any changes to the AST will technica
Check https://github.com/ballista-compute/sqlparser-rs/commits/main for undocumented changes.
## [0.8.0] 2020-03-21
## [0.9.0] 2020-03-21
### Added
* Add support for `TRY_CAST` syntax (#299) - Thanks @seddonm1!

View file

@ -12,13 +12,14 @@
#![warn(clippy::all)]
///! A small command-line app to run the parser.
/// A small command-line app to run the parser.
/// Run with `cargo run --example cli`
use std::fs;
use simple_logger::SimpleLogger;
use sqlparser::dialect::*;
use sqlparser::parser::Parser;
fn main() {
SimpleLogger::new().init().unwrap();

View file

@ -10,10 +10,12 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use super::ObjectName;
use std::fmt;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;
use crate::ast::ObjectName;
/// SQL data types
#[derive(Debug, Clone, PartialEq, Eq, Hash)]

View file

@ -12,12 +12,14 @@
//! AST types specific to CREATE/ALTER variants of [Statement]
//! (commonly referred to as Data Definition Language, or DDL)
use super::{display_comma_separated, DataType, Expr, Ident, ObjectName};
use crate::ast::display_separated;
use crate::tokenizer::Token;
use std::fmt;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;
use crate::ast::{display_comma_separated, display_separated, DataType, Expr, Ident, ObjectName};
use crate::tokenizer::Token;
/// An `ALTER TABLE` (`Statement::AlterTable`) operation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]

View file

@ -18,9 +18,10 @@ mod operator;
mod query;
mod value;
use std::fmt;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;
pub use self::data_type::DataType;
pub use self::ddl::{
@ -993,16 +994,16 @@ impl fmt::Display for Statement {
}
match hive_distribution {
HiveDistributionStyle::PARTITIONED { columns } => {
write!(f, " PARTITIONED BY ({})", display_comma_separated(&columns))?;
write!(f, " PARTITIONED BY ({})", display_comma_separated(columns))?;
}
HiveDistributionStyle::CLUSTERED {
columns,
sorted_by,
num_buckets,
} => {
write!(f, " CLUSTERED BY ({})", display_comma_separated(&columns))?;
write!(f, " CLUSTERED BY ({})", display_comma_separated(columns))?;
if !sorted_by.is_empty() {
write!(f, " SORTED BY ({})", display_comma_separated(&sorted_by))?;
write!(f, " SORTED BY ({})", display_comma_separated(sorted_by))?;
}
if *num_buckets > 0 {
write!(f, " INTO {} BUCKETS", num_buckets)?;
@ -1016,8 +1017,8 @@ impl fmt::Display for Statement {
write!(
f,
" SKEWED BY ({})) ON ({})",
display_comma_separated(&columns),
display_comma_separated(&on)
display_comma_separated(columns),
display_comma_separated(on)
)?;
if *stored_as_directories {
write!(f, " STORED AS DIRECTORIES")?;

View file

@ -10,9 +10,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;
/// Unary operators
#[derive(Debug, Clone, PartialEq, Eq, Hash)]

View file

@ -10,10 +10,11 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use super::*;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use crate::ast::*;
/// The most complete variant of a `SELECT` query expression, optionally
/// including `WITH`, `UNION` / other set operations, and `ORDER BY`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]

View file

@ -10,11 +10,12 @@
// See the License for the specific language governing permissions and
// limitations under the License.
use std::fmt;
#[cfg(feature = "bigdecimal")]
use bigdecimal::BigDecimal;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;
/// Primitive SQL values such as number and string
#[derive(Debug, Clone, PartialEq, Eq, Hash)]

View file

@ -10,7 +10,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.
///! This module defines
/// This module defines
/// 1) a list of constants for every keyword that
/// can appear in [Word::keyword]:
/// pub const KEYWORD = "KEYWORD"

View file

@ -12,15 +12,16 @@
//! SQL Parser
use log::debug;
use super::ast::*;
use super::dialect::keywords::Keyword;
use super::dialect::*;
use super::tokenizer::*;
use std::error::Error;
use std::fmt;
use log::debug;
use crate::ast::*;
use crate::dialect::keywords::Keyword;
use crate::dialect::*;
use crate::tokenizer::*;
#[derive(Debug, Clone, PartialEq)]
pub enum ParserError {
TokenizerError(String),
@ -56,7 +57,6 @@ pub enum IsLateral {
NotLateral,
}
use crate::ast::Statement::CreateVirtualTable;
use IsLateral::*;
impl From<TokenizerError> for ParserError {
@ -102,7 +102,7 @@ impl<'a> Parser<'a> {
/// Parse a SQL statement and produce an Abstract Syntax Tree (AST)
pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
let mut tokenizer = Tokenizer::new(dialect, &sql);
let mut tokenizer = Tokenizer::new(dialect, sql);
let tokens = tokenizer.tokenize()?;
let mut parser = Parser::new(tokens, dialect);
let mut stmts = Vec::new();
@ -297,6 +297,7 @@ impl<'a> Parser<'a> {
}
Ok(expr)
}
pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
let condition = self.parse_expr()?;
let message = if self.parse_keyword(Keyword::AS) {
@ -1248,7 +1249,7 @@ impl<'a> Parser<'a> {
// definitions in a traditional CREATE TABLE statement", but
// we don't implement that.
let module_args = self.parse_parenthesized_column_list(Optional)?;
Ok(CreateVirtualTable {
Ok(Statement::CreateVirtualTable {
name: table_name,
if_not_exists,
module_name,

View file

@ -18,10 +18,10 @@
// via `tests/test_utils/mod.rs`.
use std::fmt::Debug;
use super::ast::*;
use super::dialect::*;
use super::parser::{Parser, ParserError};
use super::tokenizer::Tokenizer;
use crate::ast::*;
use crate::dialect::*;
use crate::parser::{Parser, ParserError};
use crate::tokenizer::Tokenizer;
/// Tests use the methods on this struct to invoke the parser on one or
/// multiple dialects.
@ -64,7 +64,7 @@ impl TestedDialects {
}
pub fn parse_sql_statements(&self, sql: &str) -> Result<Vec<Statement>, ParserError> {
self.one_of_identical_results(|dialect| Parser::parse_sql(dialect, &sql))
self.one_of_identical_results(|dialect| Parser::parse_sql(dialect, sql))
// To fail the `ensure_multiple_dialects_are_tested` test:
// Parser::parse_sql(&**self.dialects.first().unwrap(), sql)
}
@ -75,11 +75,11 @@ impl TestedDialects {
/// tree as parsing `canonical`, and that serializing it back to string
/// results in the `canonical` representation.
pub fn one_statement_parses_to(&self, sql: &str, canonical: &str) -> Statement {
let mut statements = self.parse_sql_statements(&sql).unwrap();
let mut statements = self.parse_sql_statements(sql).unwrap();
assert_eq!(statements.len(), 1);
if !canonical.is_empty() && sql != canonical {
assert_eq!(self.parse_sql_statements(&canonical).unwrap(), statements);
assert_eq!(self.parse_sql_statements(canonical).unwrap(), statements);
}
let only_statement = statements.pop().unwrap();

View file

@ -16,15 +16,16 @@
//!
//! The tokens then form the input for the parser, which outputs an Abstract Syntax Tree (AST).
use std::fmt;
use std::iter::Peekable;
use std::str::Chars;
use super::dialect::keywords::{Keyword, ALL_KEYWORDS, ALL_KEYWORDS_INDEX};
use super::dialect::Dialect;
use super::dialect::SnowflakeDialect;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;
use crate::dialect::keywords::{Keyword, ALL_KEYWORDS, ALL_KEYWORDS_INDEX};
use crate::dialect::Dialect;
use crate::dialect::SnowflakeDialect;
/// SQL Token enumeration
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
@ -184,6 +185,7 @@ impl Token {
pub fn make_keyword(keyword: &str) -> Self {
Token::make_word(keyword, None)
}
pub fn make_word(word: &str, quote_style: Option<char>) -> Self {
let word_uppercase = word.to_uppercase();
Token::Word(Word {
@ -226,6 +228,7 @@ impl fmt::Display for Word {
}
}
}
impl Word {
fn matching_end_quote(ch: char) -> char {
match ch {

View file

@ -102,7 +102,7 @@ fn parse_insert_sqlite() {
let dialect = SQLiteDialect {};
let check = |sql: &str, expected_action: Option<SqliteOnConflict>| match Parser::parse_sql(
&dialect, &sql,
&dialect, sql,
)
.unwrap()
.pop()
@ -340,7 +340,7 @@ fn parse_column_aliases() {
}
// alias without AS is parsed correctly:
one_statement_parses_to("SELECT a.col + 1 newname FROM foo AS a", &sql);
one_statement_parses_to("SELECT a.col + 1 newname FROM foo AS a", sql);
}
#[test]
@ -2685,7 +2685,7 @@ fn parse_multiple_statements() {
let res = parse_sql_statements(&(sql1.to_owned() + ";" + sql2_kw + sql2_rest));
assert_eq!(
vec![
one_statement_parses_to(&sql1, ""),
one_statement_parses_to(sql1, ""),
one_statement_parses_to(&(sql2_kw.to_owned() + sql2_rest), ""),
],
res.unwrap()

View file

@ -38,7 +38,7 @@ fn test_snowflake_create_table() {
fn test_snowflake_single_line_tokenize() {
let sql = "CREATE TABLE# this is a comment \ntable_1";
let dialect = SnowflakeDialect {};
let mut tokenizer = Tokenizer::new(&dialect, &sql);
let mut tokenizer = Tokenizer::new(&dialect, sql);
let tokens = tokenizer.tokenize().unwrap();
let expected = vec![
@ -55,7 +55,7 @@ fn test_snowflake_single_line_tokenize() {
assert_eq!(expected, tokens);
let sql = "CREATE TABLE// this is a comment \ntable_1";
let mut tokenizer = Tokenizer::new(&dialect, &sql);
let mut tokenizer = Tokenizer::new(&dialect, sql);
let tokens = tokenizer.tokenize().unwrap();
let expected = vec![