mirror of
https://github.com/apache/datafusion-sqlparser-rs.git
synced 2025-07-07 17:04:59 +00:00
Fix a bunch of trivial clippy lints
This commit is contained in:
parent
3df2223d95
commit
c223eaf0aa
7 changed files with 70 additions and 84 deletions
|
@ -14,8 +14,8 @@ fn main() {
|
|||
.nth(1)
|
||||
.expect("No arguments provided!\n\nUsage: cargo run --example cli FILENAME.sql");
|
||||
|
||||
let contents =
|
||||
fs::read_to_string(&filename).expect(&format!("Unable to read the file {}", &filename));
|
||||
let contents = fs::read_to_string(&filename)
|
||||
.unwrap_or_else(|_| panic!("Unable to read the file {}", &filename));
|
||||
let without_bom = if contents.chars().nth(0).unwrap() as u64 != 0xfeff {
|
||||
contents.as_str()
|
||||
} else {
|
||||
|
@ -31,7 +31,7 @@ fn main() {
|
|||
"Round-trip:\n'{}'",
|
||||
statements
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.map(std::string::ToString::to_string)
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n")
|
||||
);
|
||||
|
|
|
@ -365,9 +365,9 @@ keyword!(
|
|||
);
|
||||
|
||||
/// special case of keyword where the it is an invalid identifier
|
||||
pub const END_EXEC: &'static str = "END-EXEC";
|
||||
pub const END_EXEC: &str = "END-EXEC";
|
||||
|
||||
pub const ALL_KEYWORDS: &'static [&'static str] = &[
|
||||
pub const ALL_KEYWORDS: &[&str] = &[
|
||||
ABS,
|
||||
ADD,
|
||||
ASC,
|
||||
|
@ -716,7 +716,7 @@ pub const ALL_KEYWORDS: &'static [&'static str] = &[
|
|||
|
||||
/// These keywords can't be used as a table alias, so that `FROM table_name alias`
|
||||
/// can be parsed unambiguously without looking ahead.
|
||||
pub const RESERVED_FOR_TABLE_ALIAS: &'static [&'static str] = &[
|
||||
pub const RESERVED_FOR_TABLE_ALIAS: &[&str] = &[
|
||||
// Reserved as both a table and a column alias:
|
||||
WITH, SELECT, WHERE, GROUP, ORDER, UNION, EXCEPT, INTERSECT,
|
||||
// Reserved only as a table alias in the `FROM`/`JOIN` clauses:
|
||||
|
@ -725,7 +725,7 @@ pub const RESERVED_FOR_TABLE_ALIAS: &'static [&'static str] = &[
|
|||
|
||||
/// Can't be used as a column alias, so that `SELECT <expr> alias`
|
||||
/// can be parsed unambiguously without looking ahead.
|
||||
pub const RESERVED_FOR_COLUMN_ALIAS: &'static [&'static str] = &[
|
||||
pub const RESERVED_FOR_COLUMN_ALIAS: &[&str] = &[
|
||||
// Reserved as both a table and a column alias:
|
||||
WITH, SELECT, WHERE, GROUP, ORDER, UNION, EXCEPT, INTERSECT,
|
||||
// Reserved only as a column alias in the `SELECT` clause:
|
||||
|
|
|
@ -271,10 +271,10 @@ impl ToString for SQLStatement {
|
|||
values,
|
||||
} => {
|
||||
let mut s = format!("INSERT INTO {}", table_name.to_string());
|
||||
if columns.len() > 0 {
|
||||
if !columns.is_empty() {
|
||||
s += &format!(" ({})", columns.join(", "));
|
||||
}
|
||||
if values.len() > 0 {
|
||||
if !values.is_empty() {
|
||||
s += &format!(
|
||||
" VALUES({})",
|
||||
values
|
||||
|
@ -307,12 +307,12 @@ impl ToString for SQLStatement {
|
|||
);
|
||||
}
|
||||
s += " FROM stdin; ";
|
||||
if values.len() > 0 {
|
||||
if !values.is_empty() {
|
||||
s += &format!(
|
||||
"\n{}",
|
||||
values
|
||||
.iter()
|
||||
.map(|v| v.clone().unwrap_or("\\N".to_string()))
|
||||
.map(|v| v.clone().unwrap_or_else(|| "\\N".to_string()))
|
||||
.collect::<Vec<String>>()
|
||||
.join("\t")
|
||||
);
|
||||
|
@ -381,13 +381,7 @@ impl ToString for SQLStatement {
|
|||
file_format.as_ref().map(|f| f.to_string()).unwrap(),
|
||||
location.as_ref().unwrap()
|
||||
),
|
||||
SQLStatement::SQLCreateTable {
|
||||
name,
|
||||
columns,
|
||||
external: _,
|
||||
file_format: _,
|
||||
location: _,
|
||||
} => format!(
|
||||
SQLStatement::SQLCreateTable { name, columns, .. } => format!(
|
||||
"CREATE TABLE {} ({})",
|
||||
name.to_string(),
|
||||
columns
|
||||
|
|
|
@ -49,10 +49,7 @@ pub struct Parser {
|
|||
impl Parser {
|
||||
/// Parse the specified tokens
|
||||
pub fn new(tokens: Vec<Token>) -> Self {
|
||||
Parser {
|
||||
tokens: tokens,
|
||||
index: 0,
|
||||
}
|
||||
Parser { tokens, index: 0 }
|
||||
}
|
||||
|
||||
/// Parse a SQL statement and produce an Abstract Syntax Tree (AST)
|
||||
|
@ -235,7 +232,7 @@ impl Parser {
|
|||
t
|
||||
)),
|
||||
},
|
||||
None => parser_err!(format!("Prefix parser expected a keyword but hit EOF")),
|
||||
None => parser_err!("Prefix parser expected a keyword but hit EOF"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -405,21 +402,21 @@ impl Parser {
|
|||
/// Convert a token operator to an AST operator
|
||||
pub fn to_sql_operator(&self, tok: &Token) -> Result<SQLOperator, ParserError> {
|
||||
match tok {
|
||||
&Token::Eq => Ok(SQLOperator::Eq),
|
||||
&Token::Neq => Ok(SQLOperator::NotEq),
|
||||
&Token::Lt => Ok(SQLOperator::Lt),
|
||||
&Token::LtEq => Ok(SQLOperator::LtEq),
|
||||
&Token::Gt => Ok(SQLOperator::Gt),
|
||||
&Token::GtEq => Ok(SQLOperator::GtEq),
|
||||
&Token::Plus => Ok(SQLOperator::Plus),
|
||||
&Token::Minus => Ok(SQLOperator::Minus),
|
||||
&Token::Mult => Ok(SQLOperator::Multiply),
|
||||
&Token::Div => Ok(SQLOperator::Divide),
|
||||
&Token::Mod => Ok(SQLOperator::Modulus),
|
||||
&Token::SQLWord(ref k) if k.keyword == "AND" => Ok(SQLOperator::And),
|
||||
&Token::SQLWord(ref k) if k.keyword == "OR" => Ok(SQLOperator::Or),
|
||||
//&Token::SQLWord(ref k) if k.keyword == "NOT" => Ok(SQLOperator::Not),
|
||||
&Token::SQLWord(ref k) if k.keyword == "LIKE" => Ok(SQLOperator::Like),
|
||||
Token::Eq => Ok(SQLOperator::Eq),
|
||||
Token::Neq => Ok(SQLOperator::NotEq),
|
||||
Token::Lt => Ok(SQLOperator::Lt),
|
||||
Token::LtEq => Ok(SQLOperator::LtEq),
|
||||
Token::Gt => Ok(SQLOperator::Gt),
|
||||
Token::GtEq => Ok(SQLOperator::GtEq),
|
||||
Token::Plus => Ok(SQLOperator::Plus),
|
||||
Token::Minus => Ok(SQLOperator::Minus),
|
||||
Token::Mult => Ok(SQLOperator::Multiply),
|
||||
Token::Div => Ok(SQLOperator::Divide),
|
||||
Token::Mod => Ok(SQLOperator::Modulus),
|
||||
Token::SQLWord(ref k) if k.keyword == "AND" => Ok(SQLOperator::And),
|
||||
Token::SQLWord(ref k) if k.keyword == "OR" => Ok(SQLOperator::Or),
|
||||
//Token::SQLWord(ref k) if k.keyword == "NOT" => Ok(SQLOperator::Not),
|
||||
Token::SQLWord(ref k) if k.keyword == "LIKE" => Ok(SQLOperator::Like),
|
||||
_ => parser_err!(format!("Unsupported SQL operator {:?}", tok)),
|
||||
}
|
||||
}
|
||||
|
@ -438,19 +435,17 @@ impl Parser {
|
|||
debug!("get_precedence() {:?}", tok);
|
||||
|
||||
match tok {
|
||||
&Token::SQLWord(ref k) if k.keyword == "OR" => Ok(5),
|
||||
&Token::SQLWord(ref k) if k.keyword == "AND" => Ok(10),
|
||||
&Token::SQLWord(ref k) if k.keyword == "NOT" => Ok(15),
|
||||
&Token::SQLWord(ref k) if k.keyword == "IS" => Ok(17),
|
||||
&Token::SQLWord(ref k) if k.keyword == "IN" => Ok(20),
|
||||
&Token::SQLWord(ref k) if k.keyword == "BETWEEN" => Ok(20),
|
||||
&Token::SQLWord(ref k) if k.keyword == "LIKE" => Ok(20),
|
||||
&Token::Eq | &Token::Lt | &Token::LtEq | &Token::Neq | &Token::Gt | &Token::GtEq => {
|
||||
Ok(20)
|
||||
}
|
||||
&Token::Plus | &Token::Minus => Ok(30),
|
||||
&Token::Mult | &Token::Div | &Token::Mod => Ok(40),
|
||||
&Token::DoubleColon => Ok(50),
|
||||
Token::SQLWord(k) if k.keyword == "OR" => Ok(5),
|
||||
Token::SQLWord(k) if k.keyword == "AND" => Ok(10),
|
||||
Token::SQLWord(k) if k.keyword == "NOT" => Ok(15),
|
||||
Token::SQLWord(k) if k.keyword == "IS" => Ok(17),
|
||||
Token::SQLWord(k) if k.keyword == "IN" => Ok(20),
|
||||
Token::SQLWord(k) if k.keyword == "BETWEEN" => Ok(20),
|
||||
Token::SQLWord(k) if k.keyword == "LIKE" => Ok(20),
|
||||
Token::Eq | Token::Lt | Token::LtEq | Token::Neq | Token::Gt | Token::GtEq => Ok(20),
|
||||
Token::Plus | Token::Minus => Ok(30),
|
||||
Token::Mult | Token::Div | Token::Mod => Ok(40),
|
||||
Token::DoubleColon => Ok(50),
|
||||
_ => Ok(0),
|
||||
}
|
||||
}
|
||||
|
@ -484,7 +479,7 @@ impl Parser {
|
|||
loop {
|
||||
match self.token_at(index) {
|
||||
Some(Token::Whitespace(_)) => {
|
||||
index = index + 1;
|
||||
index += 1;
|
||||
}
|
||||
Some(_) => {
|
||||
return Some(index);
|
||||
|
@ -507,7 +502,7 @@ impl Parser {
|
|||
|
||||
pub fn next_token_no_skip(&mut self) -> Option<Token> {
|
||||
if self.index < self.tokens.len() {
|
||||
self.index = self.index + 1;
|
||||
self.index += 1;
|
||||
Some(self.tokens[self.index - 1].clone())
|
||||
} else {
|
||||
None
|
||||
|
@ -532,7 +527,7 @@ impl Parser {
|
|||
/// Get the previous token and decrement the token index
|
||||
fn prev_token_no_skip(&mut self) -> Option<Token> {
|
||||
if self.index > 0 {
|
||||
self.index = self.index - 1;
|
||||
self.index -= 1;
|
||||
Some(self.tokens[self.index].clone())
|
||||
} else {
|
||||
None
|
||||
|
@ -878,7 +873,7 @@ impl Parser {
|
|||
}
|
||||
},
|
||||
//TODO: parse the timestamp here (see parse_timestamp_value())
|
||||
Token::Number(ref n) if n.contains(".") => match n.parse::<f64>() {
|
||||
Token::Number(ref n) if n.contains('.') => match n.parse::<f64>() {
|
||||
Ok(n) => Ok(Value::Double(n)),
|
||||
Err(e) => parser_err!(format!("Could not parse '{}' as f64: {}", n, e)),
|
||||
},
|
||||
|
@ -953,7 +948,7 @@ impl Parser {
|
|||
Some(token) => match token {
|
||||
Token::Plus | Token::Minus => {
|
||||
let tz = self.parse_timezone_offset()?;
|
||||
let offset = FixedOffset::east(tz as i32 * 3600);
|
||||
let offset = FixedOffset::east(i32::from(tz) * 3600);
|
||||
Ok(Value::Timestamp(DateTime::from_utc(date_time, offset)))
|
||||
}
|
||||
_ => Ok(Value::DateTime(date_time)),
|
||||
|
@ -1256,7 +1251,7 @@ impl Parser {
|
|||
break;
|
||||
}
|
||||
}
|
||||
return Ok(cte);
|
||||
Ok(cte)
|
||||
}
|
||||
|
||||
/// Parse a "query body", which is an expression with roughly the
|
||||
|
|
|
@ -138,9 +138,9 @@ impl Token {
|
|||
let is_keyword = quote_style == None && ALL_KEYWORDS.contains(&word_uppercase.as_str());
|
||||
Token::SQLWord(SQLWord {
|
||||
value: word.to_string(),
|
||||
quote_style: quote_style,
|
||||
quote_style,
|
||||
keyword: if is_keyword {
|
||||
word_uppercase.to_string()
|
||||
word_uppercase
|
||||
} else {
|
||||
"".to_string()
|
||||
},
|
||||
|
|
|
@ -446,9 +446,9 @@ fn parse_create_table() {
|
|||
SQLStatement::SQLCreateTable {
|
||||
name,
|
||||
columns,
|
||||
external: _,
|
||||
file_format: _,
|
||||
location: _,
|
||||
external: false,
|
||||
file_format: None,
|
||||
location: None,
|
||||
} => {
|
||||
assert_eq!("uk_cities", name.to_string());
|
||||
assert_eq!(3, columns.len());
|
||||
|
@ -841,14 +841,13 @@ fn parse_ctes() {
|
|||
cte_sqls[0], cte_sqls[1]
|
||||
);
|
||||
|
||||
fn assert_ctes_in_select(expected: &Vec<&str>, sel: &SQLQuery) {
|
||||
for i in 0..1 {
|
||||
let Cte {
|
||||
ref query,
|
||||
ref alias,
|
||||
} = sel.ctes[i];
|
||||
assert_eq!(expected[i], query.to_string());
|
||||
fn assert_ctes_in_select(expected: &[&str], sel: &SQLQuery) {
|
||||
let mut i = 0;
|
||||
for exp in expected {
|
||||
let Cte { query, alias } = &sel.ctes[i];
|
||||
assert_eq!(*exp, query.to_string());
|
||||
assert_eq!(if i == 0 { "a" } else { "b" }, alias);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -858,7 +857,7 @@ fn parse_ctes() {
|
|||
let sql = &format!("SELECT ({})", with);
|
||||
let select = verified_only_select(sql);
|
||||
match expr_from_projection(only(&select.projection)) {
|
||||
&ASTNode::SQLSubquery(ref subquery) => {
|
||||
ASTNode::SQLSubquery(ref subquery) => {
|
||||
assert_ctes_in_select(&cte_sqls, subquery.as_ref());
|
||||
}
|
||||
_ => panic!("Expected subquery"),
|
||||
|
@ -1006,7 +1005,7 @@ fn parse_invalid_subquery_without_parens() {
|
|||
);
|
||||
}
|
||||
|
||||
fn only<'a, T>(v: &'a Vec<T>) -> &'a T {
|
||||
fn only<T>(v: &[T]) -> &T {
|
||||
assert_eq!(1, v.len());
|
||||
v.first().unwrap()
|
||||
}
|
||||
|
@ -1074,6 +1073,5 @@ fn parse_sql_expr_with(dialect: &dyn Dialect, sql: &str) -> ASTNode {
|
|||
let mut tokenizer = Tokenizer::new(dialect, &sql);
|
||||
let tokens = tokenizer.tokenize().unwrap();
|
||||
let mut parser = Parser::new(tokens);
|
||||
let ast = parser.parse_expr().unwrap();
|
||||
ast
|
||||
parser.parse_expr().unwrap()
|
||||
}
|
||||
|
|
|
@ -163,9 +163,9 @@ fn parse_create_table_with_defaults() {
|
|||
SQLStatement::SQLCreateTable {
|
||||
name,
|
||||
columns,
|
||||
external: _,
|
||||
file_format: _,
|
||||
location: _,
|
||||
external: false,
|
||||
file_format: None,
|
||||
location: None,
|
||||
} => {
|
||||
assert_eq!("public.customer", name.to_string());
|
||||
assert_eq!(10, columns.len());
|
||||
|
@ -210,9 +210,9 @@ fn parse_create_table_from_pg_dump() {
|
|||
SQLStatement::SQLCreateTable {
|
||||
name,
|
||||
columns,
|
||||
external: _,
|
||||
file_format: _,
|
||||
location: _,
|
||||
external: false,
|
||||
file_format: None,
|
||||
location: None,
|
||||
} => {
|
||||
assert_eq!("public.customer", name.to_string());
|
||||
|
||||
|
@ -273,9 +273,9 @@ fn parse_create_table_with_inherit() {
|
|||
SQLStatement::SQLCreateTable {
|
||||
name,
|
||||
columns,
|
||||
external: _,
|
||||
file_format: _,
|
||||
location: _,
|
||||
external: false,
|
||||
file_format: None,
|
||||
location: None,
|
||||
} => {
|
||||
assert_eq!("bazaar.settings", name.to_string());
|
||||
|
||||
|
@ -407,8 +407,7 @@ fn parse_sql_statements(sql: &str) -> Result<Vec<SQLStatement>, ParserError> {
|
|||
fn parse_sql_expr(sql: &str) -> ASTNode {
|
||||
debug!("sql: {}", sql);
|
||||
let mut parser = parser(sql);
|
||||
let ast = parser.parse_expr().unwrap();
|
||||
ast
|
||||
parser.parse_expr().unwrap()
|
||||
}
|
||||
|
||||
fn parser(sql: &str) -> Parser {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue